diff --git a/private/buf/buflsp/completion.go b/private/buf/buflsp/completion.go index ef155ff088..bd8a374e61 100644 --- a/private/buf/buflsp/completion.go +++ b/private/buf/buflsp/completion.go @@ -27,7 +27,9 @@ import ( "github.com/bufbuild/buf/private/pkg/normalpath" "github.com/bufbuild/protocompile/experimental/ast" "github.com/bufbuild/protocompile/experimental/ast/syntax" + "github.com/bufbuild/protocompile/experimental/report" "github.com/bufbuild/protocompile/experimental/seq" + "github.com/bufbuild/protocompile/experimental/token" "github.com/bufbuild/protocompile/experimental/token/keyword" "go.lsp.dev/protocol" ) @@ -168,7 +170,7 @@ func completionItemsForDef(ctx context.Context, file *file, declPath []ast.DeclA file.lsp.logger.DebugContext( ctx, "completion: ignoring definition name", - slog.String("def_kind", def.Classify().String()), + slog.String("kind", def.Classify().String()), ) return nil } @@ -181,12 +183,148 @@ func completionItemsForDef(ctx context.Context, file *file, declPath []ast.DeclA file.lsp.logger.DebugContext( ctx, "completion: ignoring definition passed type", - slog.String("def_kind", def.Classify().String()), + slog.String("kind", def.Classify().String()), ) return nil } } + switch def.Classify() { + case ast.DefKindMessage: + return nil + case ast.DefKindService: + return nil + case ast.DefKindEnum: + return nil + case ast.DefKindField: + return completionItemsForField(ctx, file, declPath, def.AsField(), position) + case ast.DefKindInvalid: + return completionItemsForKeyword(ctx, file, declPath, def, position) + default: + file.lsp.logger.DebugContext(ctx, "completion: unknown definition type", slog.String("kind", def.Classify().String())) + return nil + } +} + +// completionItemsForKeyword returns completion items for a declaration expecting keywords. +func completionItemsForKeyword(ctx context.Context, file *file, declPath []ast.DeclAny, def ast.DeclDef, position protocol.Position) []protocol.CompletionItem { + positionLocation := file.file.InverseLocation(int(position.Line)+1, int(position.Character)+1, positionalEncoding) + offset := positionLocation.Offset + + span := def.Span() + + // Check if at newline or end of span. Keywords are restricted to the first identifier. + if !isNewlineOrEndOfSpan(span, offset) { + file.lsp.logger.Debug("completion: keyword skip on span bounds", slog.String("span", span.Text())) + return nil + } + + tokenSpan := extractAroundToken(file, offset) + file.lsp.logger.DebugContext(ctx, "completion: keyword items", slog.String("span", span.Text())) + + // If this is an invalid definition at the top level, return top-level keywords. + if len(declPath) == 1 { + file.lsp.logger.DebugContext(ctx, "completion: keyword returning top-level") + return slices.Collect(keywordToCompletionItem( + topLevelKeywords(), + protocol.CompletionItemKindKeyword, + tokenSpan, + offset, + )) + } + + parent := declPath[len(declPath)-2] + file.lsp.logger.DebugContext( + ctx, "completion: keyword nested definition", + slog.String("kind", parent.Kind().String()), + ) + if parent.Kind() != ast.DeclKindDef { + return nil + } + + var items iter.Seq[protocol.CompletionItem] + parentDef := parent.AsDef() + switch parentDef.Classify() { + case ast.DefKindMessage: + isProto2 := isProto2(file) + items = joinCompletionItems( + keywordToCompletionItem( + messageLevelKeywords(isProto2), + protocol.CompletionItemKindKeyword, + tokenSpan, + offset, + ), + keywordToCompletionItem( + messageLevelFieldKeywords(), + protocol.CompletionItemKindKeyword, + tokenSpan, + offset, + ), + keywordToCompletionItem( + predeclaredTypeKeywords(), + protocol.CompletionItemKindClass, + tokenSpan, + offset, + ), + // TODO: add custom types. + ) + case ast.DefKindService: + items = keywordToCompletionItem( + serviceLevelKeywords(), + protocol.CompletionItemKindKeyword, + tokenSpan, + offset, + ) + case ast.DefKindEnum: + items = keywordToCompletionItem( + enumLevelKeywords(), + protocol.CompletionItemKindKeyword, + tokenSpan, + offset, + ) + default: + return nil + } + return slices.Collect(items) +} + +// completionItemsForField returns completion items for a field. +func completionItemsForField(ctx context.Context, file *file, declPath []ast.DeclAny, defField ast.DefField, position protocol.Position) []protocol.CompletionItem { + if len(declPath) == 1 { + file.lsp.logger.DebugContext(ctx, "completion: field top level, going to keywords") + return completionItemsForKeyword(ctx, file, declPath, defField.Decl, position) + } + + parent := declPath[len(declPath)-2] + file.lsp.logger.DebugContext( + ctx, "completion: field within definition", + slog.String("kind", parent.Kind().String()), + ) + if parent.Kind() != ast.DeclKindDef { + return nil + } + + positionLocation := file.file.InverseLocation(int(position.Line)+1, int(position.Character)+1, positionalEncoding) + offset := positionLocation.Offset + + // If on a newline, before the current field or at the end of the span return keywords. + if isNewlineOrEndOfSpan(defField.Span(), offset) { + file.lsp.logger.DebugContext(ctx, "completion: field on newline, return keywords") + return completionItemsForKeyword(ctx, file, declPath, defField.Decl, position) + } + + typeSpan := defField.Type.Span() + start, end := typeSpan.Start, typeSpan.End + + if start > offset || offset > end { + file.lsp.logger.DebugContext( + ctx, "completion: field outside definition", + slog.String("kind", parent.Kind().String()), + ) + return nil + } + + def := parent.AsDef() switch def.Classify() { case ast.DefKindMessage: return nil @@ -195,12 +333,6 @@ func completionItemsForDef(ctx context.Context, file *file, declPath []ast.DeclA case ast.DefKindEnum: return nil default: - // If this is an invalid definition at the top level, return top-level keywords. - if len(declPath) == 1 { - file.lsp.logger.DebugContext(ctx, "completion: unknown definition at top level, returning top-level keywords") - return slices.Collect(topLevelCompletionItems()) - } - file.lsp.logger.DebugContext(ctx, "completion: unknown definition type (not at top level)") return nil } } @@ -243,22 +375,16 @@ func completionItemsForImport(ctx context.Context, file *file, declImport ast.De var items []protocol.CompletionItem for importPath := range file.importToFile { suggest := fmt.Sprintf("%q", importPath) - if !strings.HasPrefix(suggest, prefix) { - file.lsp.logger.Debug("completion: skipping on prefix", + if !strings.HasPrefix(suggest, prefix) || !strings.HasSuffix(suggest, suffix) { + file.lsp.logger.Debug("completion: skipping on prefix/suffix", slog.String("import", importPathText), - slog.String("prefix", prefix), - slog.String("suggest", suggest), - ) - continue - } - if !strings.HasSuffix(suggest, suffix) { - file.lsp.logger.Debug("completion: skipping on suffix", - slog.String("import", importPathText), - slog.String("suffix", prefix), slog.String("suggest", suggest), + slog.String("prefix", prefix), + slog.String("suffix", suffix), ) continue } + items = append(items, protocol.CompletionItem{ Label: importPath, Kind: protocol.CompletionItemKindFile, @@ -274,26 +400,120 @@ func completionItemsForImport(ctx context.Context, file *file, declImport ast.De return items } -// topLevelCompletionItems returns completion items for top-level proto keywords. -func topLevelCompletionItems() iter.Seq[protocol.CompletionItem] { - return func(yield func(protocol.CompletionItem) bool) { - _ = yield(keywordToCompletionItem(keyword.Syntax)) && - yield(keywordToCompletionItem(keyword.Edition)) && - yield(keywordToCompletionItem(keyword.Import)) && - yield(keywordToCompletionItem(keyword.Package)) && - yield(keywordToCompletionItem(keyword.Message)) && - yield(keywordToCompletionItem(keyword.Service)) && - yield(keywordToCompletionItem(keyword.Option)) && - yield(keywordToCompletionItem(keyword.Enum)) && - yield(keywordToCompletionItem(keyword.Extend)) +// topLevelKeywords returns keywords for the top-level. +func topLevelKeywords() iter.Seq[keyword.Keyword] { + return func(yield func(keyword.Keyword) bool) { + _ = yield(keyword.Syntax) && + yield(keyword.Edition) && + yield(keyword.Import) && + yield(keyword.Package) && + yield(keyword.Message) && + yield(keyword.Service) && + yield(keyword.Option) && + yield(keyword.Enum) && + yield(keyword.Extend) + } +} + +// messageLevelFieldKeywords returns keywords for messages. +func messageLevelKeywords(isProto2 bool) iter.Seq[keyword.Keyword] { + return func(yield func(keyword.Keyword) bool) { + ok := yield(keyword.Message) && + yield(keyword.Enum) && + yield(keyword.Option) && + yield(keyword.Extend) && + yield(keyword.Oneof) && + yield(keyword.Extensions) && + yield(keyword.Reserved) + _ = ok && isProto2 && + yield(keyword.Group) + } +} + +// messageLevelFieldKeywords returns keywords for type modifiers. +func messageLevelFieldKeywords() iter.Seq[keyword.Keyword] { + return func(yield func(keyword.Keyword) bool) { + _ = yield(keyword.Repeated) && + yield(keyword.Optional) && + yield(keyword.Required) + } +} + +// predeclaredTypeKeywords returns keywords for all predeclared types. +func predeclaredTypeKeywords() iter.Seq[keyword.Keyword] { + return func(yield func(keyword.Keyword) bool) { + _ = yield(keyword.Int32) && + yield(keyword.Int64) && + yield(keyword.UInt32) && + yield(keyword.UInt64) && + yield(keyword.SInt32) && + yield(keyword.SInt64) && + yield(keyword.Fixed32) && + yield(keyword.Fixed64) && + yield(keyword.SFixed32) && + yield(keyword.SFixed64) && + yield(keyword.Float) && + yield(keyword.Double) && + yield(keyword.Bool) && + yield(keyword.String) && + yield(keyword.Bytes) + } +} + +// serviceLevelKeywords returns keywords for service. +func serviceLevelKeywords() iter.Seq[keyword.Keyword] { + return func(yield func(keyword.Keyword) bool) { + _ = yield(keyword.RPC) && + yield(keyword.Option) + } +} + +// enumLevelKeywords returns keywords for enums. +func enumLevelKeywords() iter.Seq[keyword.Keyword] { + return func(yield func(keyword.Keyword) bool) { + _ = yield(keyword.Option) } } // keywordToCompletionItem converts a keyword to a completion item. -func keywordToCompletionItem(kw keyword.Keyword) protocol.CompletionItem { - return protocol.CompletionItem{ - Label: kw.String(), - Kind: protocol.CompletionItemKindKeyword, +func keywordToCompletionItem( + keywords iter.Seq[keyword.Keyword], + kind protocol.CompletionItemKind, + span report.Span, + offset int, +) iter.Seq[protocol.CompletionItem] { + return func(yield func(protocol.CompletionItem) bool) { + editRange := reportSpanToProtocolRange(span) + prefix, suffix := splitSpan(span, offset) + for keyword := range keywords { + suggest := keyword.String() + if !strings.HasPrefix(suggest, prefix) || !strings.HasSuffix(suggest, suffix) { + continue + } + if !yield(protocol.CompletionItem{ + Label: suggest, + Kind: kind, + TextEdit: &protocol.TextEdit{ + Range: editRange, + NewText: suggest, + }, + }) { + break + } + } + } +} + +// joinCompletionItems returns a sequence of sequences. +func joinCompletionItems(itemIters ...iter.Seq[protocol.CompletionItem]) iter.Seq[protocol.CompletionItem] { + return func(yield func(protocol.CompletionItem) bool) { + for _, items := range itemIters { + for item := range items { + if !yield(item) { + break + } + } + } } } @@ -345,6 +565,94 @@ func getDeclForPositionHelper(body ast.DeclBody, position protocol.Position, pat return bestPath } +// extractAroundToken extracts the value around the offset by querying the token stream. +func extractAroundToken(file *file, offset int) report.Span { + if file.ir.AST().IsZero() { + return report.Span{} + } + stream := file.ir.AST().Context().Stream() + if stream == nil { + return report.Span{} + } + before, after := stream.Around(offset) + if before.IsZero() && after.IsZero() { + return report.Span{} + } + + isToken := func(tok token.Token) bool { + switch kind := tok.Kind(); kind { + case token.Ident, token.Punct: + return true + default: + return false + } + } + span := report.Span{ + File: file.file, + Start: offset, + End: offset, + } + if !before.IsZero() { + cursor := token.NewCursorAt(before) + for tok := cursor.PrevSkippable(); isToken(tok); tok = cursor.PrevSkippable() { + before = tok + } + if isToken(before) { + span.Start = before.Span().Start + } + } + if !after.IsZero() { + cursor := token.NewCursorAt(after) + for tok := cursor.NextSkippable(); isToken(tok); tok = cursor.NextSkippable() { + after = tok + } + if isToken(after) { + span.End = after.Span().End + } + } + return span +} + +func splitSpan(span report.Span, offset int) (prefix string, suffix string) { + if span.Start > offset || offset > span.End { + return "", "" + } + index := offset - span.Start + text := span.Text() + return text[:index], text[index:] +} + +// isNewlineOrEndOfSpan returns true if this offset is separated be a newline or at the end of the span. +// This most likely means we are at the start of a new declaration. +func isNewlineOrEndOfSpan(span report.Span, offset int) bool { + if offset == span.End { + return true + } + text := span.Text() + index := offset - span.Start + if newLine := strings.IndexByte(text[index:], '\n'); newLine >= 0 { + // Newline separates the end, check theres no dangling content after us on this line. + after := text[index : index+newLine] + return len(strings.TrimSpace(after)) == 0 + } + return false +} + +// isProto2 returns true if the file has a syntax declaration of proto2. +func isProto2(file *file) bool { + body := file.ir.AST().DeclBody + for decl := range seq.Values(body.Decls()) { + if decl.IsZero() { + continue + } + if kind := decl.Kind(); kind == ast.DeclKindSyntax { + declSyntax := decl.AsSyntax() + return declSyntax.IsSyntax() && declSyntax.Value().Span().Text() == "proto2" + } + } + return false +} + // resolveCompletionItem resolves additional details for a completion item. // // This function is called by the CompletionResolve handler in server.go.