diff --git a/src/Server.zig b/src/Server.zig index 3d4833da2..d79921d6e 100644 --- a/src/Server.zig +++ b/src/Server.zig @@ -1459,7 +1459,7 @@ fn prepareRenameHandler(server: *Server, arena: std.mem.Allocator, request: type }; const handle = server.document_store.getHandle(document_uri) orelse return null; const source_index = offsets.positionToIndex(handle.tree.source, request.position, server.offset_encoding); - const name_loc = Analyser.identifierLocFromIndex(&handle.tree, source_index) orelse return null; + const name_loc = offsets.identifierLocFromIndex(&handle.tree, source_index) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); return .{ .prepare_rename_placeholder = .{ diff --git a/src/analysis.zig b/src/analysis.zig index d69683fee..cb9708c7e 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -686,97 +686,6 @@ pub fn isSnakeCase(name: []const u8) bool { // ANALYSIS ENGINE -/// if the `source_index` points to `@name`, the source location of `name` without the `@` is returned. -/// if the `source_index` points to `@"name"`, the source location of `name` is returned. -pub fn identifierLocFromIndex(tree: *const Ast, source_index: usize) ?offsets.Loc { - _, const loc = identifierTokenAndLocFromIndex(tree, source_index) orelse return null; - return loc; -} - -pub fn identifierTokenAndLocFromIndex(tree: *const Ast, source_index: usize) ?struct { Ast.TokenIndex, offsets.Loc } { - const token = offsets.sourceIndexToTokenIndex(tree, source_index).pickPreferred(&.{ .identifier, .builtin }, tree) orelse return null; - switch (tree.tokenTag(token)) { - .identifier, - .builtin, - => { - const token_loc = offsets.tokenToLoc(tree, token); - if (!(token_loc.start <= source_index and source_index <= token_loc.end)) return null; - return .{ token, offsets.identifierIndexToLoc(tree.source, tree.tokenStart(token), .name) }; - }, - else => {}, - } - - var start = source_index; - while (start > 0 and isSymbolChar(tree.source[start - 1])) { - start -= 1; - } - - var end = source_index; - while (end < tree.source.len and isSymbolChar(tree.source[end])) { - end += 1; - } - - if (start == end) return null; - return .{ token, .{ .start = start, .end = end } }; -} - -test identifierLocFromIndex { - var tree = try Ast.parse(std.testing.allocator, - \\ name @builtin @"escaped" @"s p a c e" end - , .zig); - defer tree.deinit(std.testing.allocator); - - try std.testing.expectEqualSlices( - std.zig.Token.Tag, - &.{ .identifier, .builtin, .identifier, .identifier, .identifier, .eof }, - tree.tokens.items(.tag), - ); - - { - const expected_loc: offsets.Loc = .{ .start = 1, .end = 5 }; - std.debug.assert(std.mem.eql(u8, "name", offsets.locToSlice(tree.source, expected_loc))); - - try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 1)); - try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 2)); - try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 5)); - } - - { - const expected_loc: offsets.Loc = .{ .start = 8, .end = 15 }; - std.debug.assert(std.mem.eql(u8, "builtin", offsets.locToSlice(tree.source, expected_loc))); - - try std.testing.expectEqual(@as(?offsets.Loc, null), identifierLocFromIndex(&tree, 6)); - try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 7)); - try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 8)); - try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 11)); - try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 15)); - try std.testing.expectEqual(@as(?offsets.Loc, null), identifierLocFromIndex(&tree, 16)); - } - - { - const expected_loc: offsets.Loc = .{ .start = 19, .end = 26 }; - std.debug.assert(std.mem.eql(u8, "escaped", offsets.locToSlice(tree.source, expected_loc))); - - try std.testing.expectEqual(@as(?offsets.Loc, null), identifierLocFromIndex(&tree, 16)); - try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 17)); - try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 18)); - try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 19)); - try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 23)); - try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 27)); - try std.testing.expectEqual(@as(?offsets.Loc, null), identifierLocFromIndex(&tree, 28)); - } - - { - const expected_loc: offsets.Loc = .{ .start = 43, .end = 46 }; - std.debug.assert(std.mem.eql(u8, "end", offsets.locToSlice(tree.source, expected_loc))); - - try std.testing.expectEqual(@as(?offsets.Loc, null), identifierLocFromIndex(&tree, 42)); - try std.testing.expectEqual(@as(?offsets.Loc, expected_loc), identifierLocFromIndex(&tree, 43)); - try std.testing.expectEqual(@as(?offsets.Loc, expected_loc), identifierLocFromIndex(&tree, 45)); - try std.testing.expectEqual(@as(?offsets.Loc, expected_loc), identifierLocFromIndex(&tree, 46)); - } -} - /// Resolves variable declarations consisting of chains of imports and field accesses of containers /// Examples: ///```zig @@ -4898,6 +4807,7 @@ pub const PositionContext = union(enum) { /// - `blk: while` /// - `blk: switch` label_decl: offsets.Loc, + test_doctest_name: offsets.Loc, enum_literal: offsets.Loc, number_literal: offsets.Loc, char_literal: offsets.Loc, @@ -4905,7 +4815,6 @@ pub const PositionContext = union(enum) { parens_expr: offsets.Loc, keyword: Ast.TokenIndex, error_access, - test_doctest_name, comment, other, empty, @@ -4921,6 +4830,7 @@ pub const PositionContext = union(enum) { .var_access, .label_access, .label_decl, + .test_doctest_name, .enum_literal, .number_literal, .char_literal, @@ -4928,7 +4838,6 @@ pub const PositionContext = union(enum) { => |l| return l, .keyword => |token_index| return offsets.tokenToLoc(tree, token_index), .error_access, - .test_doctest_name, .comment, .other, .empty, @@ -4980,6 +4889,7 @@ const Stack = struct { }; pub fn initCapacity(allocator: std.mem.Allocator, capacity: usize) error{OutOfMemory}!Stack { + std.debug.assert(capacity > 0); // See `peek`. return .{ .states = try .initCapacity(allocator, capacity) }; } @@ -4991,9 +4901,9 @@ const Stack = struct { try self.states.append(allocator, state.*); } - pub fn peek(self: *Stack, allocator: std.mem.Allocator) error{OutOfMemory}!*Stack.State { + pub fn peek(self: *Stack) *Stack.State { if (self.states.items.len == 0) { - try self.states.append(allocator, .{ .ctx = .empty, .scope = .global }); + self.states.appendAssumeCapacity(.{ .ctx = .empty, .scope = .global }); } return &self.states.items[self.states.items.len - 1]; } @@ -5001,12 +4911,11 @@ const Stack = struct { /// Pops the last state off the stack. Sets previous state's ctx to .empty if !scopes_match pub fn pop( self: *Stack, - allocator: std.mem.Allocator, /// Indicate whether the current state's scope matches the one being closed scopes_match: bool, - ) error{OutOfMemory}!void { + ) void { if (self.states.items.len != 0) self.states.items.len -= 1; - if (!scopes_match) (try self.peek(allocator)).ctx = .empty; + if (!scopes_match) self.peek().ctx = .empty; } }; @@ -5017,10 +4926,6 @@ fn tokenLocAppend(prev: offsets.Loc, token: std.zig.Token) offsets.Loc { }; } -pub fn isSymbolChar(char: u8) bool { - return std.ascii.isAlphanumeric(char) or char == '_'; -} - /// Given a byte index in a document (typically cursor offset), classify what kind of entity is at that index. /// /// Classification is based on the lexical structure -- we fetch the line containing index, and look at the @@ -5097,8 +5002,13 @@ pub fn getPositionContext( // Single '@' do not return a builtin token so we check this on our own. if (tok.tag == .invalid and tree.source[tok.loc.start] == '@') { - tok.tag = .builtin; - tok.loc = .{ .start = tok.loc.start, .end = tok.loc.start + 1 }; + if (std.mem.startsWith(u8, tree.source[tok.loc.start..], "@\"")) { + tok.tag = .identifier; + tok.loc = .{ .start = tok.loc.start, .end = @min(line_loc.end, tree.tokenStart(current_token + 1)) }; + } else if (std.mem.startsWith(u8, tree.source[tok.loc.start..], "@")) { + tok.tag = .builtin; + tok.loc = .{ .start = tok.loc.start, .end = tok.loc.start + 1 }; + } } if (source_index < tok.loc.start) break; @@ -5107,7 +5017,7 @@ pub fn getPositionContext( // `tok` is the latter of the two. if (!should_do_lookahead) break; should_do_lookahead = false; - const curr_ctx = try stack.peek(allocator); + const curr_ctx = stack.peek(); switch (tok.tag) { .identifier, .builtin, @@ -5137,13 +5047,15 @@ pub fn getPositionContext( else => {}, } - // State changes - var curr_ctx = try stack.peek(allocator); - switch (tok.tag) { - .string_literal, .multiline_string_literal_line => string_lit_block: { - curr_ctx.ctx = .{ .string_literal = tok.loc }; - if (tok.tag != .string_literal) break :string_lit_block; - + const curr_ctx: *Stack.State = stack.peek(); + defer switch (stack.peek().ctx) { + .field_access => |*loc| loc.* = tokenLocAppend(loc.*, tok), + else => {}, + }; + const new_state: PositionContext = switch (tok.tag) { + .multiline_string_literal_line => .{ .string_literal = tok.loc }, + .string_literal, + => new_state: { const string_literal_slice = offsets.locToSlice(tree.source, tok.loc); var content_loc = tok.loc; @@ -5154,7 +5066,8 @@ pub fn getPositionContext( } } - if (source_index < content_loc.start or content_loc.end < source_index) break :string_lit_block; + var new_state: PositionContext = .{ .string_literal = tok.loc }; + if (source_index < content_loc.start or content_loc.end < source_index) break :new_state new_state; if (curr_ctx.scope == .parens and stack.states.items.len >= 2) @@ -5165,53 +5078,54 @@ pub fn getPositionContext( .builtin => |loc| { const builtin_name = tree.source[loc.start..loc.end]; if (std.mem.eql(u8, builtin_name, "@import")) { - curr_ctx.ctx = .{ .import_string_literal = tok.loc }; + new_state = .{ .import_string_literal = tok.loc }; } else if (std.mem.eql(u8, builtin_name, "@cInclude")) { - curr_ctx.ctx = .{ .cinclude_string_literal = tok.loc }; + new_state = .{ .cinclude_string_literal = tok.loc }; } else if (std.mem.eql(u8, builtin_name, "@embedFile")) { - curr_ctx.ctx = .{ .embedfile_string_literal = tok.loc }; + new_state = .{ .embedfile_string_literal = tok.loc }; } }, else => {}, } } + break :new_state new_state; }, - .identifier => if (curr_ctx.isErrSetDef()) { - // Intent is to skip everything between the `error{...}` braces - } else switch (curr_ctx.ctx) { - .enum_literal => curr_ctx.ctx = .{ .enum_literal = tokenLocAppend(curr_ctx.ctx.loc(tree).?, tok) }, - .field_access => curr_ctx.ctx = .{ .field_access = tokenLocAppend(curr_ctx.ctx.loc(tree).?, tok) }, - .label_access => |loc| curr_ctx.ctx = if (loc.start == loc.end) + .identifier => if (curr_ctx.isErrSetDef()) + continue // Intent is to skip everything between the `error{...}` braces + else switch (curr_ctx.ctx) { + .enum_literal => |loc| .{ .enum_literal = tokenLocAppend(loc, tok) }, + .field_access => |loc| .{ .field_access = tokenLocAppend(loc, tok) }, + .label_access => |loc| if (loc.start == loc.end) .{ .label_access = tok.loc } else .{ .var_access = tok.loc }, - .test_doctest_name => curr_ctx.ctx = .test_doctest_name, - else => curr_ctx.ctx = .{ .var_access = tok.loc }, + .test_doctest_name => .{ .test_doctest_name = tok.loc }, + else => .{ .var_access = tok.loc }, }, - .builtin => curr_ctx.ctx = .{ .builtin = tok.loc }, - .period, .period_asterisk => switch (curr_ctx.ctx) { - // TODO: only set context to enum literal if token tag is "." (not ".*") - .empty, .label_access => curr_ctx.ctx = .{ .enum_literal = tok.loc }, - .enum_literal => curr_ctx.ctx = .empty, + .builtin => .{ .builtin = tok.loc }, + .period => switch (curr_ctx.ctx) { + .empty, .label_access => .{ .enum_literal = tok.loc }, + .enum_literal => .empty, .keyword => |token_index| switch (tree.tokenTag(token_index)) { - .keyword_break => curr_ctx.ctx = .{ .enum_literal = tok.loc }, - else => curr_ctx.ctx = .other, + .keyword_break => .{ .enum_literal = tok.loc }, + else => .other, }, - .comment, .other, .field_access, .error_access => {}, - else => curr_ctx.ctx = .{ .field_access = tokenLocAppend(curr_ctx.ctx.loc(tree) orelse tok.loc, tok) }, + .comment, .other, .error_access => curr_ctx.ctx, + .test_doctest_name, .var_access, .field_access => |loc| .{ .field_access = tokenLocAppend(loc, tok) }, + else => .{ .field_access = tokenLocAppend(curr_ctx.ctx.loc(tree) orelse tok.loc, tok) }, }, - .question_mark => switch (curr_ctx.ctx) { - .field_access => {}, - else => curr_ctx.ctx = .empty, + .question_mark, .period_asterisk => switch (curr_ctx.ctx) { + .var_access, .field_access => |loc| .{ .field_access = tokenLocAppend(loc, tok) }, + else => .empty, }, .colon => switch (curr_ctx.ctx) { .keyword => |token_index| switch (tree.tokenTag(token_index)) { .keyword_break, .keyword_continue, - => curr_ctx.ctx = .{ .label_access = .{ .start = tok.loc.end, .end = tok.loc.end } }, - else => curr_ctx.ctx = .empty, + => .{ .label_access = .{ .start = tok.loc.end, .end = tok.loc.end } }, + else => .empty, }, - else => curr_ctx.ctx = .empty, + else => .empty, }, .l_paren => { if (curr_ctx.ctx == .empty) curr_ctx.ctx = .{ .parens_expr = tok.loc }; @@ -5223,22 +5137,40 @@ pub fn getPositionContext( else => .parens, } else .parens; try stack.push(allocator, &.{ .ctx = .empty, .scope = scope }); + continue; + }, + .r_paren => { + stack.pop(curr_ctx.scope == .parens); + continue; }, - .r_paren => try stack.pop(allocator, curr_ctx.scope == .parens), - .l_bracket => try stack.push(allocator, &.{ .ctx = .empty, .scope = .brackets }), - .r_bracket => try stack.pop(allocator, curr_ctx.scope == .brackets), - .l_brace => try stack.push(allocator, &.{ .ctx = if (curr_ctx.ctx == .error_access) curr_ctx.ctx else .empty, .scope = .braces }), - .r_brace => try stack.pop(allocator, curr_ctx.scope == .braces), - .keyword_error => curr_ctx.ctx = .error_access, + .l_bracket => { + try stack.push(allocator, &.{ .ctx = .empty, .scope = .brackets }); + continue; + }, + .r_bracket => { + stack.pop(curr_ctx.scope == .brackets); + continue; + }, + .l_brace => { + try stack.push(allocator, &.{ .ctx = if (curr_ctx.ctx == .error_access) curr_ctx.ctx else .empty, .scope = .braces }); + continue; + }, + .r_brace => { + stack.pop(curr_ctx.scope == .braces); + continue; + }, + .keyword_error => .error_access, .number_literal => { if (tok.loc.start <= source_index and tok.loc.end >= source_index) { return .{ .number_literal = tok.loc }; } + continue; }, .char_literal => { if (tok.loc.start <= source_index and tok.loc.end >= source_index) { return .{ .char_literal = tok.loc }; } + continue; }, .keyword_addrspace, .keyword_break, @@ -5248,26 +5180,23 @@ pub fn getPositionContext( .keyword_if, .keyword_switch, .keyword_while, - => |tag| { + => |tag| new_state: { std.debug.assert(tree.tokenTag(current_token) == tag); - curr_ctx.ctx = .{ .keyword = current_token }; + break :new_state .{ .keyword = current_token }; }, - .keyword_test => curr_ctx.ctx = .test_doctest_name, - .container_doc_comment => curr_ctx.ctx = .comment, - .doc_comment => { - if (!curr_ctx.isErrSetDef()) curr_ctx.ctx = .comment; // Intent is to skip everything between the `error{...}` braces + .keyword_test => .{ .test_doctest_name = .{ .start = tok.loc.end, .end = tok.loc.end } }, + .container_doc_comment => .comment, + .doc_comment => new_state: { + if (!curr_ctx.isErrSetDef()) break :new_state .comment; // Intent is to skip everything between the `error{...}` braces + continue; }, - .comma => { - if (!curr_ctx.isErrSetDef()) curr_ctx.ctx = .empty; // Intent is to skip everything between the `error{...}` braces + .comma => new_state: { + if (!curr_ctx.isErrSetDef()) break :new_state .empty; // Intent is to skip everything between the `error{...}` braces + continue; }, - else => curr_ctx.ctx = .empty, - } - - curr_ctx = try stack.peek(allocator); - switch (curr_ctx.ctx) { - .field_access => |r| curr_ctx.ctx = .{ .field_access = tokenLocAppend(r, tok) }, - else => {}, - } + else => .empty, + }; + curr_ctx.ctx = new_state; } if (stack.states.pop()) |state| { @@ -6594,7 +6523,7 @@ pub fn getSymbolFieldAccessesHighlight( property_types: *std.ArrayList(Type), ) Error!?offsets.Loc { const name_loc, const highlight_loc = blk: { - const name_token, const name_loc = Analyser.identifierTokenAndLocFromIndex(&handle.tree, source_index) orelse { + const name_token, const name_loc = offsets.identifierTokenAndLocFromIndex(&handle.tree, source_index) orelse { const token = offsets.sourceIndexToTokenIndex(&handle.tree, source_index).pickPreferred(&.{ .question_mark, .period_asterisk }, &handle.tree) orelse return null; switch (handle.tree.tokenTag(token)) { .question_mark => { diff --git a/src/features/completions.zig b/src/features/completions.zig index 2c7ede451..e4e4a92ee 100644 --- a/src/features/completions.zig +++ b/src/features/completions.zig @@ -473,26 +473,58 @@ fn populateSnippedCompletions(builder: *Builder, kind: enum { generic, top_level } } +fn prepareCompletionLoc(tree: *const Ast, source_index: usize) offsets.Loc { + const fallback_loc: offsets.Loc = .{ .start = source_index, .end = source_index }; + const token = switch (offsets.sourceIndexToTokenIndex(tree, source_index)) { + .none => return fallback_loc, + .one => |token| token, + .between => |data| data.left, + }; + switch (tree.tokenTag(token)) { + .identifier, .builtin => |tag| { + if (tag == .builtin and tree.tokenStart(token) == source_index) return fallback_loc; + const token_loc = offsets.tokenToLoc(tree, token); + std.debug.assert(token_loc.start <= source_index and source_index <= token_loc.end); + return offsets.identifierIndexToLoc(tree.source, token_loc.start, if (tag == .builtin) .name else .full); + }, + else => { + const token_start = tree.tokenStart(token); + + var start: usize, var end: usize = start: { + if (std.mem.startsWith(u8, tree.source[token_start..], "@\"")) { + break :start .{ token_start, token_start + 2 }; + } else if (std.mem.startsWith(u8, tree.source[token_start..], "@") or std.mem.startsWith(u8, tree.source[token_start..], ".")) { + if (token_start + 1 < source_index) return fallback_loc; + break :start .{ token_start + 1, token_start + 1 }; + } else { + break :start .{ token_start, token_start }; + } + }; + start = @min(start, source_index); + end = @max(end, source_index); + + while (end < tree.source.len and offsets.isSymbolChar(tree.source[end])) { + end += 1; + } + + return .{ .start = start, .end = end }; + }, + } +} + const FunctionCompletionFormat = enum { snippet, only_name }; const PrepareFunctionCompletionResult = struct { types.Range, types.Range, FunctionCompletionFormat }; fn prepareFunctionCompletion(builder: *Builder) PrepareFunctionCompletionResult { if (builder.cached_prepare_function_completion_result) |result| return result; - const source = builder.orig_handle.tree.source; - - var start_index = builder.source_index; - while (start_index > 0 and Analyser.isSymbolChar(source[start_index - 1])) { - start_index -= 1; - } - - var end_index = builder.source_index; - while (end_index < source.len and Analyser.isSymbolChar(source[end_index])) { - end_index += 1; - } + const tree = &builder.orig_handle.tree; + const source = tree.source; + const source_index = builder.source_index; - var insert_loc: offsets.Loc = .{ .start = start_index, .end = builder.source_index }; - var replace_loc: offsets.Loc = .{ .start = start_index, .end = end_index }; + const identifier_loc = prepareCompletionLoc(tree, source_index); + var insert_loc: offsets.Loc = .{ .start = identifier_loc.start, .end = source_index }; + var replace_loc: offsets.Loc = .{ .start = identifier_loc.start, .end = identifier_loc.end }; var format: FunctionCompletionFormat = .only_name; @@ -505,7 +537,7 @@ fn prepareFunctionCompletion(builder: *Builder) PrepareFunctionCompletionResult format = .snippet; } else if (insert_can_be_snippet or replace_can_be_snippet) { // snippet completions would be possible but insert and replace would need different `newText` - } else if (builder.use_snippets and !std.mem.startsWith(u8, source[end_index..], "(")) { + } else if (builder.use_snippets and !std.mem.startsWith(u8, source[identifier_loc.end..], "(")) { format = .snippet; } @@ -949,18 +981,9 @@ pub fn completionAtIndex( const completions = builder.completions.items; if (completions.len == 0) return null; - var start_index = source_index; - while (start_index > 0 and Analyser.isSymbolChar(source[start_index - 1])) { - start_index -= 1; - } - - var end_index = source_index; - while (end_index < source.len and Analyser.isSymbolChar(source[end_index])) { - end_index += 1; - } - - const insert_range = offsets.locToRange(source, .{ .start = start_index, .end = source_index }, server.offset_encoding); - const replace_range = offsets.locToRange(source, .{ .start = start_index, .end = end_index }, server.offset_encoding); + const identifier_loc = prepareCompletionLoc(&handle.tree, source_index); + const insert_range = offsets.locToRange(source, .{ .start = identifier_loc.start, .end = source_index }, server.offset_encoding); + const replace_range = offsets.locToRange(source, .{ .start = identifier_loc.start, .end = identifier_loc.end }, server.offset_encoding); for (completions) |*item| { if (item.textEdit == null) { @@ -1741,7 +1764,7 @@ fn collectFieldAccessContainerNodes( // inconsistent at returning name_loc for methods, ie // `abc.method() == .` => fails, `abc.method(.{}){.}` => ok // it also fails for `abc.xyz.*` ... currently we take advantage of this quirk - const name_loc = Analyser.identifierLocFromIndex(&handle.tree, loc.end) orelse { + const name_loc = offsets.identifierLocFromIndex(&handle.tree, loc.end) orelse { const result = try analyser.getFieldAccessType(handle, loc.end, loc) orelse return; const container = try analyser.resolveDerefType(result) orelse result; if (try analyser.resolveUnwrapErrorUnionType(container, .payload)) |unwrapped| { diff --git a/src/features/goto.zig b/src/features/goto.zig index 447a957f6..18bdc16a9 100644 --- a/src/features/goto.zig +++ b/src/features/goto.zig @@ -79,7 +79,7 @@ fn gotoDefinitionLabel( const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - const name_loc = Analyser.identifierLocFromIndex(&handle.tree, pos_index) orelse return null; + const name_loc = offsets.identifierLocFromIndex(&handle.tree, pos_index) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); const decl = (try Analyser.lookupLabel(handle, name, pos_index)) orelse return null; return try gotoDefinitionSymbol(analyser, offsets.locToRange(handle.tree.source, loc, offset_encoding), decl, kind, offset_encoding); @@ -95,7 +95,7 @@ fn gotoDefinitionGlobal( const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - const name_token, const name_loc = Analyser.identifierTokenAndLocFromIndex(&handle.tree, pos_index) orelse return null; + const name_token, const name_loc = offsets.identifierTokenAndLocFromIndex(&handle.tree, pos_index) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); const decl = (try analyser.lookupSymbolGlobal(handle, name, pos_index)) orelse return null; return try gotoDefinitionSymbol(analyser, offsets.tokenToRange(&handle.tree, name_token, offset_encoding), decl, kind, offset_encoding); @@ -138,7 +138,7 @@ fn gotoDefinitionEnumLiteral( const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - const name_token, const name_loc = Analyser.identifierTokenAndLocFromIndex(&handle.tree, source_index) orelse { + const name_token, const name_loc = offsets.identifierTokenAndLocFromIndex(&handle.tree, source_index) orelse { return gotoDefinitionStructInit(analyser, handle, source_index, kind, offset_encoding); }; const name = offsets.locToSlice(handle.tree.source, name_loc); @@ -209,7 +209,7 @@ fn gotoDefinitionFieldAccess( const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - const name_token, const name_loc = Analyser.identifierTokenAndLocFromIndex(&handle.tree, source_index) orelse return null; + const name_token, const name_loc = offsets.identifierTokenAndLocFromIndex(&handle.tree, source_index) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); const held_loc = offsets.locMerge(loc, name_loc); const accesses = (try analyser.getSymbolFieldAccesses(arena, handle, source_index, held_loc, name)) orelse return null; diff --git a/src/features/hover.zig b/src/features/hover.zig index 1c876ec20..0bf7a4477 100644 --- a/src/features/hover.zig +++ b/src/features/hover.zig @@ -296,7 +296,7 @@ fn hoverDefinitionGlobal( const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - const name_token, const name_loc = Analyser.identifierTokenAndLocFromIndex(&handle.tree, pos_index) orelse return null; + const name_token, const name_loc = offsets.identifierTokenAndLocFromIndex(&handle.tree, pos_index) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); const hover_text = blk: { const is_escaped_identifier = handle.tree.source[handle.tree.tokenStart(name_token)] == '@'; @@ -372,7 +372,7 @@ fn hoverDefinitionEnumLiteral( const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - const name_token, const name_loc = Analyser.identifierTokenAndLocFromIndex(&handle.tree, source_index) orelse { + const name_token, const name_loc = offsets.identifierTokenAndLocFromIndex(&handle.tree, source_index) orelse { return try hoverDefinitionStructInit(analyser, arena, handle, source_index, markup_kind, offset_encoding); }; const name = offsets.locToSlice(handle.tree.source, name_loc); diff --git a/src/features/references.zig b/src/features/references.zig index e0a074d5d..232df4011 100644 --- a/src/features/references.zig +++ b/src/features/references.zig @@ -679,7 +679,7 @@ pub fn referencesHandler(server: *Server, arena: std.mem.Allocator, request: Gen ); } - const name_loc = Analyser.identifierLocFromIndex(&handle.tree, source_index) orelse return null; + const name_loc = offsets.identifierLocFromIndex(&handle.tree, source_index) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); const decl = switch (pos_context) { diff --git a/src/features/signature_help.zig b/src/features/signature_help.zig index f000dc75f..7a5eeb264 100644 --- a/src/features/signature_help.zig +++ b/src/features/signature_help.zig @@ -287,7 +287,7 @@ pub fn getSignatureInfo( ); } - const name_loc = Analyser.identifierLocFromIndex(&handle.tree, loc.end - 1) orelse { + const name_loc = offsets.identifierLocFromIndex(&handle.tree, loc.end - 1) orelse { try symbol_stack.append(arena, .l_paren); continue; }; diff --git a/src/offsets.zig b/src/offsets.zig index e5c520b7c..c9b54affa 100644 --- a/src/offsets.zig +++ b/src/offsets.zig @@ -227,7 +227,9 @@ pub const IdentifierIndexRange = enum { full, }; -/// Support formats: +/// The source index must be at the start of the valid identifier. +/// +/// Supported formats: /// - `foo` /// - `@"foo"` /// - `@foo` @@ -251,12 +253,7 @@ pub fn identifierIndexToLoc(text: [:0]const u8, source_index: usize, range: Iden } else { const start: usize = source_index + @intFromBool(text[source_index] == '@'); var index = start; - while (true) : (index += 1) { - switch (text[index]) { - 'a'...'z', 'A'...'Z', '_', '0'...'9' => {}, - else => break, - } - } + while (isSymbolChar(text[index])) : (index += 1) {} return .{ .start = if (range == .full) source_index else start, .end = index }; } } @@ -304,6 +301,93 @@ pub fn identifierTokenToNameSlice(tree: *const Ast, identifier_token: Ast.TokenI return locToSlice(tree.source, identifierTokenToNameLoc(tree, identifier_token)); } +/// See `identifierTokenAndLocFromIndex`. +pub fn identifierLocFromIndex(tree: *const Ast, source_index: usize) ?Loc { + _, const loc = identifierTokenAndLocFromIndex(tree, source_index) orelse return null; + return loc; +} + +/// Returns the source location of `foo` if the source index is on a valid identifier. +/// +/// Supported formats: +/// - `foo` (identifier) +/// - `@"foo"` (escaped identifier) +/// - `@foo` (builtin) +pub fn identifierTokenAndLocFromIndex(tree: *const Ast, source_index: usize) ?struct { Ast.TokenIndex, Loc } { + const token = sourceIndexToTokenIndex(tree, source_index).pickPreferred(&.{ .identifier, .builtin }, tree) orelse return null; + switch (tree.tokenTag(token)) { + .identifier, .builtin => {}, + else => return null, + } + const token_loc = tokenToLoc(tree, token); + std.debug.assert(token_loc.start <= source_index and source_index <= token_loc.end); + return .{ token, identifierIndexToLoc(tree.source, token_loc.start, .name) }; +} + +test identifierLocFromIndex { + var tree = try Ast.parse(std.testing.allocator, + \\ name @builtin @"escaped" @"s p a c e" end + , .zig); + defer tree.deinit(std.testing.allocator); + + try std.testing.expectEqualSlices( + std.zig.Token.Tag, + &.{ .identifier, .builtin, .identifier, .identifier, .identifier, .eof }, + tree.tokens.items(.tag), + ); + + { + const expected_loc: Loc = .{ .start = 1, .end = 5 }; + std.debug.assert(std.mem.eql(u8, "name", locToSlice(tree.source, expected_loc))); + + try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 1)); + try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 2)); + try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 5)); + } + + { + const expected_loc: Loc = .{ .start = 8, .end = 15 }; + std.debug.assert(std.mem.eql(u8, "builtin", locToSlice(tree.source, expected_loc))); + + try std.testing.expectEqual(@as(?Loc, null), identifierLocFromIndex(&tree, 6)); + try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 7)); + try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 8)); + try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 11)); + try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 15)); + try std.testing.expectEqual(@as(?Loc, null), identifierLocFromIndex(&tree, 16)); + } + + { + const expected_loc: Loc = .{ .start = 19, .end = 26 }; + std.debug.assert(std.mem.eql(u8, "escaped", locToSlice(tree.source, expected_loc))); + + try std.testing.expectEqual(@as(?Loc, null), identifierLocFromIndex(&tree, 16)); + try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 17)); + try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 18)); + try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 19)); + try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 23)); + try std.testing.expectEqual(expected_loc, identifierLocFromIndex(&tree, 27)); + try std.testing.expectEqual(@as(?Loc, null), identifierLocFromIndex(&tree, 28)); + } + + { + const expected_loc: Loc = .{ .start = 43, .end = 46 }; + std.debug.assert(std.mem.eql(u8, "end", locToSlice(tree.source, expected_loc))); + + try std.testing.expectEqual(@as(?Loc, null), identifierLocFromIndex(&tree, 42)); + try std.testing.expectEqual(@as(?Loc, expected_loc), identifierLocFromIndex(&tree, 43)); + try std.testing.expectEqual(@as(?Loc, expected_loc), identifierLocFromIndex(&tree, 45)); + try std.testing.expectEqual(@as(?Loc, expected_loc), identifierLocFromIndex(&tree, 46)); + } +} + +pub fn isSymbolChar(char: u8) bool { + return switch (char) { + 'a'...'z', 'A'...'Z', '_', '0'...'9' => true, + else => false, + }; +} + pub fn tokensToLoc(tree: *const Ast, first_token: Ast.TokenIndex, last_token: Ast.TokenIndex) Loc { return .{ .start = tree.tokenStart(first_token), .end = tokenToLoc(tree, last_token).end }; } diff --git a/tests/lsp_features/completion.zig b/tests/lsp_features/completion.zig index 0c4bdf848..c3c1e48ec 100644 --- a/tests/lsp_features/completion.zig +++ b/tests/lsp_features/completion.zig @@ -4097,6 +4097,36 @@ test "insert replace behaviour - function alias" { }); } +test "insert replace behaviour - escaped identifier" { + try testCompletionTextEdit(.{ + .source = + \\const @"foo bar" = 5; + \\const foo = @"foo + , + .label = "@\"foo bar\"", + .expected_insert_line = "const foo = @\"foo bar\"", + .expected_replace_line = "const foo = @\"foo bar\"", + }); + try testCompletionTextEdit(.{ + .source = + \\fn @"foo bar"() void {} + \\const foo = @"foo + , + .label = "@\"foo bar\"", + .expected_insert_line = "const foo = @\"foo bar\"@\"foo", + .expected_replace_line = "const foo = @\"foo bar\"", + }); + try testCompletionTextEdit(.{ + .source = + \\fn @"foo bar"() void {} + \\const foo = @"foo + , + .label = "@\"foo bar\"", + .expected_insert_line = "const foo = @\"foo bar\"", + .expected_replace_line = "const foo = @\"foo bar\"", + }); +} + test "insert replace behaviour - decl literal function" { try testCompletionTextEdit(.{ .source = @@ -4681,7 +4711,6 @@ fn testCompletionTextEdit( ctx.server.config_manager.config.enable_snippets = options.enable_snippets; const test_uri = try ctx.addDocument(.{ .source = text }); - const handle = ctx.server.document_store.getHandle(test_uri).?; const cursor_position = offsets.indexToPosition(options.source, cursor_idx, ctx.server.offset_encoding); const params: types.completion.Params = .{ @@ -4705,21 +4734,7 @@ fn testCompletionTextEdit( const TextEditOrInsertReplace = std.meta.Child(@TypeOf(completion_item.textEdit)); - const text_edit_or_insert_replace: TextEditOrInsertReplace = completion_item.textEdit orelse blk: { - var start_index: usize = cursor_idx; - while (start_index > 0 and zls.Analyser.isSymbolChar(handle.tree.source[start_index - 1])) { - start_index -= 1; - } - - const start_position = offsets.indexToPosition(text, start_index, ctx.server.offset_encoding); - - break :blk .{ - .text_edit = .{ - .newText = completion_item.insertText orelse completion_item.label, - .range = .{ .start = start_position, .end = cursor_position }, - }, - }; - }; + const text_edit_or_insert_replace: TextEditOrInsertReplace = completion_item.textEdit.?; switch (text_edit_or_insert_replace) { .text_edit => |text_edit| { diff --git a/tests/utility/position_context.zig b/tests/utility/position_context.zig index d680158aa..c78f9aae6 100644 --- a/tests/utility/position_context.zig +++ b/tests/utility/position_context.zig @@ -36,6 +36,21 @@ test "var_access" { , .empty, .{}); } +test "var_access escaped identifier" { + try testContext( + \\const foo = @" + , .var_access, .{}); + try testContext( + \\const foo = @"foo + , .var_access, .{}); + try testContext( + \\const foo = @"foo + , .var_access, .{}); + try testContext( + \\const foo = @"foo" + , .var_access, .{}); +} + test "function.payload" { try testContext( \\ fn foo() !Str { @@ -194,6 +209,12 @@ test "field access" { , .field_access, .{}); } +test "field access function call" { + try testContext( + \\Foo.bar() + , .field_access, .{}); +} + test "field access across multiple lines" { // ErrorBuilder doesn't support locs across multiple lines so don't let the test fail :) try testContext( @@ -594,6 +615,19 @@ test "label decl" { , .label_decl, .{}); } +test "doctest name" { + try testContext( + \\test + , .test_doctest_name, .{}); + try testContext( + \\test foo {} + , .test_doctest_name, .{}); + + try testContext( + \\test foo.bar, + , .field_access, .{}); +} + test "empty" { try testContext( \\