From c4905bfa528ef605703777c0d0b789a7e5321d40 Mon Sep 17 00:00:00 2001 From: Techatrix Date: Tue, 10 Feb 2026 17:02:52 +0100 Subject: [PATCH 1/9] refactor symbol/label/callsite references Preparation for better cross-file references. --- src/analysis.zig | 12 +- src/features/references.zig | 255 +++++++++++++++++------------------- 2 files changed, 121 insertions(+), 146 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index cb9708c7e..eb0446eda 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -1666,21 +1666,17 @@ fn resolveCallsiteReferences(analyser: *Analyser, decl_handle: DeclWithHandle) E } const refs = try references.callsiteReferences( - analyser.arena, analyser, .{ .decl = func_decl, .handle = decl_handle.handle, .container_type = decl_handle.container_type }, false, false, ); - // TODO: Set `workspace` to true; current problems - // - we gather dependencies, not dependents - var possible: std.ArrayList(Type.TypeWithDescriptor) = .empty; for (refs.items) |ref| { var call_buf: [1]Ast.Node.Index = undefined; - const call = tree.fullCall(&call_buf, ref.call_node).?; + const call = tree.fullCall(&call_buf, ref.node).?; const real_param_idx = if (func_params_len != 0 and pay.param_index != 0 and call.ast.params.len == func_params_len - 1) pay.param_index - 1 @@ -1695,15 +1691,13 @@ fn resolveCallsiteReferences(analyser: *Analyser, decl_handle: DeclWithHandle) E defer analyser.collect_callsite_references = old_collect_callsite_references; analyser.collect_callsite_references = false; - const handle = try analyser.store.getOrLoadHandle(ref.uri) orelse continue; - break :resolve_ty try analyser.resolveTypeOfNode(.of( // TODO?: this is a """heuristic based approach""" // perhaps it would be better to use proper self detection // maybe it'd be a perf issue and this is fine? // you figure it out future contributor <3 call.ast.params[real_param_idx], - handle, + ref.handle, )) orelse continue; }; @@ -1713,7 +1707,7 @@ fn resolveCallsiteReferences(analyser: *Analyser, decl_handle: DeclWithHandle) E const loc = offsets.tokenToPosition(tree, tree.nodeMainToken(call.ast.params[real_param_idx]), .@"utf-8"); try possible.append(analyser.arena, .{ .type = ty, - .descriptor = try std.fmt.allocPrint(analyser.arena, "{s}:{d}:{d}", .{ ref.uri.raw, loc.line + 1, loc.character + 1 }), + .descriptor = try std.fmt.allocPrint(analyser.arena, "{s}:{d}:{d}", .{ ref.handle.uri.raw, loc.line + 1, loc.character + 1 }), }); } diff --git a/src/features/references.zig b/src/features/references.zig index 232df4011..e8fed5e89 100644 --- a/src/features/references.zig +++ b/src/features/references.zig @@ -15,21 +15,20 @@ const tracy = @import("tracy"); fn labelReferences( allocator: std.mem.Allocator, - decl: Analyser.DeclWithHandle, + handle: *DocumentStore.Handle, + decl: @FieldType(Analyser.Declaration, "label"), encoding: offsets.Encoding, include_decl: bool, ) error{OutOfMemory}!std.ArrayList(types.Location) { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - std.debug.assert(decl.decl == .label); // use `symbolReferences` instead - const handle = decl.handle; const tree = &handle.tree; // Find while / for / block from label -> iterate over children nodes, find break and continues, change their labels if they match. // This case can be implemented just by scanning tokens. - const first_tok = decl.decl.label.identifier; - const last_tok = ast.lastToken(tree, decl.decl.label.block); + const first_tok = decl.identifier; + const last_tok = ast.lastToken(tree, decl.block); var locations: std.ArrayList(types.Location) = .empty; errdefer locations.deinit(allocator); @@ -38,7 +37,7 @@ fn labelReferences( // The first token is always going to be the label try locations.append(allocator, .{ .uri = handle.uri.raw, - .range = offsets.tokenToRange(&handle.tree, first_tok, encoding), + .range = offsets.tokenToRange(tree, first_tok, encoding), }); } @@ -54,7 +53,7 @@ fn labelReferences( try locations.append(allocator, .{ .uri = handle.uri.raw, - .range = offsets.tokenToRange(&handle.tree, curr_tok + 2, encoding), + .range = offsets.tokenToRange(tree, curr_tok + 2, encoding), }); } @@ -62,56 +61,51 @@ fn labelReferences( } const Builder = struct { - allocator: std.mem.Allocator, locations: std.ArrayList(types.Location) = .empty, /// this is the declaration we are searching for - decl_handle: Analyser.DeclWithHandle, + target_symbol: Analyser.DeclWithHandle, /// the decl is local to a function, block, etc local_only_decl: bool, - /// Whether the `decl_handle` has been added - did_add_decl_handle: bool = false, + /// Whether the `target_symbol` has been added + did_add_target_symbol: bool = false, analyser: *Analyser, encoding: offsets.Encoding, - const Context = struct { - builder: *Builder, - handle: *DocumentStore.Handle, - }; - - fn deinit(self: *Builder) void { - self.locations.deinit(self.allocator); - } - fn add(self: *Builder, handle: *DocumentStore.Handle, token_index: Ast.TokenIndex) error{OutOfMemory}!void { - if (self.decl_handle.handle == handle and - self.decl_handle.nameToken() == token_index) + if (self.target_symbol.handle == handle and + self.target_symbol.nameToken() == token_index) { - if (self.did_add_decl_handle) return; - self.did_add_decl_handle = true; + if (self.did_add_target_symbol) return; + self.did_add_target_symbol = true; } - try self.locations.append(self.allocator, .{ + try self.locations.append(self.analyser.arena, .{ .uri = handle.uri.raw, .range = offsets.tokenToRange(&handle.tree, token_index, self.encoding), }); } fn collectReferences(self: *Builder, handle: *DocumentStore.Handle, node: Ast.Node.Index) Analyser.Error!void { - const context = Context{ - .builder = self, - .handle = handle, - }; - try referenceNode(&context, &handle.tree, node); - var walker: ast.Walker = try .init(self.allocator, &handle.tree, node); - defer walker.deinit(self.allocator); - while (try walker.nextIgnoreClose(self.allocator, &handle.tree)) |child| try referenceNode(&context, &handle.tree, child); + const tracy_zone = tracy.trace(@src()); + defer tracy_zone.end(); + + const arena = self.analyser.arena; + try referenceNode(self, handle, node); + var walker: ast.Walker = try .init(arena, &handle.tree, node); + defer walker.deinit(arena); + while (try walker.nextIgnoreClose(arena, &handle.tree)) |child| { + try referenceNode(self, handle, child); + } } - fn referenceNode(self: *const Context, tree: *const Ast, node: Ast.Node.Index) Analyser.Error!void { - const builder = self.builder; - const handle = self.handle; - const decl_name = offsets.identifierTokenToNameSlice( - &builder.decl_handle.handle.tree, - builder.decl_handle.nameToken(), + fn referenceNode( + builder: *Builder, + handle: *DocumentStore.Handle, + node: Ast.Node.Index, + ) Analyser.Error!void { + const tree = &handle.tree; + const target_symbol_name = offsets.identifierTokenToNameSlice( + &builder.target_symbol.handle.tree, + builder.target_symbol.nameToken(), ); switch (tree.nodeTag(node)) { @@ -128,7 +122,7 @@ const Builder = struct { else => unreachable, }; const name = offsets.identifierTokenToNameSlice(tree, name_token); - if (!std.mem.eql(u8, name, decl_name)) return; + if (!std.mem.eql(u8, name, target_symbol_name)) return; const child = try builder.analyser.lookupSymbolGlobal( handle, @@ -136,7 +130,7 @@ const Builder = struct { tree.tokenStart(name_token), ) orelse return; - if (builder.decl_handle.eql(child)) { + if (builder.target_symbol.eql(child)) { try builder.add(handle, name_token); } }, @@ -144,14 +138,14 @@ const Builder = struct { if (builder.local_only_decl) return; const lhs_node, const field_token = tree.nodeData(node).node_and_token; const name = offsets.identifierTokenToNameSlice(tree, field_token); - if (!std.mem.eql(u8, name, decl_name)) return; + if (!std.mem.eql(u8, name, target_symbol_name)) return; const lhs = try builder.analyser.resolveTypeOfNode(.of(lhs_node, handle)) orelse return; const deref_lhs = try builder.analyser.resolveDerefType(lhs) orelse lhs; const child = try deref_lhs.lookupSymbol(builder.analyser, name) orelse return; - if (builder.decl_handle.eql(child)) { + if (builder.target_symbol.eql(child)) { try builder.add(handle, field_token); } }, @@ -170,7 +164,7 @@ const Builder = struct { for (struct_init.ast.fields) |value_node| { // the node of `value` in `.name = value` const name_token = tree.firstToken(value_node) - 2; // math our way two token indexes back to get the `name` const name = offsets.identifierTokenToNameSlice(tree, name_token); - if (!std.mem.eql(u8, name, decl_name)) continue; + if (!std.mem.eql(u8, name, target_symbol_name)) continue; const nodes = switch (tree.nodeTag(node)) { .struct_init_dot, @@ -178,7 +172,7 @@ const Builder = struct { .struct_init_dot_two, .struct_init_dot_two_comma, => try ast.nodesOverlappingIndex( - builder.allocator, + builder.analyser.arena, tree, tree.tokenStart(name_token), ), @@ -198,7 +192,7 @@ const Builder = struct { nodes[1..], ) orelse return; - if (builder.decl_handle.eql(lookup)) { + if (builder.target_symbol.eql(lookup)) { try builder.add(handle, name_token); } // if we get here then we know that the name of the field matched @@ -210,10 +204,10 @@ const Builder = struct { if (builder.local_only_decl) return; const name_token = tree.nodeMainToken(node); const name = offsets.identifierTokenToNameSlice(&handle.tree, name_token); - if (!std.mem.eql(u8, name, decl_name)) return; + if (!std.mem.eql(u8, name, target_symbol_name)) return; const lookup = try builder.analyser.getSymbolEnumLiteral(handle, tree.tokenStart(name_token), name) orelse return; - if (builder.decl_handle.eql(lookup)) { + if (builder.target_symbol.eql(lookup)) { try builder.add(handle, name_token); } }, @@ -222,15 +216,14 @@ const Builder = struct { } }; -fn gatherReferences( - allocator: std.mem.Allocator, - analyser: *Analyser, - curr_handle: *DocumentStore.Handle, +fn gatherWorkspaceReferences( + store: *DocumentStore, + builder: anytype, + root_handle: *DocumentStore.Handle, skip_std_references: bool, include_decl: bool, - builder: anytype, - handle_behavior: enum { get, get_or_load }, ) Analyser.Error!void { + const allocator = store.allocator; var dependencies: Uri.ArrayHashMap(void) = .empty; defer { for (dependencies.keys()) |uri| { @@ -239,16 +232,16 @@ fn gatherReferences( dependencies.deinit(allocator); } - var it: DocumentStore.HandleIterator = .{ .store = analyser.store }; + var it: DocumentStore.HandleIterator = .{ .store = store }; while (it.next()) |handle| { if (skip_std_references and DocumentStore.isInStd(handle.uri)) { - if (!include_decl or !handle.uri.eql(curr_handle.uri)) + if (!include_decl or !handle.uri.eql(root_handle.uri)) continue; } var handle_dependencies: std.ArrayList(Uri) = .empty; defer handle_dependencies.deinit(allocator); - try analyser.store.collectDependencies(allocator, handle, &handle_dependencies); + try store.collectDependencies(allocator, handle, &handle_dependencies); try dependencies.ensureUnusedCapacity(allocator, handle_dependencies.items.len); for (handle_dependencies.items) |uri| { @@ -260,35 +253,32 @@ fn gatherReferences( } for (dependencies.keys()) |uri| { - if (uri.eql(curr_handle.uri)) continue; - const handle = switch (handle_behavior) { - .get => analyser.store.getHandle(uri), - .get_or_load => try analyser.store.getOrLoadHandle(uri), - } orelse continue; + if (uri.eql(root_handle.uri)) continue; + const handle = try store.getOrLoadHandle(uri) orelse continue; try builder.collectReferences(handle, .root); } } fn symbolReferences( - allocator: std.mem.Allocator, analyser: *Analyser, request: GeneralReferencesRequest, - decl_handle: Analyser.DeclWithHandle, + target_symbol: Analyser.DeclWithHandle, encoding: offsets.Encoding, - /// add `decl_handle` as a references + /// add `target_symbol` as a references include_decl: bool, /// exclude references from the std library skip_std_references: bool, - curr_handle: *DocumentStore.Handle, + /// The file on which the request was initiated. + current_handle: *DocumentStore.Handle, ) Analyser.Error!std.ArrayList(types.Location) { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - std.debug.assert(decl_handle.decl != .label); // use `labelReferences` instead + std.debug.assert(target_symbol.decl != .label); // use `labelReferences` instead - const doc_scope = try decl_handle.handle.getDocumentScope(); - const source_index = decl_handle.handle.tree.tokenStart(decl_handle.nameToken()); + const doc_scope = try target_symbol.handle.getDocumentScope(); + const source_index = target_symbol.handle.tree.tokenStart(target_symbol.nameToken()); const scope_index = Analyser.innermostScopeAtIndexWithTag(doc_scope, source_index, .init(.{ .block = true, .container = true, @@ -299,7 +289,7 @@ fn symbolReferences( // If `local_node != null`, references to the declaration can only be // found inside of the given ast node. - const local_node: ?Ast.Node.Index = switch (decl_handle.decl) { + const local_node: ?Ast.Node.Index = switch (target_symbol.decl) { .ast_node => switch (doc_scope.getScopeTag(scope_index)) { .block => scope_node, .container => null, @@ -320,28 +310,28 @@ fn symbolReferences( }; var builder: Builder = .{ - .allocator = allocator, .analyser = analyser, - .decl_handle = decl_handle, + .target_symbol = target_symbol, .local_only_decl = local_node != null, .encoding = encoding, }; - errdefer builder.deinit(); - if (include_decl) try builder.add(decl_handle.handle, decl_handle.nameToken()); + blk: { + if (!include_decl) break :blk; + if (request == .highlight and !target_symbol.handle.uri.eql(current_handle.uri)) break :blk; + try builder.add(target_symbol.handle, target_symbol.nameToken()); + } - try builder.collectReferences(curr_handle, local_node orelse .root); + try builder.collectReferences(current_handle, local_node orelse .root); - const workspace = local_node == null and request != .highlight and decl_handle.isPublic(); + const workspace = local_node == null and request != .highlight and target_symbol.isPublic(); if (workspace) { - try gatherReferences( - allocator, - analyser, - curr_handle, + try gatherWorkspaceReferences( + analyser.store, + &builder, + current_handle, skip_std_references, include_decl, - &builder, - .get, ); } @@ -499,48 +489,37 @@ fn controlFlowReferences( return locations; } -pub const Callsite = struct { - uri: Uri, - call_node: Ast.Node.Index, -}; - const CallBuilder = struct { - allocator: std.mem.Allocator, - callsites: std.ArrayList(Callsite) = .empty, + callsites: std.ArrayList(Analyser.NodeWithHandle) = .empty, /// this is the declaration we are searching for - decl_handle: Analyser.DeclWithHandle, + target_decl: Analyser.DeclWithHandle, analyser: *Analyser, - const Context = struct { - builder: *CallBuilder, - handle: *DocumentStore.Handle, - }; - - fn deinit(self: *CallBuilder) void { - self.callsites.deinit(self.allocator); - } - fn add(self: *CallBuilder, handle: *DocumentStore.Handle, call_node: Ast.Node.Index) error{OutOfMemory}!void { - try self.callsites.append(self.allocator, .{ - .uri = handle.uri, - .call_node = call_node, + try self.callsites.append(self.analyser.arena, .{ + .handle = handle, + .node = call_node, }); } fn collectReferences(self: *CallBuilder, handle: *DocumentStore.Handle, node: Ast.Node.Index) Analyser.Error!void { - const context = Context{ - .builder = self, - .handle = handle, - }; - var walker: ast.Walker = try .init(self.allocator, &handle.tree, node); - defer walker.deinit(self.allocator); - while (try walker.nextIgnoreClose(self.allocator, &handle.tree)) |child| try referenceNode(&context, &handle.tree, child); + const tracy_zone = tracy.trace(@src()); + defer tracy_zone.end(); + + const arena = self.analyser.arena; + var walker: ast.Walker = try .init(arena, &handle.tree, node); + defer walker.deinit(arena); + while (try walker.nextIgnoreClose(arena, &handle.tree)) |child| { + try referenceNode(self, handle, child); + } } - fn referenceNode(self: *const Context, tree: *const Ast, node: Ast.Node.Index) Analyser.Error!void { - const builder = self.builder; - const handle = self.handle; - + fn referenceNode( + builder: *CallBuilder, + handle: *DocumentStore.Handle, + node: Ast.Node.Index, + ) Analyser.Error!void { + const tree = &handle.tree; switch (tree.nodeTag(node)) { .call, .call_comma, @@ -562,7 +541,7 @@ const CallBuilder = struct { tree.tokenStart(identifier_token), )) orelse return; - if (builder.decl_handle.eql(child)) { + if (builder.target_decl.eql(child)) { try builder.add(handle, node); } }, @@ -574,7 +553,7 @@ const CallBuilder = struct { const symbol = offsets.tokenToSlice(tree, field_name); const child = (try deref_lhs.lookupSymbol(builder.analyser, symbol)) orelse return; - if (builder.decl_handle.eql(child)) { + if (builder.target_decl.eql(child)) { try builder.add(handle, node); } }, @@ -587,33 +566,34 @@ const CallBuilder = struct { }; pub fn callsiteReferences( - allocator: std.mem.Allocator, analyser: *Analyser, decl_handle: Analyser.DeclWithHandle, /// exclude references from the std library skip_std_references: bool, /// search other files for references workspace: bool, -) Analyser.Error!std.ArrayList(Callsite) { +) Analyser.Error!std.ArrayList(Analyser.NodeWithHandle) { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); std.debug.assert(decl_handle.decl == .ast_node); - var builder = CallBuilder{ - .allocator = allocator, + var builder: CallBuilder = .{ .analyser = analyser, - .decl_handle = decl_handle, + .target_decl = decl_handle, }; - errdefer builder.deinit(); - const curr_handle = decl_handle.handle; + try builder.collectReferences(decl_handle.handle, .root); - try builder.collectReferences(curr_handle, .root); - - if (!workspace) return builder.callsites; - - try gatherReferences(allocator, analyser, curr_handle, skip_std_references, false, &builder, .get_or_load); + if (workspace) { + try gatherWorkspaceReferences( + analyser.store, + &builder, + decl_handle.handle, + skip_std_references, + false, + ); + } return builder.callsites; } @@ -682,7 +662,7 @@ pub fn referencesHandler(server: *Server, arena: std.mem.Allocator, request: Gen const name_loc = offsets.identifierLocFromIndex(&handle.tree, source_index) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); - const decl = switch (pos_context) { + const target_decl = switch (pos_context) { .var_access, .test_doctest_name => try analyser.lookupSymbolGlobal(handle, name, source_index), .field_access => |loc| z: { const held_loc = offsets.locMerge(loc, name_loc); @@ -699,13 +679,18 @@ pub fn referencesHandler(server: *Server, arena: std.mem.Allocator, request: Gen else => null, } orelse return null; - break :locs switch (decl.decl) { - .label => try labelReferences(arena, decl, server.offset_encoding, include_decl), - else => try symbolReferences( + break :locs switch (target_decl.decl) { + .label => |payload| try labelReferences( arena, + target_decl.handle, + payload, + server.offset_encoding, + include_decl, + ), + else => try symbolReferences( &analyser, request, - decl, + target_decl, server.offset_encoding, include_decl, server.config_manager.config.skip_std_references, @@ -742,11 +727,7 @@ pub fn referencesHandler(server: *Server, arena: std.mem.Allocator, request: Gen .highlight => { var highlights: std.ArrayList(types.DocumentHighlight) = try .initCapacity(arena, locations.items.len); for (locations.items) |loc| { - const loc_uri = Uri.parse(arena, loc.uri) catch |err| switch (err) { - error.OutOfMemory => return error.OutOfMemory, - else => return error.InvalidParams, - }; - if (!loc_uri.eql(handle.uri)) continue; + std.debug.assert(std.mem.eql(u8, handle.uri.raw, loc.uri)); highlights.appendAssumeCapacity(.{ .range = loc.range, .kind = .Text, From ce5e2387758823f780e13e0180c52b0340048dfc Mon Sep 17 00:00:00 2001 From: Techatrix Date: Tue, 10 Feb 2026 17:46:31 +0100 Subject: [PATCH 2/9] fix edge-case in uri percent encoding normalization A percent encoding with one upper and one lower case letter would not be normalized properly. Also avoids unnecessary loop iterations when encountering an already normalized percent encoding. --- src/Uri.zig | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/src/Uri.zig b/src/Uri.zig index 3970e8ad2..4b976baa8 100644 --- a/src/Uri.zig +++ b/src/Uri.zig @@ -115,9 +115,9 @@ test "parse - always add authority component (posix)" { } test "parse - normalize percent encoding (posix)" { - const uri: Uri = try .parseWithOs(std.testing.allocator, "file:///foo%5cmain%2ezig", false); + const uri: Uri = try .parseWithOs(std.testing.allocator, "file:///f%Aao%5cmain%2ezig", false); defer uri.deinit(std.testing.allocator); - try std.testing.expectEqualStrings("file:///foo%5Cmain.zig", uri.raw); + try std.testing.expectEqualStrings("file:///f%AAo%5Cmain.zig", uri.raw); } test "parse - convert percent encoded '\\' to '/' (windows)" { @@ -435,21 +435,21 @@ fn normalizePercentEncoded( const lower_value = std.fmt.charToDigit(lower_hex, 16) catch continue; const percent_encoded_char = upper_value * 16 + lower_value; - if (!isValidChar(percent_encoded_char)) { - if (std.ascii.isUpper(upper_hex) or std.ascii.isUpper(lower_hex)) continue; - + if (isValidChar(percent_encoded_char)) { + // a character has been unnecessarily escaped + result.appendSliceAssumeCapacity(percent_encoded[start..percent]); + result.appendAssumeCapacity(percent_encoded_char); + start = percent + 3; + } else if (std.ascii.isLower(upper_hex) or std.ascii.isLower(lower_hex)) { // convert percent encoded character to upper case result.appendSliceAssumeCapacity(percent_encoded[start..percent]); result.appendAssumeCapacity('%'); result.appendAssumeCapacity(std.ascii.toUpper(upper_hex)); result.appendAssumeCapacity(std.ascii.toUpper(lower_hex)); + start = percent + 3; } else { - // a character has been unnecessarily escaped - result.appendSliceAssumeCapacity(percent_encoded[start..percent]); - result.appendAssumeCapacity(percent_encoded_char); + // skip properly percent encoded character } - - start = percent + 3; index = percent + 3; } result.appendSliceAssumeCapacity(percent_encoded[start..]); From c0058121e3ac1f9b829489595c76240e842caa0b Mon Sep 17 00:00:00 2001 From: Techatrix Date: Tue, 10 Feb 2026 17:51:08 +0100 Subject: [PATCH 3/9] resolve file import uris without a round trip to file paths This enable imports to be resolved on documents with non `file` uri scheme. --- src/DocumentStore.zig | 24 +++---------- src/Uri.zig | 57 ++++++++++++++++++++++++++++++- tests/context.zig | 11 +----- tests/lsp_features/references.zig | 5 +-- 4 files changed, 62 insertions(+), 35 deletions(-) diff --git a/src/DocumentStore.zig b/src/DocumentStore.zig index 37fa7d05f..51c941f70 100644 --- a/src/DocumentStore.zig +++ b/src/DocumentStore.zig @@ -574,11 +574,7 @@ pub const Handle = struct { var imports = try analysis.collectImports(allocator, &handle.tree); defer imports.deinit(allocator); - const base_path = handle.uri.toFsPath(allocator) catch |err| switch (err) { - error.OutOfMemory => return error.OutOfMemory, - error.UnsupportedScheme => return &.{}, - }; - defer allocator.free(base_path); + const parsed_uri = std.Uri.parse(handle.uri.raw) catch unreachable; // The Uri is guranteed to be valid var uris: std.ArrayList(Uri) = try .initCapacity(allocator, imports.items.len); errdefer { @@ -588,7 +584,7 @@ pub const Handle = struct { for (imports.items) |import_str| { if (!std.mem.endsWith(u8, import_str, ".zig")) continue; - uris.appendAssumeCapacity(try resolveFileImportString(allocator, base_path, import_str) orelse continue); + uris.appendAssumeCapacity(try Uri.resolveImport(allocator, handle.uri, parsed_uri, import_str)); } return try uris.toOwnedSlice(allocator); @@ -1873,13 +1869,8 @@ pub fn uriFromImportStr( defer tracy_zone.end(); if (std.mem.endsWith(u8, import_str, ".zig") or std.mem.endsWith(u8, import_str, ".zon")) { - const base_path = handle.uri.toFsPath(allocator) catch |err| switch (err) { - error.OutOfMemory => return error.OutOfMemory, - error.UnsupportedScheme => return .none, - }; - defer allocator.free(base_path); - const uri = try resolveFileImportString(allocator, base_path, import_str) orelse return .none; - return .{ .one = uri }; + const parsed_uri = std.Uri.parse(handle.uri.raw) catch unreachable; // The Uri is guranteed to be valid + return .{ .one = try Uri.resolveImport(allocator, handle.uri, parsed_uri, import_str) }; } if (std.mem.eql(u8, import_str, "std")) { @@ -1949,10 +1940,3 @@ pub fn uriFromImportStr( }, } } - -fn resolveFileImportString(allocator: std.mem.Allocator, base_path: []const u8, import_str: []const u8) error{OutOfMemory}!?Uri { - const joined_path = try std.fs.path.resolve(allocator, &.{ base_path, "..", import_str }); - defer allocator.free(joined_path); - - return try .fromPath(allocator, joined_path); -} diff --git a/src/Uri.zig b/src/Uri.zig index 4b976baa8..26f6fa994 100644 --- a/src/Uri.zig +++ b/src/Uri.zig @@ -8,6 +8,7 @@ const Uri = @This(); /// - consistent casing of the Windows drive letter /// - consistent path seperator on Windows (convert '\\' to '/') /// - always add an authority component even if unnecessary +/// - remove query and fragment component raw: []const u8, pub fn parse(allocator: std.mem.Allocator, text: []const u8) (std.Uri.ParseError || error{OutOfMemory})!Uri { @@ -35,7 +36,7 @@ fn parseWithOs( } capacity += host.percent_encoded.len; } - if (uri.port != null) capacity += comptime ":".len + std.math.log10_int(@as(usize, std.math.maxInt(u16))); // TODO check this + if (uri.port != null) capacity += comptime ":".len + std.math.log10_int(@as(usize, std.math.maxInt(u16))); if (!std.mem.startsWith(u8, uri.path.percent_encoded, "/")) { capacity += "/".len; } @@ -419,6 +420,60 @@ test "toFsPath - UNC (windows)" { try std.testing.expectEqualStrings(uri.raw, round_trip_uri.raw); } +pub fn resolveImport( + allocator: std.mem.Allocator, + uri: Uri, + parsed_uri: std.Uri, + sub_path: []const u8, +) error{OutOfMemory}!Uri { + var result: std.ArrayList(u8) = try .initCapacity(allocator, uri.raw.len + sub_path.len); + { + errdefer comptime unreachable; + result.printAssumeCapacity("{s}:", .{parsed_uri.scheme}); + result.appendSliceAssumeCapacity("//"); + if (parsed_uri.host) |host| { + if (parsed_uri.user) |user| { + result.appendSliceAssumeCapacity(user.percent_encoded); + if (parsed_uri.password) |password| { + result.appendAssumeCapacity(':'); + result.appendSliceAssumeCapacity(password.percent_encoded); + } + result.appendAssumeCapacity('@'); + } + result.appendSliceAssumeCapacity(host.percent_encoded); + if (parsed_uri.port) |port| result.printAssumeCapacity(":{d}", .{port}); + } + } + var aw: std.Io.Writer.Allocating = .fromArrayList(allocator, &result); + defer aw.deinit(); + + const percent_encoded_path = parsed_uri.path.percent_encoded; + + const joined_path = try std.fs.path.resolvePosix(allocator, &.{ percent_encoded_path, "..", sub_path }); + defer allocator.free(joined_path); + + std.Uri.Component.percentEncode(&aw.writer, joined_path, isPathChar) catch unreachable; + + return .{ .raw = try aw.toOwnedSlice() }; +} + +test "resolve" { + const uri: Uri = try .parseWithOs(std.testing.allocator, "file:///dir/main.zig", false); + defer uri.deinit(std.testing.allocator); + + const parsed_uri = std.Uri.parse(uri.raw) catch unreachable; + + const resolved_uri = try resolveImport(std.testing.allocator, uri, parsed_uri, "foo bar.zig"); + defer resolved_uri.deinit(std.testing.allocator); + + try std.testing.expectEqualStrings("file:///dir/foo%20bar.zig", resolved_uri.raw); + + var round_trip_uri: Uri = try .parseWithOs(std.testing.allocator, resolved_uri.raw, false); + defer round_trip_uri.deinit(std.testing.allocator); + + try std.testing.expectEqualStrings(round_trip_uri.raw, resolved_uri.raw); +} + fn normalizePercentEncoded( result: *std.ArrayList(u8), percent_encoded: []const u8, diff --git a/tests/context.zig b/tests/context.zig index 53d406669..55c493460 100644 --- a/tests/context.zig +++ b/tests/context.zig @@ -82,20 +82,11 @@ pub const Context = struct { // helper pub fn addDocument(self: *Context, options: struct { - use_file_scheme: bool = false, source: []const u8, mode: std.zig.Ast.Mode = .zig, }) !zls.Uri { - const fmt = switch (builtin.os.tag) { - .windows => "file:///c:/Untitled-{d}.{t}", - else => "file:///Untitled-{d}.{t}", - }; - const arena = self.arena.allocator(); - const path = if (options.use_file_scheme) - try std.fmt.allocPrint(arena, fmt, .{ self.file_id, options.mode }) - else - try std.fmt.allocPrint(arena, "untitled:///Untitled-{d}.{t}", .{ self.file_id, options.mode }); + const path = try std.fmt.allocPrint(arena, "untitled:///Untitled-{d}.{t}", .{ self.file_id, options.mode }); const uri: zls.Uri = try .parse(arena, path); const params: types.TextDocument.DidOpenParams = .{ diff --git a/tests/lsp_features/references.zig b/tests/lsp_features/references.zig index 5f6e904db..e4bbadb57 100644 --- a/tests/lsp_features/references.zig +++ b/tests/lsp_features/references.zig @@ -345,10 +345,7 @@ fn testMultiFileSymbolReferences(sources: []const []const u8, include_decl: bool var phr = try helper.collectReplacePlaceholders(allocator, source, placeholder_name); defer phr.deinit(allocator); - const uri = try ctx.addDocument(.{ - .use_file_scheme = sources.len > 1, // use 'file:/' scheme when testing with multiple files so that they can import each other - .source = phr.new_source, - }); + const uri = try ctx.addDocument(.{ .source = phr.new_source }); files.putAssumeCapacityNoClobber(uri.raw, .{ .source = source, .new_source = phr.new_source }); phr.new_source = ""; // `files` takes ownership of `new_source` from `phr` From 3a7f0d4ea7bfe0cd5b8489994ea37e291c419278 Mon Sep 17 00:00:00 2001 From: Techatrix Date: Tue, 10 Feb 2026 14:57:28 +0100 Subject: [PATCH 4/9] optimize `offsets.tokenToLoc` on builtin tokens --- src/offsets.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/offsets.zig b/src/offsets.zig index c9b54affa..8c3b3f976 100644 --- a/src/offsets.zig +++ b/src/offsets.zig @@ -397,7 +397,7 @@ pub fn tokenToLoc(tree: *const Ast, token_index: Ast.TokenIndex) Loc { const tag = tree.tokenTag(token_index); // Many tokens can be determined entirely by their tag. - if (tag == .identifier) { + if (tag == .identifier or tag == .builtin) { // fast path for identifiers return identifierIndexToLoc(tree.source, start, .full); } else if (tag.lexeme()) |lexeme| { From f911d6cd43fd45954797950d06519829477be02c Mon Sep 17 00:00:00 2001 From: Techatrix Date: Tue, 10 Feb 2026 12:25:16 +0100 Subject: [PATCH 5/9] remove global error and enum set completions This logic is mostly guesswork. Perhaps we can bring it back after fixing cross file references. --- src/DocumentScope.zig | 74 +----------------- src/features/completions.zig | 121 ++---------------------------- tests/lsp_features/completion.zig | 102 ------------------------- 3 files changed, 8 insertions(+), 289 deletions(-) diff --git a/src/DocumentScope.zig b/src/DocumentScope.zig index b927a8cff..30074163c 100644 --- a/src/DocumentScope.zig +++ b/src/DocumentScope.zig @@ -13,36 +13,6 @@ declarations: std.MultiArrayList(Declaration), /// used for looking up a child declaration in a given scope declaration_lookup_map: DeclarationLookupMap, extra: std.ArrayList(u32), -/// All identifier token that are in error sets. -/// When there are multiple error sets that contain the same error, only one of them is stored. -/// A token that has a doc comment takes priority. -/// This means that if there a multiple error sets with the same name, only one of them is included. -global_error_set: IdentifierSet, -/// All identifier token that are in enums. -/// When there are multiple enums that contain the field name, only one of them is stored. -/// A token that has a doc comment takes priority. -/// This means that if there a multiple enums with the same name, only one of them is included. -global_enum_set: IdentifierSet, - -/// Stores a set of identifier tokens with unique names -pub const IdentifierSet = std.ArrayHashMapUnmanaged(Ast.TokenIndex, void, IdentifierTokenContext, true); - -pub const IdentifierTokenContext = struct { - tree: *const Ast, - - pub fn eql(self: @This(), a: Ast.TokenIndex, b: Ast.TokenIndex, b_index: usize) bool { - _ = b_index; - if (a == b) return true; - const a_name = offsets.identifierTokenToNameSlice(self.tree, a); - const b_name = offsets.identifierTokenToNameSlice(self.tree, b); - return std.mem.eql(u8, a_name, b_name); - } - - pub fn hash(self: @This(), token: Ast.TokenIndex) u32 { - const name = offsets.identifierTokenToNameSlice(self.tree, token); - return std.array_hash_map.hashString(name); - } -}; /// Every `index` inside this `ArrayhashMap` is equivalent to a `Declaration.Index` /// This means that every declaration is only the child of a single scope @@ -524,8 +494,6 @@ pub fn init(allocator: std.mem.Allocator, tree: *const Ast) error{OutOfMemory}!D .declarations = .empty, .declaration_lookup_map = .empty, .extra = .empty, - .global_error_set = .empty, - .global_enum_set = .empty, }; errdefer document_scope.deinit(allocator); @@ -557,9 +525,6 @@ pub fn deinit(scope: *DocumentScope, allocator: std.mem.Allocator) void { scope.declarations.deinit(allocator); scope.declaration_lookup_map.deinit(allocator); scope.extra.deinit(allocator); - - scope.global_enum_set.deinit(allocator); - scope.global_error_set.deinit(allocator); } fn locToSmallLoc(loc: offsets.Loc) Scope.SmallLoc { @@ -807,17 +772,6 @@ noinline fn walkContainerDecl( var buf: [2]Ast.Node.Index = undefined; const container_decl = tree.fullContainerDecl(&buf, node_idx).?; - const is_enum_or_tagged_union, const is_struct = blk: { - if (node_idx == .root) break :blk .{ false, true }; - break :blk switch (tree.tokenTag(container_decl.ast.main_token)) { - .keyword_enum => .{ true, false }, - .keyword_union => .{ container_decl.ast.enum_token != null or container_decl.ast.arg != .none, false }, - .keyword_struct => .{ false, true }, - .keyword_opaque => .{ false, false }, - else => unreachable, - }; - }; - const scope = try context.startScope( .container, .{ .ast_node = node_idx }, @@ -837,6 +791,8 @@ noinline fn walkContainerDecl( .container_field_align, => { var container_field = tree.fullContainerField(decl).?; + + const is_struct = node_idx == .root or tree.tokenTag(container_decl.ast.main_token) == .keyword_struct; if (is_struct and container_field.ast.tuple_like) continue; container_field.convertToNonTupleLike(tree); @@ -844,23 +800,6 @@ noinline fn walkContainerDecl( const main_token = container_field.ast.main_token; if (tree.tokenTag(main_token) != .identifier) continue; try scope.pushDeclaration(main_token, .{ .ast_node = decl }, .field); - - if (is_enum_or_tagged_union) { - const name = offsets.identifierTokenToNameSlice(tree, main_token); - if (std.mem.eql(u8, name, "_")) continue; - - const gop = try context.doc_scope.global_enum_set.getOrPutContext( - context.allocator, - main_token, - .{ .tree = tree }, - ); - if (!gop.found_existing) { - gop.key_ptr.* = main_token; - } else if (gop.found_existing and tree.tokenTag(main_token - 1) == .doc_comment) { - // a token with a doc comment takes priority. - gop.key_ptr.* = main_token; - } - } }, .fn_proto, .fn_proto_multi, @@ -905,15 +844,6 @@ noinline fn walkErrorSetNode( const identifier_token: Ast.TokenIndex = @intCast(tok_i); try scope.pushDeclaration(identifier_token, .{ .error_token = identifier_token }, .other); - const gop = try context.doc_scope.global_error_set.getOrPutContext( - context.allocator, - identifier_token, - .{ .tree = tree }, - ); - if (!gop.found_existing or tree.tokenTag(identifier_token - 1) == .doc_comment) { - // a token with a doc comment takes priority. - gop.key_ptr.* = identifier_token; - } } try scope.finalize(); diff --git a/src/features/completions.zig b/src/features/completions.zig index 98ec78b43..2eadc383b 100644 --- a/src/features/completions.zig +++ b/src/features/completions.zig @@ -722,25 +722,13 @@ fn completeDot(builder: *Builder, loc: offsets.Loc) Analyser.Error!void { const dot_token_index = offsets.sourceIndexToTokenIndex(tree, loc.start).pickPreferred(&.{.period}, tree) orelse return; if (dot_token_index < 2) return; - blk: { - const nodes = try ast.nodesOverlappingIndexIncludingParseErrors(builder.arena, tree, loc.start); - const dot_context = getEnumLiteralContext(tree, dot_token_index, nodes) orelse break :blk; - const used_members_set = try collectUsedMembersSet(builder, dot_context.likely, dot_token_index); - const containers = try collectContainerNodes(builder, builder.orig_handle, dot_context); - for (containers) |container| { - try collectContainerFields(builder, dot_context.likely, container, used_members_set); - } + const nodes = try ast.nodesOverlappingIndexIncludingParseErrors(builder.arena, tree, loc.start); + const dot_context = getEnumLiteralContext(tree, dot_token_index, nodes) orelse return; + const used_members_set = try collectUsedMembersSet(builder, dot_context.likely, dot_token_index); + const containers = try collectContainerNodes(builder, builder.orig_handle, dot_context); + for (containers) |container| { + try collectContainerFields(builder, dot_context.likely, container, used_members_set); } - - if (builder.completions.items.len != 0) return; - - // Prevent compl for float numbers, eg `1.` - // Ideally this would also `or token_tags[dot_token_index - 1] != .equal`, - // which would mean the only possibility left would be `var enum_val = .`. - if (tree.tokenTag(dot_token_index - 1) == .number_literal or tree.tokenTag(dot_token_index - 1) != .equal) return; - - // `var enum_val = .` or the get*Context logic failed because of syntax errors (parser didn't create the necessary node(s)) - try globalSetCompletions(builder, .enum_set); } /// Asserts that `pos_context` is one of the following: @@ -964,7 +952,6 @@ pub fn completionAtIndex( .builtin => try completeBuiltin(&builder), .var_access, .empty => try completeGlobal(&builder), .field_access => |loc| try completeFieldAccess(&builder, loc), - .error_access => try globalSetCompletions(&builder, .error_set), .enum_literal => |loc| try completeDot(&builder, loc), .label_access, .label_decl => try completeLabel(&builder), .import_string_literal, @@ -1012,102 +999,6 @@ pub fn completionAtIndex( return .{ .isIncomplete = false, .items = completions }; } -// <---------------------------------------------------------------------------> -// global error set / enum field set -// <---------------------------------------------------------------------------> - -const CompletionSet = std.ArrayHashMapUnmanaged(types.completion.Item, void, CompletionContext, false); - -const CompletionContext = struct { - pub fn hash(self: @This(), item: types.completion.Item) u32 { - _ = self; - return std.array_hash_map.hashString(item.label); - } - - pub fn eql(self: @This(), a: types.completion.Item, b: types.completion.Item, b_index: usize) bool { - _ = self; - _ = b_index; - return std.mem.eql(u8, a.label, b.label); - } -}; - -const CompletionNameAdapter = struct { - pub fn hash(ctx: @This(), name: []const u8) u32 { - _ = ctx; - return std.array_hash_map.hashString(name); - } - - pub fn eql(ctx: @This(), a: []const u8, b: types.completion.Item, b_map_index: usize) bool { - _ = ctx; - _ = b_map_index; - return std.mem.eql(u8, a, b.label); - } -}; - -/// Every `DocumentScope` store a set of all error names and a set of all enum field names. -/// This function collects all of these sets from all dependencies and returns them as completions. -fn globalSetCompletions(builder: *Builder, kind: enum { error_set, enum_set }) Analyser.Error!void { - const tracy_zone = tracy.trace(@src()); - defer tracy_zone.end(); - - const store = &builder.server.document_store; - - var dependencies: std.ArrayList(Uri) = .empty; - try dependencies.append(builder.arena, builder.orig_handle.uri); - try store.collectDependencies(builder.arena, builder.orig_handle, &dependencies); - - // TODO Better solution for deciding what tags to include - var result_set: CompletionSet = .empty; - - for (dependencies.items) |uri| { - // not every dependency is loaded which results in incomplete completion - const dependency_handle = store.getHandle(uri) orelse continue; - const document_scope = try dependency_handle.getDocumentScope(); - const curr_set: DocumentScope.IdentifierSet = switch (kind) { - .error_set => @field(document_scope, "global_error_set"), - .enum_set => @field(document_scope, "global_enum_set"), - }; - try result_set.ensureUnusedCapacity(builder.arena, curr_set.count()); - for (curr_set.keys()) |identifier_token| { - const name = offsets.identifierTokenToNameSlice(&dependency_handle.tree, identifier_token); - - const gop = result_set.getOrPutAssumeCapacityAdapted( - name, - CompletionNameAdapter{}, - ); - - if (!gop.found_existing) { - gop.key_ptr.* = .{ - .label = name, - .detail = switch (kind) { - .error_set => try std.fmt.allocPrint(builder.arena, "error.{f}", .{std.zig.fmtId(name)}), - .enum_set => null, - }, - .kind = switch (kind) { - .error_set => .Constant, - .enum_set => .EnumMember, - }, - .documentation = null, // will be set below - }; - } - - if (gop.key_ptr.documentation == null) { - if (try Analyser.getDocCommentsBeforeToken(builder.arena, &dependency_handle.tree, identifier_token)) |documentation| { - gop.key_ptr.documentation = .{ - .markup_content = .{ - // TODO check if client supports markdown - .kind = .markdown, - .value = documentation, - }, - }; - } - } - } - } - - try builder.completions.appendSlice(builder.arena, result_set.keys()); -} - // <---------------------------------------------------------------------------> // completions/enum_literal.zig staging area // <---------------------------------------------------------------------------> diff --git a/tests/lsp_features/completion.zig b/tests/lsp_features/completion.zig index 78d989e3d..c1cdea388 100644 --- a/tests/lsp_features/completion.zig +++ b/tests/lsp_features/completion.zig @@ -1742,44 +1742,6 @@ test "tagged union" { }); } -test "global enum set" { - try testCompletion( - \\const SomeError = error{ e }; - \\const E1 = enum { - \\ foo, - \\ bar, - \\}; - \\const E2 = enum { - \\ baz, - \\ ///hello - \\ qux, - \\}; - \\const baz = . - , &.{ - .{ .label = "foo", .kind = .EnumMember }, - .{ .label = "bar", .kind = .EnumMember }, - .{ .label = "baz", .kind = .EnumMember }, - .{ .label = "qux", .kind = .EnumMember, .documentation = "hello" }, - }); - try testCompletion( - \\const SomeError = error{ e }; - \\const Enum1 = enum { - \\ ///hello world - \\ foo, - \\ bar, - \\}; - \\const Enum2 = enum { - \\ foo, - \\ ///hallo welt - \\ bar, - \\}; - \\const baz = . - , &.{ - .{ .label = "foo", .kind = .EnumMember, .documentation = "hello world" }, - .{ .label = "bar", .kind = .EnumMember, .documentation = "hallo welt" }, - }); -} - test "switch cases" { // Because current logic is to list all enums if all else fails, // the following tests include an extra enum to ensure that we're not just 'getting lucky' @@ -1991,71 +1953,7 @@ test "error set" { }); } -test "global error set" { - try testCompletion( - \\const SomeEnum = enum { e }; - \\const Error1 = error { - \\ foo, - \\ bar, - \\}; - \\const Error2 = error { - \\ baz, - \\ ///hello - \\ qux, - \\}; - \\const baz = error. - , &.{ - .{ .label = "foo", .kind = .Constant, .detail = "error.foo" }, - .{ .label = "bar", .kind = .Constant, .detail = "error.bar" }, - .{ .label = "baz", .kind = .Constant, .detail = "error.baz" }, - .{ .label = "qux", .kind = .Constant, .detail = "error.qux", .documentation = "hello" }, - }); - try testCompletion( - \\const SomeEnum = enum { e }; - \\const Error1 = error { - \\ ///hello world - \\ foo, - \\ bar, - \\}; - \\const Error2 = error { - \\ foo, - \\ ///hallo welt - \\ bar, - \\}; - \\const baz = error. - , &.{ - .{ .label = "foo", .kind = .Constant, .detail = "error.foo", .documentation = "hello world" }, - .{ .label = "bar", .kind = .Constant, .detail = "error.bar", .documentation = "hallo welt" }, - }); - try testCompletion( - \\const Error = error { - \\ ///hello world - \\ @"some name", - \\}; - \\const baz = error. - , &.{ - .{ .label = "some name", .kind = .Constant, .detail = "error.@\"some name\"", .documentation = "hello world" }, - }); -} - test "merged error sets" { - try testCompletion( - \\const FirstSet = error{ - \\ X, - \\ Y, - \\}; - \\const SecondSet = error{ - \\ Foo, - \\ Bar, - \\} || FirstSet; - \\const e = error. - , &.{ - .{ .label = "X", .kind = .Constant, .detail = "error.X" }, - .{ .label = "Y", .kind = .Constant, .detail = "error.Y" }, - .{ .label = "Foo", .kind = .Constant, .detail = "error.Foo" }, - .{ .label = "Bar", .kind = .Constant, .detail = "error.Bar" }, - }); - try testCompletion( \\const FirstSet = error{ \\ x, From 026de3178a3bcc2b8c2dd13a319977db28e8e193 Mon Sep 17 00:00:00 2001 From: Techatrix Date: Tue, 10 Feb 2026 14:56:48 +0100 Subject: [PATCH 6/9] refactor import/cImport logic Please Andrew, remove cImport from the language. I beg you. --- src/DocumentStore.zig | 193 +++++++++++++++++++++++------------------- src/analysis.zig | 52 ------------ 2 files changed, 105 insertions(+), 140 deletions(-) diff --git a/src/DocumentStore.zig b/src/DocumentStore.zig index 51c941f70..f02a72191 100644 --- a/src/DocumentStore.zig +++ b/src/DocumentStore.zig @@ -25,7 +25,7 @@ mutex: std.Io.Mutex = .init, wait_group: if (supports_build_system) std.Io.Group else void = if (supports_build_system) .init else {}, handles: Uri.ArrayHashMap(*Handle.Future) = .empty, build_files: if (supports_build_system) Uri.ArrayHashMap(*BuildFile) else void = if (supports_build_system) .empty else {}, -cimports: if (supports_build_system) std.AutoArrayHashMapUnmanaged(Hash, translate_c.Result) else void = if (supports_build_system) .empty else {}, +cimports: if (supports_build_system) std.AutoArrayHashMapUnmanaged(CImportHash, translate_c.Result) else void = if (supports_build_system) .empty else {}, diagnostics_collection: *DiagnosticsCollection, builds_in_progress: std.atomic.Value(i32) = .init(0), transport: ?*lsp.Transport = null, @@ -35,19 +35,8 @@ lsp_capabilities: struct { supports_inlay_hints_refresh: bool = false, } = .{}, -pub const Hasher = std.crypto.auth.siphash.SipHash128(1, 3); -pub const Hash = [Hasher.mac_length]u8; - pub const supports_build_system = std.process.can_spawn; -pub fn computeHash(bytes: []const u8) Hash { - var hasher: Hasher = .init(&@splat(0)); - hasher.update(bytes); - var hash: Hash = undefined; - hasher.final(&hash); - return hash; -} - pub const Config = struct { environ_map: *const std.process.Environ.Map, zig_exe_path: ?[]const u8, @@ -154,9 +143,8 @@ pub const BuildFile = struct { const handle = try store.getOrLoadHandle(source_uri) orelse continue; - const import_uris = (try handle.import_uris.get(handle)).*; - try found_uris.ensureUnusedCapacity(arena, import_uris.len); - for (import_uris) |import_uri| found_uris.putAssumeCapacity(try import_uri.dupe(arena), {}); + try found_uris.ensureUnusedCapacity(arena, handle.file_imports.len); + for (handle.file_imports) |import_uri| found_uris.putAssumeCapacity(try import_uri.dupe(arena), {}); } } @@ -175,14 +163,15 @@ pub const BuildFile = struct { pub const Handle = struct { uri: Uri, tree: Ast, - /// Contains one entry for every cimport in the document + /// List of every file that has been `@Import`ed. Does not include imported modules. + file_imports: []const Uri, + /// Contains one entry for every `@cImport` in the document cimports: std.MultiArrayList(CImportHandle), /// `true` if the document has been directly opened by the client i.e. with `textDocument/didOpen` /// `false` indicates the document only exists because it is a dependency of another document /// or has been closed with `textDocument/didClose`. lsp_synced: bool, document_scope: Lazy(DocumentScope, DocumentStoreContext) = .unset, - import_uris: Lazy([]const Uri, ImportUrisContext) = .unset, /// private field impl: struct { @@ -428,8 +417,27 @@ pub const Handle = struct { var new_tree = try parseTree(allocator, text, mode); errdefer new_tree.deinit(allocator); - var new_cimports = try collectCIncludes(allocator, &new_tree); - errdefer new_cimports.deinit(allocator); + var new_file_imports: std.ArrayList(Uri) = .empty; + errdefer new_file_imports.deinit(allocator); + + var new_cimports: std.MultiArrayList(CImportHandle) = .empty; + errdefer { + for (new_cimports.items(.source)) |source| { + allocator.free(source); + } + new_cimports.deinit(allocator); + } + + try collectImports( + allocator, + handle.uri, + &new_tree, + &new_file_imports, + &new_cimports, + ); + + const file_imports = try new_file_imports.toOwnedSlice(allocator); + errdefer file_imports.deinit(allocator); errdefer comptime unreachable; @@ -440,13 +448,13 @@ pub const Handle = struct { old_handle.cimports = handle.cimports; handle.tree = new_tree; + old_handle.file_imports = handle.file_imports; + handle.file_imports = file_imports; handle.cimports = new_cimports; handle.impl.has_tree_and_source = true; old_handle.document_scope = handle.document_scope; handle.document_scope = .unset; - old_handle.import_uris = handle.import_uris; - handle.import_uris = .unset; } fn parseTree(allocator: std.mem.Allocator, new_text: [:0]const u8, mode: Ast.Mode) error{OutOfMemory}!Ast { @@ -468,10 +476,76 @@ pub const Handle = struct { return tree; } + fn collectImports( + allocator: std.mem.Allocator, + uri: Uri, + tree: *const Ast, + file_imports: *std.ArrayList(Uri), + cimports: *std.MultiArrayList(CImportHandle), + ) error{OutOfMemory}!void { + const tracy_zone = tracy.trace(@src()); + defer tracy_zone.end(); + + const parsed_uri = std.Uri.parse(uri.raw) catch unreachable; // The Uri is guranteed to be valid + + const node_tags = tree.nodes.items(.tag); + for (node_tags, 0..) |tag, i| { + const node: Ast.Node.Index = @enumFromInt(i); + + switch (tag) { + .builtin_call, + .builtin_call_comma, + .builtin_call_two, + .builtin_call_two_comma, + => {}, + else => continue, + } + const name = offsets.tokenToSlice(tree, tree.nodeMainToken(node)); + + if (std.mem.eql(u8, name, "@import")) { + try file_imports.ensureUnusedCapacity(allocator, 1); + + var buffer: [2]Ast.Node.Index = undefined; + const params = tree.builtinCallParams(&buffer, node).?; + if (params.len < 1) continue; + if (tree.nodeTag(params[0]) != .string_literal) continue; + + var import_string = offsets.tokenToSlice(tree, tree.nodeMainToken(params[0])); + import_string = import_string[1 .. import_string.len - 1]; + + if (!std.mem.endsWith(u8, import_string, ".zig")) continue; + + const import_uri = try Uri.resolveImport(allocator, uri, parsed_uri, import_string); + file_imports.appendAssumeCapacity(import_uri); + continue; + } + + if (std.mem.eql(u8, name, "@cImport")) { + try cimports.ensureUnusedCapacity(allocator, 1); + + const c_source = translate_c.convertCInclude(allocator, tree, node) catch |err| switch (err) { + error.Unsupported => continue, + error.OutOfMemory => return error.OutOfMemory, + }; + + var hasher: CImportHasher = .init(&@splat(0)); + hasher.update(c_source); + + cimports.appendAssumeCapacity(.{ + .node = node, + .hash = hasher.finalResult(), + .source = c_source, + }); + continue; + } + } + } + /// A handle that can only be deallocated. Keep in sync with `deinit`. const dead: Handle = .{ .uri = undefined, .tree = undefined, + .file_imports = &.{}, .cimports = .empty, .lsp_synced = undefined, .impl = .{ @@ -491,7 +565,8 @@ pub const Handle = struct { self.tree.deinit(allocator); } self.document_scope.deinit(allocator); - self.import_uris.deinit(allocator); + for (self.file_imports) |uri| uri.deinit(allocator); + allocator.free(self.file_imports); for (self.cimports.items(.source)) |source| allocator.free(source); self.cimports.deinit(allocator); @@ -568,32 +643,6 @@ pub const Handle = struct { document_scope.deinit(allocator); } }; - - const ImportUrisContext = struct { - fn create(handle: *Handle, allocator: std.mem.Allocator) error{OutOfMemory}![]const Uri { - var imports = try analysis.collectImports(allocator, &handle.tree); - defer imports.deinit(allocator); - - const parsed_uri = std.Uri.parse(handle.uri.raw) catch unreachable; // The Uri is guranteed to be valid - - var uris: std.ArrayList(Uri) = try .initCapacity(allocator, imports.items.len); - errdefer { - for (uris.items) |uri| uri.deinit(allocator); - uris.deinit(allocator); - } - - for (imports.items) |import_str| { - if (!std.mem.endsWith(u8, import_str, ".zig")) continue; - uris.appendAssumeCapacity(try Uri.resolveImport(allocator, handle.uri, parsed_uri, import_str)); - } - - return try uris.toOwnedSlice(allocator); - } - fn deinit(import_uris: *[]const Uri, allocator: std.mem.Allocator) void { - for (import_uris.*) |uri| uri.deinit(allocator); - allocator.free(import_uris.*); - } - }; }; pub const HandleIterator = struct { @@ -1437,6 +1486,7 @@ fn createAndStoreDocument( .handle = .{ .uri = gop.key_ptr.*, .tree = undefined, + .file_imports = &.{}, .cimports = .empty, .lsp_synced = options.lsp_synced, .impl = .{ @@ -1474,49 +1524,18 @@ fn createAndStoreDocument( return &handle_future.handle; } +pub const CImportHasher = std.crypto.auth.siphash.SipHash128(1, 3); +pub const CImportHash = [CImportHasher.mac_length]u8; + pub const CImportHandle = struct { /// the `@cImport` node node: Ast.Node.Index, /// hash of c source file - hash: Hash, + hash: CImportHash, /// c source file source: []const u8, }; -/// Collects all `@cImport` nodes and converts them into c source code if possible -/// Caller owns returned memory. -fn collectCIncludes(allocator: std.mem.Allocator, tree: *const Ast) error{OutOfMemory}!std.MultiArrayList(CImportHandle) { - const tracy_zone = tracy.trace(@src()); - defer tracy_zone.end(); - - const cimport_nodes = try analysis.collectCImportNodes(allocator, tree); - defer allocator.free(cimport_nodes); - - var sources: std.MultiArrayList(CImportHandle) = .empty; - try sources.ensureTotalCapacity(allocator, cimport_nodes.len); - errdefer { - for (sources.items(.source)) |source| { - allocator.free(source); - } - sources.deinit(allocator); - } - - for (cimport_nodes) |node| { - const c_source = translate_c.convertCInclude(allocator, tree, node) catch |err| switch (err) { - error.Unsupported => continue, - error.OutOfMemory => return error.OutOfMemory, - }; - - sources.appendAssumeCapacity(.{ - .node = node, - .hash = computeHash(c_source), - .source = c_source, - }); - } - - return sources; -} - /// collects every file uri the given handle depends on /// includes imports, cimports & packages /// **Thread safe** takes a shared lock @@ -1529,10 +1548,8 @@ pub fn collectDependencies( const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - const import_uris = (try handle.import_uris.get(handle)).*; - - try dependencies.ensureUnusedCapacity(allocator, import_uris.len + handle.cimports.len); - for (import_uris) |uri| { + try dependencies.ensureUnusedCapacity(allocator, handle.file_imports.len + handle.cimports.len); + for (handle.file_imports) |uri| { dependencies.appendAssumeCapacity(try uri.dupe(allocator)); } @@ -1682,7 +1699,7 @@ pub fn resolveCImport(self: *DocumentStore, handle: *Handle, node: Ast.Node.Inde // TODO regenerate cimports if the header files gets modified const index = std.mem.findScalar(Ast.Node.Index, handle.cimports.items(.node), node) orelse return null; - const hash: Hash = handle.cimports.items(.hash)[index]; + const hash: CImportHash = handle.cimports.items(.hash)[index]; const source = handle.cimports.items(.source)[index]; { diff --git a/src/analysis.zig b/src/analysis.zig index eb0446eda..24517fca1 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -4482,58 +4482,6 @@ pub fn instanceStdBuiltinType(analyser: *Analyser, type_name: []const u8) Error! return try result_ty.instanceTypeVal(analyser); } -/// Collects all `@import`'s we can find into a slice of import paths (without quotes). -pub fn collectImports(allocator: std.mem.Allocator, tree: *const Ast) error{OutOfMemory}!std.ArrayList([]const u8) { - const tracy_zone = tracy.trace(@src()); - defer tracy_zone.end(); - - var imports: std.ArrayList([]const u8) = .empty; - errdefer imports.deinit(allocator); - - for (0..tree.tokens.len) |i| { - if (tree.tokenTag(@intCast(i)) != .builtin) - continue; - const name = offsets.identifierTokenToNameSlice(tree, @intCast(i)); - if (!std.mem.eql(u8, name, "import")) continue; - if (!std.mem.startsWith(std.zig.Token.Tag, tree.tokens.items(.tag)[i + 1 ..], &.{ .l_paren, .string_literal, .r_paren })) continue; - - const str = tree.tokenSlice(@intCast(i + 2)); - try imports.append(allocator, str[1 .. str.len - 1]); - } - - return imports; -} - -/// Collects all `@cImport` nodes -/// Caller owns returned memory. -pub fn collectCImportNodes(allocator: std.mem.Allocator, tree: *const Ast) error{OutOfMemory}![]Ast.Node.Index { - const tracy_zone = tracy.trace(@src()); - defer tracy_zone.end(); - - var import_nodes: std.ArrayList(Ast.Node.Index) = .empty; - errdefer import_nodes.deinit(allocator); - - const node_tags = tree.nodes.items(.tag); - for (node_tags, 0..) |tag, i| { - const node: Ast.Node.Index = @enumFromInt(i); - - switch (tag) { - .builtin_call, - .builtin_call_comma, - .builtin_call_two, - .builtin_call_two_comma, - => {}, - else => continue, - } - - if (!std.mem.eql(u8, Ast.tokenSlice(tree.*, tree.nodeMainToken(node)), "@cImport")) continue; - - try import_nodes.append(allocator, node); - } - - return import_nodes.toOwnedSlice(allocator); -} - pub const NodeWithUri = struct { node: Ast.Node.Index, uri: Uri, From c503f328d5775472cf86e175f430d9d26e23ed73 Mon Sep 17 00:00:00 2001 From: Techatrix Date: Sun, 15 Feb 2026 16:20:13 +0100 Subject: [PATCH 7/9] restrain cross-file symbol references within it's build system module When requesting references on a file that is within a build system module, the search will be constrained within the given module. This does not account for references across modules (or projects) which may be implemented in the future. When the file is not part of a build system module, the search will be across all files that are loaded by ZLS. With workspace symbols (#2339) all files in workspace folders will be loaded. Files that never (transitively) import the target symbol file will be filtered out so this isn't necessarily as slow as it may sound like. --- src/DocumentStore.zig | 50 +---------- src/analysis.zig | 1 - src/features/references.zig | 139 ++++++++++++++++++------------ tests/lsp_features/references.zig | 30 +++++-- 4 files changed, 106 insertions(+), 114 deletions(-) diff --git a/src/DocumentStore.zig b/src/DocumentStore.zig index f02a72191..e7b5b01d2 100644 --- a/src/DocumentStore.zig +++ b/src/DocumentStore.zig @@ -87,10 +87,9 @@ pub const BuildFile = struct { } /// Returns whether the `Uri` is a dependency of the given `BuildFile`. - /// May return `null` to indicate an inconclusive result because + /// May return `.unknown` to indicate an inconclusive result because /// the required build config has not been resolved yet. /// - /// invalidates any pointers into `build_files` /// **Thread safe** takes an exclusive lock fn isAssociatedWith( build_file: *BuildFile, @@ -1536,53 +1535,6 @@ pub const CImportHandle = struct { source: []const u8, }; -/// collects every file uri the given handle depends on -/// includes imports, cimports & packages -/// **Thread safe** takes a shared lock -pub fn collectDependencies( - store: *DocumentStore, - allocator: std.mem.Allocator, - handle: *Handle, - dependencies: *std.ArrayList(Uri), -) error{ Canceled, OutOfMemory }!void { - const tracy_zone = tracy.trace(@src()); - defer tracy_zone.end(); - - try dependencies.ensureUnusedCapacity(allocator, handle.file_imports.len + handle.cimports.len); - for (handle.file_imports) |uri| { - dependencies.appendAssumeCapacity(try uri.dupe(allocator)); - } - - if (supports_build_system) { - try store.mutex.lock(store.io); - defer store.mutex.unlock(store.io); - for (handle.cimports.items(.hash)) |hash| { - const result = store.cimports.get(hash) orelse continue; - switch (result) { - .success => |uri| dependencies.appendAssumeCapacity(try uri.dupe(allocator)), - .failure => continue, - } - } - } - - if (supports_build_system) no_build_file: { - const build_file = switch (try handle.getAssociatedBuildFile(store)) { - .none, .unresolved => break :no_build_file, - .resolved => |resolved| resolved.build_file, - }; - - const build_config = build_file.tryLockConfig(store.io) orelse break :no_build_file; - defer build_file.unlockConfig(store.io); - - const module_paths = build_config.modules.map.keys(); - - try dependencies.ensureUnusedCapacity(allocator, module_paths.len); - for (module_paths) |module_path| { - dependencies.appendAssumeCapacity(try .fromPath(allocator, module_path)); - } - } -} - /// returns `true` if all include paths could be collected /// may return `false` because include paths from a build.zig may not have been resolved already /// **Thread safe** takes a shared lock diff --git a/src/analysis.zig b/src/analysis.zig index 24517fca1..38ec0e6ed 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -1669,7 +1669,6 @@ fn resolveCallsiteReferences(analyser: *Analyser, decl_handle: DeclWithHandle) E analyser, .{ .decl = func_decl, .handle = decl_handle.handle, .container_type = decl_handle.container_type }, false, - false, ); var possible: std.ArrayList(Type.TypeWithDescriptor) = .empty; diff --git a/src/features/references.zig b/src/features/references.zig index e8fed5e89..6b524346c 100644 --- a/src/features/references.zig +++ b/src/features/references.zig @@ -216,50 +216,6 @@ const Builder = struct { } }; -fn gatherWorkspaceReferences( - store: *DocumentStore, - builder: anytype, - root_handle: *DocumentStore.Handle, - skip_std_references: bool, - include_decl: bool, -) Analyser.Error!void { - const allocator = store.allocator; - var dependencies: Uri.ArrayHashMap(void) = .empty; - defer { - for (dependencies.keys()) |uri| { - uri.deinit(allocator); - } - dependencies.deinit(allocator); - } - - var it: DocumentStore.HandleIterator = .{ .store = store }; - while (it.next()) |handle| { - if (skip_std_references and DocumentStore.isInStd(handle.uri)) { - if (!include_decl or !handle.uri.eql(root_handle.uri)) - continue; - } - - var handle_dependencies: std.ArrayList(Uri) = .empty; - defer handle_dependencies.deinit(allocator); - try store.collectDependencies(allocator, handle, &handle_dependencies); - - try dependencies.ensureUnusedCapacity(allocator, handle_dependencies.items.len); - for (handle_dependencies.items) |uri| { - const gop = dependencies.getOrPutAssumeCapacity(uri); - if (gop.found_existing) { - uri.deinit(allocator); - } - } - } - - for (dependencies.keys()) |uri| { - if (uri.eql(root_handle.uri)) continue; - const handle = try store.getOrLoadHandle(uri) orelse continue; - - try builder.collectReferences(handle, .root); - } -} - fn symbolReferences( analyser: *Analyser, request: GeneralReferencesRequest, @@ -267,8 +223,6 @@ fn symbolReferences( encoding: offsets.Encoding, /// add `target_symbol` as a references include_decl: bool, - /// exclude references from the std library - skip_std_references: bool, /// The file on which the request was initiated. current_handle: *DocumentStore.Handle, ) Analyser.Error!std.ArrayList(types.Location) { @@ -326,18 +280,88 @@ fn symbolReferences( const workspace = local_node == null and request != .highlight and target_symbol.isPublic(); if (workspace) { - try gatherWorkspaceReferences( + var uris = try gatherWorkspaceReferenceCandidates( analyser.store, - &builder, + analyser.arena, current_handle, - skip_std_references, - include_decl, + target_symbol.handle, ); + for (uris.keys()) |uri| { + if (uri.eql(current_handle.uri)) continue; + const dependency_handle = try analyser.store.getOrLoadHandle(uri) orelse continue; + try builder.collectReferences(dependency_handle, .root); + } } return builder.locations; } +fn gatherWorkspaceReferenceCandidates( + store: *DocumentStore, + arena: std.mem.Allocator, + /// The file on which the request was initiated. + root_handle: *DocumentStore.Handle, + /// The file which contains the symbol that is being searched for. + target_handle: *DocumentStore.Handle, +) Analyser.Error!Uri.ArrayHashMap(void) { + if (DocumentStore.supports_build_system) no_build_file: { + const resolved = switch (try root_handle.getAssociatedBuildFile(store)) { + .unresolved => return .empty, // this should await instead + .none => break :no_build_file, + .resolved => |resolved| resolved, + }; + + const root_module_root_uri: Uri = try .fromPath(arena, resolved.root_source_file); + + var found_uris: Uri.ArrayHashMap(void) = .empty; + try found_uris.put(arena, root_module_root_uri, {}); + + if (!root_handle.uri.eql(target_handle.uri)) { + switch (try target_handle.getAssociatedBuildFile(store)) { + .unresolved, .none => {}, + .resolved => |resolved2| { + const target_module_root_uri: Uri = try .fromPath(arena, resolved2.root_source_file); + // also search through the module in which the symbol has been defined + try found_uris.put(arena, target_module_root_uri, {}); + }, + } + } + + var i: usize = 0; + while (i < found_uris.count()) : (i += 1) { + const uri = found_uris.keys()[i]; + const handle = try store.getOrLoadHandle(uri) orelse continue; + + try found_uris.ensureUnusedCapacity(arena, handle.file_imports.len); + for (handle.file_imports) |import_uri| found_uris.putAssumeCapacity(import_uri, {}); + } + return found_uris; + } + + var per_file_dependants: Uri.ArrayHashMap(std.ArrayList(Uri)) = .empty; + + var it: DocumentStore.HandleIterator = .{ .store = store }; + while (it.next()) |handle| { + for (handle.file_imports) |import_uri| { + const gop = try per_file_dependants.getOrPutValue(arena, import_uri, .empty); + try gop.value_ptr.append(arena, handle.uri); + } + } + + var found_uris: Uri.ArrayHashMap(void) = .empty; + try found_uris.put(arena, target_handle.uri, {}); + + var i: usize = 0; + while (i < found_uris.count()) : (i += 1) { + const uri = found_uris.keys()[i]; + const dependants: std.ArrayList(Uri) = per_file_dependants.get(uri) orelse .empty; + try found_uris.ensureUnusedCapacity(arena, dependants.items.len); + for (dependants.items) |dependant_uri| found_uris.putAssumeCapacity(dependant_uri, {}); + } + + return found_uris; +} + fn controlFlowReferences( allocator: std.mem.Allocator, token_handle: Analyser.TokenWithHandle, @@ -568,8 +592,6 @@ const CallBuilder = struct { pub fn callsiteReferences( analyser: *Analyser, decl_handle: Analyser.DeclWithHandle, - /// exclude references from the std library - skip_std_references: bool, /// search other files for references workspace: bool, ) Analyser.Error!std.ArrayList(Analyser.NodeWithHandle) { @@ -586,13 +608,17 @@ pub fn callsiteReferences( try builder.collectReferences(decl_handle.handle, .root); if (workspace) { - try gatherWorkspaceReferences( + var uris = try gatherWorkspaceReferenceCandidates( analyser.store, - &builder, + analyser.arena, + decl_handle.handle, decl_handle.handle, - skip_std_references, - false, ); + for (uris.keys()) |uri| { + if (uri.eql(decl_handle.handle.uri)) continue; + const dependency_handle = try analyser.store.getOrLoadHandle(uri) orelse continue; + try builder.collectReferences(dependency_handle, .root); + } } return builder.callsites; @@ -693,7 +719,6 @@ pub fn referencesHandler(server: *Server, arena: std.mem.Allocator, request: Gen target_decl, server.offset_encoding, include_decl, - server.config_manager.config.skip_std_references, handle, ), }; diff --git a/tests/lsp_features/references.zig b/tests/lsp_features/references.zig index e4bbadb57..adc0ce2bc 100644 --- a/tests/lsp_features/references.zig +++ b/tests/lsp_features/references.zig @@ -303,15 +303,30 @@ test "switch case capture - union tag" { test "cross-file reference" { try testMultiFileSymbolReferences(&.{ - // TODO not putting a reference here is a hack to workaround cross-file references being broken https://github.com/zigtools/zls/issues/1071 - // for now this only tests the ability to find references within a file to a decl from another file - \\pub const placeholder = struct {}; + // Untitled-0.zig + \\pub const <0> = struct {}; , + // Untitled-1.zig \\const file = @import("Untitled-0.zig"); \\const first = file.<0>; \\const second = file.<0>; , - }, false); + }, true); +} + +test "cross-file - transitive import" { + try testMultiFileSymbolReferences(&.{ + // Untitled-0.zig + \\pub const <0> = struct {}; + , + // Untitled-1.zig + \\pub const file = @import("Untitled-0.zig"); + , + // Untitled-2.zig + \\const file = @import("Untitled-1.zig").file; + \\const foo: file.<0> = undefined; + , + }, true); } fn testSymbolReferences(source: []const u8) !void { @@ -392,11 +407,12 @@ fn testMultiFileSymbolReferences(sources: []const []const u8, include_decl: bool defer visited.deinit(allocator); for (actual_locations) |response_location| { - const actual_loc = offsets.rangeToLoc(file.new_source, response_location.range, ctx.server.offset_encoding); const actual_file_index = files.getIndex(response_location.uri) orelse { std.debug.print("received location to unknown file `{s}` as the result\n", .{response_location.uri}); return error.InvalidReference; }; + const actual_file_source = files.values()[actual_file_index].new_source; + const actual_loc = offsets.rangeToLoc(actual_file_source, response_location.range, ctx.server.offset_encoding); const index = found_index: { for (locs.items(.new), locs.items(.file_index), 0..) |expected_loc, expected_file_index, idx| { @@ -405,12 +421,12 @@ fn testMultiFileSymbolReferences(sources: []const []const u8, include_decl: bool if (expected_loc.end != actual_loc.end) continue; break :found_index idx; } - try error_builder.msgAtLoc("server returned unexpected reference!", file_uri, actual_loc, .err, .{}); + try error_builder.msgAtLoc("server returned unexpected reference!", response_location.uri, actual_loc, .err, .{}); return error.UnexpectedReference; }; if (visited.isSet(index)) { - try error_builder.msgAtLoc("server returned duplicate reference!", file_uri, actual_loc, .err, .{}); + try error_builder.msgAtLoc("server returned duplicate reference!", response_location.uri, actual_loc, .err, .{}); return error.DuplicateReference; } else { visited.set(index); From 70a71065a932136c4389b0a35200c3fdc1b65c32 Mon Sep 17 00:00:00 2001 From: Techatrix Date: Mon, 16 Feb 2026 19:11:32 +0100 Subject: [PATCH 8/9] make Analyser.resolveVarDeclAlias non recursive --- src/analysis.zig | 157 +++++++++++++++++++---------------------- src/features/hover.zig | 6 +- 2 files changed, 75 insertions(+), 88 deletions(-) diff --git a/src/analysis.zig b/src/analysis.zig index 38ec0e6ed..e0302b501 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -692,91 +692,86 @@ pub fn isSnakeCase(name: []const u8) bool { /// const decl = @import("decl-file.zig").decl; /// const other = decl.middle.other; ///``` -pub fn resolveVarDeclAlias(analyser: *Analyser, options: ResolveOptions) Error!?DeclWithHandle { +pub fn resolveVarDeclAlias(analyser: *Analyser, decl: DeclWithHandle) Error!?DeclWithHandle { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); + const initial_node = switch (decl.decl) { + .ast_node => |node| node, + else => return null, + }; + var node_trail: NodeSet = .empty; defer node_trail.deinit(analyser.gpa); - return try analyser.resolveVarDeclAliasInternal(options, &node_trail); -} -fn resolveVarDeclAliasInternal(analyser: *Analyser, options: ResolveOptions, node_trail: *NodeSet) Error!?DeclWithHandle { - const node_handle = options.node_handle; - const node_with_uri: NodeWithUri = .{ - .node = node_handle.node, - .uri = node_handle.handle.uri, + var current: ResolveOptions = .{ + .node_handle = .of(initial_node, decl.handle), + .container_type = decl.container_type, }; + var result: ?DeclWithHandle = null; + while (true) { + const node = current.node_handle.node; + const handle = current.node_handle.handle; + const tree = &handle.tree; + + const resolved: DeclWithHandle = switch (tree.nodeTag(node)) { + .identifier => blk: { + const name_token = ast.identifierTokenFromIdentifierNode(tree, node) orelse break :blk null; + const name = offsets.identifierTokenToNameSlice(tree, name_token); + if (current.container_type) |ty| { + break :blk try ty.lookupSymbol(analyser, name); + } + break :blk try analyser.lookupSymbolGlobal( + handle, + name, + tree.tokenStart(name_token), + ); + }, + .field_access => blk: { + const lhs, const field_name = tree.nodeData(node).node_and_token; + const resolved = (try analyser.resolveTypeOfNode(.{ + .node_handle = .of(lhs, handle), + .container_type = current.container_type, + })) orelse break :blk null; + if (!resolved.is_type_val) + break :blk null; + + const symbol_name = offsets.identifierTokenToNameSlice(tree, field_name); + + break :blk try resolved.lookupSymbol(analyser, symbol_name); + }, + .global_var_decl, + .local_var_decl, + .aligned_var_decl, + .simple_var_decl, + => { + const var_decl = tree.fullVarDecl(node).?; - const gop = try node_trail.getOrPut(analyser.gpa, node_with_uri); - if (gop.found_existing) return null; - - const handle = node_handle.handle; - const tree = &handle.tree; - - const resolved = switch (tree.nodeTag(node_handle.node)) { - .identifier => blk: { - const name_token = ast.identifierTokenFromIdentifierNode(tree, node_handle.node) orelse break :blk null; - const name = offsets.identifierTokenToNameSlice(tree, name_token); - if (options.container_type) |ty| { - break :blk try ty.lookupSymbol(analyser, name); - } - break :blk try analyser.lookupSymbolGlobal( - handle, - name, - tree.tokenStart(name_token), - ); - }, - .field_access => blk: { - const lhs, const field_name = tree.nodeData(node_handle.node).node_and_token; - const resolved = (try analyser.resolveTypeOfNode(.{ - .node_handle = .of(lhs, handle), - .container_type = options.container_type, - })) orelse return null; - if (!resolved.is_type_val) - return null; - - const symbol_name = offsets.identifierTokenToNameSlice(tree, field_name); - - break :blk try resolved.lookupSymbol(analyser, symbol_name); - }, - .global_var_decl, - .local_var_decl, - .aligned_var_decl, - .simple_var_decl, - => { - const var_decl = tree.fullVarDecl(node_handle.node).?; + const base_exp = var_decl.ast.init_node.unwrap() orelse return result; + if (tree.tokenTag(var_decl.ast.mut_token) != .keyword_const) return result; - const base_exp = var_decl.ast.init_node.unwrap() orelse return null; - if (tree.tokenTag(var_decl.ast.mut_token) != .keyword_const) return null; + const gop = try node_trail.getOrPut(analyser.gpa, .{ .node = base_exp, .uri = handle.uri }); + if (gop.found_existing) return null; - return try analyser.resolveVarDeclAliasInternal(.{ - .node_handle = .of(base_exp, handle), - .container_type = options.container_type, - }, node_trail); - }, - else => return null, - } orelse return null; + current.node_handle.node = base_exp; + continue; + }, + else => null, + } orelse return result; - const resolved_node = switch (resolved.decl) { - .ast_node => |node| node, - else => return resolved, - }; + const resolved_node = switch (resolved.decl) { + .ast_node => |resolved_node| resolved_node, + else => return resolved, + }; - if (node_trail.contains(.{ - .node = resolved_node, - .uri = resolved.handle.uri, - })) { - return null; - } + const gop = try node_trail.getOrPut(analyser.gpa, .{ .node = resolved_node, .uri = resolved.handle.uri }); + if (gop.found_existing) return null; - if (try analyser.resolveVarDeclAliasInternal(.{ - .node_handle = .of(resolved_node, resolved.handle), - .container_type = options.container_type, - }, node_trail)) |result| { - return result; - } else { - return resolved; + current = .{ + .node_handle = .of(resolved_node, resolved.handle), + .container_type = resolved.container_type, + }; + result = resolved; } } @@ -1442,7 +1437,11 @@ pub fn resolvePrimitive(analyser: *Analyser, identifier_name: []const u8) error{ fn resolveStringLiteral(analyser: *Analyser, options: ResolveOptions) Error!?[]const u8 { var node_with_handle = options.node_handle; - if (try analyser.resolveVarDeclAlias(options)) |decl_with_handle| { + if (try analyser.resolveVarDeclAlias(.{ + .decl = .{ .ast_node = options.node_handle.node }, + .handle = options.node_handle.handle, + .container_type = options.container_type, + })) |decl_with_handle| { if (decl_with_handle.decl == .ast_node) { node_with_handle = .{ .node = decl_with_handle.decl.ast_node, @@ -5212,16 +5211,8 @@ pub const DeclWithHandle = struct { pub fn definitionToken(self: DeclWithHandle, analyser: *Analyser, resolve_alias: bool) Error!TokenWithHandle { if (resolve_alias) { - switch (self.decl) { - .ast_node => |node| { - if (try analyser.resolveVarDeclAlias(.{ - .node_handle = .of(node, self.handle), - .container_type = self.container_type, - })) |result| { - return result.definitionToken(analyser, resolve_alias); - } - }, - else => {}, + if (try analyser.resolveVarDeclAlias(self)) |result| { + return result.definitionToken(analyser, resolve_alias); } if (try self.resolveType(analyser)) |resolved_type| { if (resolved_type.is_type_val) { diff --git a/src/features/hover.zig b/src/features/hover.zig index 0bf7a4477..e5c717b23 100644 --- a/src/features/hover.zig +++ b/src/features/hover.zig @@ -31,11 +31,7 @@ fn hoverSymbol( if (try decl_handle.docComments(arena)) |doc_string| { try doc_strings.append(arena, doc_string); } - if (decl_handle.decl != .ast_node) break; - decl_handle = try analyser.resolveVarDeclAlias(.{ - .node_handle = .of(decl_handle.decl.ast_node, decl_handle.handle), - .container_type = decl_handle.container_type, - }) orelse break; + decl_handle = try analyser.resolveVarDeclAlias(decl_handle) orelse break; maybe_resolved_type = maybe_resolved_type orelse try decl_handle.resolveType(analyser); } From d387dd5efc2a3469c08c1e48c2aee069e4605ecf Mon Sep 17 00:00:00 2001 From: Techatrix Date: Mon, 16 Feb 2026 19:22:14 +0100 Subject: [PATCH 9/9] resolve references to aliased symbols Only works for aliases that do not rename the symbol. ```zig const A = foo.A; // works const B = foo.A; // doesn't work because the symbol was renamed ``` --- src/features/references.zig | 67 +++++++++++++++++++++---------- tests/lsp_features/references.zig | 25 +++++++++++- 2 files changed, 68 insertions(+), 24 deletions(-) diff --git a/src/features/references.zig b/src/features/references.zig index 6b524346c..af80c440e 100644 --- a/src/features/references.zig +++ b/src/features/references.zig @@ -108,7 +108,7 @@ const Builder = struct { builder.target_symbol.nameToken(), ); - switch (tree.nodeTag(node)) { + var candidate: Analyser.DeclWithHandle, const name_token = candidate: switch (tree.nodeTag(node)) { .identifier, .test_decl, => |tag| { @@ -124,15 +124,12 @@ const Builder = struct { const name = offsets.identifierTokenToNameSlice(tree, name_token); if (!std.mem.eql(u8, name, target_symbol_name)) return; - const child = try builder.analyser.lookupSymbolGlobal( + const candidate = try builder.analyser.lookupSymbolGlobal( handle, name, tree.tokenStart(name_token), ) orelse return; - - if (builder.target_symbol.eql(child)) { - try builder.add(handle, name_token); - } + break :candidate .{ candidate, name_token }; }, .field_access => { if (builder.local_only_decl) return; @@ -143,11 +140,8 @@ const Builder = struct { const lhs = try builder.analyser.resolveTypeOfNode(.of(lhs_node, handle)) orelse return; const deref_lhs = try builder.analyser.resolveDerefType(lhs) orelse lhs; - const child = try deref_lhs.lookupSymbol(builder.analyser, name) orelse return; - - if (builder.target_symbol.eql(child)) { - try builder.add(handle, field_token); - } + const candidate = try deref_lhs.lookupSymbol(builder.analyser, name) orelse return; + break :candidate .{ candidate, field_token }; }, .struct_init_one, .struct_init_one_comma, @@ -185,33 +179,60 @@ const Builder = struct { else => unreachable, }; - const lookup = try builder.analyser.lookupSymbolFieldInit( + const candidate = try builder.analyser.lookupSymbolFieldInit( handle, name, nodes[0], nodes[1..], ) orelse return; - if (builder.target_symbol.eql(lookup)) { - try builder.add(handle, name_token); - } // if we get here then we know that the name of the field matched // and duplicate fields are invalid so just return early - return; + break :candidate .{ candidate, name_token }; } + return; }, .enum_literal => { if (builder.local_only_decl) return; const name_token = tree.nodeMainToken(node); const name = offsets.identifierTokenToNameSlice(&handle.tree, name_token); if (!std.mem.eql(u8, name, target_symbol_name)) return; - const lookup = try builder.analyser.getSymbolEnumLiteral(handle, tree.tokenStart(name_token), name) orelse return; + const candidate = try builder.analyser.getSymbolEnumLiteral(handle, tree.tokenStart(name_token), name) orelse return; + break :candidate .{ candidate, name_token }; + }, + .global_var_decl, + .local_var_decl, + .aligned_var_decl, + .simple_var_decl, + => { + if (builder.local_only_decl) return; + const var_decl = tree.fullVarDecl(node).?; - if (builder.target_symbol.eql(lookup)) { - try builder.add(handle, name_token); - } + const alias_name_token = var_decl.ast.mut_token + 1; + const alias_name = offsets.identifierTokenToNameSlice(&handle.tree, alias_name_token); + if (!std.mem.eql(u8, alias_name, target_symbol_name)) return; + + const init_node = var_decl.ast.init_node.unwrap() orelse return; + if (tree.tokenTag(var_decl.ast.mut_token) != .keyword_const) return; + + if (tree.nodeTag(init_node) != .field_access) return; + const lhs_node, const field_token = tree.nodeData(init_node).node_and_token; + const field_name = offsets.identifierTokenToNameSlice(tree, field_token); + if (!std.mem.eql(u8, field_name, target_symbol_name)) return; + + const lhs = try builder.analyser.resolveTypeOfNode(.of(lhs_node, handle)) orelse return; + const deref_lhs = try builder.analyser.resolveDerefType(lhs) orelse lhs; + + const candidate = try deref_lhs.lookupSymbol(builder.analyser, field_name) orelse return; + break :candidate .{ candidate, alias_name_token }; }, - else => {}, + else => return, + }; + + candidate = try builder.analyser.resolveVarDeclAlias(candidate) orelse candidate; + + if (builder.target_symbol.eql(candidate)) { + try builder.add(handle, name_token); } } }; @@ -688,7 +709,7 @@ pub fn referencesHandler(server: *Server, arena: std.mem.Allocator, request: Gen const name_loc = offsets.identifierLocFromIndex(&handle.tree, source_index) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); - const target_decl = switch (pos_context) { + var target_decl = switch (pos_context) { .var_access, .test_doctest_name => try analyser.lookupSymbolGlobal(handle, name, source_index), .field_access => |loc| z: { const held_loc = offsets.locMerge(loc, name_loc); @@ -705,6 +726,8 @@ pub fn referencesHandler(server: *Server, arena: std.mem.Allocator, request: Gen else => null, } orelse return null; + target_decl = try analyser.resolveVarDeclAlias(target_decl) orelse target_decl; + break :locs switch (target_decl.decl) { .label => |payload| try labelReferences( arena, diff --git a/tests/lsp_features/references.zig b/tests/lsp_features/references.zig index adc0ce2bc..dfbc45e62 100644 --- a/tests/lsp_features/references.zig +++ b/tests/lsp_features/references.zig @@ -308,8 +308,12 @@ test "cross-file reference" { , // Untitled-1.zig \\const file = @import("Untitled-0.zig"); - \\const first = file.<0>; - \\const second = file.<0>; + \\const <0> = file.<0>; + \\const renamed = file.<0>; + \\comptime { + \\ _ = <0>; + \\ _ = renamed; + \\} , }, true); } @@ -329,6 +333,23 @@ test "cross-file - transitive import" { }, true); } +test "cross-file - alias" { + try testMultiFileSymbolReferences(&.{ + // Untitled-0.zig + \\pub const <0> = struct { + \\ fn foo(_: <0>) void {} + \\ var bar: <0> = undefined; + \\}; + , + // Untitled-1.zig + \\const <0> = @import("Untitled-0.zig").<0>; + \\comptime { + \\ _ = <0>; + \\} + , + }, true); +} + fn testSymbolReferences(source: []const u8) !void { return testMultiFileSymbolReferences(&.{source}, true); }