diff --git a/src/DocumentScope.zig b/src/DocumentScope.zig index b927a8cff..30074163c 100644 --- a/src/DocumentScope.zig +++ b/src/DocumentScope.zig @@ -13,36 +13,6 @@ declarations: std.MultiArrayList(Declaration), /// used for looking up a child declaration in a given scope declaration_lookup_map: DeclarationLookupMap, extra: std.ArrayList(u32), -/// All identifier token that are in error sets. -/// When there are multiple error sets that contain the same error, only one of them is stored. -/// A token that has a doc comment takes priority. -/// This means that if there a multiple error sets with the same name, only one of them is included. -global_error_set: IdentifierSet, -/// All identifier token that are in enums. -/// When there are multiple enums that contain the field name, only one of them is stored. -/// A token that has a doc comment takes priority. -/// This means that if there a multiple enums with the same name, only one of them is included. -global_enum_set: IdentifierSet, - -/// Stores a set of identifier tokens with unique names -pub const IdentifierSet = std.ArrayHashMapUnmanaged(Ast.TokenIndex, void, IdentifierTokenContext, true); - -pub const IdentifierTokenContext = struct { - tree: *const Ast, - - pub fn eql(self: @This(), a: Ast.TokenIndex, b: Ast.TokenIndex, b_index: usize) bool { - _ = b_index; - if (a == b) return true; - const a_name = offsets.identifierTokenToNameSlice(self.tree, a); - const b_name = offsets.identifierTokenToNameSlice(self.tree, b); - return std.mem.eql(u8, a_name, b_name); - } - - pub fn hash(self: @This(), token: Ast.TokenIndex) u32 { - const name = offsets.identifierTokenToNameSlice(self.tree, token); - return std.array_hash_map.hashString(name); - } -}; /// Every `index` inside this `ArrayhashMap` is equivalent to a `Declaration.Index` /// This means that every declaration is only the child of a single scope @@ -524,8 +494,6 @@ pub fn init(allocator: std.mem.Allocator, tree: *const Ast) error{OutOfMemory}!D .declarations = .empty, .declaration_lookup_map = .empty, .extra = .empty, - .global_error_set = .empty, - .global_enum_set = .empty, }; errdefer document_scope.deinit(allocator); @@ -557,9 +525,6 @@ pub fn deinit(scope: *DocumentScope, allocator: std.mem.Allocator) void { scope.declarations.deinit(allocator); scope.declaration_lookup_map.deinit(allocator); scope.extra.deinit(allocator); - - scope.global_enum_set.deinit(allocator); - scope.global_error_set.deinit(allocator); } fn locToSmallLoc(loc: offsets.Loc) Scope.SmallLoc { @@ -807,17 +772,6 @@ noinline fn walkContainerDecl( var buf: [2]Ast.Node.Index = undefined; const container_decl = tree.fullContainerDecl(&buf, node_idx).?; - const is_enum_or_tagged_union, const is_struct = blk: { - if (node_idx == .root) break :blk .{ false, true }; - break :blk switch (tree.tokenTag(container_decl.ast.main_token)) { - .keyword_enum => .{ true, false }, - .keyword_union => .{ container_decl.ast.enum_token != null or container_decl.ast.arg != .none, false }, - .keyword_struct => .{ false, true }, - .keyword_opaque => .{ false, false }, - else => unreachable, - }; - }; - const scope = try context.startScope( .container, .{ .ast_node = node_idx }, @@ -837,6 +791,8 @@ noinline fn walkContainerDecl( .container_field_align, => { var container_field = tree.fullContainerField(decl).?; + + const is_struct = node_idx == .root or tree.tokenTag(container_decl.ast.main_token) == .keyword_struct; if (is_struct and container_field.ast.tuple_like) continue; container_field.convertToNonTupleLike(tree); @@ -844,23 +800,6 @@ noinline fn walkContainerDecl( const main_token = container_field.ast.main_token; if (tree.tokenTag(main_token) != .identifier) continue; try scope.pushDeclaration(main_token, .{ .ast_node = decl }, .field); - - if (is_enum_or_tagged_union) { - const name = offsets.identifierTokenToNameSlice(tree, main_token); - if (std.mem.eql(u8, name, "_")) continue; - - const gop = try context.doc_scope.global_enum_set.getOrPutContext( - context.allocator, - main_token, - .{ .tree = tree }, - ); - if (!gop.found_existing) { - gop.key_ptr.* = main_token; - } else if (gop.found_existing and tree.tokenTag(main_token - 1) == .doc_comment) { - // a token with a doc comment takes priority. - gop.key_ptr.* = main_token; - } - } }, .fn_proto, .fn_proto_multi, @@ -905,15 +844,6 @@ noinline fn walkErrorSetNode( const identifier_token: Ast.TokenIndex = @intCast(tok_i); try scope.pushDeclaration(identifier_token, .{ .error_token = identifier_token }, .other); - const gop = try context.doc_scope.global_error_set.getOrPutContext( - context.allocator, - identifier_token, - .{ .tree = tree }, - ); - if (!gop.found_existing or tree.tokenTag(identifier_token - 1) == .doc_comment) { - // a token with a doc comment takes priority. - gop.key_ptr.* = identifier_token; - } } try scope.finalize(); diff --git a/src/DocumentStore.zig b/src/DocumentStore.zig index 37fa7d05f..e7b5b01d2 100644 --- a/src/DocumentStore.zig +++ b/src/DocumentStore.zig @@ -25,7 +25,7 @@ mutex: std.Io.Mutex = .init, wait_group: if (supports_build_system) std.Io.Group else void = if (supports_build_system) .init else {}, handles: Uri.ArrayHashMap(*Handle.Future) = .empty, build_files: if (supports_build_system) Uri.ArrayHashMap(*BuildFile) else void = if (supports_build_system) .empty else {}, -cimports: if (supports_build_system) std.AutoArrayHashMapUnmanaged(Hash, translate_c.Result) else void = if (supports_build_system) .empty else {}, +cimports: if (supports_build_system) std.AutoArrayHashMapUnmanaged(CImportHash, translate_c.Result) else void = if (supports_build_system) .empty else {}, diagnostics_collection: *DiagnosticsCollection, builds_in_progress: std.atomic.Value(i32) = .init(0), transport: ?*lsp.Transport = null, @@ -35,19 +35,8 @@ lsp_capabilities: struct { supports_inlay_hints_refresh: bool = false, } = .{}, -pub const Hasher = std.crypto.auth.siphash.SipHash128(1, 3); -pub const Hash = [Hasher.mac_length]u8; - pub const supports_build_system = std.process.can_spawn; -pub fn computeHash(bytes: []const u8) Hash { - var hasher: Hasher = .init(&@splat(0)); - hasher.update(bytes); - var hash: Hash = undefined; - hasher.final(&hash); - return hash; -} - pub const Config = struct { environ_map: *const std.process.Environ.Map, zig_exe_path: ?[]const u8, @@ -98,10 +87,9 @@ pub const BuildFile = struct { } /// Returns whether the `Uri` is a dependency of the given `BuildFile`. - /// May return `null` to indicate an inconclusive result because + /// May return `.unknown` to indicate an inconclusive result because /// the required build config has not been resolved yet. /// - /// invalidates any pointers into `build_files` /// **Thread safe** takes an exclusive lock fn isAssociatedWith( build_file: *BuildFile, @@ -154,9 +142,8 @@ pub const BuildFile = struct { const handle = try store.getOrLoadHandle(source_uri) orelse continue; - const import_uris = (try handle.import_uris.get(handle)).*; - try found_uris.ensureUnusedCapacity(arena, import_uris.len); - for (import_uris) |import_uri| found_uris.putAssumeCapacity(try import_uri.dupe(arena), {}); + try found_uris.ensureUnusedCapacity(arena, handle.file_imports.len); + for (handle.file_imports) |import_uri| found_uris.putAssumeCapacity(try import_uri.dupe(arena), {}); } } @@ -175,14 +162,15 @@ pub const BuildFile = struct { pub const Handle = struct { uri: Uri, tree: Ast, - /// Contains one entry for every cimport in the document + /// List of every file that has been `@Import`ed. Does not include imported modules. + file_imports: []const Uri, + /// Contains one entry for every `@cImport` in the document cimports: std.MultiArrayList(CImportHandle), /// `true` if the document has been directly opened by the client i.e. with `textDocument/didOpen` /// `false` indicates the document only exists because it is a dependency of another document /// or has been closed with `textDocument/didClose`. lsp_synced: bool, document_scope: Lazy(DocumentScope, DocumentStoreContext) = .unset, - import_uris: Lazy([]const Uri, ImportUrisContext) = .unset, /// private field impl: struct { @@ -428,8 +416,27 @@ pub const Handle = struct { var new_tree = try parseTree(allocator, text, mode); errdefer new_tree.deinit(allocator); - var new_cimports = try collectCIncludes(allocator, &new_tree); - errdefer new_cimports.deinit(allocator); + var new_file_imports: std.ArrayList(Uri) = .empty; + errdefer new_file_imports.deinit(allocator); + + var new_cimports: std.MultiArrayList(CImportHandle) = .empty; + errdefer { + for (new_cimports.items(.source)) |source| { + allocator.free(source); + } + new_cimports.deinit(allocator); + } + + try collectImports( + allocator, + handle.uri, + &new_tree, + &new_file_imports, + &new_cimports, + ); + + const file_imports = try new_file_imports.toOwnedSlice(allocator); + errdefer file_imports.deinit(allocator); errdefer comptime unreachable; @@ -440,13 +447,13 @@ pub const Handle = struct { old_handle.cimports = handle.cimports; handle.tree = new_tree; + old_handle.file_imports = handle.file_imports; + handle.file_imports = file_imports; handle.cimports = new_cimports; handle.impl.has_tree_and_source = true; old_handle.document_scope = handle.document_scope; handle.document_scope = .unset; - old_handle.import_uris = handle.import_uris; - handle.import_uris = .unset; } fn parseTree(allocator: std.mem.Allocator, new_text: [:0]const u8, mode: Ast.Mode) error{OutOfMemory}!Ast { @@ -468,10 +475,76 @@ pub const Handle = struct { return tree; } + fn collectImports( + allocator: std.mem.Allocator, + uri: Uri, + tree: *const Ast, + file_imports: *std.ArrayList(Uri), + cimports: *std.MultiArrayList(CImportHandle), + ) error{OutOfMemory}!void { + const tracy_zone = tracy.trace(@src()); + defer tracy_zone.end(); + + const parsed_uri = std.Uri.parse(uri.raw) catch unreachable; // The Uri is guranteed to be valid + + const node_tags = tree.nodes.items(.tag); + for (node_tags, 0..) |tag, i| { + const node: Ast.Node.Index = @enumFromInt(i); + + switch (tag) { + .builtin_call, + .builtin_call_comma, + .builtin_call_two, + .builtin_call_two_comma, + => {}, + else => continue, + } + const name = offsets.tokenToSlice(tree, tree.nodeMainToken(node)); + + if (std.mem.eql(u8, name, "@import")) { + try file_imports.ensureUnusedCapacity(allocator, 1); + + var buffer: [2]Ast.Node.Index = undefined; + const params = tree.builtinCallParams(&buffer, node).?; + if (params.len < 1) continue; + if (tree.nodeTag(params[0]) != .string_literal) continue; + + var import_string = offsets.tokenToSlice(tree, tree.nodeMainToken(params[0])); + import_string = import_string[1 .. import_string.len - 1]; + + if (!std.mem.endsWith(u8, import_string, ".zig")) continue; + + const import_uri = try Uri.resolveImport(allocator, uri, parsed_uri, import_string); + file_imports.appendAssumeCapacity(import_uri); + continue; + } + + if (std.mem.eql(u8, name, "@cImport")) { + try cimports.ensureUnusedCapacity(allocator, 1); + + const c_source = translate_c.convertCInclude(allocator, tree, node) catch |err| switch (err) { + error.Unsupported => continue, + error.OutOfMemory => return error.OutOfMemory, + }; + + var hasher: CImportHasher = .init(&@splat(0)); + hasher.update(c_source); + + cimports.appendAssumeCapacity(.{ + .node = node, + .hash = hasher.finalResult(), + .source = c_source, + }); + continue; + } + } + } + /// A handle that can only be deallocated. Keep in sync with `deinit`. const dead: Handle = .{ .uri = undefined, .tree = undefined, + .file_imports = &.{}, .cimports = .empty, .lsp_synced = undefined, .impl = .{ @@ -491,7 +564,8 @@ pub const Handle = struct { self.tree.deinit(allocator); } self.document_scope.deinit(allocator); - self.import_uris.deinit(allocator); + for (self.file_imports) |uri| uri.deinit(allocator); + allocator.free(self.file_imports); for (self.cimports.items(.source)) |source| allocator.free(source); self.cimports.deinit(allocator); @@ -568,36 +642,6 @@ pub const Handle = struct { document_scope.deinit(allocator); } }; - - const ImportUrisContext = struct { - fn create(handle: *Handle, allocator: std.mem.Allocator) error{OutOfMemory}![]const Uri { - var imports = try analysis.collectImports(allocator, &handle.tree); - defer imports.deinit(allocator); - - const base_path = handle.uri.toFsPath(allocator) catch |err| switch (err) { - error.OutOfMemory => return error.OutOfMemory, - error.UnsupportedScheme => return &.{}, - }; - defer allocator.free(base_path); - - var uris: std.ArrayList(Uri) = try .initCapacity(allocator, imports.items.len); - errdefer { - for (uris.items) |uri| uri.deinit(allocator); - uris.deinit(allocator); - } - - for (imports.items) |import_str| { - if (!std.mem.endsWith(u8, import_str, ".zig")) continue; - uris.appendAssumeCapacity(try resolveFileImportString(allocator, base_path, import_str) orelse continue); - } - - return try uris.toOwnedSlice(allocator); - } - fn deinit(import_uris: *[]const Uri, allocator: std.mem.Allocator) void { - for (import_uris.*) |uri| uri.deinit(allocator); - allocator.free(import_uris.*); - } - }; }; pub const HandleIterator = struct { @@ -1441,6 +1485,7 @@ fn createAndStoreDocument( .handle = .{ .uri = gop.key_ptr.*, .tree = undefined, + .file_imports = &.{}, .cimports = .empty, .lsp_synced = options.lsp_synced, .impl = .{ @@ -1478,98 +1523,18 @@ fn createAndStoreDocument( return &handle_future.handle; } +pub const CImportHasher = std.crypto.auth.siphash.SipHash128(1, 3); +pub const CImportHash = [CImportHasher.mac_length]u8; + pub const CImportHandle = struct { /// the `@cImport` node node: Ast.Node.Index, /// hash of c source file - hash: Hash, + hash: CImportHash, /// c source file source: []const u8, }; -/// Collects all `@cImport` nodes and converts them into c source code if possible -/// Caller owns returned memory. -fn collectCIncludes(allocator: std.mem.Allocator, tree: *const Ast) error{OutOfMemory}!std.MultiArrayList(CImportHandle) { - const tracy_zone = tracy.trace(@src()); - defer tracy_zone.end(); - - const cimport_nodes = try analysis.collectCImportNodes(allocator, tree); - defer allocator.free(cimport_nodes); - - var sources: std.MultiArrayList(CImportHandle) = .empty; - try sources.ensureTotalCapacity(allocator, cimport_nodes.len); - errdefer { - for (sources.items(.source)) |source| { - allocator.free(source); - } - sources.deinit(allocator); - } - - for (cimport_nodes) |node| { - const c_source = translate_c.convertCInclude(allocator, tree, node) catch |err| switch (err) { - error.Unsupported => continue, - error.OutOfMemory => return error.OutOfMemory, - }; - - sources.appendAssumeCapacity(.{ - .node = node, - .hash = computeHash(c_source), - .source = c_source, - }); - } - - return sources; -} - -/// collects every file uri the given handle depends on -/// includes imports, cimports & packages -/// **Thread safe** takes a shared lock -pub fn collectDependencies( - store: *DocumentStore, - allocator: std.mem.Allocator, - handle: *Handle, - dependencies: *std.ArrayList(Uri), -) error{ Canceled, OutOfMemory }!void { - const tracy_zone = tracy.trace(@src()); - defer tracy_zone.end(); - - const import_uris = (try handle.import_uris.get(handle)).*; - - try dependencies.ensureUnusedCapacity(allocator, import_uris.len + handle.cimports.len); - for (import_uris) |uri| { - dependencies.appendAssumeCapacity(try uri.dupe(allocator)); - } - - if (supports_build_system) { - try store.mutex.lock(store.io); - defer store.mutex.unlock(store.io); - for (handle.cimports.items(.hash)) |hash| { - const result = store.cimports.get(hash) orelse continue; - switch (result) { - .success => |uri| dependencies.appendAssumeCapacity(try uri.dupe(allocator)), - .failure => continue, - } - } - } - - if (supports_build_system) no_build_file: { - const build_file = switch (try handle.getAssociatedBuildFile(store)) { - .none, .unresolved => break :no_build_file, - .resolved => |resolved| resolved.build_file, - }; - - const build_config = build_file.tryLockConfig(store.io) orelse break :no_build_file; - defer build_file.unlockConfig(store.io); - - const module_paths = build_config.modules.map.keys(); - - try dependencies.ensureUnusedCapacity(allocator, module_paths.len); - for (module_paths) |module_path| { - dependencies.appendAssumeCapacity(try .fromPath(allocator, module_path)); - } - } -} - /// returns `true` if all include paths could be collected /// may return `false` because include paths from a build.zig may not have been resolved already /// **Thread safe** takes a shared lock @@ -1686,7 +1651,7 @@ pub fn resolveCImport(self: *DocumentStore, handle: *Handle, node: Ast.Node.Inde // TODO regenerate cimports if the header files gets modified const index = std.mem.findScalar(Ast.Node.Index, handle.cimports.items(.node), node) orelse return null; - const hash: Hash = handle.cimports.items(.hash)[index]; + const hash: CImportHash = handle.cimports.items(.hash)[index]; const source = handle.cimports.items(.source)[index]; { @@ -1873,13 +1838,8 @@ pub fn uriFromImportStr( defer tracy_zone.end(); if (std.mem.endsWith(u8, import_str, ".zig") or std.mem.endsWith(u8, import_str, ".zon")) { - const base_path = handle.uri.toFsPath(allocator) catch |err| switch (err) { - error.OutOfMemory => return error.OutOfMemory, - error.UnsupportedScheme => return .none, - }; - defer allocator.free(base_path); - const uri = try resolveFileImportString(allocator, base_path, import_str) orelse return .none; - return .{ .one = uri }; + const parsed_uri = std.Uri.parse(handle.uri.raw) catch unreachable; // The Uri is guranteed to be valid + return .{ .one = try Uri.resolveImport(allocator, handle.uri, parsed_uri, import_str) }; } if (std.mem.eql(u8, import_str, "std")) { @@ -1949,10 +1909,3 @@ pub fn uriFromImportStr( }, } } - -fn resolveFileImportString(allocator: std.mem.Allocator, base_path: []const u8, import_str: []const u8) error{OutOfMemory}!?Uri { - const joined_path = try std.fs.path.resolve(allocator, &.{ base_path, "..", import_str }); - defer allocator.free(joined_path); - - return try .fromPath(allocator, joined_path); -} diff --git a/src/Uri.zig b/src/Uri.zig index 3970e8ad2..26f6fa994 100644 --- a/src/Uri.zig +++ b/src/Uri.zig @@ -8,6 +8,7 @@ const Uri = @This(); /// - consistent casing of the Windows drive letter /// - consistent path seperator on Windows (convert '\\' to '/') /// - always add an authority component even if unnecessary +/// - remove query and fragment component raw: []const u8, pub fn parse(allocator: std.mem.Allocator, text: []const u8) (std.Uri.ParseError || error{OutOfMemory})!Uri { @@ -35,7 +36,7 @@ fn parseWithOs( } capacity += host.percent_encoded.len; } - if (uri.port != null) capacity += comptime ":".len + std.math.log10_int(@as(usize, std.math.maxInt(u16))); // TODO check this + if (uri.port != null) capacity += comptime ":".len + std.math.log10_int(@as(usize, std.math.maxInt(u16))); if (!std.mem.startsWith(u8, uri.path.percent_encoded, "/")) { capacity += "/".len; } @@ -115,9 +116,9 @@ test "parse - always add authority component (posix)" { } test "parse - normalize percent encoding (posix)" { - const uri: Uri = try .parseWithOs(std.testing.allocator, "file:///foo%5cmain%2ezig", false); + const uri: Uri = try .parseWithOs(std.testing.allocator, "file:///f%Aao%5cmain%2ezig", false); defer uri.deinit(std.testing.allocator); - try std.testing.expectEqualStrings("file:///foo%5Cmain.zig", uri.raw); + try std.testing.expectEqualStrings("file:///f%AAo%5Cmain.zig", uri.raw); } test "parse - convert percent encoded '\\' to '/' (windows)" { @@ -419,6 +420,60 @@ test "toFsPath - UNC (windows)" { try std.testing.expectEqualStrings(uri.raw, round_trip_uri.raw); } +pub fn resolveImport( + allocator: std.mem.Allocator, + uri: Uri, + parsed_uri: std.Uri, + sub_path: []const u8, +) error{OutOfMemory}!Uri { + var result: std.ArrayList(u8) = try .initCapacity(allocator, uri.raw.len + sub_path.len); + { + errdefer comptime unreachable; + result.printAssumeCapacity("{s}:", .{parsed_uri.scheme}); + result.appendSliceAssumeCapacity("//"); + if (parsed_uri.host) |host| { + if (parsed_uri.user) |user| { + result.appendSliceAssumeCapacity(user.percent_encoded); + if (parsed_uri.password) |password| { + result.appendAssumeCapacity(':'); + result.appendSliceAssumeCapacity(password.percent_encoded); + } + result.appendAssumeCapacity('@'); + } + result.appendSliceAssumeCapacity(host.percent_encoded); + if (parsed_uri.port) |port| result.printAssumeCapacity(":{d}", .{port}); + } + } + var aw: std.Io.Writer.Allocating = .fromArrayList(allocator, &result); + defer aw.deinit(); + + const percent_encoded_path = parsed_uri.path.percent_encoded; + + const joined_path = try std.fs.path.resolvePosix(allocator, &.{ percent_encoded_path, "..", sub_path }); + defer allocator.free(joined_path); + + std.Uri.Component.percentEncode(&aw.writer, joined_path, isPathChar) catch unreachable; + + return .{ .raw = try aw.toOwnedSlice() }; +} + +test "resolve" { + const uri: Uri = try .parseWithOs(std.testing.allocator, "file:///dir/main.zig", false); + defer uri.deinit(std.testing.allocator); + + const parsed_uri = std.Uri.parse(uri.raw) catch unreachable; + + const resolved_uri = try resolveImport(std.testing.allocator, uri, parsed_uri, "foo bar.zig"); + defer resolved_uri.deinit(std.testing.allocator); + + try std.testing.expectEqualStrings("file:///dir/foo%20bar.zig", resolved_uri.raw); + + var round_trip_uri: Uri = try .parseWithOs(std.testing.allocator, resolved_uri.raw, false); + defer round_trip_uri.deinit(std.testing.allocator); + + try std.testing.expectEqualStrings(round_trip_uri.raw, resolved_uri.raw); +} + fn normalizePercentEncoded( result: *std.ArrayList(u8), percent_encoded: []const u8, @@ -435,21 +490,21 @@ fn normalizePercentEncoded( const lower_value = std.fmt.charToDigit(lower_hex, 16) catch continue; const percent_encoded_char = upper_value * 16 + lower_value; - if (!isValidChar(percent_encoded_char)) { - if (std.ascii.isUpper(upper_hex) or std.ascii.isUpper(lower_hex)) continue; - + if (isValidChar(percent_encoded_char)) { + // a character has been unnecessarily escaped + result.appendSliceAssumeCapacity(percent_encoded[start..percent]); + result.appendAssumeCapacity(percent_encoded_char); + start = percent + 3; + } else if (std.ascii.isLower(upper_hex) or std.ascii.isLower(lower_hex)) { // convert percent encoded character to upper case result.appendSliceAssumeCapacity(percent_encoded[start..percent]); result.appendAssumeCapacity('%'); result.appendAssumeCapacity(std.ascii.toUpper(upper_hex)); result.appendAssumeCapacity(std.ascii.toUpper(lower_hex)); + start = percent + 3; } else { - // a character has been unnecessarily escaped - result.appendSliceAssumeCapacity(percent_encoded[start..percent]); - result.appendAssumeCapacity(percent_encoded_char); + // skip properly percent encoded character } - - start = percent + 3; index = percent + 3; } result.appendSliceAssumeCapacity(percent_encoded[start..]); diff --git a/src/analysis.zig b/src/analysis.zig index cb9708c7e..e0302b501 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -692,91 +692,86 @@ pub fn isSnakeCase(name: []const u8) bool { /// const decl = @import("decl-file.zig").decl; /// const other = decl.middle.other; ///``` -pub fn resolveVarDeclAlias(analyser: *Analyser, options: ResolveOptions) Error!?DeclWithHandle { +pub fn resolveVarDeclAlias(analyser: *Analyser, decl: DeclWithHandle) Error!?DeclWithHandle { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); + const initial_node = switch (decl.decl) { + .ast_node => |node| node, + else => return null, + }; + var node_trail: NodeSet = .empty; defer node_trail.deinit(analyser.gpa); - return try analyser.resolveVarDeclAliasInternal(options, &node_trail); -} -fn resolveVarDeclAliasInternal(analyser: *Analyser, options: ResolveOptions, node_trail: *NodeSet) Error!?DeclWithHandle { - const node_handle = options.node_handle; - const node_with_uri: NodeWithUri = .{ - .node = node_handle.node, - .uri = node_handle.handle.uri, + var current: ResolveOptions = .{ + .node_handle = .of(initial_node, decl.handle), + .container_type = decl.container_type, }; + var result: ?DeclWithHandle = null; + while (true) { + const node = current.node_handle.node; + const handle = current.node_handle.handle; + const tree = &handle.tree; + + const resolved: DeclWithHandle = switch (tree.nodeTag(node)) { + .identifier => blk: { + const name_token = ast.identifierTokenFromIdentifierNode(tree, node) orelse break :blk null; + const name = offsets.identifierTokenToNameSlice(tree, name_token); + if (current.container_type) |ty| { + break :blk try ty.lookupSymbol(analyser, name); + } + break :blk try analyser.lookupSymbolGlobal( + handle, + name, + tree.tokenStart(name_token), + ); + }, + .field_access => blk: { + const lhs, const field_name = tree.nodeData(node).node_and_token; + const resolved = (try analyser.resolveTypeOfNode(.{ + .node_handle = .of(lhs, handle), + .container_type = current.container_type, + })) orelse break :blk null; + if (!resolved.is_type_val) + break :blk null; + + const symbol_name = offsets.identifierTokenToNameSlice(tree, field_name); + + break :blk try resolved.lookupSymbol(analyser, symbol_name); + }, + .global_var_decl, + .local_var_decl, + .aligned_var_decl, + .simple_var_decl, + => { + const var_decl = tree.fullVarDecl(node).?; - const gop = try node_trail.getOrPut(analyser.gpa, node_with_uri); - if (gop.found_existing) return null; - - const handle = node_handle.handle; - const tree = &handle.tree; - - const resolved = switch (tree.nodeTag(node_handle.node)) { - .identifier => blk: { - const name_token = ast.identifierTokenFromIdentifierNode(tree, node_handle.node) orelse break :blk null; - const name = offsets.identifierTokenToNameSlice(tree, name_token); - if (options.container_type) |ty| { - break :blk try ty.lookupSymbol(analyser, name); - } - break :blk try analyser.lookupSymbolGlobal( - handle, - name, - tree.tokenStart(name_token), - ); - }, - .field_access => blk: { - const lhs, const field_name = tree.nodeData(node_handle.node).node_and_token; - const resolved = (try analyser.resolveTypeOfNode(.{ - .node_handle = .of(lhs, handle), - .container_type = options.container_type, - })) orelse return null; - if (!resolved.is_type_val) - return null; - - const symbol_name = offsets.identifierTokenToNameSlice(tree, field_name); + const base_exp = var_decl.ast.init_node.unwrap() orelse return result; + if (tree.tokenTag(var_decl.ast.mut_token) != .keyword_const) return result; - break :blk try resolved.lookupSymbol(analyser, symbol_name); - }, - .global_var_decl, - .local_var_decl, - .aligned_var_decl, - .simple_var_decl, - => { - const var_decl = tree.fullVarDecl(node_handle.node).?; + const gop = try node_trail.getOrPut(analyser.gpa, .{ .node = base_exp, .uri = handle.uri }); + if (gop.found_existing) return null; - const base_exp = var_decl.ast.init_node.unwrap() orelse return null; - if (tree.tokenTag(var_decl.ast.mut_token) != .keyword_const) return null; - - return try analyser.resolveVarDeclAliasInternal(.{ - .node_handle = .of(base_exp, handle), - .container_type = options.container_type, - }, node_trail); - }, - else => return null, - } orelse return null; + current.node_handle.node = base_exp; + continue; + }, + else => null, + } orelse return result; - const resolved_node = switch (resolved.decl) { - .ast_node => |node| node, - else => return resolved, - }; + const resolved_node = switch (resolved.decl) { + .ast_node => |resolved_node| resolved_node, + else => return resolved, + }; - if (node_trail.contains(.{ - .node = resolved_node, - .uri = resolved.handle.uri, - })) { - return null; - } + const gop = try node_trail.getOrPut(analyser.gpa, .{ .node = resolved_node, .uri = resolved.handle.uri }); + if (gop.found_existing) return null; - if (try analyser.resolveVarDeclAliasInternal(.{ - .node_handle = .of(resolved_node, resolved.handle), - .container_type = options.container_type, - }, node_trail)) |result| { - return result; - } else { - return resolved; + current = .{ + .node_handle = .of(resolved_node, resolved.handle), + .container_type = resolved.container_type, + }; + result = resolved; } } @@ -1442,7 +1437,11 @@ pub fn resolvePrimitive(analyser: *Analyser, identifier_name: []const u8) error{ fn resolveStringLiteral(analyser: *Analyser, options: ResolveOptions) Error!?[]const u8 { var node_with_handle = options.node_handle; - if (try analyser.resolveVarDeclAlias(options)) |decl_with_handle| { + if (try analyser.resolveVarDeclAlias(.{ + .decl = .{ .ast_node = options.node_handle.node }, + .handle = options.node_handle.handle, + .container_type = options.container_type, + })) |decl_with_handle| { if (decl_with_handle.decl == .ast_node) { node_with_handle = .{ .node = decl_with_handle.decl.ast_node, @@ -1666,21 +1665,16 @@ fn resolveCallsiteReferences(analyser: *Analyser, decl_handle: DeclWithHandle) E } const refs = try references.callsiteReferences( - analyser.arena, analyser, .{ .decl = func_decl, .handle = decl_handle.handle, .container_type = decl_handle.container_type }, false, - false, ); - // TODO: Set `workspace` to true; current problems - // - we gather dependencies, not dependents - var possible: std.ArrayList(Type.TypeWithDescriptor) = .empty; for (refs.items) |ref| { var call_buf: [1]Ast.Node.Index = undefined; - const call = tree.fullCall(&call_buf, ref.call_node).?; + const call = tree.fullCall(&call_buf, ref.node).?; const real_param_idx = if (func_params_len != 0 and pay.param_index != 0 and call.ast.params.len == func_params_len - 1) pay.param_index - 1 @@ -1695,15 +1689,13 @@ fn resolveCallsiteReferences(analyser: *Analyser, decl_handle: DeclWithHandle) E defer analyser.collect_callsite_references = old_collect_callsite_references; analyser.collect_callsite_references = false; - const handle = try analyser.store.getOrLoadHandle(ref.uri) orelse continue; - break :resolve_ty try analyser.resolveTypeOfNode(.of( // TODO?: this is a """heuristic based approach""" // perhaps it would be better to use proper self detection // maybe it'd be a perf issue and this is fine? // you figure it out future contributor <3 call.ast.params[real_param_idx], - handle, + ref.handle, )) orelse continue; }; @@ -1713,7 +1705,7 @@ fn resolveCallsiteReferences(analyser: *Analyser, decl_handle: DeclWithHandle) E const loc = offsets.tokenToPosition(tree, tree.nodeMainToken(call.ast.params[real_param_idx]), .@"utf-8"); try possible.append(analyser.arena, .{ .type = ty, - .descriptor = try std.fmt.allocPrint(analyser.arena, "{s}:{d}:{d}", .{ ref.uri.raw, loc.line + 1, loc.character + 1 }), + .descriptor = try std.fmt.allocPrint(analyser.arena, "{s}:{d}:{d}", .{ ref.handle.uri.raw, loc.line + 1, loc.character + 1 }), }); } @@ -4488,58 +4480,6 @@ pub fn instanceStdBuiltinType(analyser: *Analyser, type_name: []const u8) Error! return try result_ty.instanceTypeVal(analyser); } -/// Collects all `@import`'s we can find into a slice of import paths (without quotes). -pub fn collectImports(allocator: std.mem.Allocator, tree: *const Ast) error{OutOfMemory}!std.ArrayList([]const u8) { - const tracy_zone = tracy.trace(@src()); - defer tracy_zone.end(); - - var imports: std.ArrayList([]const u8) = .empty; - errdefer imports.deinit(allocator); - - for (0..tree.tokens.len) |i| { - if (tree.tokenTag(@intCast(i)) != .builtin) - continue; - const name = offsets.identifierTokenToNameSlice(tree, @intCast(i)); - if (!std.mem.eql(u8, name, "import")) continue; - if (!std.mem.startsWith(std.zig.Token.Tag, tree.tokens.items(.tag)[i + 1 ..], &.{ .l_paren, .string_literal, .r_paren })) continue; - - const str = tree.tokenSlice(@intCast(i + 2)); - try imports.append(allocator, str[1 .. str.len - 1]); - } - - return imports; -} - -/// Collects all `@cImport` nodes -/// Caller owns returned memory. -pub fn collectCImportNodes(allocator: std.mem.Allocator, tree: *const Ast) error{OutOfMemory}![]Ast.Node.Index { - const tracy_zone = tracy.trace(@src()); - defer tracy_zone.end(); - - var import_nodes: std.ArrayList(Ast.Node.Index) = .empty; - errdefer import_nodes.deinit(allocator); - - const node_tags = tree.nodes.items(.tag); - for (node_tags, 0..) |tag, i| { - const node: Ast.Node.Index = @enumFromInt(i); - - switch (tag) { - .builtin_call, - .builtin_call_comma, - .builtin_call_two, - .builtin_call_two_comma, - => {}, - else => continue, - } - - if (!std.mem.eql(u8, Ast.tokenSlice(tree.*, tree.nodeMainToken(node)), "@cImport")) continue; - - try import_nodes.append(allocator, node); - } - - return import_nodes.toOwnedSlice(allocator); -} - pub const NodeWithUri = struct { node: Ast.Node.Index, uri: Uri, @@ -5271,16 +5211,8 @@ pub const DeclWithHandle = struct { pub fn definitionToken(self: DeclWithHandle, analyser: *Analyser, resolve_alias: bool) Error!TokenWithHandle { if (resolve_alias) { - switch (self.decl) { - .ast_node => |node| { - if (try analyser.resolveVarDeclAlias(.{ - .node_handle = .of(node, self.handle), - .container_type = self.container_type, - })) |result| { - return result.definitionToken(analyser, resolve_alias); - } - }, - else => {}, + if (try analyser.resolveVarDeclAlias(self)) |result| { + return result.definitionToken(analyser, resolve_alias); } if (try self.resolveType(analyser)) |resolved_type| { if (resolved_type.is_type_val) { diff --git a/src/features/completions.zig b/src/features/completions.zig index 98ec78b43..2eadc383b 100644 --- a/src/features/completions.zig +++ b/src/features/completions.zig @@ -722,25 +722,13 @@ fn completeDot(builder: *Builder, loc: offsets.Loc) Analyser.Error!void { const dot_token_index = offsets.sourceIndexToTokenIndex(tree, loc.start).pickPreferred(&.{.period}, tree) orelse return; if (dot_token_index < 2) return; - blk: { - const nodes = try ast.nodesOverlappingIndexIncludingParseErrors(builder.arena, tree, loc.start); - const dot_context = getEnumLiteralContext(tree, dot_token_index, nodes) orelse break :blk; - const used_members_set = try collectUsedMembersSet(builder, dot_context.likely, dot_token_index); - const containers = try collectContainerNodes(builder, builder.orig_handle, dot_context); - for (containers) |container| { - try collectContainerFields(builder, dot_context.likely, container, used_members_set); - } + const nodes = try ast.nodesOverlappingIndexIncludingParseErrors(builder.arena, tree, loc.start); + const dot_context = getEnumLiteralContext(tree, dot_token_index, nodes) orelse return; + const used_members_set = try collectUsedMembersSet(builder, dot_context.likely, dot_token_index); + const containers = try collectContainerNodes(builder, builder.orig_handle, dot_context); + for (containers) |container| { + try collectContainerFields(builder, dot_context.likely, container, used_members_set); } - - if (builder.completions.items.len != 0) return; - - // Prevent compl for float numbers, eg `1.` - // Ideally this would also `or token_tags[dot_token_index - 1] != .equal`, - // which would mean the only possibility left would be `var enum_val = .`. - if (tree.tokenTag(dot_token_index - 1) == .number_literal or tree.tokenTag(dot_token_index - 1) != .equal) return; - - // `var enum_val = .` or the get*Context logic failed because of syntax errors (parser didn't create the necessary node(s)) - try globalSetCompletions(builder, .enum_set); } /// Asserts that `pos_context` is one of the following: @@ -964,7 +952,6 @@ pub fn completionAtIndex( .builtin => try completeBuiltin(&builder), .var_access, .empty => try completeGlobal(&builder), .field_access => |loc| try completeFieldAccess(&builder, loc), - .error_access => try globalSetCompletions(&builder, .error_set), .enum_literal => |loc| try completeDot(&builder, loc), .label_access, .label_decl => try completeLabel(&builder), .import_string_literal, @@ -1012,102 +999,6 @@ pub fn completionAtIndex( return .{ .isIncomplete = false, .items = completions }; } -// <---------------------------------------------------------------------------> -// global error set / enum field set -// <---------------------------------------------------------------------------> - -const CompletionSet = std.ArrayHashMapUnmanaged(types.completion.Item, void, CompletionContext, false); - -const CompletionContext = struct { - pub fn hash(self: @This(), item: types.completion.Item) u32 { - _ = self; - return std.array_hash_map.hashString(item.label); - } - - pub fn eql(self: @This(), a: types.completion.Item, b: types.completion.Item, b_index: usize) bool { - _ = self; - _ = b_index; - return std.mem.eql(u8, a.label, b.label); - } -}; - -const CompletionNameAdapter = struct { - pub fn hash(ctx: @This(), name: []const u8) u32 { - _ = ctx; - return std.array_hash_map.hashString(name); - } - - pub fn eql(ctx: @This(), a: []const u8, b: types.completion.Item, b_map_index: usize) bool { - _ = ctx; - _ = b_map_index; - return std.mem.eql(u8, a, b.label); - } -}; - -/// Every `DocumentScope` store a set of all error names and a set of all enum field names. -/// This function collects all of these sets from all dependencies and returns them as completions. -fn globalSetCompletions(builder: *Builder, kind: enum { error_set, enum_set }) Analyser.Error!void { - const tracy_zone = tracy.trace(@src()); - defer tracy_zone.end(); - - const store = &builder.server.document_store; - - var dependencies: std.ArrayList(Uri) = .empty; - try dependencies.append(builder.arena, builder.orig_handle.uri); - try store.collectDependencies(builder.arena, builder.orig_handle, &dependencies); - - // TODO Better solution for deciding what tags to include - var result_set: CompletionSet = .empty; - - for (dependencies.items) |uri| { - // not every dependency is loaded which results in incomplete completion - const dependency_handle = store.getHandle(uri) orelse continue; - const document_scope = try dependency_handle.getDocumentScope(); - const curr_set: DocumentScope.IdentifierSet = switch (kind) { - .error_set => @field(document_scope, "global_error_set"), - .enum_set => @field(document_scope, "global_enum_set"), - }; - try result_set.ensureUnusedCapacity(builder.arena, curr_set.count()); - for (curr_set.keys()) |identifier_token| { - const name = offsets.identifierTokenToNameSlice(&dependency_handle.tree, identifier_token); - - const gop = result_set.getOrPutAssumeCapacityAdapted( - name, - CompletionNameAdapter{}, - ); - - if (!gop.found_existing) { - gop.key_ptr.* = .{ - .label = name, - .detail = switch (kind) { - .error_set => try std.fmt.allocPrint(builder.arena, "error.{f}", .{std.zig.fmtId(name)}), - .enum_set => null, - }, - .kind = switch (kind) { - .error_set => .Constant, - .enum_set => .EnumMember, - }, - .documentation = null, // will be set below - }; - } - - if (gop.key_ptr.documentation == null) { - if (try Analyser.getDocCommentsBeforeToken(builder.arena, &dependency_handle.tree, identifier_token)) |documentation| { - gop.key_ptr.documentation = .{ - .markup_content = .{ - // TODO check if client supports markdown - .kind = .markdown, - .value = documentation, - }, - }; - } - } - } - } - - try builder.completions.appendSlice(builder.arena, result_set.keys()); -} - // <---------------------------------------------------------------------------> // completions/enum_literal.zig staging area // <---------------------------------------------------------------------------> diff --git a/src/features/hover.zig b/src/features/hover.zig index 0bf7a4477..e5c717b23 100644 --- a/src/features/hover.zig +++ b/src/features/hover.zig @@ -31,11 +31,7 @@ fn hoverSymbol( if (try decl_handle.docComments(arena)) |doc_string| { try doc_strings.append(arena, doc_string); } - if (decl_handle.decl != .ast_node) break; - decl_handle = try analyser.resolveVarDeclAlias(.{ - .node_handle = .of(decl_handle.decl.ast_node, decl_handle.handle), - .container_type = decl_handle.container_type, - }) orelse break; + decl_handle = try analyser.resolveVarDeclAlias(decl_handle) orelse break; maybe_resolved_type = maybe_resolved_type orelse try decl_handle.resolveType(analyser); } diff --git a/src/features/references.zig b/src/features/references.zig index 232df4011..af80c440e 100644 --- a/src/features/references.zig +++ b/src/features/references.zig @@ -15,21 +15,20 @@ const tracy = @import("tracy"); fn labelReferences( allocator: std.mem.Allocator, - decl: Analyser.DeclWithHandle, + handle: *DocumentStore.Handle, + decl: @FieldType(Analyser.Declaration, "label"), encoding: offsets.Encoding, include_decl: bool, ) error{OutOfMemory}!std.ArrayList(types.Location) { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - std.debug.assert(decl.decl == .label); // use `symbolReferences` instead - const handle = decl.handle; const tree = &handle.tree; // Find while / for / block from label -> iterate over children nodes, find break and continues, change their labels if they match. // This case can be implemented just by scanning tokens. - const first_tok = decl.decl.label.identifier; - const last_tok = ast.lastToken(tree, decl.decl.label.block); + const first_tok = decl.identifier; + const last_tok = ast.lastToken(tree, decl.block); var locations: std.ArrayList(types.Location) = .empty; errdefer locations.deinit(allocator); @@ -38,7 +37,7 @@ fn labelReferences( // The first token is always going to be the label try locations.append(allocator, .{ .uri = handle.uri.raw, - .range = offsets.tokenToRange(&handle.tree, first_tok, encoding), + .range = offsets.tokenToRange(tree, first_tok, encoding), }); } @@ -54,7 +53,7 @@ fn labelReferences( try locations.append(allocator, .{ .uri = handle.uri.raw, - .range = offsets.tokenToRange(&handle.tree, curr_tok + 2, encoding), + .range = offsets.tokenToRange(tree, curr_tok + 2, encoding), }); } @@ -62,59 +61,54 @@ fn labelReferences( } const Builder = struct { - allocator: std.mem.Allocator, locations: std.ArrayList(types.Location) = .empty, /// this is the declaration we are searching for - decl_handle: Analyser.DeclWithHandle, + target_symbol: Analyser.DeclWithHandle, /// the decl is local to a function, block, etc local_only_decl: bool, - /// Whether the `decl_handle` has been added - did_add_decl_handle: bool = false, + /// Whether the `target_symbol` has been added + did_add_target_symbol: bool = false, analyser: *Analyser, encoding: offsets.Encoding, - const Context = struct { - builder: *Builder, - handle: *DocumentStore.Handle, - }; - - fn deinit(self: *Builder) void { - self.locations.deinit(self.allocator); - } - fn add(self: *Builder, handle: *DocumentStore.Handle, token_index: Ast.TokenIndex) error{OutOfMemory}!void { - if (self.decl_handle.handle == handle and - self.decl_handle.nameToken() == token_index) + if (self.target_symbol.handle == handle and + self.target_symbol.nameToken() == token_index) { - if (self.did_add_decl_handle) return; - self.did_add_decl_handle = true; + if (self.did_add_target_symbol) return; + self.did_add_target_symbol = true; } - try self.locations.append(self.allocator, .{ + try self.locations.append(self.analyser.arena, .{ .uri = handle.uri.raw, .range = offsets.tokenToRange(&handle.tree, token_index, self.encoding), }); } fn collectReferences(self: *Builder, handle: *DocumentStore.Handle, node: Ast.Node.Index) Analyser.Error!void { - const context = Context{ - .builder = self, - .handle = handle, - }; - try referenceNode(&context, &handle.tree, node); - var walker: ast.Walker = try .init(self.allocator, &handle.tree, node); - defer walker.deinit(self.allocator); - while (try walker.nextIgnoreClose(self.allocator, &handle.tree)) |child| try referenceNode(&context, &handle.tree, child); + const tracy_zone = tracy.trace(@src()); + defer tracy_zone.end(); + + const arena = self.analyser.arena; + try referenceNode(self, handle, node); + var walker: ast.Walker = try .init(arena, &handle.tree, node); + defer walker.deinit(arena); + while (try walker.nextIgnoreClose(arena, &handle.tree)) |child| { + try referenceNode(self, handle, child); + } } - fn referenceNode(self: *const Context, tree: *const Ast, node: Ast.Node.Index) Analyser.Error!void { - const builder = self.builder; - const handle = self.handle; - const decl_name = offsets.identifierTokenToNameSlice( - &builder.decl_handle.handle.tree, - builder.decl_handle.nameToken(), + fn referenceNode( + builder: *Builder, + handle: *DocumentStore.Handle, + node: Ast.Node.Index, + ) Analyser.Error!void { + const tree = &handle.tree; + const target_symbol_name = offsets.identifierTokenToNameSlice( + &builder.target_symbol.handle.tree, + builder.target_symbol.nameToken(), ); - switch (tree.nodeTag(node)) { + var candidate: Analyser.DeclWithHandle, const name_token = candidate: switch (tree.nodeTag(node)) { .identifier, .test_decl, => |tag| { @@ -128,32 +122,26 @@ const Builder = struct { else => unreachable, }; const name = offsets.identifierTokenToNameSlice(tree, name_token); - if (!std.mem.eql(u8, name, decl_name)) return; + if (!std.mem.eql(u8, name, target_symbol_name)) return; - const child = try builder.analyser.lookupSymbolGlobal( + const candidate = try builder.analyser.lookupSymbolGlobal( handle, name, tree.tokenStart(name_token), ) orelse return; - - if (builder.decl_handle.eql(child)) { - try builder.add(handle, name_token); - } + break :candidate .{ candidate, name_token }; }, .field_access => { if (builder.local_only_decl) return; const lhs_node, const field_token = tree.nodeData(node).node_and_token; const name = offsets.identifierTokenToNameSlice(tree, field_token); - if (!std.mem.eql(u8, name, decl_name)) return; + if (!std.mem.eql(u8, name, target_symbol_name)) return; const lhs = try builder.analyser.resolveTypeOfNode(.of(lhs_node, handle)) orelse return; const deref_lhs = try builder.analyser.resolveDerefType(lhs) orelse lhs; - const child = try deref_lhs.lookupSymbol(builder.analyser, name) orelse return; - - if (builder.decl_handle.eql(child)) { - try builder.add(handle, field_token); - } + const candidate = try deref_lhs.lookupSymbol(builder.analyser, name) orelse return; + break :candidate .{ candidate, field_token }; }, .struct_init_one, .struct_init_one_comma, @@ -170,7 +158,7 @@ const Builder = struct { for (struct_init.ast.fields) |value_node| { // the node of `value` in `.name = value` const name_token = tree.firstToken(value_node) - 2; // math our way two token indexes back to get the `name` const name = offsets.identifierTokenToNameSlice(tree, name_token); - if (!std.mem.eql(u8, name, decl_name)) continue; + if (!std.mem.eql(u8, name, target_symbol_name)) continue; const nodes = switch (tree.nodeTag(node)) { .struct_init_dot, @@ -178,7 +166,7 @@ const Builder = struct { .struct_init_dot_two, .struct_init_dot_two_comma, => try ast.nodesOverlappingIndex( - builder.allocator, + builder.analyser.arena, tree, tree.tokenStart(name_token), ), @@ -191,104 +179,81 @@ const Builder = struct { else => unreachable, }; - const lookup = try builder.analyser.lookupSymbolFieldInit( + const candidate = try builder.analyser.lookupSymbolFieldInit( handle, name, nodes[0], nodes[1..], ) orelse return; - if (builder.decl_handle.eql(lookup)) { - try builder.add(handle, name_token); - } // if we get here then we know that the name of the field matched // and duplicate fields are invalid so just return early - return; + break :candidate .{ candidate, name_token }; } + return; }, .enum_literal => { if (builder.local_only_decl) return; const name_token = tree.nodeMainToken(node); const name = offsets.identifierTokenToNameSlice(&handle.tree, name_token); - if (!std.mem.eql(u8, name, decl_name)) return; - const lookup = try builder.analyser.getSymbolEnumLiteral(handle, tree.tokenStart(name_token), name) orelse return; - - if (builder.decl_handle.eql(lookup)) { - try builder.add(handle, name_token); - } + if (!std.mem.eql(u8, name, target_symbol_name)) return; + const candidate = try builder.analyser.getSymbolEnumLiteral(handle, tree.tokenStart(name_token), name) orelse return; + break :candidate .{ candidate, name_token }; }, - else => {}, - } - } -}; + .global_var_decl, + .local_var_decl, + .aligned_var_decl, + .simple_var_decl, + => { + if (builder.local_only_decl) return; + const var_decl = tree.fullVarDecl(node).?; -fn gatherReferences( - allocator: std.mem.Allocator, - analyser: *Analyser, - curr_handle: *DocumentStore.Handle, - skip_std_references: bool, - include_decl: bool, - builder: anytype, - handle_behavior: enum { get, get_or_load }, -) Analyser.Error!void { - var dependencies: Uri.ArrayHashMap(void) = .empty; - defer { - for (dependencies.keys()) |uri| { - uri.deinit(allocator); - } - dependencies.deinit(allocator); - } + const alias_name_token = var_decl.ast.mut_token + 1; + const alias_name = offsets.identifierTokenToNameSlice(&handle.tree, alias_name_token); + if (!std.mem.eql(u8, alias_name, target_symbol_name)) return; - var it: DocumentStore.HandleIterator = .{ .store = analyser.store }; - while (it.next()) |handle| { - if (skip_std_references and DocumentStore.isInStd(handle.uri)) { - if (!include_decl or !handle.uri.eql(curr_handle.uri)) - continue; - } + const init_node = var_decl.ast.init_node.unwrap() orelse return; + if (tree.tokenTag(var_decl.ast.mut_token) != .keyword_const) return; - var handle_dependencies: std.ArrayList(Uri) = .empty; - defer handle_dependencies.deinit(allocator); - try analyser.store.collectDependencies(allocator, handle, &handle_dependencies); + if (tree.nodeTag(init_node) != .field_access) return; + const lhs_node, const field_token = tree.nodeData(init_node).node_and_token; + const field_name = offsets.identifierTokenToNameSlice(tree, field_token); + if (!std.mem.eql(u8, field_name, target_symbol_name)) return; - try dependencies.ensureUnusedCapacity(allocator, handle_dependencies.items.len); - for (handle_dependencies.items) |uri| { - const gop = dependencies.getOrPutAssumeCapacity(uri); - if (gop.found_existing) { - uri.deinit(allocator); - } - } - } + const lhs = try builder.analyser.resolveTypeOfNode(.of(lhs_node, handle)) orelse return; + const deref_lhs = try builder.analyser.resolveDerefType(lhs) orelse lhs; - for (dependencies.keys()) |uri| { - if (uri.eql(curr_handle.uri)) continue; - const handle = switch (handle_behavior) { - .get => analyser.store.getHandle(uri), - .get_or_load => try analyser.store.getOrLoadHandle(uri), - } orelse continue; + const candidate = try deref_lhs.lookupSymbol(builder.analyser, field_name) orelse return; + break :candidate .{ candidate, alias_name_token }; + }, + else => return, + }; + + candidate = try builder.analyser.resolveVarDeclAlias(candidate) orelse candidate; - try builder.collectReferences(handle, .root); + if (builder.target_symbol.eql(candidate)) { + try builder.add(handle, name_token); + } } -} +}; fn symbolReferences( - allocator: std.mem.Allocator, analyser: *Analyser, request: GeneralReferencesRequest, - decl_handle: Analyser.DeclWithHandle, + target_symbol: Analyser.DeclWithHandle, encoding: offsets.Encoding, - /// add `decl_handle` as a references + /// add `target_symbol` as a references include_decl: bool, - /// exclude references from the std library - skip_std_references: bool, - curr_handle: *DocumentStore.Handle, + /// The file on which the request was initiated. + current_handle: *DocumentStore.Handle, ) Analyser.Error!std.ArrayList(types.Location) { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - std.debug.assert(decl_handle.decl != .label); // use `labelReferences` instead + std.debug.assert(target_symbol.decl != .label); // use `labelReferences` instead - const doc_scope = try decl_handle.handle.getDocumentScope(); - const source_index = decl_handle.handle.tree.tokenStart(decl_handle.nameToken()); + const doc_scope = try target_symbol.handle.getDocumentScope(); + const source_index = target_symbol.handle.tree.tokenStart(target_symbol.nameToken()); const scope_index = Analyser.innermostScopeAtIndexWithTag(doc_scope, source_index, .init(.{ .block = true, .container = true, @@ -299,7 +264,7 @@ fn symbolReferences( // If `local_node != null`, references to the declaration can only be // found inside of the given ast node. - const local_node: ?Ast.Node.Index = switch (decl_handle.decl) { + const local_node: ?Ast.Node.Index = switch (target_symbol.decl) { .ast_node => switch (doc_scope.getScopeTag(scope_index)) { .block => scope_node, .container => null, @@ -320,34 +285,104 @@ fn symbolReferences( }; var builder: Builder = .{ - .allocator = allocator, .analyser = analyser, - .decl_handle = decl_handle, + .target_symbol = target_symbol, .local_only_decl = local_node != null, .encoding = encoding, }; - errdefer builder.deinit(); - if (include_decl) try builder.add(decl_handle.handle, decl_handle.nameToken()); + blk: { + if (!include_decl) break :blk; + if (request == .highlight and !target_symbol.handle.uri.eql(current_handle.uri)) break :blk; + try builder.add(target_symbol.handle, target_symbol.nameToken()); + } - try builder.collectReferences(curr_handle, local_node orelse .root); + try builder.collectReferences(current_handle, local_node orelse .root); - const workspace = local_node == null and request != .highlight and decl_handle.isPublic(); + const workspace = local_node == null and request != .highlight and target_symbol.isPublic(); if (workspace) { - try gatherReferences( - allocator, - analyser, - curr_handle, - skip_std_references, - include_decl, - &builder, - .get, + var uris = try gatherWorkspaceReferenceCandidates( + analyser.store, + analyser.arena, + current_handle, + target_symbol.handle, ); + for (uris.keys()) |uri| { + if (uri.eql(current_handle.uri)) continue; + const dependency_handle = try analyser.store.getOrLoadHandle(uri) orelse continue; + try builder.collectReferences(dependency_handle, .root); + } } return builder.locations; } +fn gatherWorkspaceReferenceCandidates( + store: *DocumentStore, + arena: std.mem.Allocator, + /// The file on which the request was initiated. + root_handle: *DocumentStore.Handle, + /// The file which contains the symbol that is being searched for. + target_handle: *DocumentStore.Handle, +) Analyser.Error!Uri.ArrayHashMap(void) { + if (DocumentStore.supports_build_system) no_build_file: { + const resolved = switch (try root_handle.getAssociatedBuildFile(store)) { + .unresolved => return .empty, // this should await instead + .none => break :no_build_file, + .resolved => |resolved| resolved, + }; + + const root_module_root_uri: Uri = try .fromPath(arena, resolved.root_source_file); + + var found_uris: Uri.ArrayHashMap(void) = .empty; + try found_uris.put(arena, root_module_root_uri, {}); + + if (!root_handle.uri.eql(target_handle.uri)) { + switch (try target_handle.getAssociatedBuildFile(store)) { + .unresolved, .none => {}, + .resolved => |resolved2| { + const target_module_root_uri: Uri = try .fromPath(arena, resolved2.root_source_file); + // also search through the module in which the symbol has been defined + try found_uris.put(arena, target_module_root_uri, {}); + }, + } + } + + var i: usize = 0; + while (i < found_uris.count()) : (i += 1) { + const uri = found_uris.keys()[i]; + const handle = try store.getOrLoadHandle(uri) orelse continue; + + try found_uris.ensureUnusedCapacity(arena, handle.file_imports.len); + for (handle.file_imports) |import_uri| found_uris.putAssumeCapacity(import_uri, {}); + } + return found_uris; + } + + var per_file_dependants: Uri.ArrayHashMap(std.ArrayList(Uri)) = .empty; + + var it: DocumentStore.HandleIterator = .{ .store = store }; + while (it.next()) |handle| { + for (handle.file_imports) |import_uri| { + const gop = try per_file_dependants.getOrPutValue(arena, import_uri, .empty); + try gop.value_ptr.append(arena, handle.uri); + } + } + + var found_uris: Uri.ArrayHashMap(void) = .empty; + try found_uris.put(arena, target_handle.uri, {}); + + var i: usize = 0; + while (i < found_uris.count()) : (i += 1) { + const uri = found_uris.keys()[i]; + const dependants: std.ArrayList(Uri) = per_file_dependants.get(uri) orelse .empty; + try found_uris.ensureUnusedCapacity(arena, dependants.items.len); + for (dependants.items) |dependant_uri| found_uris.putAssumeCapacity(dependant_uri, {}); + } + + return found_uris; +} + fn controlFlowReferences( allocator: std.mem.Allocator, token_handle: Analyser.TokenWithHandle, @@ -499,48 +534,37 @@ fn controlFlowReferences( return locations; } -pub const Callsite = struct { - uri: Uri, - call_node: Ast.Node.Index, -}; - const CallBuilder = struct { - allocator: std.mem.Allocator, - callsites: std.ArrayList(Callsite) = .empty, + callsites: std.ArrayList(Analyser.NodeWithHandle) = .empty, /// this is the declaration we are searching for - decl_handle: Analyser.DeclWithHandle, + target_decl: Analyser.DeclWithHandle, analyser: *Analyser, - const Context = struct { - builder: *CallBuilder, - handle: *DocumentStore.Handle, - }; - - fn deinit(self: *CallBuilder) void { - self.callsites.deinit(self.allocator); - } - fn add(self: *CallBuilder, handle: *DocumentStore.Handle, call_node: Ast.Node.Index) error{OutOfMemory}!void { - try self.callsites.append(self.allocator, .{ - .uri = handle.uri, - .call_node = call_node, + try self.callsites.append(self.analyser.arena, .{ + .handle = handle, + .node = call_node, }); } fn collectReferences(self: *CallBuilder, handle: *DocumentStore.Handle, node: Ast.Node.Index) Analyser.Error!void { - const context = Context{ - .builder = self, - .handle = handle, - }; - var walker: ast.Walker = try .init(self.allocator, &handle.tree, node); - defer walker.deinit(self.allocator); - while (try walker.nextIgnoreClose(self.allocator, &handle.tree)) |child| try referenceNode(&context, &handle.tree, child); + const tracy_zone = tracy.trace(@src()); + defer tracy_zone.end(); + + const arena = self.analyser.arena; + var walker: ast.Walker = try .init(arena, &handle.tree, node); + defer walker.deinit(arena); + while (try walker.nextIgnoreClose(arena, &handle.tree)) |child| { + try referenceNode(self, handle, child); + } } - fn referenceNode(self: *const Context, tree: *const Ast, node: Ast.Node.Index) Analyser.Error!void { - const builder = self.builder; - const handle = self.handle; - + fn referenceNode( + builder: *CallBuilder, + handle: *DocumentStore.Handle, + node: Ast.Node.Index, + ) Analyser.Error!void { + const tree = &handle.tree; switch (tree.nodeTag(node)) { .call, .call_comma, @@ -562,7 +586,7 @@ const CallBuilder = struct { tree.tokenStart(identifier_token), )) orelse return; - if (builder.decl_handle.eql(child)) { + if (builder.target_decl.eql(child)) { try builder.add(handle, node); } }, @@ -574,7 +598,7 @@ const CallBuilder = struct { const symbol = offsets.tokenToSlice(tree, field_name); const child = (try deref_lhs.lookupSymbol(builder.analyser, symbol)) orelse return; - if (builder.decl_handle.eql(child)) { + if (builder.target_decl.eql(child)) { try builder.add(handle, node); } }, @@ -587,33 +611,36 @@ const CallBuilder = struct { }; pub fn callsiteReferences( - allocator: std.mem.Allocator, analyser: *Analyser, decl_handle: Analyser.DeclWithHandle, - /// exclude references from the std library - skip_std_references: bool, /// search other files for references workspace: bool, -) Analyser.Error!std.ArrayList(Callsite) { +) Analyser.Error!std.ArrayList(Analyser.NodeWithHandle) { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); std.debug.assert(decl_handle.decl == .ast_node); - var builder = CallBuilder{ - .allocator = allocator, + var builder: CallBuilder = .{ .analyser = analyser, - .decl_handle = decl_handle, + .target_decl = decl_handle, }; - errdefer builder.deinit(); - - const curr_handle = decl_handle.handle; - try builder.collectReferences(curr_handle, .root); + try builder.collectReferences(decl_handle.handle, .root); - if (!workspace) return builder.callsites; - - try gatherReferences(allocator, analyser, curr_handle, skip_std_references, false, &builder, .get_or_load); + if (workspace) { + var uris = try gatherWorkspaceReferenceCandidates( + analyser.store, + analyser.arena, + decl_handle.handle, + decl_handle.handle, + ); + for (uris.keys()) |uri| { + if (uri.eql(decl_handle.handle.uri)) continue; + const dependency_handle = try analyser.store.getOrLoadHandle(uri) orelse continue; + try builder.collectReferences(dependency_handle, .root); + } + } return builder.callsites; } @@ -682,7 +709,7 @@ pub fn referencesHandler(server: *Server, arena: std.mem.Allocator, request: Gen const name_loc = offsets.identifierLocFromIndex(&handle.tree, source_index) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); - const decl = switch (pos_context) { + var target_decl = switch (pos_context) { .var_access, .test_doctest_name => try analyser.lookupSymbolGlobal(handle, name, source_index), .field_access => |loc| z: { const held_loc = offsets.locMerge(loc, name_loc); @@ -699,16 +726,22 @@ pub fn referencesHandler(server: *Server, arena: std.mem.Allocator, request: Gen else => null, } orelse return null; - break :locs switch (decl.decl) { - .label => try labelReferences(arena, decl, server.offset_encoding, include_decl), - else => try symbolReferences( + target_decl = try analyser.resolveVarDeclAlias(target_decl) orelse target_decl; + + break :locs switch (target_decl.decl) { + .label => |payload| try labelReferences( arena, + target_decl.handle, + payload, + server.offset_encoding, + include_decl, + ), + else => try symbolReferences( &analyser, request, - decl, + target_decl, server.offset_encoding, include_decl, - server.config_manager.config.skip_std_references, handle, ), }; @@ -742,11 +775,7 @@ pub fn referencesHandler(server: *Server, arena: std.mem.Allocator, request: Gen .highlight => { var highlights: std.ArrayList(types.DocumentHighlight) = try .initCapacity(arena, locations.items.len); for (locations.items) |loc| { - const loc_uri = Uri.parse(arena, loc.uri) catch |err| switch (err) { - error.OutOfMemory => return error.OutOfMemory, - else => return error.InvalidParams, - }; - if (!loc_uri.eql(handle.uri)) continue; + std.debug.assert(std.mem.eql(u8, handle.uri.raw, loc.uri)); highlights.appendAssumeCapacity(.{ .range = loc.range, .kind = .Text, diff --git a/src/offsets.zig b/src/offsets.zig index c9b54affa..8c3b3f976 100644 --- a/src/offsets.zig +++ b/src/offsets.zig @@ -397,7 +397,7 @@ pub fn tokenToLoc(tree: *const Ast, token_index: Ast.TokenIndex) Loc { const tag = tree.tokenTag(token_index); // Many tokens can be determined entirely by their tag. - if (tag == .identifier) { + if (tag == .identifier or tag == .builtin) { // fast path for identifiers return identifierIndexToLoc(tree.source, start, .full); } else if (tag.lexeme()) |lexeme| { diff --git a/tests/context.zig b/tests/context.zig index 53d406669..55c493460 100644 --- a/tests/context.zig +++ b/tests/context.zig @@ -82,20 +82,11 @@ pub const Context = struct { // helper pub fn addDocument(self: *Context, options: struct { - use_file_scheme: bool = false, source: []const u8, mode: std.zig.Ast.Mode = .zig, }) !zls.Uri { - const fmt = switch (builtin.os.tag) { - .windows => "file:///c:/Untitled-{d}.{t}", - else => "file:///Untitled-{d}.{t}", - }; - const arena = self.arena.allocator(); - const path = if (options.use_file_scheme) - try std.fmt.allocPrint(arena, fmt, .{ self.file_id, options.mode }) - else - try std.fmt.allocPrint(arena, "untitled:///Untitled-{d}.{t}", .{ self.file_id, options.mode }); + const path = try std.fmt.allocPrint(arena, "untitled:///Untitled-{d}.{t}", .{ self.file_id, options.mode }); const uri: zls.Uri = try .parse(arena, path); const params: types.TextDocument.DidOpenParams = .{ diff --git a/tests/lsp_features/completion.zig b/tests/lsp_features/completion.zig index 78d989e3d..c1cdea388 100644 --- a/tests/lsp_features/completion.zig +++ b/tests/lsp_features/completion.zig @@ -1742,44 +1742,6 @@ test "tagged union" { }); } -test "global enum set" { - try testCompletion( - \\const SomeError = error{ e }; - \\const E1 = enum { - \\ foo, - \\ bar, - \\}; - \\const E2 = enum { - \\ baz, - \\ ///hello - \\ qux, - \\}; - \\const baz = . - , &.{ - .{ .label = "foo", .kind = .EnumMember }, - .{ .label = "bar", .kind = .EnumMember }, - .{ .label = "baz", .kind = .EnumMember }, - .{ .label = "qux", .kind = .EnumMember, .documentation = "hello" }, - }); - try testCompletion( - \\const SomeError = error{ e }; - \\const Enum1 = enum { - \\ ///hello world - \\ foo, - \\ bar, - \\}; - \\const Enum2 = enum { - \\ foo, - \\ ///hallo welt - \\ bar, - \\}; - \\const baz = . - , &.{ - .{ .label = "foo", .kind = .EnumMember, .documentation = "hello world" }, - .{ .label = "bar", .kind = .EnumMember, .documentation = "hallo welt" }, - }); -} - test "switch cases" { // Because current logic is to list all enums if all else fails, // the following tests include an extra enum to ensure that we're not just 'getting lucky' @@ -1991,71 +1953,7 @@ test "error set" { }); } -test "global error set" { - try testCompletion( - \\const SomeEnum = enum { e }; - \\const Error1 = error { - \\ foo, - \\ bar, - \\}; - \\const Error2 = error { - \\ baz, - \\ ///hello - \\ qux, - \\}; - \\const baz = error. - , &.{ - .{ .label = "foo", .kind = .Constant, .detail = "error.foo" }, - .{ .label = "bar", .kind = .Constant, .detail = "error.bar" }, - .{ .label = "baz", .kind = .Constant, .detail = "error.baz" }, - .{ .label = "qux", .kind = .Constant, .detail = "error.qux", .documentation = "hello" }, - }); - try testCompletion( - \\const SomeEnum = enum { e }; - \\const Error1 = error { - \\ ///hello world - \\ foo, - \\ bar, - \\}; - \\const Error2 = error { - \\ foo, - \\ ///hallo welt - \\ bar, - \\}; - \\const baz = error. - , &.{ - .{ .label = "foo", .kind = .Constant, .detail = "error.foo", .documentation = "hello world" }, - .{ .label = "bar", .kind = .Constant, .detail = "error.bar", .documentation = "hallo welt" }, - }); - try testCompletion( - \\const Error = error { - \\ ///hello world - \\ @"some name", - \\}; - \\const baz = error. - , &.{ - .{ .label = "some name", .kind = .Constant, .detail = "error.@\"some name\"", .documentation = "hello world" }, - }); -} - test "merged error sets" { - try testCompletion( - \\const FirstSet = error{ - \\ X, - \\ Y, - \\}; - \\const SecondSet = error{ - \\ Foo, - \\ Bar, - \\} || FirstSet; - \\const e = error. - , &.{ - .{ .label = "X", .kind = .Constant, .detail = "error.X" }, - .{ .label = "Y", .kind = .Constant, .detail = "error.Y" }, - .{ .label = "Foo", .kind = .Constant, .detail = "error.Foo" }, - .{ .label = "Bar", .kind = .Constant, .detail = "error.Bar" }, - }); - try testCompletion( \\const FirstSet = error{ \\ x, diff --git a/tests/lsp_features/references.zig b/tests/lsp_features/references.zig index 5f6e904db..dfbc45e62 100644 --- a/tests/lsp_features/references.zig +++ b/tests/lsp_features/references.zig @@ -303,15 +303,51 @@ test "switch case capture - union tag" { test "cross-file reference" { try testMultiFileSymbolReferences(&.{ - // TODO not putting a reference here is a hack to workaround cross-file references being broken https://github.com/zigtools/zls/issues/1071 - // for now this only tests the ability to find references within a file to a decl from another file - \\pub const placeholder = struct {}; + // Untitled-0.zig + \\pub const <0> = struct {}; , + // Untitled-1.zig \\const file = @import("Untitled-0.zig"); - \\const first = file.<0>; - \\const second = file.<0>; + \\const <0> = file.<0>; + \\const renamed = file.<0>; + \\comptime { + \\ _ = <0>; + \\ _ = renamed; + \\} + , + }, true); +} + +test "cross-file - transitive import" { + try testMultiFileSymbolReferences(&.{ + // Untitled-0.zig + \\pub const <0> = struct {}; + , + // Untitled-1.zig + \\pub const file = @import("Untitled-0.zig"); + , + // Untitled-2.zig + \\const file = @import("Untitled-1.zig").file; + \\const foo: file.<0> = undefined; + , + }, true); +} + +test "cross-file - alias" { + try testMultiFileSymbolReferences(&.{ + // Untitled-0.zig + \\pub const <0> = struct { + \\ fn foo(_: <0>) void {} + \\ var bar: <0> = undefined; + \\}; + , + // Untitled-1.zig + \\const <0> = @import("Untitled-0.zig").<0>; + \\comptime { + \\ _ = <0>; + \\} , - }, false); + }, true); } fn testSymbolReferences(source: []const u8) !void { @@ -345,10 +381,7 @@ fn testMultiFileSymbolReferences(sources: []const []const u8, include_decl: bool var phr = try helper.collectReplacePlaceholders(allocator, source, placeholder_name); defer phr.deinit(allocator); - const uri = try ctx.addDocument(.{ - .use_file_scheme = sources.len > 1, // use 'file:/' scheme when testing with multiple files so that they can import each other - .source = phr.new_source, - }); + const uri = try ctx.addDocument(.{ .source = phr.new_source }); files.putAssumeCapacityNoClobber(uri.raw, .{ .source = source, .new_source = phr.new_source }); phr.new_source = ""; // `files` takes ownership of `new_source` from `phr` @@ -395,11 +428,12 @@ fn testMultiFileSymbolReferences(sources: []const []const u8, include_decl: bool defer visited.deinit(allocator); for (actual_locations) |response_location| { - const actual_loc = offsets.rangeToLoc(file.new_source, response_location.range, ctx.server.offset_encoding); const actual_file_index = files.getIndex(response_location.uri) orelse { std.debug.print("received location to unknown file `{s}` as the result\n", .{response_location.uri}); return error.InvalidReference; }; + const actual_file_source = files.values()[actual_file_index].new_source; + const actual_loc = offsets.rangeToLoc(actual_file_source, response_location.range, ctx.server.offset_encoding); const index = found_index: { for (locs.items(.new), locs.items(.file_index), 0..) |expected_loc, expected_file_index, idx| { @@ -408,12 +442,12 @@ fn testMultiFileSymbolReferences(sources: []const []const u8, include_decl: bool if (expected_loc.end != actual_loc.end) continue; break :found_index idx; } - try error_builder.msgAtLoc("server returned unexpected reference!", file_uri, actual_loc, .err, .{}); + try error_builder.msgAtLoc("server returned unexpected reference!", response_location.uri, actual_loc, .err, .{}); return error.UnexpectedReference; }; if (visited.isSet(index)) { - try error_builder.msgAtLoc("server returned duplicate reference!", file_uri, actual_loc, .err, .{}); + try error_builder.msgAtLoc("server returned duplicate reference!", response_location.uri, actual_loc, .err, .{}); return error.DuplicateReference; } else { visited.set(index);