Skip to content

Commit 551380a

Browse files
committed
fixes merger for when file is merged twice
This happens in projects where an #include is normally guarded by #ifdef. File A conditionally includes either B or C based on an #ifdef. Both B and C include D, so D gets merged twice. Previously, the offset to which a file view has been created is stored globally. This means that the offset would be reused on a fresh include in the above situation. Solved by storing offsets keyed by the NodeIndex of the parent file that is including and the NodeIndex of the file being included.
1 parent 9a770f6 commit 551380a

File tree

1 file changed

+24
-10
lines changed

1 file changed

+24
-10
lines changed

server/src/merge_views.rs

Lines changed: 24 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,18 @@ use petgraph::stable_graph::NodeIndex;
88

99
use crate::graph::CachedStableGraph;
1010

11+
/// FilialTuple represents a tuple with a parent at index 0
12+
/// and a child at index 1. Parent can be nullable in the case of
13+
/// the child being a top level node in the tree.
14+
#[derive(PartialEq, Eq, Hash)]
15+
struct FilialTuple(Option<NodeIndex>, NodeIndex);
16+
17+
impl From<(Option<&NodeIndex>, NodeIndex)> for FilialTuple {
18+
fn from(tuple: (Option<&NodeIndex>, NodeIndex)) -> Self {
19+
FilialTuple(tuple.0.and_then(|n| Some(*n)), tuple.1)
20+
}
21+
}
22+
1123
pub fn generate_merge_list<'a>(
1224
nodes: &'a [(NodeIndex, Option<NodeIndex>)],
1325
sources: &'a HashMap<PathBuf, String>,
@@ -20,22 +32,23 @@ pub fn generate_merge_list<'a>(
2032

2133
line_directives.reserve(nodes.len() * 2);
2234

23-
let mut last_offset_set: HashMap<PathBuf, usize> = HashMap::new();
35+
let mut last_offset_set: HashMap<FilialTuple, usize> = HashMap::new();
2436

2537
let mut nodes_iter = nodes.iter().peekable();
2638

39+
// invariant: nodes_iter always has _at least_ one element. Can't save a not-file :B
2740
let first = nodes_iter.next().unwrap().0;
28-
let first_path = graph.get_node(first).clone();
41+
let first_path = graph.get_node(first);
2942

30-
last_offset_set.insert(first_path.clone(), 0);
43+
last_offset_set.insert(FilialTuple(None, first), 0);
3144

3245
// stack to keep track of the depth first traversal
3346
let mut stack = VecDeque::<NodeIndex>::new();
3447

3548
create_merge_views(&mut nodes_iter, &mut merge_list, &mut last_offset_set, graph, sources, &mut line_directives, &mut stack);
3649

3750
// now we add a view of the remainder of the root file
38-
let offset = *last_offset_set.get(&first_path).unwrap();
51+
let offset = *last_offset_set.get(&FilialTuple(None, first)).unwrap();
3952

4053
let len = sources.get(&first_path).unwrap().len();
4154
merge_list.push_back(&sources.get(&first_path).unwrap()[min(offset, len) ..]);
@@ -55,7 +68,7 @@ pub fn generate_merge_list<'a>(
5568
fn create_merge_views<'a>(
5669
nodes: &mut Peekable<Iter<(NodeIndex, Option<NodeIndex>)>>,
5770
merge_list: &mut LinkedList<&'a str>,
58-
last_offset_set: &mut HashMap<PathBuf, usize>,
71+
last_offset_set: &mut HashMap<FilialTuple, usize>,
5972
graph: &'a CachedStableGraph,
6073
sources: &'a HashMap<PathBuf, String>,
6174
line_directives: &mut Vec<String>,
@@ -68,6 +81,7 @@ fn create_merge_views<'a>(
6881
None => return,
6982
};
7083

84+
// invariant: never None as only the first element in `nodes` should have a None, which is popped off in the calling function
7185
let parent = n.1.unwrap();
7286
let child = n.0;
7387
let edge = graph.get_edge_meta(parent, child);
@@ -77,7 +91,7 @@ fn create_merge_views<'a>(
7791
let parent_source = sources.get(&parent_path).unwrap();
7892
let (char_for_line, char_following_line) = char_offset_for_line(edge.line, parent_source);
7993

80-
let offset = *last_offset_set.insert(parent_path.clone(), char_following_line).get_or_insert(0);
94+
let offset = *last_offset_set.insert((stack.back(), parent).into(), char_following_line).get_or_insert(0);
8195
merge_list.push_back(&parent_source[offset..char_for_line]);
8296
add_opening_line_directive(&child_path, merge_list, line_directives);
8397

@@ -95,7 +109,7 @@ fn create_merge_views<'a>(
95109
}
96110
};
97111
merge_list.push_back(&child_source[..offset]);
98-
last_offset_set.insert(child_path.clone(), 0);
112+
last_offset_set.insert(FilialTuple(Some(parent), child), 0);
99113
// +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line
100114
add_closing_line_directive(edge.line+2, &parent_path, merge_list, line_directives);
101115
// if the next pair's parent is not the current pair's parent, we need to bubble up
@@ -109,7 +123,7 @@ fn create_merge_views<'a>(
109123
create_merge_views(nodes, merge_list, last_offset_set, graph, sources, line_directives, stack);
110124
stack.pop_back();
111125

112-
let offset = *last_offset_set.get(&child_path).unwrap();
126+
let offset = *last_offset_set.get(&FilialTuple(Some(parent), child)).unwrap();
113127
let child_source = sources.get(&child_path).unwrap();
114128
// this evaluates to false once the file contents have been exhausted aka offset = child_source.len() + 1
115129
let end_offset = {
@@ -121,7 +135,7 @@ fn create_merge_views<'a>(
121135
if offset < child_source.len()-end_offset {
122136
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
123137
merge_list.push_back(&child_source[offset../* std::cmp::max( */child_source.len()-end_offset/* , offset) */]);
124-
last_offset_set.insert(child_path.clone(), 0);
138+
last_offset_set.insert(FilialTuple(Some(parent), child), 0);
125139
}
126140

127141
// +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line
@@ -142,7 +156,7 @@ fn create_merge_views<'a>(
142156
}
143157
};
144158
merge_list.push_back(&child_source[..offset]);
145-
last_offset_set.insert(child_path.clone(), 0);
159+
last_offset_set.insert(FilialTuple(Some(parent), child), 0);
146160
// +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line
147161
add_closing_line_directive(edge.line+2, &parent_path, merge_list, line_directives);
148162
}

0 commit comments

Comments
 (0)