Skip to content

Commit 18f1a3c

Browse files
committed
Some final touches
1 parent 81410ab commit 18f1a3c

File tree

10 files changed

+86
-67
lines changed

10 files changed

+86
-67
lines changed

crates/hir-expand/src/lib.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -663,7 +663,7 @@ impl ExpansionInfo {
663663
range: TextRange,
664664
) -> Option<(FileRange, SyntaxContextId)> {
665665
debug_assert!(self.expanded.value.text_range().contains_range(range));
666-
let mut spans = self.exp_map.spans_for_node_range(range);
666+
let mut spans = self.exp_map.spans_for_range(range);
667667
let SpanData { range, anchor, ctx } = spans.next()?;
668668
let mut start = range.start();
669669
let mut end = range.end();

crates/hir-expand/src/quote.rs

+15-10
Original file line numberDiff line numberDiff line change
@@ -215,10 +215,18 @@ impl_to_to_tokentrees! {
215215
#[cfg(test)]
216216
mod tests {
217217
use crate::tt;
218-
use ::tt::Span;
218+
use base_db::{
219+
span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
220+
FileId,
221+
};
219222
use expect_test::expect;
223+
use syntax::{TextRange, TextSize};
220224

221-
const DUMMY: tt::SpanData = tt::SpanData::DUMMY;
225+
const DUMMY: tt::SpanData = tt::SpanData {
226+
range: TextRange::empty(TextSize::new(0)),
227+
anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID },
228+
ctx: SyntaxContextId::ROOT,
229+
};
222230

223231
#[test]
224232
fn test_quote_delimiters() {
@@ -242,10 +250,7 @@ mod tests {
242250
}
243251

244252
fn mk_ident(name: &str) -> crate::tt::Ident {
245-
crate::tt::Ident {
246-
text: name.into(),
247-
span: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
248-
}
253+
crate::tt::Ident { text: name.into(), span: DUMMY }
249254
}
250255

251256
#[test]
@@ -256,8 +261,8 @@ mod tests {
256261
assert_eq!(quoted.to_string(), "hello");
257262
let t = format!("{quoted:?}");
258263
expect![[r#"
259-
SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor(FileId(0), 0), ctx: SyntaxContextId(0) } SpanData { range: 0..0, anchor: SpanAnchor(FileId(0), 0), ctx: SyntaxContextId(0) }
260-
IDENT hello SpanData { range: 0..0, anchor: SpanAnchor(FileId(0), 0), ctx: SyntaxContextId(0) }"#]].assert_eq(&t);
264+
SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor(FileId(4294967295), 0), ctx: SyntaxContextId(0) } SpanData { range: 0..0, anchor: SpanAnchor(FileId(4294967295), 0), ctx: SyntaxContextId(0) }
265+
IDENT hello SpanData { range: 0..0, anchor: SpanAnchor(FileId(4294967295), 0), ctx: SyntaxContextId(0) }"#]].assert_eq(&t);
261266
}
262267

263268
#[test]
@@ -290,8 +295,8 @@ mod tests {
290295
let list = crate::tt::Subtree {
291296
delimiter: crate::tt::Delimiter {
292297
kind: crate::tt::DelimiterKind::Brace,
293-
open: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
294-
close: <crate::tt::SpanData as crate::tt::Span>::DUMMY,
298+
open: DUMMY,
299+
close: DUMMY,
295300
},
296301
token_trees: fields.collect(),
297302
};

crates/hir-expand/src/span.rs

+1-2
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,12 @@ use base_db::{
44
span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
55
FileId,
66
};
7-
use mbe::TokenMap;
87
use syntax::{ast::HasModuleItem, AstNode, TextRange, TextSize};
98
use triomphe::Arc;
109

1110
use crate::db::ExpandDatabase;
1211

13-
pub type ExpansionSpanMap = TokenMap<SpanData>;
12+
pub type ExpansionSpanMap = mbe::SpanMap<SpanData>;
1413

1514
/// Spanmap for a macro file or a real file
1615
#[derive(Clone, Debug, PartialEq, Eq)]

crates/mbe/src/benchmark.rs

+7-22
Original file line numberDiff line numberDiff line change
@@ -6,11 +6,10 @@ use syntax::{
66
AstNode, SmolStr,
77
};
88
use test_utils::{bench, bench_fixture, skip_slow_tests};
9-
use tt::Span;
109

1110
use crate::{
1211
parser::{MetaVarKind, Op, RepeatKind, Separator},
13-
syntax_node_to_token_tree, DeclarativeMacro, DummyTestSpanData, DummyTestSpanMap,
12+
syntax_node_to_token_tree, DeclarativeMacro, DummyTestSpanData, DummyTestSpanMap, DUMMY,
1413
};
1514

1615
#[test]
@@ -97,8 +96,8 @@ fn invocation_fixtures(
9796
loop {
9897
let mut subtree = tt::Subtree {
9998
delimiter: tt::Delimiter {
100-
open: DummyTestSpanData::DUMMY,
101-
close: DummyTestSpanData::DUMMY,
99+
open: DUMMY,
100+
close: DUMMY,
102101
kind: tt::DelimiterKind::Invisible,
103102
},
104103
token_trees: vec![],
@@ -211,34 +210,20 @@ fn invocation_fixtures(
211210
*seed
212211
}
213212
fn make_ident(ident: &str) -> tt::TokenTree<DummyTestSpanData> {
214-
tt::Leaf::Ident(tt::Ident { span: DummyTestSpanData::DUMMY, text: SmolStr::new(ident) })
215-
.into()
213+
tt::Leaf::Ident(tt::Ident { span: DUMMY, text: SmolStr::new(ident) }).into()
216214
}
217215
fn make_punct(char: char) -> tt::TokenTree<DummyTestSpanData> {
218-
tt::Leaf::Punct(tt::Punct {
219-
span: DummyTestSpanData::DUMMY,
220-
char,
221-
spacing: tt::Spacing::Alone,
222-
})
223-
.into()
216+
tt::Leaf::Punct(tt::Punct { span: DUMMY, char, spacing: tt::Spacing::Alone }).into()
224217
}
225218
fn make_literal(lit: &str) -> tt::TokenTree<DummyTestSpanData> {
226-
tt::Leaf::Literal(tt::Literal {
227-
span: DummyTestSpanData::DUMMY,
228-
text: SmolStr::new(lit),
229-
})
230-
.into()
219+
tt::Leaf::Literal(tt::Literal { span: DUMMY, text: SmolStr::new(lit) }).into()
231220
}
232221
fn make_subtree(
233222
kind: tt::DelimiterKind,
234223
token_trees: Option<Vec<tt::TokenTree<DummyTestSpanData>>>,
235224
) -> tt::TokenTree<DummyTestSpanData> {
236225
tt::Subtree {
237-
delimiter: tt::Delimiter {
238-
open: DummyTestSpanData::DUMMY,
239-
close: DummyTestSpanData::DUMMY,
240-
kind,
241-
},
226+
delimiter: tt::Delimiter { open: DUMMY, close: DUMMY, kind },
242227
token_trees: token_trees.unwrap_or_default(),
243228
}
244229
.into()

crates/mbe/src/expander/transcriber.rs

+13-4
Original file line numberDiff line numberDiff line change
@@ -79,8 +79,8 @@ impl<S: Span> Bindings<S> {
7979
}
8080
MetaVarKind::Block => Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
8181
delimiter: tt::Delimiter {
82-
open: S::DUMMY,
83-
close: S::DUMMY,
82+
open: span,
83+
close: span,
8484
kind: tt::DelimiterKind::Brace,
8585
},
8686
token_trees: vec![],
@@ -225,6 +225,7 @@ fn expand_subtree<S: Span>(
225225
arena.push(
226226
tt::Leaf::Literal(tt::Literal {
227227
text: index.to_string().into(),
228+
// FIXME
228229
span: S::DUMMY,
229230
})
230231
.into(),
@@ -282,8 +283,12 @@ fn expand_subtree<S: Span>(
282283
}
283284
};
284285
arena.push(
285-
tt::Leaf::Literal(tt::Literal { text: c.to_string().into(), span: S::DUMMY })
286-
.into(),
286+
tt::Leaf::Literal(tt::Literal {
287+
text: c.to_string().into(),
288+
// FIXME
289+
span: S::DUMMY,
290+
})
291+
.into(),
287292
);
288293
}
289294
}
@@ -337,7 +342,9 @@ fn expand_var<S: Span>(
337342
}
338343
Err(e) => ExpandResult {
339344
value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty(tt::DelimSpan {
345+
// FIXME
340346
open: S::DUMMY,
347+
// FIXME
341348
close: S::DUMMY,
342349
}))),
343350
err: Some(e),
@@ -479,6 +486,7 @@ fn fix_up_and_push_path_tt<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, subtree: tt
479486
tt::Leaf::Punct(tt::Punct {
480487
char: ':',
481488
spacing: tt::Spacing::Joint,
489+
// FIXME
482490
span: S::DUMMY,
483491
})
484492
.into(),
@@ -487,6 +495,7 @@ fn fix_up_and_push_path_tt<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, subtree: tt
487495
tt::Leaf::Punct(tt::Punct {
488496
char: ':',
489497
spacing: tt::Spacing::Alone,
498+
// FIXME
490499
span: S::DUMMY,
491500
})
492501
.into(),

crates/mbe/src/lib.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ pub use crate::{
3838
syntax_node_to_token_tree, syntax_node_to_token_tree_modified, token_tree_to_syntax_node,
3939
SpanMapper,
4040
},
41-
token_map::TokenMap,
41+
token_map::SpanMap,
4242
};
4343

4444
pub use crate::syntax_bridge::dummy_test_span_utils::*;

crates/mbe/src/syntax_bridge.rs

+18-11
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ use tt::{
1313
Span, SpanData, SyntaxContext,
1414
};
1515

16-
use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap};
16+
use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, SpanMap};
1717

1818
#[cfg(test)]
1919
mod tests;
@@ -22,7 +22,7 @@ pub trait SpanMapper<S: Span> {
2222
fn span_for(&self, range: TextRange) -> S;
2323
}
2424

25-
impl<S: Span> SpanMapper<S> for TokenMap<S> {
25+
impl<S: Span> SpanMapper<S> for SpanMap<S> {
2626
fn span_for(&self, range: TextRange) -> S {
2727
self.span_at(range.start())
2828
}
@@ -34,10 +34,12 @@ impl<S: Span, SM: SpanMapper<S>> SpanMapper<S> for &SM {
3434
}
3535
}
3636

37+
/// Dummy things for testing where spans don't matter.
3738
pub(crate) mod dummy_test_span_utils {
3839
use super::*;
3940

4041
pub type DummyTestSpanData = tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext>;
42+
pub const DUMMY: DummyTestSpanData = DummyTestSpanData::DUMMY;
4143

4244
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
4345
pub struct DummyTestSpanAnchor;
@@ -62,9 +64,8 @@ pub(crate) mod dummy_test_span_utils {
6264
}
6365
}
6466

65-
/// Convert the syntax node to a `TokenTree` (what macro
66-
/// will consume).
67-
/// FIXME: Flesh out the doc comment more thoroughly
67+
/// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the
68+
/// subtree's spans.
6869
pub fn syntax_node_to_token_tree<Anchor, Ctx, SpanMap>(
6970
node: &SyntaxNode,
7071
map: SpanMap,
@@ -79,6 +80,9 @@ where
7980
convert_tokens(&mut c)
8081
}
8182

83+
/// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the
84+
/// subtree's spans. Additionally using the append and remove parameters, the additional tokens can
85+
/// be injected or hidden from the output.
8286
pub fn syntax_node_to_token_tree_modified<Anchor, Ctx, SpanMap>(
8387
node: &SyntaxNode,
8488
map: SpanMap,
@@ -107,10 +111,12 @@ where
107111
// * AssocItems(SmallVec<[ast::AssocItem; 1]>)
108112
// * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
109113

114+
/// Converts a [`tt::Subtree`] back to a [`SyntaxNode`].
115+
/// The produced `SpanMap` contains a mapping from the syntax nodes offsets to the subtree's spans.
110116
pub fn token_tree_to_syntax_node<Anchor, Ctx>(
111117
tt: &tt::Subtree<SpanData<Anchor, Ctx>>,
112118
entry_point: parser::TopEntryPoint,
113-
) -> (Parse<SyntaxNode>, TokenMap<SpanData<Anchor, Ctx>>)
119+
) -> (Parse<SyntaxNode>, SpanMap<SpanData<Anchor, Ctx>>)
114120
where
115121
SpanData<Anchor, Ctx>: Span,
116122
Anchor: Copy,
@@ -142,7 +148,8 @@ where
142148
tree_sink.finish()
143149
}
144150

145-
/// Convert a string to a `TokenTree`
151+
/// Convert a string to a `TokenTree`. The spans of the subtree will be anchored to the provided
152+
/// anchor with the given context.
146153
pub fn parse_to_token_tree<Anchor, Ctx>(
147154
anchor: Anchor,
148155
ctx: Ctx,
@@ -161,7 +168,7 @@ where
161168
Some(convert_tokens(&mut conv))
162169
}
163170

164-
/// Convert a string to a `TokenTree`
171+
/// Convert a string to a `TokenTree`. The passed span will be used for all spans of the produced subtree.
165172
pub fn parse_to_token_tree_static_span<S>(span: S, text: &str) -> Option<tt::Subtree<S>>
166173
where
167174
S: Span,
@@ -798,7 +805,7 @@ where
798805
cursor: Cursor<'a, SpanData<Anchor, Ctx>>,
799806
text_pos: TextSize,
800807
inner: SyntaxTreeBuilder,
801-
token_map: TokenMap<SpanData<Anchor, Ctx>>,
808+
token_map: SpanMap<SpanData<Anchor, Ctx>>,
802809
}
803810

804811
impl<'a, Anchor, Ctx> TtTreeSink<'a, Anchor, Ctx>
@@ -811,11 +818,11 @@ where
811818
cursor,
812819
text_pos: 0.into(),
813820
inner: SyntaxTreeBuilder::default(),
814-
token_map: TokenMap::empty(),
821+
token_map: SpanMap::empty(),
815822
}
816823
}
817824

818-
fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap<SpanData<Anchor, Ctx>>) {
825+
fn finish(mut self) -> (Parse<SyntaxNode>, SpanMap<SpanData<Anchor, Ctx>>) {
819826
self.token_map.finish();
820827
(self.inner.finish(), self.token_map)
821828
}

crates/mbe/src/token_map.rs

+14-10
Original file line numberDiff line numberDiff line change
@@ -8,30 +8,33 @@ use tt::Span;
88

99
/// Maps absolute text ranges for the corresponding file to the relevant span data.
1010
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
11-
// FIXME: Rename to SpanMap
12-
pub struct TokenMap<S: Span> {
13-
// FIXME: This needs to be sorted by (FileId, AstId)
14-
// Then we can do a binary search on the file id,
15-
// then a bin search on the ast id?
11+
pub struct SpanMap<S: Span> {
1612
spans: Vec<(TextSize, S)>,
1713
}
1814

19-
impl<S: Span> TokenMap<S> {
15+
impl<S: Span> SpanMap<S> {
16+
/// Creates a new empty [`SpanMap`].
2017
pub fn empty() -> Self {
2118
Self { spans: Vec::new() }
2219
}
2320

21+
/// Finalizes the [`SpanMap`], shrinking its backing storage and validating that the offsets are
22+
/// in order.
2423
pub fn finish(&mut self) {
2524
assert!(self.spans.iter().tuple_windows().all(|(a, b)| a.0 < b.0));
2625
self.spans.shrink_to_fit();
2726
}
2827

28+
/// Pushes a new span onto the [`SpanMap`].
2929
pub fn push(&mut self, offset: TextSize, span: S) {
30+
debug_assert!(self.spans.last().map_or(true, |&(last_offset, _)| last_offset < offset));
3031
self.spans.push((offset, span));
3132
}
3233

34+
/// Returns all [`TextRange`]s that correspond to the given span.
35+
///
36+
/// Note this does a linear search through the entire backing vector.
3337
pub fn ranges_with_span(&self, span: S) -> impl Iterator<Item = TextRange> + '_ {
34-
// FIXME: linear search
3538
self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| {
3639
if s != span {
3740
return None;
@@ -41,14 +44,15 @@ impl<S: Span> TokenMap<S> {
4144
})
4245
}
4346

44-
// FIXME: We need APIs for fetching the span of a token as well as for a whole node. The node
45-
// one *is* fallible though.
47+
/// Returns the span at the given position.
4648
pub fn span_at(&self, offset: TextSize) -> S {
4749
let entry = self.spans.partition_point(|&(it, _)| it <= offset);
4850
self.spans[entry].1
4951
}
5052

51-
pub fn spans_for_node_range(&self, range: TextRange) -> impl Iterator<Item = S> + '_ {
53+
/// Returns the spans associated with the given range.
54+
/// In other words, this will return all spans that correspond to all offsets within the given range.
55+
pub fn spans_for_range(&self, range: TextRange) -> impl Iterator<Item = S> + '_ {
5256
let (start, end) = (range.start(), range.end());
5357
let start_entry = self.spans.partition_point(|&(it, _)| it <= start);
5458
let end_entry = self.spans[start_entry..].partition_point(|&(it, _)| it <= end); // FIXME: this might be wrong?

0 commit comments

Comments
 (0)