Skip to content

Lex doc comments as attributes #74

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Apr 4, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
95 changes: 73 additions & 22 deletions src/stable.rs
Original file line number Diff line number Diff line change
Expand Up @@ -591,24 +591,55 @@ impl fmt::Display for Literal {
}
}

named!(token_stream -> ::TokenStream, map!(
many0!(token_tree),
|trees| ::TokenStream::_new(TokenStream { inner: trees })
));
fn token_stream(mut input: Cursor) -> PResult<::TokenStream> {
let mut trees = Vec::new();
loop {
let input_no_ws = skip_whitespace(input);
if input_no_ws.rest.len() == 0 {
break
}
if let Ok((a, tokens)) = doc_comment(input_no_ws) {
input = a;
trees.extend(tokens);
continue
}

let (a, tt) = match token_tree(input_no_ws) {
Ok(p) => p,
Err(_) => break,
};
trees.push(tt);
input = a;
}
Ok((input, ::TokenStream::_new(TokenStream { inner: trees })))
}

#[cfg(not(procmacro2_semver_exempt))]
fn token_tree(input: Cursor) -> PResult<TokenTree> {
token_kind(input)
fn spanned<'a, T>(
input: Cursor<'a>,
f: fn(Cursor<'a>) -> PResult<'a, T>,
) -> PResult<'a, (T, ::Span)> {
let (a, b) = f(skip_whitespace(input))?;
Ok((a, ((b, ::Span::_new(Span { })))))
}

#[cfg(procmacro2_semver_exempt)]
fn token_tree(input: Cursor) -> PResult<TokenTree> {
fn spanned<'a, T>(
input: Cursor<'a>,
f: fn(Cursor<'a>) -> PResult<'a, T>,
) -> PResult<'a, (T, ::Span)> {
let input = skip_whitespace(input);
let lo = input.off;
let (input, mut token) = token_kind(input)?;
let hi = input.off;
token.set_span(::Span::_new(Span { lo: lo, hi: hi }));
Ok((input, token))
let (a, b) = f(input)?;
let hi = a.off;
let span = ::Span::_new(Span { lo: lo, hi: hi });
Ok((a, (b, span)))
}

fn token_tree(input: Cursor) -> PResult<TokenTree> {
let (rest, (mut tt, span)) = spanned(input, token_kind)?;
tt.set_span(span);
Ok((rest, tt))
}

named!(token_kind -> TokenTree, alt!(
Expand Down Expand Up @@ -721,8 +752,6 @@ named!(literal_nocapture -> (), alt!(
float
|
int
|
doc_comment
));

named!(string -> (), alt!(
Expand Down Expand Up @@ -1146,31 +1175,53 @@ fn op_char(input: Cursor) -> PResult<char> {
}
}

named!(doc_comment -> (), alt!(
fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
let mut trees = Vec::new();
let (rest, ((comment, inner), span)) = spanned(input, doc_comment_contents)?;
trees.push(TokenTree::Op(Op::new('#', Spacing::Alone)));
if inner {
trees.push(Op::new('!', Spacing::Alone).into());
}
let mut stream = vec![
TokenTree::Term(::Term::new("doc", span)),
TokenTree::Op(Op::new('=', Spacing::Alone)),
TokenTree::Literal(::Literal::string(comment)),
];
for tt in stream.iter_mut() {
tt.set_span(span);
}
trees.push(Group::new(Delimiter::Bracket, stream.into_iter().collect()).into());
for tt in trees.iter_mut() {
tt.set_span(span);
}
Ok((rest, trees))
}

named!(doc_comment_contents -> (&str, bool), alt!(
do_parse!(
punct!("//!") >>
take_until_newline_or_eof!() >>
(())
s: take_until_newline_or_eof!() >>
((s, true))
)
|
do_parse!(
option!(whitespace) >>
peek!(tag!("/*!")) >>
block_comment >>
(())
s: block_comment >>
((s, true))
)
|
do_parse!(
punct!("///") >>
not!(tag!("/")) >>
take_until_newline_or_eof!() >>
(())
s: take_until_newline_or_eof!() >>
((s, false))
)
|
do_parse!(
option!(whitespace) >>
peek!(tuple!(tag!("/**"), not!(tag!("*")))) >>
block_comment >>
(())
s: block_comment >>
((s, false))
)
));
36 changes: 1 addition & 35 deletions src/strnom.rs
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ macro_rules! take_until_newline_or_eof {
} else {
match $i.find('\n') {
Some(i) => Ok(($i.advance(i), &$i.rest[..i])),
None => Ok(($i.advance($i.len()), "")),
None => Ok(($i.advance($i.len()), &$i.rest[..$i.len()])),
}
}
}};
Expand Down Expand Up @@ -389,37 +389,3 @@ macro_rules! map {
map!($i, call!($f), $g)
};
}

macro_rules! many0 {
($i:expr, $f:expr) => {{
let ret;
let mut res = ::std::vec::Vec::new();
let mut input = $i;

loop {
if input.is_empty() {
ret = Ok((input, res));
break;
}

match $f(input) {
Err(LexError) => {
ret = Ok((input, res));
break;
}
Ok((i, o)) => {
// loop trip must always consume (otherwise infinite loops)
if i.len() == input.len() {
ret = Err(LexError);
break;
}

res.push(o);
input = i;
}
}
}

ret
}};
}
53 changes: 39 additions & 14 deletions tests/test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,12 +29,6 @@ fn roundtrip() {
roundtrip("a");
roundtrip("<<");
roundtrip("<<=");
roundtrip(
"
/// a
wut
",
);
roundtrip(
"
1
Expand Down Expand Up @@ -115,12 +109,16 @@ testing 123
testing 234
}",
&[
(1, 0, 1, 30),
(2, 0, 2, 7),
(2, 8, 2, 11),
(3, 0, 5, 1),
(4, 2, 4, 9),
(4, 10, 4, 13),
(1, 0, 1, 30), // #
(1, 0, 1, 30), // [ ... ]
(1, 0, 1, 30), // doc
(1, 0, 1, 30), // =
(1, 0, 1, 30), // "This is..."
(2, 0, 2, 7), // testing
(2, 8, 2, 11), // 123
(3, 0, 5, 1), // { ... }
(4, 2, 4, 9), // testing
(4, 10, 4, 13), // 234
],
);
}
Expand Down Expand Up @@ -192,11 +190,38 @@ fn tricky_doc_comment() {

let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
let tokens = stream.into_iter().collect::<Vec<_>>();
assert!(tokens.len() == 1, "not length 1 -- {:?}", tokens);
assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
match tokens[0] {
proc_macro2::TokenTree::Literal(_) => {}
proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '#'),
_ => panic!("wrong token {:?}", tokens[0]),
}
let mut tokens = match tokens[1] {
proc_macro2::TokenTree::Group(ref tt) => {
assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
tt.stream().into_iter()
}
_ => panic!("wrong token {:?}", tokens[0]),
};

match tokens.next().unwrap() {
proc_macro2::TokenTree::Term(ref tt) => assert_eq!(tt.as_str(), "doc"),
t => panic!("wrong token {:?}", t),
}
match tokens.next().unwrap() {
proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '='),
t => panic!("wrong token {:?}", t),
}
match tokens.next().unwrap() {
proc_macro2::TokenTree::Literal(ref tt) => {
assert_eq!(tt.to_string(), "\" doc\"");
}
t => panic!("wrong token {:?}", t),
}
assert!(tokens.next().is_none());

let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
let tokens = stream.into_iter().collect::<Vec<_>>();
assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
}

#[test]
Expand Down