Skip to content

Commit 839c286

Browse files
committed
Liberalize attributes.
1 parent 68c1cc6 commit 839c286

17 files changed

+257
-172
lines changed

src/librustc_resolve/lib.rs

+2
Original file line numberDiff line numberDiff line change
@@ -1165,6 +1165,7 @@ pub struct Resolver<'a> {
11651165

11661166
privacy_errors: Vec<PrivacyError<'a>>,
11671167
ambiguity_errors: Vec<AmbiguityError<'a>>,
1168+
gated_errors: FxHashSet<Span>,
11681169
disallowed_shadowing: Vec<&'a LegacyBinding<'a>>,
11691170

11701171
arenas: &'a ResolverArenas<'a>,
@@ -1355,6 +1356,7 @@ impl<'a> Resolver<'a> {
13551356

13561357
privacy_errors: Vec::new(),
13571358
ambiguity_errors: Vec::new(),
1359+
gated_errors: FxHashSet(),
13581360
disallowed_shadowing: Vec::new(),
13591361

13601362
arenas: arenas,

src/librustc_resolve/macros.rs

+38-16
Original file line numberDiff line numberDiff line change
@@ -28,9 +28,11 @@ use syntax::ext::placeholders::placeholder;
2828
use syntax::ext::tt::macro_rules;
2929
use syntax::feature_gate::{self, emit_feature_err, GateIssue};
3030
use syntax::fold::{self, Folder};
31+
use syntax::parse::parser::PathStyle;
32+
use syntax::parse::token::{self, Token};
3133
use syntax::ptr::P;
3234
use syntax::symbol::{Symbol, keywords};
33-
use syntax::tokenstream::TokenStream;
35+
use syntax::tokenstream::{TokenStream, TokenTree, Delimited};
3436
use syntax::util::lev_distance::find_best_match_for_name;
3537
use syntax_pos::{Span, DUMMY_SP};
3638

@@ -200,16 +202,22 @@ impl<'a> base::Resolver for Resolver<'a> {
200202
let name = unwrap_or!(attrs[i].name(), continue);
201203

202204
if name == "derive" {
203-
let mut traits = match attrs[i].meta_item_list() {
204-
Some(traits) => traits,
205-
_ => continue,
205+
let result = attrs[i].parse_list(&self.session.parse_sess,
206+
|parser| parser.parse_path(PathStyle::Mod));
207+
let mut traits = match result {
208+
Ok(traits) => traits,
209+
Err(mut e) => {
210+
e.cancel();
211+
continue
212+
}
206213
};
207214

208215
for j in 0..traits.len() {
209-
let legacy_name = Symbol::intern(&match traits[j].word() {
210-
Some(..) => format!("derive_{}", traits[j].name().unwrap()),
211-
None => continue,
212-
});
216+
if traits[j].segments.len() > 1 {
217+
continue
218+
}
219+
let trait_name = traits[j].segments[0].identifier.name;
220+
let legacy_name = Symbol::intern(&format!("derive_{}", trait_name));
213221
if !self.builtin_macros.contains_key(&legacy_name) {
214222
continue
215223
}
@@ -218,7 +226,23 @@ impl<'a> base::Resolver for Resolver<'a> {
218226
if traits.is_empty() {
219227
attrs.remove(i);
220228
} else {
221-
attrs[i].tokens = ast::MetaItemKind::List(traits).tokens(attrs[i].span);
229+
let mut tokens = Vec::new();
230+
for (i, path) in traits.iter().enumerate() {
231+
if i > 0 {
232+
tokens.push(TokenTree::Token(attrs[i].span, Token::Comma).into());
233+
}
234+
for (j, segment) in path.segments.iter().enumerate() {
235+
if j > 0 {
236+
tokens.push(TokenTree::Token(path.span, Token::ModSep).into());
237+
}
238+
let tok = Token::Ident(segment.identifier);
239+
tokens.push(TokenTree::Token(path.span, tok).into());
240+
}
241+
}
242+
attrs[i].tokens = TokenTree::Delimited(attrs[i].span, Delimited {
243+
delim: token::Paren,
244+
tts: TokenStream::concat(tokens).into(),
245+
}).into();
222246
}
223247
return Some(ast::Attribute {
224248
path: ast::Path::from_ident(span, Ident::with_empty_ctxt(legacy_name)),
@@ -262,9 +286,8 @@ impl<'a> Resolver<'a> {
262286
InvocationKind::Bang { ref mac, .. } => {
263287
return self.resolve_macro_to_def(scope, &mac.node.path, MacroKind::Bang, force);
264288
}
265-
InvocationKind::Derive { name, span, .. } => {
266-
let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name));
267-
return self.resolve_macro_to_def(scope, &path, MacroKind::Derive, force);
289+
InvocationKind::Derive { ref path, .. } => {
290+
return self.resolve_macro_to_def(scope, path, MacroKind::Derive, force);
268291
}
269292
};
270293

@@ -282,9 +305,8 @@ impl<'a> Resolver<'a> {
282305
1 => path.segments[0].identifier.name,
283306
_ => return Err(determinacy),
284307
};
285-
for &(name, span) in traits {
286-
let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name));
287-
match self.resolve_macro(scope, &path, MacroKind::Derive, force) {
308+
for path in traits {
309+
match self.resolve_macro(scope, path, MacroKind::Derive, force) {
288310
Ok(ext) => if let SyntaxExtension::ProcMacroDerive(_, ref inert_attrs) = *ext {
289311
if inert_attrs.contains(&attr_name) {
290312
// FIXME(jseyfried) Avoid `mem::replace` here.
@@ -327,7 +349,7 @@ impl<'a> Resolver<'a> {
327349
self.current_module = invocation.module.get();
328350

329351
if path.len() > 1 {
330-
if !self.use_extern_macros {
352+
if !self.use_extern_macros && self.gated_errors.insert(span) {
331353
let msg = "non-ident macro paths are experimental";
332354
let feature = "use_extern_macros";
333355
emit_feature_err(&self.session.parse_sess, feature, span, GateIssue::Language, msg);

src/libsyntax/attr.rs

+44-2
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ pub use self::IntType::*;
1717
use ast;
1818
use ast::{AttrId, Attribute, Name, Ident};
1919
use ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind};
20-
use ast::{Lit, LitKind, Expr, Item, Local, Stmt, StmtKind};
20+
use ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind};
2121
use codemap::{Spanned, spanned, dummy_spanned, mk_sp};
2222
use syntax_pos::{Span, BytePos, DUMMY_SP};
2323
use errors::Handler;
@@ -299,14 +299,45 @@ impl Attribute {
299299
})
300300
}
301301

302+
pub fn parse<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, T>
303+
where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
304+
{
305+
let mut parser = Parser::new(sess, self.tokens.clone(), None, false);
306+
let result = f(&mut parser)?;
307+
if parser.token != token::Eof {
308+
parser.unexpected()?;
309+
}
310+
Ok(result)
311+
}
312+
313+
pub fn parse_list<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, Vec<T>>
314+
where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
315+
{
316+
if self.tokens.is_empty() {
317+
return Ok(Vec::new());
318+
}
319+
self.parse(sess, |parser| {
320+
parser.expect(&token::OpenDelim(token::Paren))?;
321+
let mut list = Vec::new();
322+
while !parser.eat(&token::CloseDelim(token::Paren)) {
323+
list.push(f(parser)?);
324+
if !parser.eat(&token::Comma) {
325+
parser.expect(&token::CloseDelim(token::Paren))?;
326+
break
327+
}
328+
}
329+
Ok(list)
330+
})
331+
}
332+
302333
pub fn parse_meta<'a>(&self, sess: &'a ParseSess) -> PResult<'a, MetaItem> {
303334
if self.path.segments.len() > 1 {
304335
sess.span_diagnostic.span_err(self.path.span, "expected ident, found path");
305336
}
306337

307338
Ok(MetaItem {
308339
name: self.path.segments.last().unwrap().identifier.name,
309-
node: Parser::new(sess, self.tokens.clone(), None, false).parse_meta_item_kind()?,
340+
node: self.parse(sess, |parser| parser.parse_meta_item_kind())?,
310341
span: self.span,
311342
})
312343
}
@@ -985,6 +1016,10 @@ impl MetaItem {
9851016
{
9861017
let (mut span, name) = match tokens.next() {
9871018
Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name),
1019+
Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => return match **nt {
1020+
token::Nonterminal::NtMeta(ref meta) => Some(meta.clone()),
1021+
_ => None,
1022+
},
9881023
_ => return None,
9891024
};
9901025
let node = match MetaItemKind::from_tokens(tokens) {
@@ -1151,6 +1186,13 @@ impl LitKind {
11511186
match token {
11521187
Token::Ident(ident) if ident.name == "true" => Some(LitKind::Bool(true)),
11531188
Token::Ident(ident) if ident.name == "false" => Some(LitKind::Bool(false)),
1189+
Token::Interpolated(ref nt) => match **nt {
1190+
token::NtExpr(ref v) => match v.node {
1191+
ExprKind::Lit(ref lit) => Some(lit.node.clone()),
1192+
_ => None,
1193+
},
1194+
_ => None,
1195+
},
11541196
Token::Literal(lit, suf) => {
11551197
let (suffix_illegal, result) = parse::lit_token(lit, suf, None);
11561198
if suffix_illegal && suf.is_some() {

src/libsyntax/config.rs

+32-40
Original file line numberDiff line numberDiff line change
@@ -13,9 +13,10 @@ use feature_gate::{feature_err, EXPLAIN_STMT_ATTR_SYNTAX, Features, get_features
1313
use {fold, attr};
1414
use ast;
1515
use codemap::Spanned;
16-
use parse::ParseSess;
17-
use ptr::P;
16+
use parse::{token, ParseSess};
17+
use syntax_pos::Span;
1818

19+
use ptr::P;
1920
use util::small_vector::SmallVector;
2021

2122
/// A folder that strips out items that do not belong in the current configuration.
@@ -84,44 +85,33 @@ impl<'a> StripUnconfigured<'a> {
8485
return Some(attr);
8586
}
8687

87-
let attr_list = match attr.meta_item_list() {
88-
Some(attr_list) => attr_list,
89-
None => {
90-
let msg = "expected `#[cfg_attr(<cfg pattern>, <attr>)]`";
91-
self.sess.span_diagnostic.span_err(attr.span, msg);
92-
return None;
93-
}
94-
};
95-
96-
let (cfg, mi) = match (attr_list.len(), attr_list.get(0), attr_list.get(1)) {
97-
(2, Some(cfg), Some(mi)) => (cfg, mi),
98-
_ => {
99-
let msg = "expected `#[cfg_attr(<cfg pattern>, <attr>)]`";
100-
self.sess.span_diagnostic.span_err(attr.span, msg);
88+
let (cfg, path, tokens, span) = match attr.parse(self.sess, |parser| {
89+
parser.expect(&token::OpenDelim(token::Paren))?;
90+
let cfg = parser.parse_meta_item()?;
91+
parser.expect(&token::Comma)?;
92+
let lo = parser.span.lo;
93+
let (path, tokens) = parser.parse_path_and_tokens()?;
94+
parser.expect(&token::CloseDelim(token::Paren))?;
95+
Ok((cfg, path, tokens, Span { lo: lo, ..parser.prev_span }))
96+
}) {
97+
Ok(result) => result,
98+
Err(mut e) => {
99+
e.emit();
101100
return None;
102101
}
103102
};
104103

105-
use attr::cfg_matches;
106-
match (cfg.meta_item(), mi.meta_item()) {
107-
(Some(cfg), Some(mi)) =>
108-
if cfg_matches(&cfg, self.sess, self.features) {
109-
self.process_cfg_attr(ast::Attribute {
110-
id: attr::mk_attr_id(),
111-
style: attr.style,
112-
path: ast::Path::from_ident(mi.span, ast::Ident::with_empty_ctxt(mi.name)),
113-
tokens: mi.node.tokens(mi.span),
114-
is_sugared_doc: false,
115-
span: mi.span,
116-
})
117-
} else {
118-
None
119-
},
120-
_ => {
121-
let msg = "unexpected literal(s) in `#[cfg_attr(<cfg pattern>, <attr>)]`";
122-
self.sess.span_diagnostic.span_err(attr.span, msg);
123-
None
124-
}
104+
if attr::cfg_matches(&cfg, self.sess, self.features) {
105+
self.process_cfg_attr(ast::Attribute {
106+
id: attr::mk_attr_id(),
107+
style: attr.style,
108+
path: path,
109+
tokens: tokens,
110+
is_sugared_doc: false,
111+
span: span,
112+
})
113+
} else {
114+
None
125115
}
126116
}
127117

@@ -133,10 +123,12 @@ impl<'a> StripUnconfigured<'a> {
133123
return false;
134124
}
135125

136-
let mis = attr.meta_item_list();
137-
let mis = match mis {
138-
Some(ref mis) if is_cfg(&attr) => mis,
139-
_ => return true
126+
let mis = if !is_cfg(&attr) {
127+
return true;
128+
} else if let Some(mis) = attr.meta_item_list() {
129+
mis
130+
} else {
131+
return true;
140132
};
141133

142134
if mis.len() != 1 {

src/libsyntax/ext/derive.rs

+20-25
Original file line numberDiff line numberDiff line change
@@ -12,36 +12,31 @@ use attr::HasAttrs;
1212
use {ast, codemap};
1313
use ext::base::ExtCtxt;
1414
use ext::build::AstBuilder;
15+
use parse::parser::PathStyle;
1516
use symbol::Symbol;
1617
use syntax_pos::Span;
1718

18-
pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec<ast::Attribute>) -> Vec<(Symbol, Span)> {
19+
pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec<ast::Attribute>) -> Vec<ast::Path> {
1920
let mut result = Vec::new();
2021
attrs.retain(|attr| {
2122
if attr.path != "derive" {
2223
return true;
2324
}
2425

25-
if attr.value_str().is_some() {
26-
cx.span_err(attr.span, "unexpected value in `derive`");
27-
return false;
28-
}
29-
30-
let traits = attr.meta_item_list().unwrap_or_else(Vec::new);
31-
if traits.is_empty() {
32-
cx.span_warn(attr.span, "empty trait list in `derive`");
33-
return false;
34-
}
35-
36-
for titem in traits {
37-
if titem.word().is_none() {
38-
cx.span_err(titem.span, "malformed `derive` entry");
39-
return false;
26+
match attr.parse_list(cx.parse_sess, |parser| parser.parse_path(PathStyle::Mod)) {
27+
Ok(ref traits) if traits.is_empty() => {
28+
cx.span_warn(attr.span, "empty trait list in `derive`");
29+
false
30+
}
31+
Ok(traits) => {
32+
result.extend(traits);
33+
true
34+
}
35+
Err(mut e) => {
36+
e.emit();
37+
false
4038
}
41-
result.push((titem.name().unwrap(), titem.span));
4239
}
43-
44-
true
4540
});
4641
result
4742
}
@@ -60,21 +55,21 @@ fn allow_unstable(cx: &mut ExtCtxt, span: Span, attr_name: &str) -> Span {
6055
}
6156
}
6257

63-
pub fn add_derived_markers<T: HasAttrs>(cx: &mut ExtCtxt, traits: &[(Symbol, Span)], item: T) -> T {
58+
pub fn add_derived_markers<T: HasAttrs>(cx: &mut ExtCtxt, traits: &[ast::Path], item: T) -> T {
6459
let span = match traits.get(0) {
65-
Some(&(_, span)) => span,
60+
Some(path) => path.span,
6661
None => return item,
6762
};
6863

6964
item.map_attrs(|mut attrs| {
70-
if traits.iter().any(|&(name, _)| name == "PartialEq") &&
71-
traits.iter().any(|&(name, _)| name == "Eq") {
65+
if traits.iter().any(|path| *path == "PartialEq") &&
66+
traits.iter().any(|path| *path == "Eq") {
7267
let span = allow_unstable(cx, span, "derive(PartialEq, Eq)");
7368
let meta = cx.meta_word(span, Symbol::intern("structural_match"));
7469
attrs.push(cx.attribute(span, meta));
7570
}
76-
if traits.iter().any(|&(name, _)| name == "Copy") &&
77-
traits.iter().any(|&(name, _)| name == "Clone") {
71+
if traits.iter().any(|path| *path == "Copy") &&
72+
traits.iter().any(|path| *path == "Clone") {
7873
let span = allow_unstable(cx, span, "derive(Copy, Clone)");
7974
let meta = cx.meta_word(span, Symbol::intern("rustc_copy_clone_marker"));
8075
attrs.push(cx.attribute(span, meta));

0 commit comments

Comments
 (0)