Skip to content

Commit 6f3737e

Browse files
authored
Rust edition upgrade, DX fix for generated token macros (#2)
# Breaking changes ## Generated token macros are no longer `#[macro_export]` The `Token` derive macro generates a `macro_rules!` macro for each of the enum variants. E.g., for Lox's `Token` enum in `examples/lox`: ```rs // examples/lox/src/tokens.rs #[derive(DebugLispToken, PartialEq, Token, Lexer)] pub enum Token { #[subset_of(Ident)] #[pattern = "and|class|else|false|for|fun|if|nil|or|print|return|super|this|true|var|while"] Keyword(Substr, Span), #[pattern = "[a-zA-Z_][a-zA-Z0-9_]*"] Ident(Substr, Span), #[pattern = r"[(){}]"] Brace(Substr, Span), #[pattern = "[,.;]"] Punct(Substr, Span), #[pattern = "[=!<>]=?"] #[pattern = "[-+*/]"] Operator(Substr, Span), #[pattern = "[0-9]+"] NumLit(Substr, Span), #[pattern = r#""[^"]*""#] StrLit(Substr, Span), } ``` We get `macro_rules!` macros named `keyword`, `ident`, `brace`, `punct`, `operator`, `num_lit` and `str_lit`. These are mostly useful for the `Parse` implementations, e.g.: ```rs // examples/lox/src/decl.rs impl Parse for ClassDecl { type Stream = ParseStream; fn parse(input: &mut Self::Stream) -> Result<Self> { // ... let superclass = if input.check(operator![<]) { input.consume(operator![<])?; Some(input.consume_kind(TokenKind::Ident)?) } else { None }; // ... } } ``` Previously, those generated macros were declared like this: ```rs #( #[macro_export] macro_rules! #snake_case_variant_ident { // ... } )* ``` That has always [caused some headaches](rust-lang/rust#52234 (comment)) which I was never really sure how to deal with. In the examples here, and in my consuming projects, I had resorted to using `*` imports everywhere to work around the problem (in hindsight though, I think that likely just hides the lint by obscuring what we're doing instead of actually addressing the issue). This PR attempts an alternative solution. `#[macro_export]` is intended for libraries to expose macros to external consumers, but I don't foresee the macros generated by the `Token` derive being actually useful in that context. So instead, the generated macros are now declared like this: ```rs #( macro_rules! #snake_case_variant_ident { // ... } pub(crate) use #snake_case_variant_ident; )* ``` This is a breaking change for two reasons: 1. If you were depending on those `#[macro_export]` attributes to make the generated macros available to external consumers of your library, that is no longer going to work. Again, I don't imagine this was actually a real-world use case for anyone, but I've been wrong before! Let me know if this is a problem for you and I'll see what we can do about it. 2. If you had been importing those macros from the root of your crate, but your `Token` enum is _not_ declared in the crate root, you'll need to update your import paths to instead import them from the module where the enum is declared. E.g.: ```rs mod token { use gramatika::{DebugLisp, Span, Substr, Token as _}; #[derive(DebugLispToken, PartialEq, Token, Lexer)] pub enum Token { #[pattern = ".+"] Word(Substr, Span), } } mod foo { // use crate::word; // 👎 use crate::token::word; // 👍 } ``` On the bright side, tools like `rust-analyzer` should now find and automatically suggest the correct import paths for those macros, so the fastest way to migrate will probably be to just delete your existing `use` statement and invoke your editor's suggestion feature to re-import any unresolved symbols from their correct paths. # Other changes * Updated the Rust edition to 2021 and fixed any resulting errors * Fixed any new `clippy` lints as a result of upgrading my environment to v1.77.2 * Performed some low-hanging-fruit dependency upgrades. `regex-automata` and `syn` are still out of date for now &mdash; I attempted to update the former, but couldn't figure out how to migrate after ~10 minutes of poking around, and unfortunately I have other priorities that need to take precedence. Didn't even attempt `syn` because it's a major version bump, and that crate is basically the backbone of this whole project, so it'll have to wait for now.
1 parent c51ef06 commit 6f3737e

File tree

20 files changed

+104
-80
lines changed

20 files changed

+104
-80
lines changed

crates/gramatika-macro/Cargo.toml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[package]
22
name = "gramatika-macro"
3-
version = "0.4.3"
4-
edition = "2018"
3+
version = "0.5.0"
4+
edition = "2021"
55
authors = ["Danny McGee <[email protected]>"]
66
license = "MIT OR Apache-2.0"
77
readme = "../../README.md"
@@ -18,8 +18,8 @@ path = "tests/tests.rs"
1818
[dependencies]
1919
anyhow = "1"
2020
arcstr = "1.1"
21-
convert_case = "0.4"
22-
itertools = "0.10"
21+
convert_case = "0.6"
22+
itertools = "0.12"
2323
once_cell = "1"
2424
proc-macro2 = "1.0"
2525
quote = "1.0"

crates/gramatika-macro/src/common.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -29,12 +29,12 @@ impl VariantIdents {
2929
| "for" | "if" | "impl"
3030
| "in" | "let" | "loop"
3131
| "macro" | "match" | "mod"
32-
| "move" | "mut" | "pub"
33-
| "ref" | "return" | "static"
34-
| "struct" | "super" | "trait"
35-
| "type" | "union" | "unsafe"
36-
| "use" | "where" | "while"
37-
| "yield"
32+
| "move" | "mut" | "path"
33+
| "pub" | "ref" | "return"
34+
| "static" | "struct"
35+
| "super" | "trait" | "type"
36+
| "union" | "unsafe" | "use"
37+
| "where" | "while" | "yield"
3838
) {
3939
format_ident!("{}_", snake)
4040
} else {

crates/gramatika-macro/src/debug_lisp.rs

Lines changed: 21 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -43,15 +43,15 @@ fn derive_debug_struct(
4343
};
4444

4545
let stream = quote! {
46-
impl#generics ::gramatika::DebugLisp for #ident#generics {
46+
impl #generics ::gramatika::DebugLisp for #ident #generics {
4747
fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>, indent: usize) -> ::core::fmt::Result {
4848
::gramatika::DebugLispStruct::new(f, indent, stringify!(#ident))
4949
#(#field_method_call)*
5050
.finish()
5151
}
5252
}
5353

54-
impl#generics ::core::fmt::Debug for #ident#generics {
54+
impl #generics ::core::fmt::Debug for #ident #generics {
5555
fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result {
5656
::gramatika::DebugLisp::fmt(self, f, 0)
5757
}
@@ -63,22 +63,25 @@ fn derive_debug_struct(
6363

6464
fn derive_debug_enum(ident: &Ident, generics: &Generics, data: &DataEnum) -> TokenStream {
6565
let variant_name = data.variants.iter().map(|variant| &variant.ident);
66-
let variant_inner_type = data
67-
.variants
68-
.iter()
69-
.map(|variant| match &variant.fields {
70-
Fields::Unnamed(fields) => fields.unnamed.iter().map(|field| &field.ty),
71-
Fields::Named(_) => {
72-
panic!("`#[derive(DebugLisp)]` is not supported for enum variants with named fields")
73-
}
74-
Fields::Unit => {
75-
panic!("`#[derive(DebugLisp)]` is not supported for unit enum variants")
76-
}
77-
})
78-
.flatten();
66+
let variant_inner_type =
67+
data.variants
68+
.iter()
69+
.flat_map(|variant| match &variant.fields {
70+
Fields::Unnamed(fields) => fields.unnamed.iter().map(|field| &field.ty),
71+
Fields::Named(_) => {
72+
panic!(
73+
"`#[derive(DebugLisp)]` is not supported for enum variants with named fields"
74+
)
75+
}
76+
Fields::Unit => {
77+
panic!(
78+
"`#[derive(DebugLisp)]` is not supported for unit enum variants"
79+
)
80+
}
81+
});
7982

8083
let stream = quote! {
81-
impl#generics ::gramatika::DebugLisp for #ident#generics {
84+
impl #generics ::gramatika::DebugLisp for #ident #generics {
8285
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>, indent: usize) -> ::std::fmt::Result {
8386
write!(f, "({}::", stringify!(#ident))?;
8487

@@ -93,7 +96,7 @@ fn derive_debug_enum(ident: &Ident, generics: &Generics, data: &DataEnum) -> Tok
9396
}
9497
}
9598

96-
impl#generics ::core::fmt::Debug for #ident#generics {
99+
impl #generics ::core::fmt::Debug for #ident #generics {
97100
fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result {
98101
::gramatika::DebugLisp::fmt(self, f, 0)
99102
}
@@ -109,7 +112,7 @@ pub fn derive_token(input: TokenStream) -> TokenStream {
109112
let generics = &ast.generics;
110113

111114
let stream = quote! {
112-
impl#generics ::gramatika::DebugLisp for #ident#generics {
115+
impl #generics ::gramatika::DebugLisp for #ident #generics {
113116
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>, _: usize) -> ::std::fmt::Result {
114117
write!(
115118
f,

crates/gramatika-macro/src/lexer.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ pub fn derive(input: TokenStream) -> TokenStream {
3131
>>
3232
}
3333

34+
#[allow(non_camel_case_types)]
3435
type __TOKEN_CTOR = fn(::gramatika::Substr, ::gramatika::Span) -> #enum_ident;
3536

3637
impl ::gramatika::Lexer for #lexer_ident {

crates/gramatika-macro/src/token.rs

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@ pub fn derive(input: pm::TokenStream) -> pm::TokenStream {
100100
}
101101
}
102102

103-
impl#generics #ident#generics {
103+
impl #generics #ident #generics {
104104
pub fn as_inner(&self) -> (::gramatika::Substr, ::gramatika::Span) {
105105
match self {#(
106106
Self::#variant_ident(lexeme, span) => (lexeme.clone(), *span)
@@ -115,7 +115,7 @@ pub fn derive(input: pm::TokenStream) -> pm::TokenStream {
115115
}
116116

117117
#(
118-
#[macro_export]
118+
#[allow(unused_macros)]
119119
macro_rules! #ctor_ident {
120120
($lexeme:literal) => {
121121
#ident::#ctor_ident(
@@ -130,9 +130,12 @@ pub fn derive(input: pm::TokenStream) -> pm::TokenStream {
130130
)
131131
};
132132
}
133+
134+
#[allow(unused)]
135+
pub(crate) use #ctor_ident;
133136
)*
134137

135-
impl#generics ::gramatika::Token for #ident#generics {
138+
impl #generics ::gramatika::Token for #ident #generics {
136139
type Kind = #kind_ident;
137140

138141
fn lexeme(&self) -> ::gramatika::Substr {
@@ -156,13 +159,13 @@ pub fn derive(input: pm::TokenStream) -> pm::TokenStream {
156159
}
157160
}
158161

159-
impl#generics ::gramatika::Spanned for #ident#generics {
162+
impl #generics ::gramatika::Spanned for #ident #generics {
160163
fn span(&self) -> ::gramatika::Span {
161164
self.as_inner().1
162165
}
163166
}
164167

165-
impl#generics Clone for #ident#generics {
168+
impl #generics Clone for #ident #generics {
166169
fn clone(&self) -> Self {
167170
match self {#(
168171
#ident::#variant_ident(lexeme, span) => #ident::#variant_ident(lexeme.clone(), *span)

crates/gramatika-macro/src/traversal/codegen.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ impl ToTokens for VisitorDef {
2121
});
2222

2323
if let Some(Walk) = &self.walker_ident {
24-
if let Some(signature) = self.signatures.get(0) {
24+
if let Some(signature) = self.signatures.first() {
2525
let swn = signature.param_type.ownership;
2626
let pwn = self.receiver.ownership;
2727
let walk = format_ident!("{}", Walk.to_string().to_case(Case::Snake));

crates/gramatika/Cargo.toml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[package]
22
name = "gramatika"
3-
version = "0.4.3"
4-
edition = "2018"
3+
version = "0.5.0"
4+
edition = "2021"
55
authors = ["Danny McGee <[email protected]>"]
66
license = "MIT OR Apache-2.0"
77
readme = "../../README.md"
@@ -17,7 +17,7 @@ once_cell = { version = "1.8", optional = true }
1717
regex-automata = { version = "0.1", optional = true }
1818

1919
[dependencies.gramatika-macro]
20-
version = "0.4.3"
20+
version = "0.5.0"
2121
path = "../gramatika-macro"
2222
optional = true
2323

crates/gramatika/src/span.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -24,19 +24,19 @@ pub struct Position {
2424
pub character: usize,
2525
}
2626

27-
impl PartialOrd for Position {
28-
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
27+
impl Ord for Position {
28+
fn cmp(&self, other: &Self) -> Ordering {
2929
if self.line == other.line {
30-
Some(self.character.cmp(&other.character))
30+
self.character.cmp(&other.character)
3131
} else {
32-
Some(self.line.cmp(&other.line))
32+
self.line.cmp(&other.line)
3333
}
3434
}
3535
}
3636

37-
impl Ord for Position {
38-
fn cmp(&self, other: &Self) -> Ordering {
39-
self.partial_cmp(other).unwrap()
37+
impl PartialOrd for Position {
38+
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
39+
Some(self.cmp(other))
4040
}
4141
}
4242

examples/expand/src/main.rs

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -168,14 +168,9 @@ pub struct ExprStmt;
168168

169169
pub enum Expr {}
170170

171-
#[derive(Clone, Copy, PartialEq, Eq)]
171+
#[derive(Clone, Copy, Default, PartialEq, Eq)]
172172
pub enum FlowControl {
173+
#[default]
173174
Continue,
174175
Break,
175176
}
176-
177-
impl Default for FlowControl {
178-
fn default() -> Self {
179-
FlowControl::Continue
180-
}
181-
}

examples/lox/src/decl.rs

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,10 @@
11
use gramatika::{Parse, ParseStreamer, Result, Spanned, SpannedError, Token as _};
22

3-
use crate::*;
3+
use crate::{
4+
expr::{Expr, FunExpr},
5+
tokens::{brace, operator, punct, Token, TokenKind},
6+
ParseStream,
7+
};
48

59
#[derive(DebugLisp)]
610
pub enum Decl {

0 commit comments

Comments
 (0)