Commit 8eeb8114 authored by Geoffry Song's avatar Geoffry Song

Update to new proc-macro, quote, and syn

parent 958dba75
...@@ -20,6 +20,6 @@ crate-type = ["proc-macro"] ...@@ -20,6 +20,6 @@ crate-type = ["proc-macro"]
lalr = "0.0.2" lalr = "0.0.2"
redfa = "0.0.2" redfa = "0.0.2"
syn = { version = "0.13.1", features = ["extra-traits", "full"] } syn = { version = "0.14.0", features = ["extra-traits", "full"] }
proc-macro2 = { version = "0.3.6", features = ["nightly"] } proc-macro2 = { version = "0.4.3", features = ["nightly"] }
quote = "0.5.1" quote = "0.6.0"
...@@ -8,16 +8,14 @@ use syn; ...@@ -8,16 +8,14 @@ use syn;
use syn::buffer::Cursor; use syn::buffer::Cursor;
use syn::synom::{PResult, Synom}; use syn::synom::{PResult, Synom};
use syn::{Expr, Ident, Lifetime, LitStr, Type, Visibility}; use syn::{Expr, Ident, Lifetime, LitStr, Type, Visibility};
use quote::Tokens; use proc_macro2::{Delimiter, Span, TokenStream};
use proc_macro2::{Delimiter, Span};
use proc_macro::TokenStream;
fn dfa_fn<T>( fn dfa_fn<T>(
dfa: &Dfa<char, T>, dfa: &Dfa<char, T>,
state_enum: Ident, state_enum: Ident,
state_paths: &[Tokens], state_paths: &[TokenStream],
fn_name: Ident, fn_name: Ident,
) -> Tokens { ) -> TokenStream {
let mut arms = vec![]; let mut arms = vec![];
for (tr, state_name) in dfa.states.iter().zip(state_paths.iter().cloned()) { for (tr, state_name) in dfa.states.iter().zip(state_paths.iter().cloned()) {
let mut subarms = vec![]; let mut subarms = vec![];
...@@ -152,7 +150,7 @@ pub fn lexer(input: TokenStream) -> TokenStream { ...@@ -152,7 +150,7 @@ pub fn lexer(input: TokenStream) -> TokenStream {
lifetime, lifetime,
return_type, return_type,
rules, rules,
} = syn::parse(input).unwrap_or_else(|e| { } = syn::parse(input.into()).unwrap_or_else(|e| {
panic!("parse error: {:?}", e); panic!("parse error: {:?}", e);
}); });
...@@ -195,13 +193,13 @@ pub fn lexer(input: TokenStream) -> TokenStream { ...@@ -195,13 +193,13 @@ pub fn lexer(input: TokenStream) -> TokenStream {
// Construct "human-readable" names for each of the DFA states. // Construct "human-readable" names for each of the DFA states.
// This is purely to make the generated code nicer. // This is purely to make the generated code nicer.
let mut names: Vec<Ident> = dfa_make_names(&dfa).into_iter().map(Ident::from).collect(); let mut names: Vec<Ident> = dfa_make_names(&dfa).into_iter().map(|n| Ident::new(&n, Span::call_site())).collect();
// If we've identified an error state, give it the special name "Error". // If we've identified an error state, give it the special name "Error".
if let Some(ix) = error_state_ix { if let Some(ix) = error_state_ix {
names[ix] = Ident::from("Error"); names[ix] = Ident::new("Error", Span::call_site());
} }
// The full paths to each of the state names (e.g. `State::Error`). // The full paths to each of the state names (e.g. `State::Error`).
let state_paths: Vec<Tokens> = names.iter().map(|name| quote!(State::#name)).collect(); let state_paths: Vec<TokenStream> = names.iter().map(|name| quote!(State::#name)).collect();
let initial_state = state_paths[0].clone(); let initial_state = state_paths[0].clone();
let error_state = error_state_ix.map(|ix| state_paths[ix].clone()); let error_state = error_state_ix.map(|ix| state_paths[ix].clone());
...@@ -209,9 +207,9 @@ pub fn lexer(input: TokenStream) -> TokenStream { ...@@ -209,9 +207,9 @@ pub fn lexer(input: TokenStream) -> TokenStream {
// Construct the actual DFA transition function, which, given a `State` and the next character, returns the next `State`. // Construct the actual DFA transition function, which, given a `State` and the next character, returns the next `State`.
let transition_fn = dfa_fn( let transition_fn = dfa_fn(
&dfa, &dfa,
Ident::from("State"), Ident::new("State", Span::call_site()),
&state_paths, &state_paths,
Ident::from("transition"), Ident::new("transition", Span::call_site()),
); );
let accepting_fn = { let accepting_fn = {
...@@ -243,6 +241,7 @@ pub fn lexer(input: TokenStream) -> TokenStream { ...@@ -243,6 +241,7 @@ pub fn lexer(input: TokenStream) -> TokenStream {
_ => unreachable!() _ => unreachable!()
}) })
}; };
let lifetime = &lifetime; // jank
quote!( quote!(
#vis fn #name #(<#lifetime>)* (input: &#(#lifetime)* str) -> Option<(#return_type, &#(#lifetime)* str)> { #vis fn #name #(<#lifetime>)* (input: &#(#lifetime)* str) -> Option<(#return_type, &#(#lifetime)* str)> {
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
...@@ -283,5 +282,5 @@ pub fn lexer(input: TokenStream) -> TokenStream { ...@@ -283,5 +282,5 @@ pub fn lexer(input: TokenStream) -> TokenStream {
None None
} }
} }
).into() )
} }
...@@ -18,10 +18,10 @@ use proc_macro::TokenStream; ...@@ -18,10 +18,10 @@ use proc_macro::TokenStream;
#[proc_macro] #[proc_macro]
pub fn lexer(tok: TokenStream) -> TokenStream { pub fn lexer(tok: TokenStream) -> TokenStream {
lexer::lexer(tok) lexer::lexer(tok.into()).into()
} }
#[proc_macro] #[proc_macro]
pub fn parser(tok: TokenStream) -> TokenStream { pub fn parser(tok: TokenStream) -> TokenStream {
parser::parser(tok) parser::parser(tok.into()).into()
} }
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment