Commit b889183a authored by Geoffry Song's avatar Geoffry Song

Convert to using proc macro

parent fcddb337
[package]
name = "plex"
version = "0.0.3"
version = "0.1.0"
authors = ["Geoffry Song <goffrie@gmail.com>"]
description = "A syntax extension for writing lexers and parsers."
......@@ -14,10 +14,13 @@ keywords = ["tokenizer", "scanner", "lexer", "parser", "generator"]
[lib]
name = "plex"
plugin = true
crate-type = ["dylib"]
crate-type = ["proc-macro"]
[dependencies]
lalr = "0.0.2"
redfa = "0.0.2"
literalext = { version = "0.1.0", default-features = false, features = ["proc-macro"] }
syn = { version = "0.12", features = ["extra-traits", "full"] }
proc-macro2 = { version = "0.2.1", features = ["nightly"] }
quote = "0.4"
#![feature(plugin)]
#![plugin(plex)]
#![feature(use_extern_macros)]
extern crate plex;
use std::io::Read;
mod lexer {
use plex::lexer;
#[derive(Debug, Clone)]
pub enum Token {
Ident(String),
......@@ -41,7 +43,7 @@ mod lexer {
} else {
panic!("integer {} is out of range", text)
}, text)
},
}
r#"[a-zA-Z_][a-zA-Z0-9_]*"# => (Token::Ident(text.to_owned()), text),
......@@ -86,7 +88,8 @@ mod lexer {
type Item = (Token, Span);
fn next(&mut self) -> Option<(Token, Span)> {
loop {
let tok = if let Some(tok) = next_token(&mut self.remaining) {
let tok = if let Some((tok, new_remaining)) = next_token(self.remaining) {
self.remaining = new_remaining;
tok
} else {
return None
......@@ -132,9 +135,10 @@ mod ast {
}
mod parser {
use ::ast::*;
use ::lexer::Token::*;
use ::lexer::*;
use ast::*;
use lexer::Token::*;
use lexer::*;
use plex::parser;
parser! {
fn parse_(Token, Span);
......@@ -214,7 +218,7 @@ mod parser {
}
mod interp {
use ::ast::*;
use ast::*;
use std::collections::HashMap;
pub fn interp<'a>(p: &'a Program) {
......@@ -224,7 +228,7 @@ mod interp {
}
}
fn interp_expr<'a>(env: &mut HashMap<&'a str, i64>, expr: &'a Expr) -> i64 {
use ::ast::Expr_::*;
use ast::Expr_::*;
match expr.node {
Add(ref a, ref b) => interp_expr(env, a) + interp_expr(env, b),
Sub(ref a, ref b) => interp_expr(env, a) - interp_expr(env, b),
......@@ -249,7 +253,8 @@ mod interp {
fn main() {
let mut s = String::new();
std::io::stdin().read_to_string(&mut s).unwrap();
let lexer = lexer::Lexer::new(&s);
let lexer = lexer::Lexer::new(&s)
.inspect(|tok| eprintln!("tok: {:?}", tok));
let program = parser::parse(lexer).unwrap();
interp::interp(&program);
}
This diff is collapsed.
#![feature(plugin_registrar, quote, rustc_private, i128_type)]
#![recursion_limit = "128"]
#![feature(proc_macro)]
extern crate lalr;
//extern crate literalext;
extern crate proc_macro2;
extern crate proc_macro;
#[macro_use]
extern crate quote;
extern crate redfa;
extern crate syntax;
extern crate rustc_plugin;
#[macro_use]
extern crate syn;
pub mod lexer;
pub mod parser;
mod lexer;
mod parser;
use syntax::ext::base;
use syntax::symbol::Symbol;
use rustc_plugin as plugin;
use proc_macro::TokenStream;
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut plugin::Registry) {
reg.register_syntax_extension(
Symbol::intern("parser"),
base::SyntaxExtension::NormalTT {
expander: Box::new(parser::expand_parser),
def_info: None,
allow_internal_unstable: false,
allow_internal_unsafe: false,
});
reg.register_syntax_extension(
Symbol::intern("lexer"),
base::SyntaxExtension::NormalTT {
expander: Box::new(lexer::expand_lexer),
def_info: None,
allow_internal_unstable: false,
allow_internal_unsafe: false,
});
#[proc_macro]
pub fn lexer(tok: TokenStream) -> TokenStream {
lexer::lexer(tok)
}
#[proc_macro]
pub fn parser(tok: TokenStream) -> TokenStream {
parser::parser(tok)
}
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment