From ec29011346ac91f2acdc0455ad6dc19a6f9614ca Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Sat, 21 Jan 2017 08:32:11 +0000 Subject: [PATCH 1/4] Remove duplicate `TokenStream` quoter tests (modulo imports). --- .../auxiliary/cond_noprelude_plugin.rs | 65 ------------------- .../auxiliary/cond_prelude_plugin.rs | 60 ----------------- .../macro-quote-noprelude.rs | 54 --------------- .../run-pass-fulldeps/macro-quote-prelude.rs | 54 --------------- 4 files changed, 233 deletions(-) delete mode 100644 src/test/run-pass-fulldeps/auxiliary/cond_noprelude_plugin.rs delete mode 100644 src/test/run-pass-fulldeps/auxiliary/cond_prelude_plugin.rs delete mode 100644 src/test/run-pass-fulldeps/macro-quote-noprelude.rs delete mode 100644 src/test/run-pass-fulldeps/macro-quote-prelude.rs diff --git a/src/test/run-pass-fulldeps/auxiliary/cond_noprelude_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/cond_noprelude_plugin.rs deleted file mode 100644 index 664bb9da89a57..0000000000000 --- a/src/test/run-pass-fulldeps/auxiliary/cond_noprelude_plugin.rs +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -#![allow(unused_parens)] -#![feature(plugin)] -#![feature(plugin_registrar)] -#![feature(rustc_private)] -#![plugin(proc_macro_plugin)] - -extern crate rustc_plugin; -extern crate proc_macro_tokens; -extern crate syntax; - -use proc_macro_tokens::build::ident_eq; - -use syntax::ast::Ident; -use syntax::ext::base::{ExtCtxt, MacResult}; -use syntax::ext::proc_macro_shim::build_block_emitter; -use syntax::tokenstream::{TokenTree, TokenStream}; -use syntax::codemap::Span; - -use rustc_plugin::Registry; - -#[plugin_registrar] -pub fn plugin_registrar(reg: &mut Registry) { - reg.register_macro("cond", cond); -} - -fn cond<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { - let output = cond_rec(TokenStream::from_tts(tts.clone().to_owned())); - build_block_emitter(cx, sp, output) -} - -fn cond_rec(input: TokenStream) -> TokenStream { - if input.is_empty() { - return qquote!(); - } - - let next = input.slice(0..1); - let rest = input.slice_from(1..); - - let clause : TokenStream = match next.maybe_delimited() { - Some(ts) => ts, - _ => panic!("Invalid input"), - }; - - // clause is ([test]) [rhs] - if clause.len() < 2 { panic!("Invalid macro usage in cond: {:?}", clause) } - - let test: TokenStream = clause.slice(0..1); - let rhs: TokenStream = clause.slice_from(1..); - - if ident_eq(&test[0], Ident::from_str("else")) || rest.is_empty() { - qquote!({unquote(rhs)}) - } else { - qquote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } }) - } -} diff --git a/src/test/run-pass-fulldeps/auxiliary/cond_prelude_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/cond_prelude_plugin.rs deleted file mode 100644 index 6a2d159a4bdd9..0000000000000 --- a/src/test/run-pass-fulldeps/auxiliary/cond_prelude_plugin.rs +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -#![allow(unused_parens)] -#![feature(plugin)] -#![feature(plugin_registrar)] -#![feature(rustc_private)] -#![plugin(proc_macro_plugin)] - -extern crate rustc_plugin; -extern crate proc_macro_tokens; -extern crate syntax; - -use syntax::ext::proc_macro_shim::prelude::*; -use proc_macro_tokens::prelude::*; - -use rustc_plugin::Registry; - -#[plugin_registrar] -pub fn plugin_registrar(reg: &mut Registry) { - reg.register_macro("cond", cond); -} - -fn cond<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { - let output = cond_rec(TokenStream::from_tts(tts.clone().to_owned())); - build_block_emitter(cx, sp, output) -} - -fn cond_rec(input: TokenStream) -> TokenStream { - if input.is_empty() { - return qquote!(); - } - - let next = input.slice(0..1); - let rest = input.slice_from(1..); - - let clause : TokenStream = match next.maybe_delimited() { - Some(ts) => ts, - _ => panic!("Invalid input"), - }; - - // clause is ([test]) [rhs] - if clause.len() < 2 { panic!("Invalid macro usage in cond: {:?}", clause) } - - let test: TokenStream = clause.slice(0..1); - let rhs: TokenStream = clause.slice_from(1..); - - if ident_eq(&test[0], Ident::from_str("else")) || rest.is_empty() { - qquote!({unquote(rhs)}) - } else { - qquote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } }) - } -} diff --git a/src/test/run-pass-fulldeps/macro-quote-noprelude.rs b/src/test/run-pass-fulldeps/macro-quote-noprelude.rs deleted file mode 100644 index 4184ca7be372f..0000000000000 --- a/src/test/run-pass-fulldeps/macro-quote-noprelude.rs +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// aux-build:cond_noprelude_plugin.rs -// ignore-stage1 - -#![feature(plugin)] -#![feature(rustc_private)] -#![plugin(cond_noprelude_plugin)] - -fn fact(n : i64) -> i64 { - if n == 0 { - 1 - } else { - n * fact(n - 1) - } -} - -fn fact_cond(n : i64) -> i64 { - cond!( - ((n == 0) 1) - (else (n * fact_cond(n-1))) - ) -} - -fn fib(n : i64) -> i64 { - if n == 0 || n == 1 { - 1 - } else { - fib(n-1) + fib(n-2) - } -} - -fn fib_cond(n : i64) -> i64 { - cond!( - ((n == 0) 1) - ((n == 1) 1) - (else (fib_cond(n-1) + fib_cond(n-2))) - ) -} - -fn main() { - assert_eq!(fact(3), fact_cond(3)); - assert_eq!(fact(5), fact_cond(5)); - assert_eq!(fib(5), fib_cond(5)); - assert_eq!(fib(8), fib_cond(8)); -} diff --git a/src/test/run-pass-fulldeps/macro-quote-prelude.rs b/src/test/run-pass-fulldeps/macro-quote-prelude.rs deleted file mode 100644 index 5b703a5bc2668..0000000000000 --- a/src/test/run-pass-fulldeps/macro-quote-prelude.rs +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// aux-build:cond_prelude_plugin.rs -// ignore-stage1 - -#![feature(plugin)] -#![feature(rustc_private)] -#![plugin(cond_prelude_plugin)] - -fn fact(n : i64) -> i64 { - if n == 0 { - 1 - } else { - n * fact(n - 1) - } -} - -fn fact_cond(n : i64) -> i64 { - cond!( - ((n == 0) 1) - (else (n * fact_cond(n-1))) - ) -} - -fn fib(n : i64) -> i64 { - if n == 0 || n == 1 { - 1 - } else { - fib(n-1) + fib(n-2) - } -} - -fn fib_cond(n : i64) -> i64 { - cond!( - ((n == 0) 1) - ((n == 1) 1) - (else (fib_cond(n-1) + fib_cond(n-2))) - ) -} - -fn main() { - assert_eq!(fact(3), fact_cond(3)); - assert_eq!(fact(5), fact_cond(5)); - assert_eq!(fib(5), fib_cond(5)); - assert_eq!(fib(8), fib_cond(8)); -} From 2dc60b1180b2974b8966c33100e9541845e1d2e8 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Wed, 18 Jan 2017 03:27:09 +0000 Subject: [PATCH 2/4] Refactor `TokenStream`. --- mk/crates.mk | 2 +- src/Cargo.lock | 3 +- src/libproc_macro/lib.rs | 11 +- src/libproc_macro_plugin/Cargo.toml | 3 +- src/libproc_macro_plugin/lib.rs | 16 +- src/libproc_macro_plugin/qquote.rs | 558 ++++-------- src/libproc_macro_tokens/build.rs | 31 +- src/libproc_macro_tokens/parse.rs | 9 +- src/libsyntax/codemap.rs | 17 - src/libsyntax/ext/expand.rs | 10 +- src/libsyntax/ext/proc_macro_shim.rs | 72 -- src/libsyntax/lib.rs | 4 +- src/libsyntax/parse/mod.rs | 2 +- src/libsyntax/tokenstream.rs | 811 +++--------------- src/libsyntax/util/rc_slice.rs | 50 ++ .../auxiliary/cond_plugin.rs | 69 +- .../auxiliary/hello_macro.rs | 15 +- .../auxiliary/proc_macro_def.rs | 17 +- src/test/run-pass-fulldeps/macro-quote-1.rs | 12 +- 19 files changed, 424 insertions(+), 1288 deletions(-) delete mode 100644 src/libsyntax/ext/proc_macro_shim.rs create mode 100644 src/libsyntax/util/rc_slice.rs diff --git a/mk/crates.mk b/mk/crates.mk index 9624a51dda01b..d7218b6bc8970 100644 --- a/mk/crates.mk +++ b/mk/crates.mk @@ -103,7 +103,7 @@ DEPS_syntax_ext := syntax syntax_pos rustc_errors fmt_macros proc_macro DEPS_proc_macro := syntax syntax_pos rustc_plugin log DEPS_syntax_pos := serialize DEPS_proc_macro_tokens := syntax syntax_pos log -DEPS_proc_macro_plugin := syntax syntax_pos rustc_plugin log proc_macro_tokens +DEPS_proc_macro_plugin := syntax syntax_pos rustc_plugin DEPS_rustc_const_math := std syntax log serialize rustc_i128 DEPS_rustc_const_eval := rustc_const_math rustc syntax log serialize \ diff --git a/src/Cargo.lock b/src/Cargo.lock index 2c10272916fdc..86fdece237b9b 100644 --- a/src/Cargo.lock +++ b/src/Cargo.lock @@ -208,10 +208,9 @@ dependencies = [ name = "proc_macro_plugin" version = "0.0.0" dependencies = [ - "log 0.0.0", - "proc_macro_tokens 0.0.0", "rustc_plugin 0.0.0", "syntax 0.0.0", + "syntax_pos 0.0.0", ] [[package]] diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index ab5962779feeb..f962c888f42cc 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -82,14 +82,15 @@ pub mod __internal { use syntax::ast; use syntax::ptr::P; use syntax::parse::{self, token, ParseSess}; - use syntax::tokenstream::TokenStream as TokenStream_; + use syntax::tokenstream::{TokenTree, TokenStream as TokenStream_}; use super::{TokenStream, LexError}; pub fn new_token_stream(item: P) -> TokenStream { - TokenStream { inner: TokenStream_::from_tokens(vec![ - token::Interpolated(Rc::new(token::NtItem(item))) - ])} + TokenStream { + inner: TokenTree::Token(item.span, token::Interpolated(Rc::new(token::NtItem(item)))) + .into() + } } pub fn token_stream_wrap(inner: TokenStream_) -> TokenStream { @@ -175,7 +176,7 @@ impl FromStr for TokenStream { let tts = try!(parse::parse_tts_from_source_str(name, src, sess) .map_err(parse_to_lex_err)); - Ok(__internal::token_stream_wrap(TokenStream_::from_tts(tts))) + Ok(__internal::token_stream_wrap(tts.into_iter().collect())) }) } } diff --git a/src/libproc_macro_plugin/Cargo.toml b/src/libproc_macro_plugin/Cargo.toml index 33fd814cd5f8b..146a66cdf01cb 100644 --- a/src/libproc_macro_plugin/Cargo.toml +++ b/src/libproc_macro_plugin/Cargo.toml @@ -8,7 +8,6 @@ path = "lib.rs" crate-type = ["dylib"] [dependencies] -log = { path = "../liblog" } rustc_plugin = { path = "../librustc_plugin" } syntax = { path = "../libsyntax" } -proc_macro_tokens = { path = "../libproc_macro_tokens" } +syntax_pos = { path = "../libsyntax_pos" } diff --git a/src/libproc_macro_plugin/lib.rs b/src/libproc_macro_plugin/lib.rs index 9d8bb7fa0f57c..e904290957619 100644 --- a/src/libproc_macro_plugin/lib.rs +++ b/src/libproc_macro_plugin/lib.rs @@ -15,11 +15,8 @@ //! ## Usage //! This crate provides the `qquote!` macro for syntax creation. //! -//! The `qquote!` macro imports `syntax::ext::proc_macro_shim::prelude::*`, so you -//! will need to `extern crate syntax` for usage. (This is a temporary solution until more -//! of the external API in libproc_macro_tokens is stabilized to support the token construction -//! operations that the qausiquoter relies on.) The shim file also provides additional -//! operations, such as `build_block_emitter` (as used in the `cond` example below). +//! The `qquote!` macro uses the crate `syntax`, so users must declare `extern crate syntax;` +//! at the crate root. This is a temporary solution until we have better hygiene. //! //! ## Quasiquotation //! @@ -88,19 +85,20 @@ extern crate rustc_plugin; extern crate syntax; -extern crate proc_macro_tokens; -#[macro_use] extern crate log; +extern crate syntax_pos; mod qquote; - use qquote::qquote; use rustc_plugin::Registry; +use syntax::ext::base::SyntaxExtension; +use syntax::symbol::Symbol; // ____________________________________________________________________________________________ // Main macro definition #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { - reg.register_macro("qquote", qquote); + reg.register_syntax_extension(Symbol::intern("qquote"), + SyntaxExtension::ProcMacro(Box::new(qquote))); } diff --git a/src/libproc_macro_plugin/qquote.rs b/src/libproc_macro_plugin/qquote.rs index 03873b20c18cb..69c6eba6c0f89 100644 --- a/src/libproc_macro_plugin/qquote.rs +++ b/src/libproc_macro_plugin/qquote.rs @@ -9,463 +9,223 @@ // except according to those terms. //! # Quasiquoter -//! This file contains the implementation internals of the quasiquoter provided by `quote!`. -//! -//! ## Ouput -//! The quasiquoter produces output of the form: -//! let tmp0 = ...; -//! let tmp1 = ...; -//! ... -//! concat(from_tokens(...), concat(...)) -//! -//! To the more explicit, the quasiquoter produces a series of bindings that each -//! construct TokenStreams via constructing Tokens and using `from_tokens`, ultimately -//! invoking `concat` on these bindings (and inlined expressions) to construct a -//! TokenStream that resembles the output syntax. -//! - -use proc_macro_tokens::build::*; -use proc_macro_tokens::parse::lex; - -use qquote::int_build::*; +//! This file contains the implementation internals of the quasiquoter provided by `qquote!`. use syntax::ast::Ident; -use syntax::codemap::Span; -use syntax::ext::base::*; -use syntax::ext::base; -use syntax::ext::proc_macro_shim::build_block_emitter; -use syntax::parse::token::{self, Token}; -use syntax::print::pprust; +use syntax::parse::token::{self, Token, Lit}; use syntax::symbol::Symbol; -use syntax::tokenstream::{TokenTree, TokenStream}; +use syntax::tokenstream::{self, Delimited, TokenTree, TokenStream}; +use syntax_pos::DUMMY_SP; -// ____________________________________________________________________________________________ -// Main definition -/// The user should use the macro, not this procedure. -pub fn qquote<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) - -> Box { +use std::rc::Rc; - debug!("\nTTs in: {:?}\n", pprust::tts_to_string(&tts[..])); - let output = qquoter(cx, TokenStream::from_tts(tts.clone().to_owned())); - debug!("\nQQ out: {}\n", pprust::tts_to_string(&output.to_tts()[..])); - let imports = concat(lex("use syntax::ext::proc_macro_shim::prelude::*;"), - lex("use proc_macro_tokens::prelude::*;")); - build_block_emitter(cx, sp, build_brace_delimited(concat(imports, output))) +pub fn qquote<'cx>(stream: TokenStream) -> TokenStream { + stream.quote() } -// ____________________________________________________________________________________________ -// Datatype Definitions - -#[derive(Debug)] -struct QDelimited { - delim: token::DelimToken, - open_span: Span, - tts: Vec, - close_span: Span, +trait Quote { + fn quote(&self) -> TokenStream; } -#[derive(Debug)] -enum Qtt { - TT(TokenTree), - Delimited(QDelimited), - QIdent(TokenTree), +macro_rules! quote_tok { + (,) => { Token::Comma }; + (.) => { Token::Dot }; + (:) => { Token::Colon }; + (::) => { Token::ModSep }; + (!) => { Token::Not }; + (<) => { Token::Lt }; + (>) => { Token::Gt }; + (_) => { Token::Underscore }; + ($i:ident) => { Token::Ident(Ident::from_str(stringify!($i))) }; } -type Bindings = Vec<(Ident, TokenStream)>; - -// ____________________________________________________________________________________________ -// Quasiquoter Algorithm -// This algorithm works as follows: -// Input: TokenStream -// 1. Walk the TokenStream, gathering up the unquoted expressions and marking them separately. -// 2. Hoist any unquoted term into its own let-binding via a gensym'd identifier -// 3. Convert the body from a `complex expression` into a simplified one via `convert_complex_tts -// 4. Stitch everything together with `concat`. -fn qquoter<'cx>(cx: &'cx mut ExtCtxt, ts: TokenStream) -> TokenStream { - if ts.is_empty() { - return lex("TokenStream::mk_empty()"); - } - let qq_res = qquote_iter(cx, 0, ts); - let mut bindings = qq_res.0; - let body = qq_res.1; - let mut cct_res = convert_complex_tts(cx, body); - - bindings.append(&mut cct_res.0); - - if bindings.is_empty() { - cct_res.1 - } else { - debug!("BINDINGS"); - for b in bindings.clone() { - debug!("{:?} = {}", b.0, pprust::tts_to_string(&b.1.to_tts()[..])); - } - TokenStream::concat(unravel(bindings), cct_res.1) - } +macro_rules! quote_tree { + ((unquote $($t:tt)*)) => { $($t)* }; + ((quote $($t:tt)*)) => { ($($t)*).quote() }; + (($($t:tt)*)) => { delimit(token::Paren, quote!($($t)*)) }; + ([$($t:tt)*]) => { delimit(token::Bracket, quote!($($t)*)) }; + ({$($t:tt)*}) => { delimit(token::Brace, quote!($($t)*)) }; + ($t:tt) => { TokenStream::from(TokenTree::Token(DUMMY_SP, quote_tok!($t))) }; } -fn qquote_iter<'cx>(cx: &'cx mut ExtCtxt, depth: i64, ts: TokenStream) -> (Bindings, Vec) { - let mut depth = depth; - let mut bindings: Bindings = Vec::new(); - let mut output: Vec = Vec::new(); - - let mut iter = ts.iter(); - - loop { - let next = iter.next(); - if next.is_none() { - break; - } - let next = next.unwrap().clone(); - match next { - TokenTree::Token(_, Token::Ident(id)) if is_unquote(id) => { - if depth == 0 { - let exp = iter.next(); - if exp.is_none() { - break; - } // produce an error or something first - let exp = vec![exp.unwrap().to_owned()]; - debug!("RHS: {:?}", exp.clone()); - let new_id = Ident::with_empty_ctxt(Symbol::gensym("tmp")); - debug!("RHS TS: {:?}", TokenStream::from_tts(exp.clone())); - debug!("RHS TS TT: {:?}", TokenStream::from_tts(exp.clone()).to_vec()); - bindings.push((new_id, TokenStream::from_tts(exp))); - debug!("BINDINGS"); - for b in bindings.clone() { - debug!("{:?} = {}", b.0, pprust::tts_to_string(&b.1.to_tts()[..])); - } - output.push(Qtt::QIdent(as_tt(Token::Ident(new_id.clone())))); - } else { - depth = depth - 1; - output.push(Qtt::TT(next.clone())); - } - } - TokenTree::Token(_, Token::Ident(id)) if is_qquote(id) => { - depth = depth + 1; - } - TokenTree::Delimited(_, ref dl) => { - let br = qquote_iter(cx, depth, TokenStream::from_tts(dl.tts.clone().to_owned())); - let mut nested_bindings = br.0; - let nested = br.1; - bindings.append(&mut nested_bindings); - - let new_dl = QDelimited { - delim: dl.delim, - open_span: dl.open_span, - tts: nested, - close_span: dl.close_span, - }; - - output.push(Qtt::Delimited(new_dl)); - } - t => { - output.push(Qtt::TT(t)); - } - } - } +fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream { + TokenTree::Delimited(DUMMY_SP, Rc::new(Delimited { + delim: delim, + tts: stream.trees().cloned().collect(), + open_span: DUMMY_SP, + close_span: DUMMY_SP, + })).into() +} - (bindings, output) +macro_rules! quote { + () => { TokenStream::empty() }; + ($($t:tt)*) => { [ $( quote_tree!($t), )* ].iter().cloned().collect::() }; } -// ____________________________________________________________________________________________ -// Turns QQTs into a TokenStream and some Bindings. -/// Construct a chain of concatenations. -fn unravel_concats(tss: Vec) -> TokenStream { - let mut pushes: Vec = - tss.into_iter().filter(|&ref ts| !ts.is_empty()).collect(); - let mut output = match pushes.pop() { - Some(ts) => ts, - None => { - return TokenStream::mk_empty(); +impl Quote for Option { + fn quote(&self) -> TokenStream { + match *self { + Some(ref t) => quote!(::std::option::Option::Some((quote t))), + None => quote!(::std::option::Option::None), } - }; - - while let Some(ts) = pushes.pop() { - output = build_fn_call(Ident::from_str("concat"), - concat(concat(ts, - from_tokens(vec![Token::Comma])), - output)); } - output } -/// This converts the vector of Qtts into a set of Bindings for construction and the main -/// body as a TokenStream. -fn convert_complex_tts<'cx>(cx: &'cx mut ExtCtxt, tts: Vec) -> (Bindings, TokenStream) { - let mut pushes: Vec = Vec::new(); - let mut bindings: Bindings = Vec::new(); +impl Quote for TokenStream { + fn quote(&self) -> TokenStream { + if self.is_empty() { + return quote!(::syntax::tokenstream::TokenStream::empty()); + } - let mut iter = tts.into_iter(); + struct Quote<'a>(tokenstream::Cursor<'a>); - loop { - let next = iter.next(); - if next.is_none() { - break; - } - let next = next.unwrap(); - match next { - Qtt::TT(TokenTree::Token(_, t)) => { - let token_out = emit_token(t); - pushes.push(token_out); - } - // FIXME handle sequence repetition tokens - Qtt::Delimited(qdl) => { - debug!(" Delimited: {:?} ", qdl.tts); - let fresh_id = Ident::with_empty_ctxt(Symbol::gensym("qdl_tmp")); - let (mut nested_bindings, nested_toks) = convert_complex_tts(cx, qdl.tts); + impl<'a> Iterator for Quote<'a> { + type Item = TokenStream; - let body = if nested_toks.is_empty() { - assert!(nested_bindings.is_empty()); - build_mod_call(vec![Ident::from_str("TokenStream"), - Ident::from_str("mk_empty")], - TokenStream::mk_empty()) - } else { - bindings.append(&mut nested_bindings); - bindings.push((fresh_id, nested_toks)); - TokenStream::from_tokens(vec![Token::Ident(fresh_id)]) + fn next(&mut self) -> Option { + let is_unquote = match self.0.peek() { + Some(&TokenTree::Token(_, Token::Ident(ident))) if ident.name == "unquote" => { + self.0.next(); + true + } + _ => false, }; - let delimitiers = build_delim_tok(qdl.delim); - - pushes.push(build_mod_call(vec![Ident::from_str("proc_macro_tokens"), - Ident::from_str("build"), - Ident::from_str("build_delimited")], - flatten(vec![body, - lex(","), - delimitiers].into_iter()))); + self.0.next().cloned().map(|tree| { + let quoted_tree = if is_unquote { tree.into() } else { tree.quote() }; + quote!(::syntax::tokenstream::TokenStream::from((unquote quoted_tree)),) + }) } - Qtt::QIdent(t) => { - pushes.push(TokenStream::from_tts(vec![t])); - pushes.push(TokenStream::mk_empty()); - } - _ => panic!("Unhandled case!"), } + let quoted = Quote(self.trees()).collect::(); + quote!([(unquote quoted)].iter().cloned().collect::<::syntax::tokenstream::TokenStream>()) } - - (bindings, unravel_concats(pushes)) } -// ____________________________________________________________________________________________ -// Utilities - -/// Unravels Bindings into a TokenStream of `let` declarations. -fn unravel(bindings: Bindings) -> TokenStream { - flatten(bindings.into_iter().map(|(a, b)| build_let(a, b))) +impl Quote for Vec { + fn quote(&self) -> TokenStream { + let stream = self.iter().cloned().collect::(); + quote!((quote stream).trees().cloned().collect::<::std::vec::Vec<_> >()) + } } -/// Checks if the Ident is `unquote`. -fn is_unquote(id: Ident) -> bool { - let qq = Ident::from_str("unquote"); - id.name == qq.name // We disregard context; unquote is _reserved_ +impl Quote for TokenTree { + fn quote(&self) -> TokenStream { + match *self { + TokenTree::Token(_, ref token) => quote! { + ::syntax::tokenstream::TokenTree::Token(::syntax::ext::quote::rt::DUMMY_SP, + (quote token)) + }, + TokenTree::Delimited(_, ref delimited) => quote! { + ::syntax::tokenstream::TokenTree::Delimited(::syntax::ext::quote::rt::DUMMY_SP, + (quote delimited)) + }, + _ => panic!("unexpected `TokenTree::Sequence` in `qquote`"), + } + } } -/// Checks if the Ident is `quote`. -fn is_qquote(id: Ident) -> bool { - let qq = Ident::from_str("qquote"); - id.name == qq.name // We disregard context; qquote is _reserved_ +impl Quote for Rc { + fn quote(&self) -> TokenStream { + quote!(::std::rc::Rc::new(::syntax::tokenstream::Delimited { + open_span: ::syntax::ext::quote::rt::DUMMY_SP, + close_span: ::syntax::ext::quote::rt::DUMMY_SP, + delim: (quote self.delim), + tts: (quote self.tts), + })) + } } -mod int_build { - use proc_macro_tokens::build::*; - use proc_macro_tokens::parse::*; - - use syntax::ast::{self, Ident}; - use syntax::codemap::{DUMMY_SP}; - use syntax::parse::token::{self, Token, Lit}; - use syntax::symbol::keywords; - use syntax::tokenstream::{TokenTree, TokenStream}; - - // ____________________________________________________________________________________________ - // Emitters - - pub fn emit_token(t: Token) -> TokenStream { - concat(lex("TokenStream::from_tokens"), - build_paren_delimited(build_vec(build_token_tt(t)))) +impl<'a> Quote for &'a str { + fn quote(&self) -> TokenStream { + TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Str_(Symbol::intern(self)), None)) + .into() } +} - pub fn emit_lit(l: Lit, n: Option) -> TokenStream { - let suf = match n { - Some(n) => format!("Some(ast::Name({}))", n.as_u32()), - None => "None".to_string(), - }; - - let lit = match l { - Lit::Byte(n) => format!("Lit::Byte(Symbol::intern(\"{}\"))", n.to_string()), - Lit::Char(n) => format!("Lit::Char(Symbol::intern(\"{}\"))", n.to_string()), - Lit::Float(n) => format!("Lit::Float(Symbol::intern(\"{}\"))", n.to_string()), - Lit::Str_(n) => format!("Lit::Str_(Symbol::intern(\"{}\"))", n.to_string()), - Lit::Integer(n) => format!("Lit::Integer(Symbol::intern(\"{}\"))", n.to_string()), - Lit::ByteStr(n) => format!("Lit::ByteStr(Symbol::intern(\"{}\"))", n.to_string()), - _ => panic!("Unsupported literal"), - }; - - let res = format!("Token::Literal({},{})", lit, suf); - debug!("{}", res); - lex(&res) +impl Quote for Ident { + fn quote(&self) -> TokenStream { + // FIXME(jseyfried) quote hygiene + quote!(::syntax::ast::Ident::from_str((quote &*self.name.as_str()))) } +} - // ____________________________________________________________________________________________ - // Token Builders - - pub fn build_binop_tok(bot: token::BinOpToken) -> TokenStream { - match bot { - token::BinOpToken::Plus => lex("Token::BinOp(BinOpToken::Plus)"), - token::BinOpToken::Minus => lex("Token::BinOp(BinOpToken::Minus)"), - token::BinOpToken::Star => lex("Token::BinOp(BinOpToken::Star)"), - token::BinOpToken::Slash => lex("Token::BinOp(BinOpToken::Slash)"), - token::BinOpToken::Percent => lex("Token::BinOp(BinOpToken::Percent)"), - token::BinOpToken::Caret => lex("Token::BinOp(BinOpToken::Caret)"), - token::BinOpToken::And => lex("Token::BinOp(BinOpToken::And)"), - token::BinOpToken::Or => lex("Token::BinOp(BinOpToken::Or)"), - token::BinOpToken::Shl => lex("Token::BinOp(BinOpToken::Shl)"), - token::BinOpToken::Shr => lex("Token::BinOp(BinOpToken::Shr)"), - } +impl Quote for Symbol { + fn quote(&self) -> TokenStream { + quote!(::syntax::symbol::Symbol::intern((quote &*self.as_str()))) } +} - pub fn build_binopeq_tok(bot: token::BinOpToken) -> TokenStream { - match bot { - token::BinOpToken::Plus => lex("Token::BinOpEq(BinOpToken::Plus)"), - token::BinOpToken::Minus => lex("Token::BinOpEq(BinOpToken::Minus)"), - token::BinOpToken::Star => lex("Token::BinOpEq(BinOpToken::Star)"), - token::BinOpToken::Slash => lex("Token::BinOpEq(BinOpToken::Slash)"), - token::BinOpToken::Percent => lex("Token::BinOpEq(BinOpToken::Percent)"), - token::BinOpToken::Caret => lex("Token::BinOpEq(BinOpToken::Caret)"), - token::BinOpToken::And => lex("Token::BinOpEq(BinOpToken::And)"), - token::BinOpToken::Or => lex("Token::BinOpEq(BinOpToken::Or)"), - token::BinOpToken::Shl => lex("Token::BinOpEq(BinOpToken::Shl)"), - token::BinOpToken::Shr => lex("Token::BinOpEq(BinOpToken::Shr)"), +impl Quote for Token { + fn quote(&self) -> TokenStream { + macro_rules! gen_match { + ($($i:ident),*; $($t:tt)*) => { + match *self { + $( Token::$i => quote!(::syntax::parse::token::$i), )* + $( $t )* + } + } } - } - pub fn build_delim_tok(dt: token::DelimToken) -> TokenStream { - match dt { - token::DelimToken::Paren => lex("DelimToken::Paren"), - token::DelimToken::Bracket => lex("DelimToken::Bracket"), - token::DelimToken::Brace => lex("DelimToken::Brace"), - token::DelimToken::NoDelim => lex("DelimToken::NoDelim"), + gen_match! { + Eq, Lt, Le, EqEq, Ne, Ge, Gt, AndAnd, OrOr, Not, Tilde, At, Dot, DotDot, DotDotDot, + Comma, Semi, Colon, ModSep, RArrow, LArrow, FatArrow, Pound, Dollar, Question, + Underscore; + + Token::OpenDelim(delim) => quote!(::syntax::parse::token::OpenDelim((quote delim))), + Token::CloseDelim(delim) => quote!(::syntax::parse::token::CloseDelim((quote delim))), + Token::BinOp(tok) => quote!(::syntax::parse::token::BinOp((quote tok))), + Token::BinOpEq(tok) => quote!(::syntax::parse::token::BinOpEq((quote tok))), + Token::Ident(ident) => quote!(::syntax::parse::token::Ident((quote ident))), + Token::Lifetime(ident) => quote!(::syntax::parse::token::Lifetime((quote ident))), + Token::Literal(lit, sfx) => quote! { + ::syntax::parse::token::Literal((quote lit), (quote sfx)) + }, + _ => panic!("Unhandled case!"), } } +} - pub fn build_token_tt(t: Token) -> TokenStream { - match t { - Token::Eq => lex("Token::Eq"), - Token::Lt => lex("Token::Lt"), - Token::Le => lex("Token::Le"), - Token::EqEq => lex("Token::EqEq"), - Token::Ne => lex("Token::Ne"), - Token::Ge => lex("Token::Ge"), - Token::Gt => lex("Token::Gt"), - Token::AndAnd => lex("Token::AndAnd"), - Token::OrOr => lex("Token::OrOr"), - Token::Not => lex("Token::Not"), - Token::Tilde => lex("Token::Tilde"), - Token::BinOp(tok) => build_binop_tok(tok), - Token::BinOpEq(tok) => build_binopeq_tok(tok), - Token::At => lex("Token::At"), - Token::Dot => lex("Token::Dot"), - Token::DotDot => lex("Token::DotDot"), - Token::DotDotDot => lex("Token::DotDotDot"), - Token::Comma => lex("Token::Comma"), - Token::Semi => lex("Token::Semi"), - Token::Colon => lex("Token::Colon"), - Token::ModSep => lex("Token::ModSep"), - Token::RArrow => lex("Token::RArrow"), - Token::LArrow => lex("Token::LArrow"), - Token::FatArrow => lex("Token::FatArrow"), - Token::Pound => lex("Token::Pound"), - Token::Dollar => lex("Token::Dollar"), - Token::Question => lex("Token::Question"), - Token::OpenDelim(dt) => { - match dt { - token::DelimToken::Paren => lex("Token::OpenDelim(DelimToken::Paren)"), - token::DelimToken::Bracket => lex("Token::OpenDelim(DelimToken::Bracket)"), - token::DelimToken::Brace => lex("Token::OpenDelim(DelimToken::Brace)"), - token::DelimToken::NoDelim => lex("DelimToken::NoDelim"), +impl Quote for token::BinOpToken { + fn quote(&self) -> TokenStream { + macro_rules! gen_match { + ($($i:ident),*) => { + match *self { + $( token::BinOpToken::$i => quote!(::syntax::parse::token::BinOpToken::$i), )* } } - Token::CloseDelim(dt) => { - match dt { - token::DelimToken::Paren => lex("Token::CloseDelim(DelimToken::Paren)"), - token::DelimToken::Bracket => lex("Token::CloseDelim(DelimToken::Bracket)"), - token::DelimToken::Brace => lex("Token::CloseDelim(DelimToken::Brace)"), - token::DelimToken::NoDelim => lex("DelimToken::NoDelim"), - } - } - Token::Underscore => lex("_"), - Token::Literal(lit, sfx) => emit_lit(lit, sfx), - // fix ident expansion information... somehow - Token::Ident(ident) => - lex(&format!("Token::Ident(Ident::from_str(\"{}\"))", ident.name)), - Token::Lifetime(ident) => - lex(&format!("Token::Ident(Ident::from_str(\"{}\"))", ident.name)), - _ => panic!("Unhandled case!"), } - } - - // ____________________________________________________________________________________________ - // Conversion operators - pub fn as_tt(t: Token) -> TokenTree { - // FIXME do something nicer with the spans - TokenTree::Token(DUMMY_SP, t) - } - - // ____________________________________________________________________________________________ - // Build Procedures - - /// Takes `input` and returns `vec![input]`. - pub fn build_vec(ts: TokenStream) -> TokenStream { - build_mac_call(Ident::from_str("vec"), ts) - // tts.clone().to_owned() - } - - /// Takes `ident` and `rhs` and produces `let ident = rhs;`. - pub fn build_let(id: Ident, tts: TokenStream) -> TokenStream { - concat(from_tokens(vec![keyword_to_token_ident(keywords::Let), - Token::Ident(id), - Token::Eq]), - concat(tts, from_tokens(vec![Token::Semi]))) - } - - /// Takes `ident ...`, and `args ...` and produces `ident::...(args ...)`. - pub fn build_mod_call(ids: Vec, args: TokenStream) -> TokenStream { - let call = from_tokens(intersperse(ids.into_iter().map(|id| Token::Ident(id)).collect(), - Token::ModSep)); - concat(call, build_paren_delimited(args)) - } - - /// Takes `ident` and `args ...` and produces `ident(args ...)`. - pub fn build_fn_call(name: Ident, args: TokenStream) -> TokenStream { - concat(from_tokens(vec![Token::Ident(name)]), build_paren_delimited(args)) - } - - /// Takes `ident` and `args ...` and produces `ident!(args ...)`. - pub fn build_mac_call(name: Ident, args: TokenStream) -> TokenStream { - concat(from_tokens(vec![Token::Ident(name), Token::Not]), - build_paren_delimited(args)) + gen_match!(Plus, Minus, Star, Slash, Percent, Caret, And, Or, Shl, Shr) } +} - // ____________________________________________________________________________________________ - // Utilities +impl Quote for Lit { + fn quote(&self) -> TokenStream { + macro_rules! gen_match { + ($($i:ident),*) => { + match *self { + $( Lit::$i(lit) => quote!(::syntax::parse::token::Lit::$i((quote lit))), )* + _ => panic!("Unsupported literal"), + } + } + } - /// A wrapper around `TokenStream::from_tokens` to avoid extra namespace specification and - /// provide it as a generic operator. - pub fn from_tokens(tokens: Vec) -> TokenStream { - TokenStream::from_tokens(tokens) + gen_match!(Byte, Char, Float, Str_, Integer, ByteStr) } +} - pub fn intersperse(vs: Vec, t: T) -> Vec - where T: Clone - { - if vs.len() < 2 { - return vs; +impl Quote for token::DelimToken { + fn quote(&self) -> TokenStream { + macro_rules! gen_match { + ($($i:ident),*) => { + match *self { + $(token::DelimToken::$i => { quote!(::syntax::parse::token::DelimToken::$i) })* + } + } } - let mut output = vec![vs.get(0).unwrap().to_owned()]; - for v in vs.into_iter().skip(1) { - output.push(t.clone()); - output.push(v); - } - output + gen_match!(Paren, Bracket, Brace, NoDelim) } } diff --git a/src/libproc_macro_tokens/build.rs b/src/libproc_macro_tokens/build.rs index 18aa60f9df16e..ed97d57cccd91 100644 --- a/src/libproc_macro_tokens/build.rs +++ b/src/libproc_macro_tokens/build.rs @@ -18,20 +18,7 @@ use std::rc::Rc; /// A wrapper around `TokenStream::concat` to avoid extra namespace specification and /// provide TokenStream concatenation as a generic operator. pub fn concat(ts1: TokenStream, ts2: TokenStream) -> TokenStream { - TokenStream::concat(ts1, ts2) -} - -/// Flatten a sequence of TokenStreams into a single TokenStream. -pub fn flatten>(mut iter: T) -> TokenStream { - match iter.next() { - Some(mut ts) => { - for next in iter { - ts = TokenStream::concat(ts, next); - } - ts - } - None => TokenStream::mk_empty() - } + TokenStream::concat([ts1, ts2].iter().cloned()) } /// Checks if two identifiers have the same name, disregarding context. This allows us to @@ -69,14 +56,12 @@ pub fn keyword_to_token_ident(kw: keywords::Keyword) -> Token { /// Generically takes a `ts` and delimiter and returns `ts` delimited by the specified /// delimiter. pub fn build_delimited(ts: TokenStream, delim: token::DelimToken) -> TokenStream { - let tts = ts.to_tts(); - TokenStream::from_tts(vec![TokenTree::Delimited(DUMMY_SP, - Rc::new(tokenstream::Delimited { - delim: delim, - open_span: DUMMY_SP, - tts: tts, - close_span: DUMMY_SP, - }))]) + TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited { + delim: delim, + open_span: DUMMY_SP, + tts: ts.trees().cloned().collect(), + close_span: DUMMY_SP, + })).into() } /// Takes `ts` and returns `[ts]`. @@ -96,5 +81,5 @@ pub fn build_paren_delimited(ts: TokenStream) -> TokenStream { /// Constructs `()`. pub fn build_empty_args() -> TokenStream { - build_paren_delimited(TokenStream::mk_empty()) + build_paren_delimited(TokenStream::empty()) } diff --git a/src/libproc_macro_tokens/parse.rs b/src/libproc_macro_tokens/parse.rs index 73268d0e66283..d1c1e99bcdc7a 100644 --- a/src/libproc_macro_tokens/parse.rs +++ b/src/libproc_macro_tokens/parse.rs @@ -16,9 +16,8 @@ use syntax::tokenstream::TokenStream; /// Map a string to tts, using a made-up filename. For example, `lex("15")` will return a /// TokenStream containing the literal 15. pub fn lex(source_str: &str) -> TokenStream { - let ps = ParseSess::new(); - TokenStream::from_tts(filemap_to_tts(&ps, - ps.codemap().new_filemap("".to_string(), - None, - source_str.to_owned()))) + let sess = ParseSess::new(); + let filemap = + sess.codemap().new_filemap("".to_string(), None, source_str.to_owned()); + filemap_to_tts(&sess, filemap).into_iter().collect() } diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 12ce642891173..0f4b844b0eac8 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -73,23 +73,6 @@ pub fn dummy_spanned(t: T) -> Spanned { respan(DUMMY_SP, t) } -/// Build a span that covers the two provided spans. -pub fn combine_spans(sp1: Span, sp2: Span) -> Span { - if sp1 == DUMMY_SP && sp2 == DUMMY_SP { - DUMMY_SP - } else if sp1 == DUMMY_SP { - sp2 - } else if sp2 == DUMMY_SP { - sp1 - } else { - Span { - lo: if sp1.lo < sp2.lo { sp1.lo } else { sp2.lo }, - hi: if sp1.hi > sp2.hi { sp1.hi } else { sp2.hi }, - expn_id: if sp1.expn_id == sp2.expn_id { sp1.expn_id } else { NO_EXPANSION }, - } - } -} - #[derive(Clone, Hash, Debug)] pub struct NameAndSpan { /// The format with which the macro was invoked. diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index d748eec73e850..226625ebc8e5e 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -364,10 +364,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> { kind.expect_from_annotatables(items) } SyntaxExtension::AttrProcMacro(ref mac) => { - let attr_toks = TokenStream::from_tts(tts_for_attr_args(&attr, - &self.cx.parse_sess)); - - let item_toks = TokenStream::from_tts(tts_for_item(&item, &self.cx.parse_sess)); + let attr_toks = tts_for_attr_args(&attr, &self.cx.parse_sess).into_iter().collect(); + let item_toks = tts_for_item(&item, &self.cx.parse_sess).into_iter().collect(); let tok_result = mac.expand(self.cx, attr.span, attr_toks, item_toks); self.parse_expansion(tok_result, kind, name, attr.span) @@ -467,7 +465,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }, }); - let toks = TokenStream::from_tts(marked_tts); + let toks = marked_tts.into_iter().collect(); let tok_result = expandfun.expand(self.cx, span, toks); Some(self.parse_expansion(tok_result, kind, extname, span)) } @@ -490,7 +488,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { fn parse_expansion(&mut self, toks: TokenStream, kind: ExpansionKind, name: Name, span: Span) -> Expansion { - let mut parser = self.cx.new_parser_from_tts(&toks.to_tts()); + let mut parser = self.cx.new_parser_from_tts(&toks.trees().cloned().collect::>()); let expansion = match parser.parse_expansion(kind, false) { Ok(expansion) => expansion, Err(mut err) => { diff --git a/src/libsyntax/ext/proc_macro_shim.rs b/src/libsyntax/ext/proc_macro_shim.rs deleted file mode 100644 index 21ce89a6dd5be..0000000000000 --- a/src/libsyntax/ext/proc_macro_shim.rs +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! This is a shim file to ease the transition to the final procedural macro interface for -//! Macros 2.0. It currently exposes the `libsyntax` operations that the quasiquoter's -//! output needs to compile correctly, along with the following operators: -//! -//! - `build_block_emitter`, which produces a `block` output macro result from the -//! provided TokenStream. - -use ast; -use codemap::Span; -use parse::parser::Parser; -use ptr::P; -use tokenstream::TokenStream; -use ext::base::*; - -/// Take a `ExtCtxt`, `Span`, and `TokenStream`, and produce a Macro Result that parses -/// the TokenStream as a block and returns it as an `Expr`. -pub fn build_block_emitter<'cx>(cx: &'cx mut ExtCtxt, - sp: Span, - output: TokenStream) - -> Box { - let parser = cx.new_parser_from_tts(&output.to_tts()); - - struct Result<'a> { - prsr: Parser<'a>, - span: Span, - }; //FIXME is this the right lifetime - - impl<'a> Result<'a> { - fn block(&mut self) -> P { - let res = self.prsr.parse_block().unwrap(); - res - } - } - - impl<'a> MacResult for Result<'a> { - fn make_expr(self: Box) -> Option> { - let mut me = *self; - Some(P(ast::Expr { - id: ast::DUMMY_NODE_ID, - node: ast::ExprKind::Block(me.block()), - span: me.span, - attrs: ast::ThinVec::new(), - })) - - } - } - - Box::new(Result { - prsr: parser, - span: sp, - }) -} - -pub mod prelude { - pub use super::build_block_emitter; - pub use ast::Ident; - pub use codemap::{DUMMY_SP, Span}; - pub use ext::base::{ExtCtxt, MacResult}; - pub use parse::token::{self, Token, DelimToken}; - pub use symbol::keywords; - pub use tokenstream::{TokenTree, TokenStream}; -} diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index f3c5a49bcf8b6..871e6b3783a41 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -89,6 +89,9 @@ pub mod util { mod thin_vec; pub use self::thin_vec::ThinVec; + + mod rc_slice; + pub use self::rc_slice::RcSlice; } pub mod json; @@ -129,7 +132,6 @@ pub mod ext { pub mod expand; pub mod placeholders; pub mod hygiene; - pub mod proc_macro_shim; pub mod quote; pub mod source_util; diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 08f5df4515ba6..65e7ec0a34cf7 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -196,7 +196,7 @@ pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec(sess: &'a ParseSess, ts: tokenstream::TokenStream) -> Parser<'a> { - tts_to_parser(sess, ts.to_tts()) + tts_to_parser(sess, ts.trees().cloned().collect()) } diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index ab5dc8181e05b..26e976dc0762f 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -25,19 +25,19 @@ //! ownership of the original. use ast::{self, AttrStyle, LitKind}; -use syntax_pos::{Span, DUMMY_SP, NO_EXPANSION}; -use codemap::{Spanned, combine_spans}; +use syntax_pos::Span; +use codemap::Spanned; use ext::base; use ext::tt::macro_parser; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use parse::{self, Directory}; use parse::token::{self, Token, Lit, Nonterminal}; use print::pprust; +use serialize::{Decoder, Decodable, Encoder, Encodable}; use symbol::Symbol; +use util::RcSlice; -use std::fmt; -use std::iter::*; -use std::ops::{self, Index}; +use std::{fmt, iter}; use std::rc::Rc; /// A delimited sequence of token trees @@ -323,555 +323,158 @@ impl TokenTree { } } -/// #Token Streams +/// # Token Streams /// -/// TokenStreams are a syntactic abstraction over TokenTrees. The goal is for procedural -/// macros to work over TokenStreams instead of arbitrary syntax. For now, however, we -/// are going to cut a few corners (i.e., use some of the AST structure) when we need to -/// for backwards compatibility. - -/// TokenStreams are collections of TokenTrees that represent a syntactic structure. The -/// struct itself shouldn't be directly manipulated; the internal structure is not stable, -/// and may be changed at any time in the future. The operators will not, however (except -/// for signatures, later on). -#[derive(Clone, Eq, Hash, RustcEncodable, RustcDecodable)] +/// A `TokenStream` is an abstract sequence of tokens, organized into `TokenTree`s. +/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s +/// instead of a representation of the abstract syntax tree. +/// Today's `TokenTree`s can still contain AST via `Token::Interpolated` for back-compat. +#[derive(Clone, Debug)] pub struct TokenStream { - ts: InternalTS, + kind: TokenStreamKind, } -// This indicates the maximum size for a leaf in the concatenation algorithm. -// If two leafs will be collectively smaller than this, they will be merged. -// If a leaf is larger than this, it will be concatenated at the top. -const LEAF_SIZE : usize = 32; - -// NB If Leaf access proves to be slow, inroducing a secondary Leaf without the bounds -// for unsliced Leafs may lead to some performance improvemenet. -#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] -pub enum InternalTS { - Empty(Span), - Leaf { - tts: Rc>, - offset: usize, - len: usize, - sp: Span, - }, - Node { - left: Rc, - right: Rc, - len: usize, - sp: Span, - }, +#[derive(Clone, Debug)] +enum TokenStreamKind { + Empty, + Tree(TokenTree), + Stream(RcSlice), } -impl fmt::Debug for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.ts.fmt(f) +impl From for TokenStream { + fn from(tt: TokenTree) -> TokenStream { + TokenStream { kind: TokenStreamKind::Tree(tt) } } } -impl fmt::Debug for InternalTS { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - InternalTS::Empty(..) => Ok(()), - InternalTS::Leaf { ref tts, offset, len, .. } => { - for t in tts.iter().skip(offset).take(len) { - try!(write!(f, "{:?}", t)); - } - Ok(()) - } - InternalTS::Node { ref left, ref right, .. } => { - try!(left.fmt(f)); - right.fmt(f) - } - } +impl> iter::FromIterator for TokenStream { + fn from_iter>(iter: I) -> Self { + TokenStream::concat(iter.into_iter().map(Into::into)) } } -/// Checks if two TokenStreams are equivalent (including spans). For unspanned -/// equality, see `eq_unspanned`. +impl Eq for TokenStream {} + impl PartialEq for TokenStream { fn eq(&self, other: &TokenStream) -> bool { - self.iter().eq(other.iter()) - } -} - -// NB this will disregard gaps. if we have [a|{2,5} , b|{11,13}], the resultant span -// will be at {2,13}. Without finer-grained span structures, however, this seems to be -// our only recourse. -// FIXME Do something smarter to compute the expansion id. -fn covering_span(trees: &[TokenTree]) -> Span { - // disregard any dummy spans we have - let trees = trees.iter().filter(|t| t.span() != DUMMY_SP).collect::>(); - - // if we're out of spans, stop - if trees.len() < 1 { - return DUMMY_SP; - } - - // set up the initial values - let fst_span = trees[0].span(); - - let mut lo_span = fst_span.lo; - let mut hi_span = fst_span.hi; - let mut expn_id = fst_span.expn_id; - - // compute the spans iteratively - for t in trees.iter().skip(1) { - let sp = t.span(); - if sp.lo < lo_span { - lo_span = sp.lo; - } - if hi_span < sp.hi { - hi_span = sp.hi; - } - if expn_id != sp.expn_id { - expn_id = NO_EXPANSION; - } - } - - Span { - lo: lo_span, - hi: hi_span, - expn_id: expn_id, + self.trees().eq(other.trees()) } } -impl InternalTS { - fn len(&self) -> usize { - match *self { - InternalTS::Empty(..) => 0, - InternalTS::Leaf { len, .. } => len, - InternalTS::Node { len, .. } => len, - } - } - - fn span(&self) -> Span { - match *self { - InternalTS::Empty(sp) | - InternalTS::Leaf { sp, .. } | - InternalTS::Node { sp, .. } => sp, - } - } - - fn slice(&self, range: ops::Range) -> TokenStream { - let from = range.start; - let to = range.end; - if from == to { - return TokenStream::mk_empty(); - } - if from > to { - panic!("Invalid range: {} to {}", from, to); - } - if from == 0 && to == self.len() { - return TokenStream { ts: self.clone() }; /* should be cheap */ - } - match *self { - InternalTS::Empty(..) => panic!("Invalid index"), - InternalTS::Leaf { ref tts, offset, .. } => { - let offset = offset + from; - let len = to - from; - TokenStream::mk_sub_leaf(tts.clone(), - offset, - len, - covering_span(&tts[offset..offset + len])) - } - InternalTS::Node { ref left, ref right, .. } => { - let left_len = left.len(); - if to <= left_len { - left.slice(range) - } else if from >= left_len { - right.slice(from - left_len..to - left_len) - } else { - TokenStream::concat(left.slice(from..left_len), right.slice(0..to - left_len)) - } - } - } - } - - fn to_vec(&self) -> Vec<&TokenTree> { - let mut res = Vec::with_capacity(self.len()); - fn traverse_and_append<'a>(res: &mut Vec<&'a TokenTree>, ts: &'a InternalTS) { - match *ts { - InternalTS::Empty(..) => {}, - InternalTS::Leaf { ref tts, offset, len, .. } => { - let mut to_app = tts[offset..offset + len].iter().collect(); - res.append(&mut to_app); - } - InternalTS::Node { ref left, ref right, .. } => { - traverse_and_append(res, left); - traverse_and_append(res, right); - } - } - } - traverse_and_append(&mut res, self); - res - } - - fn to_tts(&self) -> Vec { - self.to_vec().into_iter().cloned().collect::>() - } - - // Returns an internal node's children. - fn children(&self) -> Option<(Rc, Rc)> { - match *self { - InternalTS::Node { ref left, ref right, .. } => Some((left.clone(), right.clone())), - _ => None, - } - } -} - -/// TokenStream operators include basic destructuring, boolean operations, `maybe_...` -/// operations, and `maybe_..._prefix` operations. Boolean operations are straightforward, -/// indicating information about the structure of the stream. The `maybe_...` operations -/// return `Some<...>` if the tokenstream contains the appropriate item. -/// -/// Similarly, the `maybe_..._prefix` operations potentially return a -/// partially-destructured stream as a pair where the first element is the expected item -/// and the second is the remainder of the stream. As anb example, -/// -/// `maybe_path_prefix("a::b::c(a,b,c).foo()") -> (a::b::c, "(a,b,c).foo()")` impl TokenStream { - // Construct an empty node with a dummy span. - pub fn mk_empty() -> TokenStream { - TokenStream { ts: InternalTS::Empty(DUMMY_SP) } - } - - // Construct an empty node with the provided span. - fn mk_spanned_empty(sp: Span) -> TokenStream { - TokenStream { ts: InternalTS::Empty(sp) } - } - - // Construct a leaf node with a 0 offset and length equivalent to the input. - fn mk_leaf(tts: Rc>, sp: Span) -> TokenStream { - let len = tts.len(); - TokenStream { - ts: InternalTS::Leaf { - tts: tts, - offset: 0, - len: len, - sp: sp, - }, - } - } - - // Construct a leaf node with the provided values. - fn mk_sub_leaf(tts: Rc>, offset: usize, len: usize, sp: Span) -> TokenStream { - TokenStream { - ts: InternalTS::Leaf { - tts: tts, - offset: offset, - len: len, - sp: sp, - }, - } + pub fn empty() -> TokenStream { + TokenStream { kind: TokenStreamKind::Empty } } - // Construct an internal node with the provided values. - fn mk_int_node(left: Rc, - right: Rc, - len: usize, - sp: Span) - -> TokenStream { - TokenStream { - ts: InternalTS::Node { - left: left, - right: right, - len: len, - sp: sp, - }, - } - } - - /// Convert a vector of `TokenTree`s into a `TokenStream`. - pub fn from_tts(trees: Vec) -> TokenStream { - let span = covering_span(&trees[..]); - TokenStream::mk_leaf(Rc::new(trees), span) - } - - /// Convert a vector of Tokens into a TokenStream. - pub fn from_tokens(tokens: Vec) -> TokenStream { - // FIXME do something nicer with the spans - TokenStream::from_tts(tokens.into_iter().map(|t| TokenTree::Token(DUMMY_SP, t)).collect()) - } - - /// Manually change a TokenStream's span. - pub fn respan(self, span: Span) -> TokenStream { - match self.ts { - InternalTS::Empty(..) => TokenStream::mk_spanned_empty(span), - InternalTS::Leaf { tts, offset, len, .. } => { - TokenStream::mk_sub_leaf(tts, offset, len, span) - } - InternalTS::Node { left, right, len, .. } => { - TokenStream::mk_int_node(left, right, len, span) - } - } - } - - /// Concatenates two TokenStreams into a new TokenStream. - pub fn concat(left: TokenStream, right: TokenStream) -> TokenStream { - // This internal procedure performs 'aggressive compacting' during concatenation as - // follows: - // - If the nodes' combined total total length is less than 32, we copy both of - // them into a new vector and build a new leaf node. - // - If one node is an internal node and the other is a 'small' leaf (length<32), - // we recur down the internal node on the appropriate side. - // - Otherwise, we construct a new internal node that points to them as left and - // right. - fn concat_internal(left: Rc, right: Rc) -> TokenStream { - let llen = left.len(); - let rlen = right.len(); - let len = llen + rlen; - let span = combine_spans(left.span(), right.span()); - if len <= LEAF_SIZE { - let mut new_vec = left.to_tts(); - let mut rvec = right.to_tts(); - new_vec.append(&mut rvec); - return TokenStream::mk_leaf(Rc::new(new_vec), span); - } - - match (left.children(), right.children()) { - (Some((lleft, lright)), None) => { - if rlen <= LEAF_SIZE { - let new_right = concat_internal(lright, right); - TokenStream::mk_int_node(lleft, Rc::new(new_right.ts), len, span) - } else { - TokenStream::mk_int_node(left, right, len, span) - } - } - (None, Some((rleft, rright))) => { - if rlen <= LEAF_SIZE { - let new_left = concat_internal(left, rleft); - TokenStream::mk_int_node(Rc::new(new_left.ts), rright, len, span) - } else { - TokenStream::mk_int_node(left, right, len, span) - } - } - (_, _) => TokenStream::mk_int_node(left, right, len, span), - } - } - - if left.is_empty() { - right - } else if right.is_empty() { - left - } else { - concat_internal(Rc::new(left.ts), Rc::new(right.ts)) - } - } - - /// Indicate if the TokenStream is empty. pub fn is_empty(&self) -> bool { - self.len() == 0 - } - - /// Return a TokenStream's length. - pub fn len(&self) -> usize { - self.ts.len() - } - - /// Convert a TokenStream into a vector of borrowed TokenTrees. - pub fn to_vec(&self) -> Vec<&TokenTree> { - self.ts.to_vec() - } - - /// Convert a TokenStream into a vector of TokenTrees (by cloning the TokenTrees). - /// (This operation is an O(n) deep copy of the underlying structure.) - pub fn to_tts(&self) -> Vec { - self.ts.to_tts() - } - - /// Return the TokenStream's span. - pub fn span(&self) -> Span { - self.ts.span() - } - - /// Returns an iterator over a TokenStream (as a sequence of TokenTrees). - pub fn iter<'a>(&self) -> Iter { - Iter { vs: self, idx: 0 } - } - - /// Splits a TokenStream based on the provided `&TokenTree -> bool` predicate. - pub fn split

(&self, pred: P) -> Split

- where P: FnMut(&TokenTree) -> bool - { - Split { - vs: self, - pred: pred, - finished: false, - idx: 0, + match self.kind { + TokenStreamKind::Empty => true, + _ => false, } } - /// Produce a slice of the input TokenStream from the `from` index, inclusive, to the - /// `to` index, non-inclusive. - pub fn slice(&self, range: ops::Range) -> TokenStream { - self.ts.slice(range) - } - - /// Slice starting at the provided index, inclusive. - pub fn slice_from(&self, from: ops::RangeFrom) -> TokenStream { - self.slice(from.start..self.len()) - } - - /// Slice up to the provided index, non-inclusive. - pub fn slice_to(&self, to: ops::RangeTo) -> TokenStream { - self.slice(0..to.end) - } - - /// Indicates where the stream is a single, delimited expression (e.g., `(a,b,c)` or - /// `{a,b,c}`). - pub fn is_delimited(&self) -> bool { - self.maybe_delimited().is_some() - } - - /// Returns the inside of the delimited term as a new TokenStream. - pub fn maybe_delimited(&self) -> Option { - if !(self.len() == 1) { - return None; - } - - // FIXME It would be nice to change Delimited to move the Rc around the TokenTree - // vector directly in order to avoid the clone here. - match self[0] { - TokenTree::Delimited(_, ref rc) => Some(TokenStream::from_tts(rc.tts.clone())), - _ => None, - } + pub fn concat>(streams: I) -> TokenStream { + let mut streams = streams.into_iter().filter(|stream| !stream.is_empty()); + let first_stream = match streams.next() { + Some(stream) => stream, + None => return TokenStream::empty(), + }; + let second_stream = match streams.next() { + Some(stream) => stream, + None => return first_stream, + }; + let mut vec = vec![first_stream, second_stream]; + vec.extend(streams); + TokenStream { kind: TokenStreamKind::Stream(RcSlice::new(vec)) } } - /// Indicates if the stream is exactly one identifier. - pub fn is_ident(&self) -> bool { - self.maybe_ident().is_some() - } - - /// Returns an identifier - pub fn maybe_ident(&self) -> Option { - if !(self.len() == 1) { - return None; - } - - match self[0] { - TokenTree::Token(_, Token::Ident(t)) => Some(t), - _ => None, - } + pub fn trees<'a>(&'a self) -> Cursor { + Cursor::new(self) } /// Compares two TokenStreams, checking equality without regarding span information. pub fn eq_unspanned(&self, other: &TokenStream) -> bool { - for (t1, t2) in self.iter().zip(other.iter()) { + for (t1, t2) in self.trees().zip(other.trees()) { if !t1.eq_unspanned(t2) { return false; } } true } - - /// Convert a vector of TokenTrees into a parentheses-delimited TokenStream. - pub fn as_delimited_stream(tts: Vec, delim: token::DelimToken) -> TokenStream { - let new_sp = covering_span(&tts); - - let new_delim = Rc::new(Delimited { - delim: delim, - open_span: DUMMY_SP, - tts: tts, - close_span: DUMMY_SP, - }); - - TokenStream::from_tts(vec![TokenTree::Delimited(new_sp, new_delim)]) - } } -impl fmt::Display for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str(&pprust::tts_to_string(&self.to_tts())) - } -} - -// FIXME Reimplement this iterator to hold onto a slice iterator for a leaf, getting the -// next leaf's iterator when the current one is exhausted. -pub struct Iter<'a> { - vs: &'a TokenStream, - idx: usize, +pub struct Cursor<'a> { + current_frame: CursorFrame<'a>, + stack: Vec>, } -impl<'a> Iterator for Iter<'a> { +impl<'a> Iterator for Cursor<'a> { type Item = &'a TokenTree; fn next(&mut self) -> Option<&'a TokenTree> { - if self.vs.is_empty() || self.idx >= self.vs.len() { - return None; - } - - let ret = Some(&self.vs[self.idx]); - self.idx = self.idx + 1; - ret + let tree = self.peek(); + self.current_frame = self.stack.pop().unwrap_or(CursorFrame::Empty); + tree } } -pub struct Split<'a, P> - where P: FnMut(&TokenTree) -> bool -{ - vs: &'a TokenStream, - pred: P, - finished: bool, - idx: usize, +enum CursorFrame<'a> { + Empty, + Tree(&'a TokenTree), + Stream(&'a RcSlice, usize), } -impl<'a, P> Iterator for Split<'a, P> - where P: FnMut(&TokenTree) -> bool -{ - type Item = TokenStream; +impl<'a> CursorFrame<'a> { + fn new(stream: &'a TokenStream) -> Self { + match stream.kind { + TokenStreamKind::Empty => CursorFrame::Empty, + TokenStreamKind::Tree(ref tree) => CursorFrame::Tree(tree), + TokenStreamKind::Stream(ref stream) => CursorFrame::Stream(stream, 0), + } + } +} - fn next(&mut self) -> Option { - if self.finished { - return None; +impl<'a> Cursor<'a> { + fn new(stream: &'a TokenStream) -> Self { + Cursor { + current_frame: CursorFrame::new(stream), + stack: Vec::new(), } - if self.idx >= self.vs.len() { - self.finished = true; - return None; + } + + pub fn peek(&mut self) -> Option<&'a TokenTree> { + while let CursorFrame::Stream(stream, index) = self.current_frame { + self.current_frame = if index == stream.len() { + self.stack.pop().unwrap_or(CursorFrame::Empty) + } else { + self.stack.push(CursorFrame::Stream(stream, index + 1)); + CursorFrame::new(&stream[index]) + }; } - let mut lookup = self.vs.iter().skip(self.idx); - match lookup.position(|x| (self.pred)(&x)) { - None => { - self.finished = true; - Some(self.vs.slice_from(self.idx..)) - } - Some(edx) => { - let ret = Some(self.vs.slice(self.idx..self.idx + edx)); - self.idx += edx + 1; - ret - } + match self.current_frame { + CursorFrame::Empty => None, + CursorFrame::Tree(tree) => Some(tree), + CursorFrame::Stream(..) => unreachable!(), } } } -impl Index for TokenStream { - type Output = TokenTree; - - fn index(&self, index: usize) -> &TokenTree { - &self.ts[index] +impl fmt::Display for TokenStream { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str(&pprust::tts_to_string(&self.trees().cloned().collect::>())) } } -impl Index for InternalTS { - type Output = TokenTree; +impl Encodable for TokenStream { + fn encode(&self, encoder: &mut E) -> Result<(), E::Error> { + self.trees().cloned().collect::>().encode(encoder) + } +} - fn index(&self, index: usize) -> &TokenTree { - if self.len() <= index { - panic!("Index {} too large for {:?}", index, self); - } - match *self { - InternalTS::Empty(..) => panic!("Invalid index"), - InternalTS::Leaf { ref tts, offset, .. } => tts.get(index + offset).unwrap(), - InternalTS::Node { ref left, ref right, .. } => { - let left_len = left.len(); - if index < left_len { - Index::index(&**left, index) - } else { - Index::index(&**right, index - left_len) - } - } - } +impl Decodable for TokenStream { + fn decode(decoder: &mut D) -> Result { + Vec::::decode(decoder).map(|vec| vec.into_iter().collect()) } } @@ -880,10 +483,13 @@ impl Index for InternalTS { mod tests { use super::*; use syntax::ast::Ident; - use syntax_pos::{Span, BytePos, NO_EXPANSION, DUMMY_SP}; - use parse::token::{self, Token}; + use syntax_pos::{Span, BytePos, NO_EXPANSION}; + use parse::token::Token; use util::parser_testing::string_to_tts; - use std::rc::Rc; + + fn string_to_ts(string: &str) -> TokenStream { + string_to_tts(string.to_owned()).into_iter().collect() + } fn sp(a: u32, b: u32) -> Span { Span { @@ -893,239 +499,76 @@ mod tests { } } - fn as_paren_delimited_stream(tts: Vec) -> TokenStream { - TokenStream::as_delimited_stream(tts, token::DelimToken::Paren) - } - #[test] fn test_concat() { - let test_res = TokenStream::from_tts(string_to_tts("foo::bar::baz".to_string())); - let test_fst = TokenStream::from_tts(string_to_tts("foo::bar".to_string())); - let test_snd = TokenStream::from_tts(string_to_tts("::baz".to_string())); - let eq_res = TokenStream::concat(test_fst, test_snd); - assert_eq!(test_res.len(), 5); - assert_eq!(eq_res.len(), 5); + let test_res = string_to_ts("foo::bar::baz"); + let test_fst = string_to_ts("foo::bar"); + let test_snd = string_to_ts("::baz"); + let eq_res = TokenStream::concat([test_fst, test_snd].iter().cloned()); + assert_eq!(test_res.trees().count(), 5); + assert_eq!(eq_res.trees().count(), 5); assert_eq!(test_res.eq_unspanned(&eq_res), true); } #[test] fn test_from_to_bijection() { let test_start = string_to_tts("foo::bar(baz)".to_string()); - let test_end = TokenStream::from_tts(string_to_tts("foo::bar(baz)".to_string())).to_tts(); + let ts = test_start.iter().cloned().collect::(); + let test_end: Vec = ts.trees().cloned().collect(); assert_eq!(test_start, test_end) } #[test] fn test_to_from_bijection() { - let test_start = TokenStream::from_tts(string_to_tts("foo::bar(baz)".to_string())); - let test_end = TokenStream::from_tts(test_start.clone().to_tts()); + let test_start = string_to_ts("foo::bar(baz)"); + let test_end = test_start.trees().cloned().collect(); assert_eq!(test_start, test_end) } #[test] fn test_eq_0() { - let test_res = TokenStream::from_tts(string_to_tts("foo".to_string())); - let test_eqs = TokenStream::from_tts(string_to_tts("foo".to_string())); + let test_res = string_to_ts("foo"); + let test_eqs = string_to_ts("foo"); assert_eq!(test_res, test_eqs) } #[test] fn test_eq_1() { - let test_res = TokenStream::from_tts(string_to_tts("::bar::baz".to_string())); - let test_eqs = TokenStream::from_tts(string_to_tts("::bar::baz".to_string())); + let test_res = string_to_ts("::bar::baz"); + let test_eqs = string_to_ts("::bar::baz"); assert_eq!(test_res, test_eqs) } - #[test] - fn test_eq_2() { - let test_res = TokenStream::from_tts(string_to_tts("foo::bar".to_string())); - let test_eqs = TokenStream::from_tts(string_to_tts("foo::bar::baz".to_string())); - assert_eq!(test_res, test_eqs.slice(0..3)) - } - #[test] fn test_eq_3() { - let test_res = TokenStream::from_tts(string_to_tts("".to_string())); - let test_eqs = TokenStream::from_tts(string_to_tts("".to_string())); + let test_res = string_to_ts(""); + let test_eqs = string_to_ts(""); assert_eq!(test_res, test_eqs) } #[test] fn test_diseq_0() { - let test_res = TokenStream::from_tts(string_to_tts("::bar::baz".to_string())); - let test_eqs = TokenStream::from_tts(string_to_tts("bar::baz".to_string())); + let test_res = string_to_ts("::bar::baz"); + let test_eqs = string_to_ts("bar::baz"); assert_eq!(test_res == test_eqs, false) } #[test] fn test_diseq_1() { - let test_res = TokenStream::from_tts(string_to_tts("(bar,baz)".to_string())); - let test_eqs = TokenStream::from_tts(string_to_tts("bar,baz".to_string())); + let test_res = string_to_ts("(bar,baz)"); + let test_eqs = string_to_ts("bar,baz"); assert_eq!(test_res == test_eqs, false) } - #[test] - fn test_slice_0() { - let test_res = TokenStream::from_tts(string_to_tts("foo::bar".to_string())); - let test_eqs = TokenStream::from_tts(string_to_tts("foo::bar::baz".to_string())); - assert_eq!(test_res, test_eqs.slice(0..3)) - } - - #[test] - fn test_slice_1() { - let test_res = TokenStream::from_tts(string_to_tts("foo::bar::baz".to_string())) - .slice(2..3); - let test_eqs = TokenStream::from_tts(vec![TokenTree::Token(sp(5,8), - token::Ident(Ident::from_str("bar")))]); - assert_eq!(test_res, test_eqs) - } - #[test] fn test_is_empty() { - let test0 = TokenStream::from_tts(Vec::new()); - let test1 = TokenStream::from_tts( - vec![TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a")))] - ); - - let test2 = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string())); + let test0: TokenStream = Vec::::new().into_iter().collect(); + let test1: TokenStream = + TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"))).into(); + let test2 = string_to_ts("foo(bar::baz)"); assert_eq!(test0.is_empty(), true); assert_eq!(test1.is_empty(), false); assert_eq!(test2.is_empty(), false); } - - #[test] - fn test_is_delimited() { - let test0 = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string())); - let test1 = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string())); - let test2 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string())); - let test3 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)(zab,rab,oof)".to_string())); - let test4 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)foo".to_string())); - let test5 = TokenStream::from_tts(string_to_tts("".to_string())); - - assert_eq!(test0.is_delimited(), false); - assert_eq!(test1.is_delimited(), true); - assert_eq!(test2.is_delimited(), true); - assert_eq!(test3.is_delimited(), false); - assert_eq!(test4.is_delimited(), false); - assert_eq!(test5.is_delimited(), false); - } - - #[test] - fn test_is_ident() { - let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string())); - let test1 = TokenStream::from_tts(string_to_tts("5".to_string())); - let test2 = TokenStream::from_tts(string_to_tts("foo".to_string())); - let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string())); - let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string())); - - assert_eq!(test0.is_ident(), false); - assert_eq!(test1.is_ident(), false); - assert_eq!(test2.is_ident(), true); - assert_eq!(test3.is_ident(), false); - assert_eq!(test4.is_ident(), false); - } - - #[test] - fn test_maybe_delimited() { - let test0_input = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string())); - let test1_input = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string())); - let test2_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string())); - let test3_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)(zab,rab)" - .to_string())); - let test4_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)foo".to_string())); - let test5_input = TokenStream::from_tts(string_to_tts("".to_string())); - - let test0 = test0_input.maybe_delimited(); - let test1 = test1_input.maybe_delimited(); - let test2 = test2_input.maybe_delimited(); - let test3 = test3_input.maybe_delimited(); - let test4 = test4_input.maybe_delimited(); - let test5 = test5_input.maybe_delimited(); - - assert_eq!(test0, None); - - let test1_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4), - token::Ident(Ident::from_str("bar"))), - TokenTree::Token(sp(4, 6), token::ModSep), - TokenTree::Token(sp(6, 9), - token::Ident(Ident::from_str("baz")))]); - assert_eq!(test1, Some(test1_expected)); - - let test2_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4), - token::Ident(Ident::from_str("foo"))), - TokenTree::Token(sp(4, 5), token::Comma), - TokenTree::Token(sp(5, 8), - token::Ident(Ident::from_str("bar"))), - TokenTree::Token(sp(8, 9), token::Comma), - TokenTree::Token(sp(9, 12), - token::Ident(Ident::from_str("baz")))]); - assert_eq!(test2, Some(test2_expected)); - - assert_eq!(test3, None); - - assert_eq!(test4, None); - - assert_eq!(test5, None); - } - - // pub fn maybe_ident(&self) -> Option - #[test] - fn test_maybe_ident() { - let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string())).maybe_ident(); - let test1 = TokenStream::from_tts(string_to_tts("5".to_string())).maybe_ident(); - let test2 = TokenStream::from_tts(string_to_tts("foo".to_string())).maybe_ident(); - let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string())).maybe_ident(); - let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string())).maybe_ident(); - - assert_eq!(test0, None); - assert_eq!(test1, None); - assert_eq!(test2, Some(Ident::from_str("foo"))); - assert_eq!(test3, None); - assert_eq!(test4, None); - } - - #[test] - fn test_as_delimited_stream() { - let test0 = as_paren_delimited_stream(string_to_tts("foo,bar,".to_string())); - let test1 = as_paren_delimited_stream(string_to_tts("baz(foo,bar)".to_string())); - - let test0_tts = vec![TokenTree::Token(sp(0, 3), token::Ident(Ident::from_str("foo"))), - TokenTree::Token(sp(3, 4), token::Comma), - TokenTree::Token(sp(4, 7), token::Ident(Ident::from_str("bar"))), - TokenTree::Token(sp(7, 8), token::Comma)]; - let test0_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 8), - Rc::new(Delimited { - delim: token::DelimToken::Paren, - open_span: DUMMY_SP, - tts: test0_tts, - close_span: DUMMY_SP, - }))]); - - assert_eq!(test0, test0_stream); - - - let test1_tts = vec![TokenTree::Token(sp(4, 7), token::Ident(Ident::from_str("foo"))), - TokenTree::Token(sp(7, 8), token::Comma), - TokenTree::Token(sp(8, 11), token::Ident(Ident::from_str("bar")))]; - - let test1_parse = vec![TokenTree::Token(sp(0, 3), token::Ident(Ident::from_str("baz"))), - TokenTree::Delimited(sp(3, 12), - Rc::new(Delimited { - delim: token::DelimToken::Paren, - open_span: sp(3, 4), - tts: test1_tts, - close_span: sp(11, 12), - }))]; - - let test1_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 12), - Rc::new(Delimited { - delim: token::DelimToken::Paren, - open_span: DUMMY_SP, - tts: test1_parse, - close_span: DUMMY_SP, - }))]); - - assert_eq!(test1, test1_stream); - } } diff --git a/src/libsyntax/util/rc_slice.rs b/src/libsyntax/util/rc_slice.rs new file mode 100644 index 0000000000000..cb3becf83f682 --- /dev/null +++ b/src/libsyntax/util/rc_slice.rs @@ -0,0 +1,50 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::hash::{self, Hash}; +use std::fmt; +use std::ops::Deref; +use std::rc::Rc; + +#[derive(Clone)] +pub struct RcSlice { + data: Rc>, + offset: u32, + len: u32, +} + +impl RcSlice { + pub fn new(vec: Vec) -> Self { + RcSlice { + offset: 0, + len: vec.len() as u32, + data: Rc::new(vec.into_boxed_slice()), + } + } +} + +impl Deref for RcSlice { + type Target = [T]; + fn deref(&self) -> &[T] { + &self.data[self.offset as usize .. (self.offset + self.len) as usize] + } +} + +impl Hash for RcSlice { + fn hash(&self, state: &mut H) { + self.deref().hash(state); + } +} + +impl fmt::Debug for RcSlice { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Debug::fmt(self.deref(), f) + } +} diff --git a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs index 31a5f5968bab6..730e112c70016 100644 --- a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs +++ b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs @@ -15,52 +15,45 @@ #![plugin(proc_macro_plugin)] extern crate rustc_plugin; -extern crate proc_macro_tokens; extern crate syntax; -use proc_macro_tokens::prelude::*; - use rustc_plugin::Registry; -use syntax::ast::Ident; -use syntax::codemap::{DUMMY_SP, Span}; -use syntax::ext::proc_macro_shim::build_block_emitter; -use syntax::ext::base::{ExtCtxt, MacResult}; -use syntax::parse::token::{self, Token, DelimToken}; +use syntax::ext::base::SyntaxExtension; +use syntax::parse::token::Token; +use syntax::symbol::Symbol; use syntax::tokenstream::{TokenTree, TokenStream}; #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { - reg.register_macro("cond", cond); -} - -fn cond<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { - let output = cond_rec(TokenStream::from_tts(tts.clone().to_owned())); - build_block_emitter(cx, sp, output) + reg.register_syntax_extension(Symbol::intern("cond"), + SyntaxExtension::ProcMacro(Box::new(cond))); } -fn cond_rec(input: TokenStream) -> TokenStream { - if input.is_empty() { - return qquote!(); - } - - let next = input.slice(0..1); - let rest = input.slice_from(1..); - - let clause : TokenStream = match next.maybe_delimited() { - Some(ts) => ts, - _ => panic!("Invalid input"), - }; - - // clause is ([test]) [rhs] - if clause.len() < 2 { panic!("Invalid macro usage in cond: {:?}", clause) } - - let test: TokenStream = clause.slice(0..1); - let rhs: TokenStream = clause.slice_from(1..); - - if ident_eq(&test[0], Ident::from_str("else")) || rest.is_empty() { - qquote!({unquote(rhs)}) - } else { - qquote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } }) - } +fn cond(input: TokenStream) -> TokenStream { + let mut conds = Vec::new(); + let mut input = input.trees(); + while let Some(tree) = input.next() { + let cond: TokenStream = match *tree { + TokenTree::Delimited(_, ref delimited) => delimited.tts.iter().cloned().collect(), + _ => panic!("Invalid input"), + }; + let mut trees = cond.trees().cloned(); + let test = trees.next(); + let rhs = trees.collect::(); + if rhs.is_empty() { + panic!("Invalid macro usage in cond: {}", cond); + } + let is_else = match test { + Some(TokenTree::Token(_, Token::Ident(ident))) if ident.name == "else" => true, + _ => false, + }; + conds.push(if is_else || input.peek().is_none() { + qquote!({ unquote rhs }) + } else { + qquote!(if unquote(test.unwrap()) { unquote rhs } else) + }); + } + + conds.into_iter().collect() } diff --git a/src/test/run-pass-fulldeps/auxiliary/hello_macro.rs b/src/test/run-pass-fulldeps/auxiliary/hello_macro.rs index a2dbb0a96d967..91075276a3020 100644 --- a/src/test/run-pass-fulldeps/auxiliary/hello_macro.rs +++ b/src/test/run-pass-fulldeps/auxiliary/hello_macro.rs @@ -14,22 +14,21 @@ #![plugin(proc_macro_plugin)] extern crate rustc_plugin; -extern crate proc_macro_tokens; extern crate syntax; -use syntax::ext::proc_macro_shim::prelude::*; -use proc_macro_tokens::prelude::*; - use rustc_plugin::Registry; +use syntax::ext::base::SyntaxExtension; +use syntax::symbol::Symbol; +use syntax::tokenstream::TokenStream; #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { - reg.register_macro("hello", hello); + reg.register_syntax_extension(Symbol::intern("hello"), + SyntaxExtension::ProcMacro(Box::new(hello))); } // This macro is not very interesting, but it does contain delimited tokens with // no content - `()` and `{}` - which has caused problems in the past. -fn hello<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { - let output = qquote!({ fn hello() {} hello(); }); - build_block_emitter(cx, sp, output) +fn hello(_: TokenStream) -> TokenStream { + qquote!({ fn hello() {} hello(); }) } diff --git a/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs b/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs index f97fb04aadf6a..612c199e8281a 100644 --- a/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs +++ b/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs @@ -9,15 +9,14 @@ // except according to those terms. #![feature(plugin, plugin_registrar, rustc_private)] +#![plugin(proc_macro_plugin)] -extern crate proc_macro_tokens; extern crate rustc_plugin; extern crate syntax; -use proc_macro_tokens::prelude::*; use rustc_plugin::Registry; use syntax::ext::base::SyntaxExtension; -use syntax::ext::proc_macro_shim::prelude::*; +use syntax::tokenstream::TokenStream; use syntax::symbol::Symbol; #[plugin_registrar] @@ -35,23 +34,21 @@ pub fn plugin_registrar(reg: &mut Registry) { } fn attr_tru(_attr: TokenStream, _item: TokenStream) -> TokenStream { - lex("fn f1() -> bool { true }") + qquote!(fn f1() -> bool { true }) } fn attr_identity(_attr: TokenStream, item: TokenStream) -> TokenStream { - let source = item.to_string(); - lex(&source) + qquote!(unquote item) } fn tru(_ts: TokenStream) -> TokenStream { - lex("true") + qquote!(true) } fn ret_tru(_ts: TokenStream) -> TokenStream { - lex("return true;") + qquote!(return true;) } fn identity(ts: TokenStream) -> TokenStream { - let source = ts.to_string(); - lex(&source) + qquote!(unquote ts) } diff --git a/src/test/run-pass-fulldeps/macro-quote-1.rs b/src/test/run-pass-fulldeps/macro-quote-1.rs index 948b20c14771f..57b6c3f0adb89 100644 --- a/src/test/run-pass-fulldeps/macro-quote-1.rs +++ b/src/test/run-pass-fulldeps/macro-quote-1.rs @@ -14,12 +14,14 @@ #![feature(rustc_private)] #![plugin(proc_macro_plugin)] -extern crate proc_macro_tokens; -use proc_macro_tokens::prelude::*; - extern crate syntax; +extern crate syntax_pos; + +use syntax::ast::Ident; +use syntax::parse::token; +use syntax::tokenstream::TokenTree; fn main() { - let lex_true = lex("true"); - assert_eq!(qquote!(true).eq_unspanned(&lex_true), true); + let true_tok = TokenTree::Token(syntax_pos::DUMMY_SP, token::Ident(Ident::from_str("true"))); + assert!(qquote!(true).eq_unspanned(&true_tok.into())); } From 31417efcd3e739b48c1cf78214e8c4ff82dc424f Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Sun, 22 Jan 2017 09:09:33 +0000 Subject: [PATCH 3/4] Remove crate `proc_macro_tokens`. --- mk/crates.mk | 3 +- src/Cargo.lock | 7 --- src/libproc_macro_tokens/Cargo.toml | 12 ---- src/libproc_macro_tokens/build.rs | 85 ----------------------------- src/libproc_macro_tokens/lib.rs | 64 ---------------------- src/libproc_macro_tokens/parse.rs | 23 -------- src/libproc_macro_tokens/prelude.rs | 12 ---- 7 files changed, 1 insertion(+), 205 deletions(-) delete mode 100644 src/libproc_macro_tokens/Cargo.toml delete mode 100644 src/libproc_macro_tokens/build.rs delete mode 100644 src/libproc_macro_tokens/lib.rs delete mode 100644 src/libproc_macro_tokens/parse.rs delete mode 100644 src/libproc_macro_tokens/prelude.rs diff --git a/mk/crates.mk b/mk/crates.mk index d7218b6bc8970..f801605d9fa97 100644 --- a/mk/crates.mk +++ b/mk/crates.mk @@ -60,7 +60,7 @@ RUSTC_CRATES := rustc rustc_typeck rustc_mir rustc_borrowck rustc_resolve rustc_ rustc_data_structures rustc_platform_intrinsics rustc_errors \ rustc_plugin rustc_metadata rustc_passes rustc_save_analysis \ rustc_const_eval rustc_const_math rustc_incremental proc_macro -HOST_CRATES := syntax syntax_ext proc_macro_tokens proc_macro_plugin syntax_pos $(RUSTC_CRATES) \ +HOST_CRATES := syntax syntax_ext proc_macro_plugin syntax_pos $(RUSTC_CRATES) \ rustdoc fmt_macros flate arena graphviz log serialize TOOLS := compiletest rustdoc rustc rustbook error_index_generator @@ -102,7 +102,6 @@ DEPS_syntax := std term serialize log arena libc rustc_bitflags std_unicode rust DEPS_syntax_ext := syntax syntax_pos rustc_errors fmt_macros proc_macro DEPS_proc_macro := syntax syntax_pos rustc_plugin log DEPS_syntax_pos := serialize -DEPS_proc_macro_tokens := syntax syntax_pos log DEPS_proc_macro_plugin := syntax syntax_pos rustc_plugin DEPS_rustc_const_math := std syntax log serialize rustc_i128 diff --git a/src/Cargo.lock b/src/Cargo.lock index 86fdece237b9b..7db243c5eb9d0 100644 --- a/src/Cargo.lock +++ b/src/Cargo.lock @@ -213,13 +213,6 @@ dependencies = [ "syntax_pos 0.0.0", ] -[[package]] -name = "proc_macro_tokens" -version = "0.0.0" -dependencies = [ - "syntax 0.0.0", -] - [[package]] name = "rand" version = "0.0.0" diff --git a/src/libproc_macro_tokens/Cargo.toml b/src/libproc_macro_tokens/Cargo.toml deleted file mode 100644 index 2cec4d7af54cb..0000000000000 --- a/src/libproc_macro_tokens/Cargo.toml +++ /dev/null @@ -1,12 +0,0 @@ -[package] -authors = ["The Rust Project Developers"] -name = "proc_macro_tokens" -version = "0.0.0" -build = false - -[lib] -path = "lib.rs" -crate-type = ["dylib"] - -[dependencies] -syntax = { path = "../libsyntax" } diff --git a/src/libproc_macro_tokens/build.rs b/src/libproc_macro_tokens/build.rs deleted file mode 100644 index ed97d57cccd91..0000000000000 --- a/src/libproc_macro_tokens/build.rs +++ /dev/null @@ -1,85 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use syntax::ast::Ident; -use syntax::codemap::DUMMY_SP; -use syntax::parse::token::{self, Token}; -use syntax::symbol::keywords; -use syntax::tokenstream::{self, TokenTree, TokenStream}; -use std::rc::Rc; - -/// A wrapper around `TokenStream::concat` to avoid extra namespace specification and -/// provide TokenStream concatenation as a generic operator. -pub fn concat(ts1: TokenStream, ts2: TokenStream) -> TokenStream { - TokenStream::concat([ts1, ts2].iter().cloned()) -} - -/// Checks if two identifiers have the same name, disregarding context. This allows us to -/// fake 'reserved' keywords. -// FIXME We really want `free-identifier-=?` (a la Dybvig 1993). von Tander 2007 is -// probably the easiest way to do that. -pub fn ident_eq(tident: &TokenTree, id: Ident) -> bool { - let tid = match *tident { - TokenTree::Token(_, Token::Ident(ref id)) => id, - _ => { - return false; - } - }; - - tid.name == id.name -} - -// ____________________________________________________________________________________________ -// Conversion operators - -/// Convert a `&str` into a Token. -pub fn str_to_token_ident(s: &str) -> Token { - Token::Ident(Ident::from_str(s)) -} - -/// Converts a keyword (from `syntax::parse::token::keywords`) into a Token that -/// corresponds to it. -pub fn keyword_to_token_ident(kw: keywords::Keyword) -> Token { - Token::Ident(Ident::from_str(&kw.name().as_str()[..])) -} - -// ____________________________________________________________________________________________ -// Build Procedures - -/// Generically takes a `ts` and delimiter and returns `ts` delimited by the specified -/// delimiter. -pub fn build_delimited(ts: TokenStream, delim: token::DelimToken) -> TokenStream { - TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited { - delim: delim, - open_span: DUMMY_SP, - tts: ts.trees().cloned().collect(), - close_span: DUMMY_SP, - })).into() -} - -/// Takes `ts` and returns `[ts]`. -pub fn build_bracket_delimited(ts: TokenStream) -> TokenStream { - build_delimited(ts, token::DelimToken::Bracket) -} - -/// Takes `ts` and returns `{ts}`. -pub fn build_brace_delimited(ts: TokenStream) -> TokenStream { - build_delimited(ts, token::DelimToken::Brace) -} - -/// Takes `ts` and returns `(ts)`. -pub fn build_paren_delimited(ts: TokenStream) -> TokenStream { - build_delimited(ts, token::DelimToken::Paren) -} - -/// Constructs `()`. -pub fn build_empty_args() -> TokenStream { - build_paren_delimited(TokenStream::empty()) -} diff --git a/src/libproc_macro_tokens/lib.rs b/src/libproc_macro_tokens/lib.rs deleted file mode 100644 index e20ed6899154e..0000000000000 --- a/src/libproc_macro_tokens/lib.rs +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! # Proc_Macro -//! -//! A library for procedural macro writers. -//! -//! ## Usage -//! This crate provides the prelude (at libproc_macro_tokens::prelude), which -//! provides a number of operations: -//! - `concat`, for concatenating two TokenStreams. -//! - `ident_eq`, for checking if two identifiers are equal regardless of syntax context. -//! - `str_to_token_ident`, for converting an `&str` into a Token. -//! - `keyword_to_token_delim`, for converting a `parse::token::keywords::Keyword` into a -//! Token. -//! - `build_delimited`, for creating a new TokenStream from an existing one and a delimiter -//! by wrapping the TokenStream in the delimiter. -//! - `build_bracket_delimited`, `build_brace_delimited`, and `build_paren_delimited`, for -//! easing the above. -//! - `build_empty_args`, which returns a TokenStream containing `()`. -//! - `lex`, which takes an `&str` and returns the TokenStream it represents. -//! -//! ## TokenStreams -//! -//! TokenStreams serve as the basis of the macro system. They are, in essence, vectors of -//! TokenTrees, where indexing treats delimited values as a single term. That is, the term -//! `even(a+c) && even(b)` will be indexibly encoded as `even | (a+c) | even | (b)` where, -//! in reality, `(a+c)` is actually a decorated pointer to `a | + | c`. -//! -//! If a user has a TokenStream that is a single, delimited value, they can use -//! `maybe_delimited` to destruct it and receive the internal vector as a new TokenStream -//! as: -//! ``` -//! `(a+c)`.maybe_delimited() ~> Some(a | + | c)` -//! ``` -//! -//! Check the TokenStream documentation for more information; the structure also provides -//! cheap concatenation and slicing. -//! - -#![crate_name = "proc_macro_tokens"] -#![unstable(feature = "rustc_private", issue = "27812")] -#![crate_type = "dylib"] -#![crate_type = "rlib"] -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] -#![deny(warnings)] - -#![feature(staged_api)] -#![feature(rustc_private)] - -extern crate syntax; - -pub mod build; -pub mod parse; -pub mod prelude; diff --git a/src/libproc_macro_tokens/parse.rs b/src/libproc_macro_tokens/parse.rs deleted file mode 100644 index d1c1e99bcdc7a..0000000000000 --- a/src/libproc_macro_tokens/parse.rs +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Parsing utilities for writing procedural macros. - -use syntax::parse::{ParseSess, filemap_to_tts}; -use syntax::tokenstream::TokenStream; - -/// Map a string to tts, using a made-up filename. For example, `lex("15")` will return a -/// TokenStream containing the literal 15. -pub fn lex(source_str: &str) -> TokenStream { - let sess = ParseSess::new(); - let filemap = - sess.codemap().new_filemap("".to_string(), None, source_str.to_owned()); - filemap_to_tts(&sess, filemap).into_iter().collect() -} diff --git a/src/libproc_macro_tokens/prelude.rs b/src/libproc_macro_tokens/prelude.rs deleted file mode 100644 index 4c0c8ba6c6684..0000000000000 --- a/src/libproc_macro_tokens/prelude.rs +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -pub use build::*; -pub use parse::*; From 49f5b0a8cf1a2d588a55f6cb8ea43942e147c66b Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Mon, 23 Jan 2017 04:58:15 +0000 Subject: [PATCH 4/4] Remove `open_span` and `close_span` from `Delimited`. --- src/libproc_macro_plugin/qquote.rs | 4 --- .../calculate_svh/svh_visitor.rs | 4 --- src/libsyntax/ext/quote.rs | 10 ++----- src/libsyntax/ext/tt/macro_rules.rs | 13 ++++---- src/libsyntax/fold.rs | 2 -- src/libsyntax/parse/lexer/tokentrees.rs | 6 +--- src/libsyntax/parse/mod.rs | 4 --- src/libsyntax/parse/parser.rs | 11 ++----- src/libsyntax/parse/token.rs | 6 ++++ src/libsyntax/tokenstream.rs | 30 ++++++++++--------- 10 files changed, 35 insertions(+), 55 deletions(-) diff --git a/src/libproc_macro_plugin/qquote.rs b/src/libproc_macro_plugin/qquote.rs index 69c6eba6c0f89..300b4df892943 100644 --- a/src/libproc_macro_plugin/qquote.rs +++ b/src/libproc_macro_plugin/qquote.rs @@ -52,8 +52,6 @@ fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream { TokenTree::Delimited(DUMMY_SP, Rc::new(Delimited { delim: delim, tts: stream.trees().cloned().collect(), - open_span: DUMMY_SP, - close_span: DUMMY_SP, })).into() } @@ -129,8 +127,6 @@ impl Quote for TokenTree { impl Quote for Rc { fn quote(&self) -> TokenStream { quote!(::std::rc::Rc::new(::syntax::tokenstream::Delimited { - open_span: ::syntax::ext::quote::rt::DUMMY_SP, - close_span: ::syntax::ext::quote::rt::DUMMY_SP, delim: (quote self.delim), tts: (quote self.tts), })) diff --git a/src/librustc_incremental/calculate_svh/svh_visitor.rs b/src/librustc_incremental/calculate_svh/svh_visitor.rs index f0e86e81c076d..3427a42526181 100644 --- a/src/librustc_incremental/calculate_svh/svh_visitor.rs +++ b/src/librustc_incremental/calculate_svh/svh_visitor.rs @@ -1034,18 +1034,14 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { hash_span!(self, span); let tokenstream::Delimited { ref delim, - open_span, ref tts, - close_span, } = **delimited; delim.hash(self.st); - hash_span!(self, open_span); tts.len().hash(self.st); for sub_tt in tts { self.hash_token_tree(sub_tt); } - hash_span!(self, close_span); } tokenstream::TokenTree::Sequence(span, ref sequence_repetition) => { hash_span!(self, span); diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index c0cbda4ba1226..8258a7427b62b 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -231,9 +231,7 @@ pub mod rt { } r.push(TokenTree::Delimited(self.span, Rc::new(tokenstream::Delimited { delim: token::Bracket, - open_span: self.span, tts: self.value.to_tokens(cx), - close_span: self.span, }))); r } @@ -250,9 +248,7 @@ pub mod rt { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { vec![TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited { delim: token::Paren, - open_span: DUMMY_SP, tts: vec![], - close_span: DUMMY_SP, }))] } } @@ -757,11 +753,11 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec { - statements_mk_tt(cx, &delimed.open_tt(), matcher).into_iter() + TokenTree::Delimited(span, ref delimed) => { + statements_mk_tt(cx, &delimed.open_tt(span), matcher).into_iter() .chain(delimed.tts.iter() .flat_map(|tt| statements_mk_tt(cx, tt, matcher))) - .chain(statements_mk_tt(cx, &delimed.close_tt(), matcher)) + .chain(statements_mk_tt(cx, &delimed.close_tt(span), matcher)) .collect() }, TokenTree::Sequence(sp, ref seq) => { diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index f6a25d4aceed7..d0c1c0efea7a3 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -350,9 +350,9 @@ impl FirstSets { TokenTree::Token(sp, ref tok) => { first.replace_with((sp, tok.clone())); } - TokenTree::Delimited(_, ref delimited) => { + TokenTree::Delimited(span, ref delimited) => { build_recur(sets, &delimited.tts[..]); - first.replace_with((delimited.open_span, + first.replace_with((delimited.open_tt(span).span(), Token::OpenDelim(delimited.delim))); } TokenTree::Sequence(sp, ref seq_rep) => { @@ -410,8 +410,8 @@ impl FirstSets { first.add_one((sp, tok.clone())); return first; } - TokenTree::Delimited(_, ref delimited) => { - first.add_one((delimited.open_span, + TokenTree::Delimited(span, ref delimited) => { + first.add_one((delimited.open_tt(span).span(), Token::OpenDelim(delimited.delim))); return first; } @@ -603,8 +603,9 @@ fn check_matcher_core(sess: &ParseSess, suffix_first = build_suffix_first(); } } - TokenTree::Delimited(_, ref d) => { - let my_suffix = TokenSet::singleton((d.close_span, Token::CloseDelim(d.delim))); + TokenTree::Delimited(span, ref d) => { + let my_suffix = TokenSet::singleton((d.close_tt(span).span(), + Token::CloseDelim(d.delim))); check_matcher_core(sess, first_sets, &d.tts, &my_suffix); // don't track non NT tokens last.replace_with_irrelevant(); diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 2e5ce739fb34a..c42bf24578f0a 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -543,9 +543,7 @@ pub fn noop_fold_tt(tt: &TokenTree, fld: &mut T) -> TokenTree { TokenTree::Delimited(fld.new_span(span), Rc::new( Delimited { delim: delimed.delim, - open_span: fld.new_span(delimed.open_span), tts: fld.fold_tts(&delimed.tts), - close_span: fld.new_span(delimed.close_span), } )) }, diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 7b6f00e0e8265..eafc3f77ab052 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -59,7 +59,6 @@ impl<'a> StringReader<'a> { // Parse the open delimiter. self.open_braces.push((delim, self.span)); - let open_span = self.span; self.real_token(); // Parse the token trees within the delimiters. @@ -67,9 +66,8 @@ impl<'a> StringReader<'a> { // uses an incorrect delimiter. let tts = self.parse_token_trees_until_close_delim(); - let close_span = self.span; // Expand to cover the entire delimited token tree - let span = Span { hi: close_span.hi, ..pre_span }; + let span = Span { hi: self.span.hi, ..pre_span }; match self.token { // Correct delimiter. @@ -115,9 +113,7 @@ impl<'a> StringReader<'a> { Ok(TokenTree::Delimited(span, Rc::new(Delimited { delim: delim, - open_span: open_span, tts: tts, - close_span: close_span, }))) }, token::CloseDelim(_) => { diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 65e7ec0a34cf7..f1a3b523cfd93 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -725,24 +725,20 @@ mod tests { sp(5, 14), Rc::new(tokenstream::Delimited { delim: token::DelimToken::Paren, - open_span: sp(5, 6), tts: vec![ TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))), TokenTree::Token(sp(8, 9), token::Colon), TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))), ], - close_span: sp(13, 14), })), TokenTree::Delimited( sp(15, 21), Rc::new(tokenstream::Delimited { delim: token::DelimToken::Brace, - open_span: sp(15, 16), tts: vec![ TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))), TokenTree::Token(sp(18, 19), token::Semi), ], - close_span: sp(20, 21), })) ]; diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 5dd772041e271..f172f52104c3d 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -254,9 +254,7 @@ impl<'a> Parser<'a> { -> Self { let tt = TokenTree::Delimited(syntax_pos::DUMMY_SP, Rc::new(Delimited { delim: token::NoDelim, - open_span: syntax_pos::DUMMY_SP, tts: tokens, - close_span: syntax_pos::DUMMY_SP, })); let mut parser = Parser { sess: sess, @@ -2717,7 +2715,7 @@ impl<'a> Parser<'a> { } let parsing_token_tree = ::std::mem::replace(&mut self.parsing_token_tree, true); - let open_span = self.span; + let lo = self.span.lo; self.bump(); let tts = self.parse_seq_to_before_tokens(&[&token::CloseDelim(token::Brace), &token::CloseDelim(token::Paren), @@ -2726,16 +2724,11 @@ impl<'a> Parser<'a> { |p| p.parse_token_tree(), |mut e| e.emit()); self.parsing_token_tree = parsing_token_tree; - - let close_span = self.span; self.bump(); - let span = Span { lo: open_span.lo, ..close_span }; - Ok(TokenTree::Delimited(span, Rc::new(Delimited { + Ok(TokenTree::Delimited(Span { lo: lo, ..self.prev_span }, Rc::new(Delimited { delim: delim, - open_span: open_span, tts: tts, - close_span: close_span, }))) }, token::CloseDelim(_) | token::Eof => unreachable!(), diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 8ac39dd462e7c..bf790b96e37f6 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -49,6 +49,12 @@ pub enum DelimToken { NoDelim, } +impl DelimToken { + pub fn len(&self) -> u32 { + if *self == NoDelim { 0 } else { 1 } + } +} + #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] pub enum Lit { Byte(ast::Name), diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 26e976dc0762f..f29a168e6df19 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -25,7 +25,7 @@ //! ownership of the original. use ast::{self, AttrStyle, LitKind}; -use syntax_pos::Span; +use syntax_pos::{BytePos, Span, DUMMY_SP}; use codemap::Spanned; use ext::base; use ext::tt::macro_parser; @@ -45,12 +45,8 @@ use std::rc::Rc; pub struct Delimited { /// The type of delimiter pub delim: token::DelimToken, - /// The span covering the opening delimiter - pub open_span: Span, /// The delimited sequence of token trees pub tts: Vec, - /// The span covering the closing delimiter - pub close_span: Span, } impl Delimited { @@ -65,13 +61,21 @@ impl Delimited { } /// Returns the opening delimiter as a token tree. - pub fn open_tt(&self) -> TokenTree { - TokenTree::Token(self.open_span, self.open_token()) + pub fn open_tt(&self, span: Span) -> TokenTree { + let open_span = match span { + DUMMY_SP => DUMMY_SP, + _ => Span { hi: span.lo + BytePos(self.delim.len()), ..span }, + }; + TokenTree::Token(open_span, self.open_token()) } /// Returns the closing delimiter as a token tree. - pub fn close_tt(&self) -> TokenTree { - TokenTree::Token(self.close_span, self.close_token()) + pub fn close_tt(&self, span: Span) -> TokenTree { + let close_span = match span { + DUMMY_SP => DUMMY_SP, + _ => Span { lo: span.hi - BytePos(self.delim.len()), ..span }, + }; + TokenTree::Token(close_span, self.close_token()) } /// Returns the token trees inside the delimiters. @@ -175,23 +179,21 @@ impl TokenTree { TokenTree::Delimited(sp, Rc::new(Delimited { delim: token::Bracket, - open_span: sp, tts: vec![TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))), TokenTree::Token(sp, token::Eq), TokenTree::Token(sp, token::Literal( token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))], - close_span: sp, })) } (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => { delimed.tts[index].clone() } - (&TokenTree::Delimited(_, ref delimed), _) => { + (&TokenTree::Delimited(span, ref delimed), _) => { if index == 0 { - return delimed.open_tt(); + return delimed.open_tt(span); } if index == delimed.tts.len() + 1 { - return delimed.close_tt(); + return delimed.close_tt(span); } delimed.tts[index - 1].clone() }