Add proper test for literals and fixed typo bug

This commit is contained in:
Edwin Cheng 2019-04-05 20:58:24 +08:00
parent 1ea0238e53
commit 7abc06bd57
3 changed files with 51 additions and 5 deletions

View file

@ -167,7 +167,7 @@ impl_froms!(TokenTree: Leaf, Subtree);
)
}
fn create_rules(macro_definition: &str) -> MacroRules {
pub(crate) fn create_rules(macro_definition: &str) -> MacroRules {
let source_file = ast::SourceFile::parse(macro_definition);
let macro_definition =
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
@ -176,7 +176,7 @@ impl_froms!(TokenTree: Leaf, Subtree);
crate::MacroRules::parse(&definition_tt).unwrap()
}
fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree {
pub(crate) fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree {
let source_file = ast::SourceFile::parse(invocation);
let macro_invocation =
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
@ -186,7 +186,7 @@ impl_froms!(TokenTree: Leaf, Subtree);
rules.expand(&invocation_tt).unwrap()
}
fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) {
pub(crate) fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) {
let expanded = expand(rules, invocation);
assert_eq!(expanded.to_string(), expansion);
}
@ -338,7 +338,7 @@ SOURCE_FILE@[0; 40)
}
#[test]
fn expand_literals_to_item_list() {
fn expand_literals_to_token_tree() {
fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree {
if let tt::TokenTree::Subtree(subtree) = tt {
return &subtree;
@ -361,6 +361,7 @@ SOURCE_FILE@[0; 40)
let a = 'c';
let c = 1000;
let f = 12E+99_f64;
let s = "rust1";
}
}
}
@ -375,5 +376,7 @@ SOURCE_FILE@[0; 40)
assert_eq!(to_literal(&stm_tokens[5 + 3]).text, "1000");
// [let] [f] [=] [12E+99_f64] [;]
assert_eq!(to_literal(&stm_tokens[10 + 3]).text, "12E+99_f64");
// [let] [s] [=] ["rust1"] [;]
assert_eq!(to_literal(&stm_tokens[15 + 3]).text, "\"rust1\"");
}
}

View file

@ -103,10 +103,12 @@ fn convert_tt(
Some(res)
}
#[derive(Debug)]
struct TtTokenSource {
tokens: Vec<TtToken>,
}
#[derive(Debug)]
struct TtToken {
kind: SyntaxKind,
is_joint_to_next: bool,
@ -355,3 +357,44 @@ impl<'a> TreeSink for TtTreeSink<'a> {
self.inner.error(error, self.text_pos)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::{expand, create_rules};
#[test]
fn convert_tt_token_source() {
let rules = create_rules(
r#"
macro_rules! literals {
($i:ident) => {
{
let a = 'c';
let c = 1000;
let f = 12E+99_f64;
let s = "rust1";
}
}
}
"#,
);
let expansion = expand(&rules, "literals!(foo)");
let tt_src = TtTokenSource::new(&expansion);
// [{]
// [let] [a] [=] ['c'] [;]
assert_eq!(tt_src.tokens[1 + 3].text, "'c'");
assert_eq!(tt_src.tokens[1 + 3].kind, CHAR);
// [let] [c] [=] [1000] [;]
assert_eq!(tt_src.tokens[1 + 5 + 3].text, "1000");
assert_eq!(tt_src.tokens[1 + 5 + 3].kind, INT_NUMBER);
// [let] [f] [=] [12E+99_f64] [;]
assert_eq!(tt_src.tokens[1 + 10 + 3].text, "12E+99_f64");
assert_eq!(tt_src.tokens[1 + 10 + 3].kind, FLOAT_NUMBER);
// [let] [s] [=] ["rust1"] [;]
assert_eq!(tt_src.tokens[1 + 15 + 3].text, "\"rust1\"");
assert_eq!(tt_src.tokens[1 + 15 + 3].kind, STRING);
}
}

View file

@ -217,7 +217,7 @@ fn scan_literal_suffix(ptr: &mut Ptr) {
pub fn classify_literal(text: &str) -> Option<Token> {
let tkn = next_token(text);
if tkn.kind.is_literal() || tkn.len.to_usize() != text.len() {
if !tkn.kind.is_literal() || tkn.len.to_usize() != text.len() {
return None;
}