rust/tests/lexer.rs

42 lines
1.1 KiB
Rust
Raw Normal View History

2017-12-28 22:27:56 +01:00
extern crate file;
2017-12-28 22:56:36 +01:00
extern crate libsyntax2;
2018-01-07 12:56:08 +01:00
extern crate testutils;
2017-12-28 22:27:56 +01:00
2018-01-07 12:56:08 +01:00
use std::path::{Path};
2017-12-28 22:56:36 +01:00
use std::fmt::Write;
2017-12-31 15:54:33 +01:00
use libsyntax2::{Token, tokenize};
2018-01-07 12:56:08 +01:00
use testutils::{assert_equal_text, collect_tests};
2017-12-28 22:27:56 +01:00
#[test]
fn lexer_tests() {
2018-01-07 12:56:08 +01:00
for test_case in collect_tests(&["lexer"]) {
2017-12-28 22:27:56 +01:00
lexer_test_case(&test_case);
}
}
fn lexer_test_case(path: &Path) {
let actual = {
let text = file::get_text(path).unwrap();
let tokens = tokenize(&text);
2017-12-30 13:29:09 +01:00
dump_tokens(&tokens, &text)
2017-12-28 22:27:56 +01:00
};
2018-01-07 12:56:08 +01:00
let path = path.with_extension("txt");
let expected = file::get_text(&path).unwrap();
2017-12-29 21:33:04 +01:00
let expected = expected.as_str();
let actual = actual.as_str();
2018-01-07 12:56:08 +01:00
assert_equal_text(expected, actual, &path)
2017-12-28 22:27:56 +01:00
}
2017-12-30 13:29:09 +01:00
fn dump_tokens(tokens: &[Token], text: &str) -> String {
2017-12-28 22:56:36 +01:00
let mut acc = String::new();
2017-12-30 13:29:09 +01:00
let mut offset = 0;
2017-12-28 22:56:36 +01:00
for token in tokens {
2017-12-30 13:29:09 +01:00
let len: u32 = token.len.into();
let len = len as usize;
let token_text = &text[offset..offset + len];
offset += len;
write!(acc, "{:?} {} {:?}\n", token.kind, token.len, token_text).unwrap()
2017-12-28 22:56:36 +01:00
}
acc
2017-12-28 22:27:56 +01:00
}