rott/rottlib/tests/diagnostics_expressions.rs

394 lines
9.9 KiB
Rust

use std::collections::HashMap;
use rottlib::arena::Arena;
use rottlib::diagnostics::Diagnostic;
use rottlib::lexer::{TokenPosition, TokenSpan, TokenizedFile};
use rottlib::parser::Parser;
#[derive(Debug, Clone, Copy)]
pub struct Fixture {
pub code: &'static str,
pub label: &'static str,
pub source: &'static str,
}
pub const FIXTURES: &[Fixture] = &[
Fixture {
code: "P0001",
label: "files/P0001_01.uc",
source: "c && ( /*lol*/ ** calc_it())",
},
Fixture {
code: "P0001",
label: "files/P0001_02.uc",
source: "\r\na + (\n//AAA\n//BBB\n//CCC\n//DDD\n//EEE\n//FFF\n ]",
},
Fixture {
code: "P0001",
label: "files/P0001_03.uc",
source: "(\n// nothing here, bucko",
},
Fixture {
code: "P0002",
label: "files/P0002_01.uc",
source: "a + [",
},
Fixture {
code: "P0002",
label: "files/P0002_02.uc",
source: "a * \n//some\n//empty lines\n *",
},
Fixture {
code: "P0002",
label: "files/P0002_03.uc",
source: "a &&",
},
Fixture {
code: "P0002",
label: "files/P0002_04.uc",
source: "a * * *",
},
Fixture {
code: "P0003",
label: "files/P0003_01.uc",
source: "(a + b && c / d ^ e @ f",
},
Fixture {
code: "P0003",
label: "files/P0003_02.uc",
source: "(a]",
},
Fixture {
code: "P0003",
label: "files/P0003_03.uc",
source: "(a\n;",
},
];
pub struct FixtureRun<'src> {
pub fixture: &'static Fixture,
pub file: TokenizedFile<'src>,
pub diagnostics: Vec<Diagnostic>,
}
pub struct FixtureRuns<'src> {
runs: HashMap<&'static str, FixtureRun<'src>>,
}
impl<'src> FixtureRuns<'src> {
pub fn get(&self, label: &str) -> Option<Vec<Diagnostic>> {
self.runs
.get(label)
.map(|fixture_run| fixture_run.diagnostics.clone())
}
pub fn get_any(&self, label: &str) -> Diagnostic {
self.runs
.get(label)
.map(|fixture_run| fixture_run.diagnostics[0].clone())
.unwrap()
}
pub fn iter(&self) -> impl Iterator<Item = (&'static str, &FixtureRun<'src>)> {
self.runs.iter().map(|(label, run)| (*label, run))
}
}
fn run_fixture(fixture: &'static Fixture) -> FixtureRun<'static> {
let arena = Arena::new();
let file = TokenizedFile::tokenize(fixture.source);
let mut parser = Parser::new(&file, &arena);
let _ = parser.parse_expression();
let diagnostics = parser.diagnostics.clone();
FixtureRun {
fixture,
file,
diagnostics,
}
}
pub fn run_fixtures(code: &str) -> FixtureRuns<'static> {
let mut runs = HashMap::new();
for fixture in FIXTURES.iter().filter(|fixture| fixture.code == code) {
runs.insert(fixture.label, run_fixture(fixture));
}
for (label, run) in runs.iter() {
run.diagnostics.iter().for_each(|diag| {
diag.render(&run.file, *label);
});
println!();
}
FixtureRuns { runs }
}
#[test]
fn check_p0001_fixtures() {
let runs = run_fixtures("P0001");
assert_eq!(runs.get("files/P0001_01.uc").unwrap().len(), 1);
assert_eq!(runs.get("files/P0001_02.uc").unwrap().len(), 1);
assert_eq!(runs.get("files/P0001_03.uc").unwrap().len(), 1);
assert_eq!(
runs.get_any("files/P0001_01.uc").headline(),
"expected expression inside parentheses, found `**`"
);
assert_eq!(
runs.get_any("files/P0001_02.uc").headline(),
"expected expression inside parentheses, found `]`"
);
assert_eq!(
runs.get_any("files/P0001_03.uc").headline(),
"expected expression, found end of file"
);
assert_eq!(runs.get_any("files/P0001_01.uc").code(), Some("P0001"));
assert_eq!(runs.get_any("files/P0001_02.uc").code(), Some("P0001"));
assert_eq!(runs.get_any("files/P0001_03.uc").code(), Some("P0001"));
assert_eq!(
runs.get_any("files/P0001_01.uc")
.primary_label()
.unwrap()
.span,
TokenSpan {
start: TokenPosition(8),
end: TokenPosition(8)
}
);
assert_eq!(
runs.get_any("files/P0001_02.uc")
.primary_label()
.unwrap()
.span,
TokenSpan {
start: TokenPosition(5),
end: TokenPosition(20)
}
);
assert_eq!(
runs.get_any("files/P0001_03.uc")
.primary_label()
.unwrap()
.span,
TokenSpan {
start: TokenPosition(0),
end: TokenPosition(3)
}
);
assert_eq!(
runs.get_any("files/P0001_01.uc")
.primary_label()
.unwrap()
.message,
"unexpected `**`"
);
assert_eq!(
runs.get_any("files/P0001_02.uc")
.primary_label()
.unwrap()
.message,
"unexpected `]`"
);
assert_eq!(
runs.get_any("files/P0001_03.uc")
.primary_label()
.unwrap()
.message,
"reached end of file here"
);
}
#[test]
fn check_p0002_fixtures() {
let runs = run_fixtures("P0002");
assert_eq!(runs.get("files/P0002_01.uc").unwrap().len(), 1);
assert_eq!(runs.get("files/P0002_02.uc").unwrap().len(), 1);
assert_eq!(runs.get("files/P0002_03.uc").unwrap().len(), 1);
assert_eq!(runs.get("files/P0002_04.uc").unwrap().len(), 1);
assert_eq!(
runs.get_any("files/P0002_01.uc").headline(),
"expected expression after `+`, found `[`"
);
assert_eq!(
runs.get_any("files/P0002_02.uc").headline(),
"expected expression after `*`, found `*`"
);
assert_eq!(
runs.get_any("files/P0002_03.uc").headline(),
"expected expression after `&&`, found end of file"
);
assert_eq!(
runs.get_any("files/P0002_04.uc").headline(),
"expected expression after `*`, found `*`"
);
assert_eq!(runs.get_any("files/P0002_01.uc").code(), Some("P0002"));
assert_eq!(runs.get_any("files/P0002_02.uc").code(), Some("P0002"));
assert_eq!(runs.get_any("files/P0002_03.uc").code(), Some("P0002"));
assert_eq!(runs.get_any("files/P0002_04.uc").code(), Some("P0002"));
assert_eq!(
runs.get_any("files/P0002_01.uc")
.primary_label()
.unwrap()
.span,
TokenSpan {
start: TokenPosition(4),
end: TokenPosition(4),
}
);
assert_eq!(
runs.get_any("files/P0002_02.uc")
.primary_label()
.unwrap()
.span,
TokenSpan {
start: TokenPosition(10),
end: TokenPosition(10),
}
);
assert_eq!(
runs.get_any("files/P0002_03.uc")
.primary_label()
.unwrap()
.span,
TokenSpan {
start: TokenPosition(3),
end: TokenPosition(3),
}
);
assert_eq!(
runs.get_any("files/P0002_04.uc")
.primary_label()
.unwrap()
.span,
TokenSpan {
start: TokenPosition(4),
end: TokenPosition(4),
}
);
assert_eq!(
runs.get_any("files/P0002_01.uc")
.primary_label()
.unwrap()
.message,
"unexpected `[`"
);
assert_eq!(
runs.get_any("files/P0002_02.uc")
.primary_label()
.unwrap()
.message,
"unexpected `*`"
);
assert_eq!(
runs.get_any("files/P0002_03.uc")
.primary_label()
.unwrap()
.message,
"reached end of file here"
);
assert_eq!(
runs.get_any("files/P0002_04.uc")
.primary_label()
.unwrap()
.message,
"unexpected `*`"
);
}
#[test]
fn check_p0003_fixtures() {
let runs = run_fixtures("P0003");
assert_eq!(runs.get("files/P0003_01.uc").unwrap().len(), 1);
assert_eq!(runs.get("files/P0003_02.uc").unwrap().len(), 1);
assert_eq!(runs.get("files/P0003_03.uc").unwrap().len(), 1);
assert_eq!(
runs.get_any("files/P0003_01.uc").headline(),
"missing `)` to close parenthesized expression"
);
assert_eq!(
runs.get_any("files/P0003_02.uc").headline(),
"missing `)` to close parenthesized expression"
);
assert_eq!(
runs.get_any("files/P0003_03.uc").headline(),
"missing `)` to close parenthesized expression"
);
assert_eq!(runs.get_any("files/P0003_01.uc").code(), Some("P0003"));
assert_eq!(runs.get_any("files/P0003_02.uc").code(), Some("P0003"));
assert_eq!(runs.get_any("files/P0003_03.uc").code(), Some("P0003"));
assert_eq!(
runs.get_any("files/P0003_01.uc")
.primary_label()
.unwrap()
.span,
TokenSpan {
start: TokenPosition(22),
end: TokenPosition(22),
}
);
assert_eq!(
runs.get_any("files/P0003_02.uc")
.primary_label()
.unwrap()
.span,
TokenSpan {
start: TokenPosition(2),
end: TokenPosition(2),
}
);
assert_eq!(
runs.get_any("files/P0003_03.uc")
.primary_label()
.unwrap()
.span,
TokenSpan {
start: TokenPosition(0),
end: TokenPosition(3),
}
);
assert_eq!(
runs.get_any("files/P0003_01.uc")
.primary_label()
.unwrap()
.message,
"expected `)` before end of file"
);
assert_eq!(
runs.get_any("files/P0003_02.uc")
.primary_label()
.unwrap()
.message,
"expected `)` before `]`"
);
assert_eq!(
runs.get_any("files/P0003_03.uc")
.primary_label()
.unwrap()
.message,
"expected `)` before `;`"
);
}