test: Add proc macro DSL for declarative tests.

This commit is contained in:
2026-03-13 21:58:17 -04:00
parent cb7911e5c9
commit 522b9f2894
4 changed files with 1103 additions and 0 deletions

View File

@@ -0,0 +1,455 @@
use proc_macro2::{Ident, Span, TokenStream};
use quote::quote;
use crate::parse::{ActionExpr, Fixtures, PiklTests, TestCase, TestKind, TestModule};
/// Generate all test modules from the parsed DSL.
pub fn generate(input: &PiklTests) -> syn::Result<TokenStream> {
let mut output = TokenStream::new();
for module in &input.modules {
output.extend(gen_module(module)?);
}
Ok(output)
}
/// Generate a single `mod name { ... }` block with test
/// functions and the right imports for the test kind.
fn gen_module(module: &TestModule) -> syn::Result<TokenStream> {
let mod_name = &module.name;
let mut test_fns = Vec::new();
for case in &module.tests {
let tokens = match module.kind {
TestKind::Headless => gen_headless(case, &module.fixtures)?,
TestKind::Filter => gen_filter(case, &module.fixtures)?,
TestKind::Nav => gen_nav(case, &module.fixtures)?,
TestKind::Menu => gen_menu(case, &module.fixtures)?,
};
test_fns.push(tokens);
}
let imports = gen_imports(module.kind);
Ok(quote! {
mod #mod_name {
#imports
#(#test_fns)*
}
})
}
/// Emit the `use` imports needed for a given test kind.
fn gen_imports(kind: TestKind) -> TokenStream {
match kind {
TestKind::Headless => {
quote! { use super::*; }
}
TestKind::Filter => {
quote! {
use pikl_core::item::Item;
use pikl_core::filter::{Filter, FuzzyFilter};
}
}
TestKind::Nav => {
quote! {
use pikl_core::navigation::Viewport;
}
}
TestKind::Menu => {
quote! {
use pikl_core::item::Item;
use pikl_core::event::{Action, MenuEvent, MenuResult};
use pikl_core::menu::MenuRunner;
use pikl_core::json_menu::JsonMenu;
}
}
}
}
// ---------------------------------------------------------------------------
// Headless
// ---------------------------------------------------------------------------
/// Generate a headless integration test that spawns the pikl
/// binary, feeds it items and a script, and asserts on
/// stdout/stderr/exit code.
fn gen_headless(case: &TestCase, fixtures: &Fixtures) -> syn::Result<TokenStream> {
let test_name = &case.name;
// Build items string: "item1\nitem2\n"
let items_str = match &fixtures.items {
Some(items) => {
let mut s = String::new();
for item in items {
s.push_str(item);
s.push('\n');
}
s
}
None => String::new(),
};
// Build script string from actions
let script = build_headless_script(&case.actions);
// Build extra CLI args (e.g. --label-key)
let extra_args: Vec<TokenStream> = if let Some(ref key) = fixtures.label_key {
vec![quote! { "--label-key" }, quote! { #key }]
} else {
Vec::new()
};
// Build assertions
let mut asserts = Vec::new();
if let Some(exit) = case.exit_code {
asserts.push(quote! {
assert_eq!(
code, #exit,
"expected exit {}, got {}, stderr: {}", #exit, code, stderr
);
});
}
if let Some(ref expected) = case.stdout {
if expected.is_empty() {
asserts.push(quote! {
assert!(
stdout.trim().is_empty(),
"expected empty stdout, got: {}", stdout
);
});
} else {
asserts.push(quote! {
assert!(
stdout.contains(#expected),
"expected stdout to contain {:?}, got: {}", #expected, stdout
);
});
}
}
if let Some(ref expected) = case.stderr_contains {
asserts.push(quote! {
assert!(
stderr.contains(#expected),
"expected stderr to contain {:?}, got: {}", #expected, stderr
);
});
}
Ok(quote! {
#[test]
fn #test_name() {
let (stdout, stderr, code) = run_pikl(#items_str, #script, &[#(#extra_args),*]);
#(#asserts)*
}
})
}
/// Turn action expressions into the newline-delimited script
/// text that gets piped to `--action-fd`.
fn build_headless_script(actions: &[ActionExpr]) -> String {
let mut script = String::new();
for action in actions {
match action {
ActionExpr::Simple(name) => {
script.push_str(name);
script.push('\n');
}
ActionExpr::Filter(query) => {
script.push_str("filter ");
script.push_str(query);
script.push('\n');
}
ActionExpr::Raw(line) => {
script.push_str(line);
script.push('\n');
}
ActionExpr::AddItems(_) => {
// Not applicable for headless. Items come from stdin.
}
}
}
script
}
// ---------------------------------------------------------------------------
// Filter
// ---------------------------------------------------------------------------
/// Generate a filter unit test: create items, push them
/// into a `FuzzyFilter`, set the query, and assert on
/// matched labels.
fn gen_filter(case: &TestCase, fixtures: &Fixtures) -> syn::Result<TokenStream> {
let test_name = &case.name;
let item_exprs = gen_item_constructors(fixtures);
let query = case.query.as_deref().unwrap_or("");
let mut asserts = Vec::new();
if let Some(ref expected) = case.match_labels {
asserts.push(quote! {
let labels: Vec<&str> = (0..f.matched_count())
.filter_map(|i| f.matched_index(i))
.map(|idx| items[idx].label())
.collect();
let expected: Vec<&str> = vec![#(#expected),*];
assert_eq!(
labels, expected,
"query {:?}: expected {:?}, got {:?}", #query, expected, labels
);
});
}
Ok(quote! {
#[test]
fn #test_name() {
let items: Vec<Item> = vec![#(#item_exprs),*];
let mut f = FuzzyFilter::new();
for (i, item) in items.iter().enumerate() {
f.push(i, item.label());
}
f.set_query(#query);
#(#asserts)*
}
})
}
// ---------------------------------------------------------------------------
// Nav
// ---------------------------------------------------------------------------
/// Generate a navigation unit test: create a viewport, run
/// movement actions, and assert on cursor/offset.
fn gen_nav(case: &TestCase, fixtures: &Fixtures) -> syn::Result<TokenStream> {
let test_name = &case.name;
let (height, count) = fixtures.viewport.unwrap_or((10, 20));
let height_lit = height;
let count_lit = count;
let action_calls = gen_nav_actions(&case.actions)?;
let mut asserts = Vec::new();
if let Some(cursor) = case.cursor {
asserts.push(quote! {
assert_eq!(
v.cursor(), #cursor,
"expected cursor {}, got {}", #cursor, v.cursor()
);
});
}
if let Some(offset) = case.offset {
asserts.push(quote! {
assert_eq!(
v.scroll_offset(), #offset,
"expected offset {}, got {}", #offset, v.scroll_offset()
);
});
}
Ok(quote! {
#[test]
fn #test_name() {
let mut v = Viewport::new();
v.set_height(#height_lit);
v.set_filtered_count(#count_lit);
#(#action_calls)*
#(#asserts)*
}
})
}
/// Convert DSL action names to `Viewport` method calls
/// (e.g. `move-down` becomes `v.move_down()`).
fn gen_nav_actions(actions: &[ActionExpr]) -> syn::Result<Vec<TokenStream>> {
let mut calls = Vec::new();
for action in actions {
match action {
ActionExpr::Simple(name) => {
let method = Ident::new(&name.replace('-', "_"), Span::call_site());
let needs_count = matches!(
name.as_str(),
"move-up" | "move-down" | "page-up" | "page-down"
);
if needs_count {
calls.push(quote! { v.#method(1); });
} else {
calls.push(quote! { v.#method(); });
}
}
_ => {
return Err(syn::Error::new(
Span::call_site(),
format!(
"nav tests only support simple actions, got: {:?}",
action_debug(action)
),
));
}
}
}
Ok(calls)
}
// ---------------------------------------------------------------------------
// Menu
// ---------------------------------------------------------------------------
/// Generate an async menu state machine test: create a menu,
/// send actions, and assert on the final result (selected
/// item or cancellation).
fn gen_menu(case: &TestCase, fixtures: &Fixtures) -> syn::Result<TokenStream> {
let test_name = &case.name;
let item_exprs = gen_item_constructors(fixtures);
let label_key = fixtures.label_key.as_deref().unwrap_or("label");
let action_sends = gen_menu_actions(&case.actions)?;
let result_assert = if case.cancelled {
quote! {
assert!(
matches!(result, Ok(MenuResult::Cancelled)),
"expected Cancelled, got: {:?}", result.as_ref().map(|r| format!("{:?}", r))
);
}
} else if let Some(ref expected) = case.selected {
quote! {
match &result {
Ok(MenuResult::Selected(value)) => {
let got = value.as_str()
.or_else(|| value.get(#label_key).and_then(|v| v.as_str()))
.unwrap_or("");
assert_eq!(
got, #expected,
"expected selected {:?}, got value: {:?}", #expected, value
);
}
other => panic!("expected Selected, got: {:?}", other),
}
}
} else {
// No assertion on result. Probably an error, but let it compile.
quote! {}
};
// If test expects cancellation via sender drop (no cancel action, no confirm),
// we need to drop tx after sending actions.
let drop_sender = quote! { drop(tx); };
Ok(quote! {
#[tokio::test]
async fn #test_name() {
let items = vec![#(#item_exprs),*];
let (menu, tx) = MenuRunner::new(JsonMenu::new(items, #label_key.to_string()));
let mut rx = menu.subscribe();
let handle = tokio::spawn(async move { menu.run().await });
// Wait for initial state broadcast.
let _ = rx.recv().await;
// Give the viewport some height so confirms work.
let _ = tx.send(Action::Resize { height: 50 }).await;
let _ = rx.recv().await;
// Send all actions.
#(#action_sends)*
// Drop sender so menu loop can exit.
#drop_sender
let result = handle.await.unwrap_or(Ok(MenuResult::Cancelled));
#result_assert
}
})
}
/// Convert DSL actions to `tx.send(Action::...)` calls for
/// menu tests.
fn gen_menu_actions(actions: &[ActionExpr]) -> syn::Result<Vec<TokenStream>> {
let mut sends = Vec::new();
for action in actions {
let expr = match action {
ActionExpr::Simple(name) => {
let variant = menu_action_variant(name)?;
quote! { let _ = tx.send(#variant).await; }
}
ActionExpr::Filter(query) => {
quote! {
let _ = tx.send(Action::UpdateFilter(#query.to_string())).await;
}
}
ActionExpr::AddItems(items) => {
let item_exprs: Vec<TokenStream> = items
.iter()
.map(|s| quote! { serde_json::Value::String(#s.to_string()) })
.collect();
quote! {
let _ = tx.send(Action::AddItems(vec![#(#item_exprs),*])).await;
}
}
ActionExpr::Raw(_) => {
return Err(syn::Error::new(
Span::call_site(),
"raw actions are only supported in headless tests",
));
}
};
sends.push(expr);
}
Ok(sends)
}
/// Map a DSL action name like `"move-down"` to the
/// corresponding `Action::MoveDown` token stream.
fn menu_action_variant(name: &str) -> syn::Result<TokenStream> {
let tokens = match name {
"confirm" => quote! { Action::Confirm },
"cancel" => quote! { Action::Cancel },
"move-down" => quote! { Action::MoveDown(1) },
"move-up" => quote! { Action::MoveUp(1) },
"move-to-top" => quote! { Action::MoveToTop },
"move-to-bottom" => quote! { Action::MoveToBottom },
"page-up" => quote! { Action::PageUp(1) },
"page-down" => quote! { Action::PageDown(1) },
_ => {
return Err(syn::Error::new(
Span::call_site(),
format!("unknown menu action: '{name}'"),
));
}
};
Ok(tokens)
}
// ---------------------------------------------------------------------------
// Shared helpers
// ---------------------------------------------------------------------------
/// Generate `Item::from_plain_text("...")` expressions for
/// each item in the fixtures.
fn gen_item_constructors(fixtures: &Fixtures) -> Vec<TokenStream> {
match &fixtures.items {
Some(items) => items
.iter()
.map(|s| quote! { Item::from_plain_text(#s) })
.collect(),
None => Vec::new(),
}
}
/// Format an action expression for use in error messages.
fn action_debug(action: &ActionExpr) -> String {
match action {
ActionExpr::Simple(name) => name.clone(),
ActionExpr::Filter(q) => format!("filter \"{q}\""),
ActionExpr::Raw(r) => format!("raw \"{r}\""),
ActionExpr::AddItems(items) => format!("add-items {:?}", items),
}
}

View File

@@ -0,0 +1,25 @@
extern crate proc_macro;
mod codegen;
mod parse;
use proc_macro::TokenStream;
/// Test DSL for pikl-menu. Generates individual test
/// functions from a concise block-based syntax.
///
/// Supported test kinds:
/// - `headless`: integration tests that spawn the pikl binary
/// - `filter`: unit tests for fuzzy filter matching
/// - `nav`: unit tests for viewport/cursor math
/// - `menu`: async unit tests for the menu state machine
///
/// See the project's test files for usage examples.
#[proc_macro]
pub fn pikl_tests(input: TokenStream) -> TokenStream {
let parsed = syn::parse_macro_input!(input as parse::PiklTests);
match codegen::generate(&parsed) {
Ok(tokens) => tokens.into(),
Err(err) => err.to_compile_error().into(),
}
}

View File

@@ -0,0 +1,428 @@
use proc_macro2::Span;
use syn::{
Ident, LitInt, LitStr, Token, braced, bracketed,
parse::{Parse, ParseStream},
};
// ---------------------------------------------------------------------------
// AST types
// ---------------------------------------------------------------------------
pub struct PiklTests {
pub modules: Vec<TestModule>,
}
pub struct TestModule {
pub kind: TestKind,
pub name: Ident,
pub fixtures: Fixtures,
pub tests: Vec<TestCase>,
}
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum TestKind {
Headless,
Filter,
Nav,
Menu,
}
pub struct Fixtures {
pub items: Option<Vec<String>>,
pub label_key: Option<String>,
pub viewport: Option<(usize, usize)>, // (height, count)
}
pub struct TestCase {
pub name: Ident,
pub actions: Vec<ActionExpr>,
pub stdout: Option<String>,
pub stderr_contains: Option<String>,
pub exit_code: Option<i32>,
pub query: Option<String>,
pub match_labels: Option<Vec<String>>,
pub cursor: Option<usize>,
pub offset: Option<usize>,
pub selected: Option<String>,
pub cancelled: bool,
}
pub enum ActionExpr {
/// A simple action like "confirm", "cancel", "move-down".
Simple(String),
/// `filter "query text"`
Filter(String),
/// `raw "literal script line"`
Raw(String),
/// `add-items ["a", "b", "c"]`
AddItems(Vec<String>),
}
// ---------------------------------------------------------------------------
// Top-level parse
// ---------------------------------------------------------------------------
impl Parse for PiklTests {
fn parse(input: ParseStream) -> syn::Result<Self> {
let mut modules = Vec::new();
while !input.is_empty() {
modules.push(input.parse()?);
}
Ok(PiklTests { modules })
}
}
// ---------------------------------------------------------------------------
// Module parse
// ---------------------------------------------------------------------------
impl Parse for TestModule {
fn parse(input: ParseStream) -> syn::Result<Self> {
let kind = parse_kind(input)?;
input.parse::<Token![mod]>()?;
let name: Ident = input.parse()?;
let content;
braced!(content in input);
let mut fixtures = Fixtures {
items: None,
label_key: None,
viewport: None,
};
let mut tests = Vec::new();
while !content.is_empty() {
// Peek at the next identifier to decide what we're parsing.
let fork = content.fork();
let ident_str = parse_ident_or_keyword(&fork)?;
match ident_str.as_str() {
"test" => {
tests.push(parse_test_case(&content)?);
}
"items" => {
consume_ident_or_keyword(&content)?;
content.parse::<Token![:]>()?;
fixtures.items = Some(parse_string_list(&content)?);
eat_semi(&content);
}
"label_key" => {
consume_ident_or_keyword(&content)?;
content.parse::<Token![:]>()?;
let val: LitStr = content.parse()?;
fixtures.label_key = Some(val.value());
eat_semi(&content);
}
"viewport" => {
consume_ident_or_keyword(&content)?;
content.parse::<Token![:]>()?;
fixtures.viewport = Some(parse_viewport_def(&content)?);
eat_semi(&content);
}
_ => {
return Err(syn::Error::new(
content.span(),
format!(
"unexpected field '{ident_str}', expected test, items, label_key, or viewport"
),
));
}
}
}
Ok(TestModule {
kind,
name,
fixtures,
tests,
})
}
}
// ---------------------------------------------------------------------------
// Test case parse
// ---------------------------------------------------------------------------
/// Parse a single `test name { ... }` block inside a module.
fn parse_test_case(input: ParseStream) -> syn::Result<TestCase> {
consume_ident_or_keyword(input)?; // "test"
let name: Ident = input.parse()?;
let content;
braced!(content in input);
let mut case = TestCase {
name,
actions: Vec::new(),
stdout: None,
stderr_contains: None,
exit_code: None,
query: None,
match_labels: None,
cursor: None,
offset: None,
selected: None,
cancelled: false,
};
while !content.is_empty() {
let field = parse_ident_or_keyword(&content)?;
// Consume the ident we just peeked.
consume_ident_or_keyword(&content)?;
match field.as_str() {
"actions" => {
content.parse::<Token![:]>()?;
case.actions = parse_action_list(&content)?;
}
"stdout" => {
content.parse::<Token![:]>()?;
let val: LitStr = content.parse()?;
case.stdout = Some(val.value());
}
"stderr" => {
// stderr contains: "text"
let kw = parse_ident_or_keyword(&content)?;
consume_ident_or_keyword(&content)?;
if kw != "contains" {
return Err(syn::Error::new(
content.span(),
"expected 'contains' after 'stderr'",
));
}
content.parse::<Token![:]>()?;
let val: LitStr = content.parse()?;
case.stderr_contains = Some(val.value());
}
"exit" => {
content.parse::<Token![:]>()?;
let val: LitInt = content.parse()?;
case.exit_code = Some(val.base10_parse()?);
}
"query" => {
content.parse::<Token![:]>()?;
let val: LitStr = content.parse()?;
case.query = Some(val.value());
}
"matches" => {
content.parse::<Token![:]>()?;
case.match_labels = Some(parse_string_list(&content)?);
}
"cursor" => {
content.parse::<Token![:]>()?;
let val: LitInt = content.parse()?;
case.cursor = Some(val.base10_parse()?);
}
"offset" => {
content.parse::<Token![:]>()?;
let val: LitInt = content.parse()?;
case.offset = Some(val.base10_parse()?);
}
"selected" => {
content.parse::<Token![:]>()?;
let val: LitStr = content.parse()?;
case.selected = Some(val.value());
}
"cancelled" => {
// Just the keyword presence means true. Optionally parse `: true`.
if content.peek(Token![:]) {
content.parse::<Token![:]>()?;
// Accept `true` or just skip
if content.peek(Ident) {
consume_ident_or_keyword(&content)?;
}
}
case.cancelled = true;
}
_ => {
return Err(syn::Error::new(
content.span(),
format!("unknown test field: '{field}'"),
));
}
}
}
Ok(case)
}
// ---------------------------------------------------------------------------
// Action parsing
// ---------------------------------------------------------------------------
/// Parse `[action1, action2, ...]` inside a test case's
/// `actions:` field.
fn parse_action_list(input: ParseStream) -> syn::Result<Vec<ActionExpr>> {
let content;
bracketed!(content in input);
let mut actions = Vec::new();
while !content.is_empty() {
actions.push(parse_action_expr(&content)?);
if content.peek(Token![,]) {
content.parse::<Token![,]>()?;
}
}
Ok(actions)
}
/// Parse a single action expression: `confirm`, `filter "text"`,
/// `raw "line"`, or `add-items ["a", "b"]`.
fn parse_action_expr(input: ParseStream) -> syn::Result<ActionExpr> {
let name = parse_hyphenated_name(input)?;
match name.as_str() {
"filter" => {
let val: LitStr = input.parse()?;
Ok(ActionExpr::Filter(val.value()))
}
"raw" => {
let val: LitStr = input.parse()?;
Ok(ActionExpr::Raw(val.value()))
}
"add-items" => {
let items = parse_string_list(input)?;
Ok(ActionExpr::AddItems(items))
}
_ => Ok(ActionExpr::Simple(name)),
}
}
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
/// Parse the test module kind keyword: `headless`, `filter`,
/// `nav`, or `menu`.
fn parse_kind(input: ParseStream) -> syn::Result<TestKind> {
let ident: Ident = input.parse()?;
match ident.to_string().as_str() {
"headless" => Ok(TestKind::Headless),
"filter" => Ok(TestKind::Filter),
"nav" => Ok(TestKind::Nav),
"menu" => Ok(TestKind::Menu),
other => Err(syn::Error::new(
ident.span(),
format!("unknown test kind '{other}', expected headless, filter, nav, or menu"),
)),
}
}
/// Parse a potentially hyphenated name like "move-down" or "move-to-top".
/// Handles the `move` keyword specially since it's reserved in Rust.
fn parse_hyphenated_name(input: ParseStream) -> syn::Result<String> {
let mut name = String::new();
// First segment: might be the `move` keyword or a regular ident.
if input.peek(Token![move]) {
input.parse::<Token![move]>()?;
name.push_str("move");
} else {
let ident: Ident = input.parse()?;
name.push_str(&ident.to_string());
}
// Consume hyphenated continuations: `-ident`
// Be careful not to consume `-` that's actually a negative number.
while input.peek(Token![-]) && !input.peek2(LitInt) {
input.parse::<Token![-]>()?;
name.push('-');
if input.peek(Token![move]) {
input.parse::<Token![move]>()?;
name.push_str("move");
} else {
let next: Ident = input.parse()?;
name.push_str(&next.to_string());
}
}
Ok(name)
}
/// Parse `["a", "b", "c"]`.
fn parse_string_list(input: ParseStream) -> syn::Result<Vec<String>> {
let content;
bracketed!(content in input);
let mut items = Vec::new();
while !content.is_empty() {
let val: LitStr = content.parse()?;
items.push(val.value());
if content.peek(Token![,]) {
content.parse::<Token![,]>()?;
}
}
Ok(items)
}
/// Parse `{ height: N, count: N }`.
fn parse_viewport_def(input: ParseStream) -> syn::Result<(usize, usize)> {
let content;
braced!(content in input);
let mut height: Option<usize> = None;
let mut count: Option<usize> = None;
while !content.is_empty() {
let key: Ident = content.parse()?;
content.parse::<Token![:]>()?;
let val: LitInt = content.parse()?;
let n: usize = val.base10_parse()?;
match key.to_string().as_str() {
"height" => height = Some(n),
"count" => count = Some(n),
other => {
return Err(syn::Error::new(
key.span(),
format!("unknown viewport field '{other}', expected height or count"),
));
}
}
if content.peek(Token![,]) {
content.parse::<Token![,]>()?;
}
}
let h =
height.ok_or_else(|| syn::Error::new(Span::call_site(), "viewport missing 'height'"))?;
let c = count.ok_or_else(|| syn::Error::new(Span::call_site(), "viewport missing 'count'"))?;
Ok((h, c))
}
/// Peek at the next ident-like token without consuming it.
/// Handles Rust keywords that might appear as DSL field names.
fn parse_ident_or_keyword(input: ParseStream) -> syn::Result<String> {
if input.peek(Token![move]) {
Ok("move".to_string())
} else if input.peek(Token![match]) {
Ok("match".to_string())
} else if input.peek(Ident) {
let fork = input.fork();
let ident: Ident = fork.parse()?;
Ok(ident.to_string())
} else {
Err(input.error("expected identifier"))
}
}
/// Consume an ident-like token (including keywords used as DSL fields).
fn consume_ident_or_keyword(input: ParseStream) -> syn::Result<()> {
if input.peek(Token![move]) {
input.parse::<Token![move]>()?;
} else if input.peek(Token![match]) {
input.parse::<Token![match]>()?;
} else {
input.parse::<Ident>()?;
}
Ok(())
}
/// Eat an optional semicolon.
fn eat_semi(input: ParseStream) {
if input.peek(Token![;]) {
let _ = input.parse::<Token![;]>();
}
}