Compare commits
10 Commits
9ed8e898a5
...
d9ed49e7d9
| Author | SHA1 | Date | |
|---|---|---|---|
|
d9ed49e7d9
|
|||
|
6a4cc85285
|
|||
|
6187b83f26
|
|||
|
fdfb4eaab5
|
|||
|
73de161a09
|
|||
|
3f2e5c779b
|
|||
|
522b9f2894
|
|||
|
cb7911e5c9
|
|||
|
c74e4ea9fb
|
|||
|
d62b136a64
|
33
Cargo.lock
generated
33
Cargo.lock
generated
@@ -100,7 +100,16 @@ version = "0.5.3"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1"
|
checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bit-vec",
|
"bit-vec 0.6.3",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bit-set"
|
||||||
|
version = "0.8.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3"
|
||||||
|
dependencies = [
|
||||||
|
"bit-vec 0.8.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -109,6 +118,12 @@ version = "0.6.3"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
|
checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bit-vec"
|
||||||
|
version = "0.8.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bitflags"
|
name = "bitflags"
|
||||||
version = "1.3.2"
|
version = "1.3.2"
|
||||||
@@ -422,10 +437,21 @@ version = "0.11.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b95f7c0680e4142284cf8b22c14a476e87d61b004a3a0861872b32ef7ead40a2"
|
checksum = "b95f7c0680e4142284cf8b22c14a476e87d61b004a3a0861872b32ef7ead40a2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bit-set",
|
"bit-set 0.5.3",
|
||||||
"regex",
|
"regex",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fancy-regex"
|
||||||
|
version = "0.14.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6e24cb5a94bcae1e5408b0effca5cd7172ea3c5755049c5f3af4cd283a165298"
|
||||||
|
dependencies = [
|
||||||
|
"bit-set 0.8.0",
|
||||||
|
"regex-automata",
|
||||||
|
"regex-syntax",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "filedescriptor"
|
name = "filedescriptor"
|
||||||
version = "0.8.3"
|
version = "0.8.3"
|
||||||
@@ -1052,6 +1078,7 @@ dependencies = [
|
|||||||
name = "pikl-core"
|
name = "pikl-core"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"fancy-regex 0.14.0",
|
||||||
"nucleo-matcher",
|
"nucleo-matcher",
|
||||||
"pikl-test-macros",
|
"pikl-test-macros",
|
||||||
"serde",
|
"serde",
|
||||||
@@ -1510,7 +1537,7 @@ dependencies = [
|
|||||||
"anyhow",
|
"anyhow",
|
||||||
"base64",
|
"base64",
|
||||||
"bitflags 2.11.0",
|
"bitflags 2.11.0",
|
||||||
"fancy-regex",
|
"fancy-regex 0.11.0",
|
||||||
"filedescriptor",
|
"filedescriptor",
|
||||||
"finl_unicode",
|
"finl_unicode",
|
||||||
"fixedbitset",
|
"fixedbitset",
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ serde_json = "1.0.149"
|
|||||||
thiserror = "2.0.18"
|
thiserror = "2.0.18"
|
||||||
tokio = { version = "1.50.0", features = ["sync", "io-util", "rt"] }
|
tokio = { version = "1.50.0", features = ["sync", "io-util", "rt"] }
|
||||||
nucleo-matcher = "0.3.1"
|
nucleo-matcher = "0.3.1"
|
||||||
|
fancy-regex = "0.14"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tokio = { version = "1.50.0", features = ["sync", "process", "io-util", "rt", "macros", "rt-multi-thread"] }
|
tokio = { version = "1.50.0", features = ["sync", "process", "io-util", "rt", "macros", "rt-multi-thread"] }
|
||||||
|
|||||||
@@ -14,8 +14,12 @@ pub mod error;
|
|||||||
pub use model::event;
|
pub use model::event;
|
||||||
pub use model::item;
|
pub use model::item;
|
||||||
pub use model::traits;
|
pub use model::traits;
|
||||||
|
pub use query::exact;
|
||||||
pub use query::filter;
|
pub use query::filter;
|
||||||
pub use query::navigation;
|
pub use query::navigation;
|
||||||
|
pub use query::pipeline;
|
||||||
|
pub use query::regex_filter;
|
||||||
|
pub use query::strategy;
|
||||||
pub use runtime::hook;
|
pub use runtime::hook;
|
||||||
pub use runtime::input;
|
pub use runtime::input;
|
||||||
pub use runtime::json_menu;
|
pub use runtime::json_menu;
|
||||||
|
|||||||
@@ -11,6 +11,15 @@ use std::sync::Arc;
|
|||||||
|
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
|
/// Input mode. Insert mode sends keystrokes to the filter,
|
||||||
|
/// normal mode uses vim-style navigation keybinds.
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
|
||||||
|
pub enum Mode {
|
||||||
|
#[default]
|
||||||
|
Insert,
|
||||||
|
Normal,
|
||||||
|
}
|
||||||
|
|
||||||
/// A command the menu should process. Frontends and headless
|
/// A command the menu should process. Frontends and headless
|
||||||
/// scripts both produce these. The menu loop consumes them
|
/// scripts both produce these. The menu loop consumes them
|
||||||
/// sequentially.
|
/// sequentially.
|
||||||
@@ -23,6 +32,9 @@ pub enum Action {
|
|||||||
MoveToBottom,
|
MoveToBottom,
|
||||||
PageUp(usize),
|
PageUp(usize),
|
||||||
PageDown(usize),
|
PageDown(usize),
|
||||||
|
HalfPageUp(usize),
|
||||||
|
HalfPageDown(usize),
|
||||||
|
SetMode(Mode),
|
||||||
Confirm,
|
Confirm,
|
||||||
Cancel,
|
Cancel,
|
||||||
Resize { height: u16 },
|
Resize { height: u16 },
|
||||||
@@ -51,6 +63,7 @@ pub struct ViewState {
|
|||||||
pub filter_text: Arc<str>,
|
pub filter_text: Arc<str>,
|
||||||
pub total_items: usize,
|
pub total_items: usize,
|
||||||
pub total_filtered: usize,
|
pub total_filtered: usize,
|
||||||
|
pub mode: Mode,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A single item in the current viewport window. Has the
|
/// A single item in the current viewport window. Has the
|
||||||
|
|||||||
125
crates/pikl-core/src/query/exact.rs
Normal file
125
crates/pikl-core/src/query/exact.rs
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
//! Exact substring filter. Case-insensitive matching,
|
||||||
|
//! results in insertion order. Fast enough that incremental
|
||||||
|
//! narrowing isn't worth the complexity.
|
||||||
|
|
||||||
|
use crate::filter::Filter;
|
||||||
|
|
||||||
|
/// Case-insensitive substring filter. Matches items whose
|
||||||
|
/// label contains the query as a substring (both lowercased).
|
||||||
|
/// Results are returned in insertion order, not scored.
|
||||||
|
pub struct ExactFilter {
|
||||||
|
items: Vec<(usize, String)>,
|
||||||
|
query_lower: String,
|
||||||
|
results: Vec<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for ExactFilter {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExactFilter {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
items: Vec::new(),
|
||||||
|
query_lower: String::new(),
|
||||||
|
results: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Filter for ExactFilter {
|
||||||
|
fn push(&mut self, index: usize, label: &str) {
|
||||||
|
self.items.push((index, label.to_lowercase()));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_query(&mut self, query: &str) {
|
||||||
|
self.query_lower = query.to_lowercase();
|
||||||
|
if self.query_lower.is_empty() {
|
||||||
|
self.results = self.items.iter().map(|(idx, _)| *idx).collect();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
self.results = self
|
||||||
|
.items
|
||||||
|
.iter()
|
||||||
|
.filter(|(_, label)| label.contains(&self.query_lower))
|
||||||
|
.map(|(idx, _)| *idx)
|
||||||
|
.collect();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn matched_count(&self) -> usize {
|
||||||
|
self.results.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn matched_index(&self, match_position: usize) -> Option<usize> {
|
||||||
|
self.results.get(match_position).copied()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn push_items(f: &mut ExactFilter, labels: &[&str]) {
|
||||||
|
for (i, label) in labels.iter().enumerate() {
|
||||||
|
f.push(i, label);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty_query_returns_all() {
|
||||||
|
let mut f = ExactFilter::new();
|
||||||
|
push_items(&mut f, &["apple", "banana", "cherry"]);
|
||||||
|
f.set_query("");
|
||||||
|
assert_eq!(f.matched_count(), 3);
|
||||||
|
assert_eq!(f.matched_index(0), Some(0));
|
||||||
|
assert_eq!(f.matched_index(1), Some(1));
|
||||||
|
assert_eq!(f.matched_index(2), Some(2));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn case_insensitive_match() {
|
||||||
|
let mut f = ExactFilter::new();
|
||||||
|
push_items(&mut f, &["Apple", "BANANA", "Cherry"]);
|
||||||
|
f.set_query("apple");
|
||||||
|
assert_eq!(f.matched_count(), 1);
|
||||||
|
assert_eq!(f.matched_index(0), Some(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn substring_match() {
|
||||||
|
let mut f = ExactFilter::new();
|
||||||
|
push_items(&mut f, &["error_log", "warning_temp", "info_log"]);
|
||||||
|
f.set_query("log");
|
||||||
|
assert_eq!(f.matched_count(), 2);
|
||||||
|
assert_eq!(f.matched_index(0), Some(0));
|
||||||
|
assert_eq!(f.matched_index(1), Some(2));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_match() {
|
||||||
|
let mut f = ExactFilter::new();
|
||||||
|
push_items(&mut f, &["apple", "banana"]);
|
||||||
|
f.set_query("xyz");
|
||||||
|
assert_eq!(f.matched_count(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn results_in_insertion_order() {
|
||||||
|
let mut f = ExactFilter::new();
|
||||||
|
push_items(&mut f, &["cat", "concatenate", "catalog"]);
|
||||||
|
f.set_query("cat");
|
||||||
|
assert_eq!(f.matched_count(), 3);
|
||||||
|
assert_eq!(f.matched_index(0), Some(0));
|
||||||
|
assert_eq!(f.matched_index(1), Some(1));
|
||||||
|
assert_eq!(f.matched_index(2), Some(2));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty_items() {
|
||||||
|
let mut f = ExactFilter::new();
|
||||||
|
f.set_query("test");
|
||||||
|
assert_eq!(f.matched_count(), 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
234
crates/pikl-core/src/query/filter.rs
Normal file
234
crates/pikl-core/src/query/filter.rs
Normal file
@@ -0,0 +1,234 @@
|
|||||||
|
//! Item filtering. Currently just fuzzy matching via nucleo,
|
||||||
|
//! but the [`Filter`] trait is here so we can swap in regex,
|
||||||
|
//! exact, or custom strategies later.
|
||||||
|
|
||||||
|
use nucleo_matcher::pattern::{CaseMatching, Normalization, Pattern};
|
||||||
|
use nucleo_matcher::{Config, Matcher, Utf32String};
|
||||||
|
|
||||||
|
/// Trait for incremental filter strategies. Items are pushed
|
||||||
|
/// in once, queries are updated, and results are read back
|
||||||
|
/// by position. Implementations own their item text and
|
||||||
|
/// match state.
|
||||||
|
pub trait Filter: Send {
|
||||||
|
/// Notify the filter about a new item. Called once per
|
||||||
|
/// item at insertion time.
|
||||||
|
fn push(&mut self, index: usize, label: &str);
|
||||||
|
|
||||||
|
/// Update the query. Implementations may defer actual
|
||||||
|
/// matching until results are read.
|
||||||
|
fn set_query(&mut self, query: &str);
|
||||||
|
|
||||||
|
/// Number of items matching the current query.
|
||||||
|
fn matched_count(&self) -> usize;
|
||||||
|
|
||||||
|
/// Get the original item index for the nth match
|
||||||
|
/// (sorted by relevance, best first).
|
||||||
|
fn matched_index(&self, match_position: usize) -> Option<usize>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fuzzy matching powered by nucleo. Smart case, smart
|
||||||
|
/// unicode normalization. Results sorted by score, best
|
||||||
|
/// match first. Supports incremental narrowing: if the new
|
||||||
|
/// query extends the previous one, only items that matched
|
||||||
|
/// before are re-scored.
|
||||||
|
pub struct FuzzyFilter {
|
||||||
|
matcher: Matcher,
|
||||||
|
items: Vec<(usize, Utf32String)>,
|
||||||
|
last_query: String,
|
||||||
|
results: Vec<(usize, u32)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for FuzzyFilter {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FuzzyFilter {
|
||||||
|
/// Create a new fuzzy filter with default nucleo config.
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
matcher: Matcher::new(Config::DEFAULT),
|
||||||
|
items: Vec::new(),
|
||||||
|
last_query: String::new(),
|
||||||
|
results: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Re-score a set of (index, haystack) pairs against a pattern.
|
||||||
|
fn score_items<'a>(
|
||||||
|
matcher: &mut Matcher,
|
||||||
|
pattern: &Pattern,
|
||||||
|
candidates: impl Iterator<Item = (usize, &'a Utf32String)>,
|
||||||
|
) -> Vec<(usize, u32)> {
|
||||||
|
let mut matches: Vec<(usize, u32)> = candidates
|
||||||
|
.filter_map(|(idx, haystack)| {
|
||||||
|
pattern
|
||||||
|
.score(haystack.slice(..), matcher)
|
||||||
|
.map(|score| (idx, score))
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
matches.sort_by(|a, b| b.1.cmp(&a.1));
|
||||||
|
matches
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Filter for FuzzyFilter {
|
||||||
|
fn push(&mut self, index: usize, label: &str) {
|
||||||
|
debug_assert_eq!(
|
||||||
|
index,
|
||||||
|
self.items.len(),
|
||||||
|
"FuzzyFilter::push requires sequential indices starting from 0"
|
||||||
|
);
|
||||||
|
let haystack = Utf32String::from(label);
|
||||||
|
self.items.push((index, haystack));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_query(&mut self, query: &str) {
|
||||||
|
if query.is_empty() {
|
||||||
|
// Empty query matches everything in insertion order.
|
||||||
|
self.results = self.items.iter().map(|(idx, _)| (*idx, 0)).collect();
|
||||||
|
self.last_query = String::new();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let pattern = Pattern::parse(query, CaseMatching::Smart, Normalization::Smart);
|
||||||
|
|
||||||
|
// Incremental: if the new query extends the previous one,
|
||||||
|
// only re-score items that already matched.
|
||||||
|
if !self.last_query.is_empty() && query.starts_with(&self.last_query) {
|
||||||
|
let prev_results = std::mem::take(&mut self.results);
|
||||||
|
let candidates = prev_results.into_iter().filter_map(|(idx, _)| {
|
||||||
|
// Items are pushed sequentially (enforced by debug_assert in push),
|
||||||
|
// so idx == position in self.items. Direct index is O(1).
|
||||||
|
self.items.get(idx).map(|(_, h)| (idx, h))
|
||||||
|
});
|
||||||
|
self.results = Self::score_items(&mut self.matcher, &pattern, candidates);
|
||||||
|
} else {
|
||||||
|
let candidates = self.items.iter().map(|(idx, h)| (*idx, h));
|
||||||
|
self.results = Self::score_items(&mut self.matcher, &pattern, candidates);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.last_query = query.to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn matched_count(&self) -> usize {
|
||||||
|
self.results.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn matched_index(&self, match_position: usize) -> Option<usize> {
|
||||||
|
self.results.get(match_position).map(|(idx, _)| *idx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn push_text_items(f: &mut FuzzyFilter, labels: &[&str]) {
|
||||||
|
for (i, label) in labels.iter().enumerate() {
|
||||||
|
f.push(i, label);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty_query_returns_all() {
|
||||||
|
let mut f = FuzzyFilter::new();
|
||||||
|
push_text_items(&mut f, &["apple", "banana", "cherry"]);
|
||||||
|
f.set_query("");
|
||||||
|
assert_eq!(f.matched_count(), 3);
|
||||||
|
assert_eq!(f.matched_index(0), Some(0));
|
||||||
|
assert_eq!(f.matched_index(1), Some(1));
|
||||||
|
assert_eq!(f.matched_index(2), Some(2));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn fuzzy_match() {
|
||||||
|
let mut f = FuzzyFilter::new();
|
||||||
|
push_text_items(&mut f, &["apple", "banana", "apricot"]);
|
||||||
|
f.set_query("ap");
|
||||||
|
assert!(f.matched_count() >= 2);
|
||||||
|
let indices: Vec<usize> = (0..f.matched_count())
|
||||||
|
.filter_map(|i| f.matched_index(i))
|
||||||
|
.collect();
|
||||||
|
assert!(indices.contains(&0)); // apple
|
||||||
|
assert!(indices.contains(&2)); // apricot
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_matches() {
|
||||||
|
let mut f = FuzzyFilter::new();
|
||||||
|
push_text_items(&mut f, &["apple", "banana"]);
|
||||||
|
f.set_query("xyz");
|
||||||
|
assert_eq!(f.matched_count(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty_items() {
|
||||||
|
let mut f = FuzzyFilter::new();
|
||||||
|
f.set_query("test");
|
||||||
|
assert_eq!(f.matched_count(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn incremental_narrowing() {
|
||||||
|
let mut f = FuzzyFilter::new();
|
||||||
|
push_text_items(&mut f, &["apple", "banana", "apricot", "avocado"]);
|
||||||
|
|
||||||
|
f.set_query("a");
|
||||||
|
let count_a = f.matched_count();
|
||||||
|
assert!(count_a >= 3); // apple, apricot, avocado at minimum
|
||||||
|
|
||||||
|
// Extending the query should narrow results
|
||||||
|
f.set_query("ap");
|
||||||
|
assert!(f.matched_count() <= count_a);
|
||||||
|
assert!(f.matched_count() >= 2); // apple, apricot
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn incremental_narrowing_large_set() {
|
||||||
|
let mut f = FuzzyFilter::new();
|
||||||
|
// Push 1000 items, all starting with "item-"
|
||||||
|
for i in 0..1000 {
|
||||||
|
f.push(i, &format!("item-{i:04}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
f.set_query("item");
|
||||||
|
let count_broad = f.matched_count();
|
||||||
|
assert_eq!(count_broad, 1000);
|
||||||
|
|
||||||
|
// Incremental narrowing: "item-00" should match ~10 items
|
||||||
|
f.set_query("item-00");
|
||||||
|
assert!(f.matched_count() < count_broad);
|
||||||
|
assert!(f.matched_count() >= 10);
|
||||||
|
|
||||||
|
// Further narrowing
|
||||||
|
f.set_query("item-001");
|
||||||
|
assert!(f.matched_count() >= 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[should_panic(expected = "sequential indices")]
|
||||||
|
fn non_sequential_push_panics_in_debug() {
|
||||||
|
let mut f = FuzzyFilter::new();
|
||||||
|
f.push(0, "first");
|
||||||
|
f.push(5, "non-sequential"); // should panic in debug
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn non_incremental_new_query() {
|
||||||
|
let mut f = FuzzyFilter::new();
|
||||||
|
push_text_items(&mut f, &["apple", "banana", "cherry"]);
|
||||||
|
|
||||||
|
f.set_query("ap");
|
||||||
|
assert!(f.matched_count() >= 1);
|
||||||
|
|
||||||
|
// Completely different query, not incremental
|
||||||
|
f.set_query("ban");
|
||||||
|
assert!(f.matched_count() >= 1);
|
||||||
|
let indices: Vec<usize> = (0..f.matched_count())
|
||||||
|
.filter_map(|i| f.matched_index(i))
|
||||||
|
.collect();
|
||||||
|
assert!(indices.contains(&1)); // banana
|
||||||
|
}
|
||||||
|
}
|
||||||
6
crates/pikl-core/src/query/mod.rs
Normal file
6
crates/pikl-core/src/query/mod.rs
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
pub mod exact;
|
||||||
|
pub mod filter;
|
||||||
|
pub mod navigation;
|
||||||
|
pub mod pipeline;
|
||||||
|
pub mod regex_filter;
|
||||||
|
pub mod strategy;
|
||||||
486
crates/pikl-core/src/query/navigation.rs
Normal file
486
crates/pikl-core/src/query/navigation.rs
Normal file
@@ -0,0 +1,486 @@
|
|||||||
|
//! Cursor and scroll state for a list of filtered items.
|
||||||
|
//! Pure logic, no rendering, no channels. The menu owns a
|
||||||
|
//! [`Viewport`] and calls its methods in response to
|
||||||
|
//! movement actions.
|
||||||
|
|
||||||
|
/// Tracks cursor position and scroll offset within a
|
||||||
|
/// filtered item list. Height comes from the frontend
|
||||||
|
/// (terminal rows minus chrome). Filtered count comes
|
||||||
|
/// from the filter engine. Everything else is derived.
|
||||||
|
pub struct Viewport {
|
||||||
|
cursor: usize,
|
||||||
|
scroll_offset: usize,
|
||||||
|
height: usize,
|
||||||
|
filtered_count: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for Viewport {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Viewport {
|
||||||
|
/// Create a viewport with everything zeroed out. Call
|
||||||
|
/// [`set_height`](Self::set_height) and
|
||||||
|
/// [`set_filtered_count`](Self::set_filtered_count) to
|
||||||
|
/// initialize.
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
cursor: 0,
|
||||||
|
scroll_offset: 0,
|
||||||
|
height: 0,
|
||||||
|
filtered_count: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Current cursor position in the filtered item list.
|
||||||
|
pub fn cursor(&self) -> usize {
|
||||||
|
self.cursor
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Index of the first visible item in the viewport.
|
||||||
|
pub fn scroll_offset(&self) -> usize {
|
||||||
|
self.scroll_offset
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the viewport height (visible rows). Clamps cursor
|
||||||
|
/// and scroll offset if they fall outside the new bounds.
|
||||||
|
pub fn set_height(&mut self, height: usize) {
|
||||||
|
self.height = height;
|
||||||
|
self.clamp();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update the total number of filtered items. Resets
|
||||||
|
/// cursor and scroll to the top.
|
||||||
|
pub fn set_filtered_count(&mut self, count: usize) {
|
||||||
|
self.filtered_count = count;
|
||||||
|
self.cursor = 0;
|
||||||
|
self.scroll_offset = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Move cursor up by `n` items. Clamps to the top.
|
||||||
|
/// Scrolls the viewport if the cursor leaves the visible
|
||||||
|
/// range.
|
||||||
|
pub fn move_up(&mut self, n: usize) {
|
||||||
|
if self.cursor > 0 {
|
||||||
|
self.cursor = self.cursor.saturating_sub(n);
|
||||||
|
if self.cursor < self.scroll_offset {
|
||||||
|
self.scroll_offset = self.cursor;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Move cursor down by `n` items. Clamps to the last
|
||||||
|
/// item. Scrolls the viewport if needed.
|
||||||
|
pub fn move_down(&mut self, n: usize) {
|
||||||
|
if self.filtered_count > 0 {
|
||||||
|
self.cursor = (self.cursor + n).min(self.filtered_count - 1);
|
||||||
|
if self.height > 0 && self.cursor >= self.scroll_offset + self.height {
|
||||||
|
self.scroll_offset = self.cursor - self.height + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Jump cursor to the first item and reset scroll.
|
||||||
|
pub fn move_to_top(&mut self) {
|
||||||
|
self.cursor = 0;
|
||||||
|
self.scroll_offset = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Jump cursor to the last item and scroll to show it.
|
||||||
|
pub fn move_to_bottom(&mut self) {
|
||||||
|
if self.filtered_count > 0 {
|
||||||
|
self.cursor = self.filtered_count - 1;
|
||||||
|
if self.height > 0 && self.cursor >= self.height {
|
||||||
|
self.scroll_offset = self.cursor - self.height + 1;
|
||||||
|
} else {
|
||||||
|
self.scroll_offset = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Move cursor up by `n` pages (viewport height each).
|
||||||
|
/// Clamps to the top of the list.
|
||||||
|
pub fn page_up(&mut self, n: usize) {
|
||||||
|
if self.height == 0 {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let distance = self.height.saturating_mul(n);
|
||||||
|
self.cursor = self.cursor.saturating_sub(distance);
|
||||||
|
if self.cursor < self.scroll_offset {
|
||||||
|
self.scroll_offset = self.cursor;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Move cursor up by `n` half-pages (half viewport height
|
||||||
|
/// each). Clamps to the top.
|
||||||
|
pub fn half_page_up(&mut self, n: usize) {
|
||||||
|
if self.height == 0 {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let half = (self.height / 2).max(1);
|
||||||
|
let distance = half.saturating_mul(n);
|
||||||
|
self.cursor = self.cursor.saturating_sub(distance);
|
||||||
|
if self.cursor < self.scroll_offset {
|
||||||
|
self.scroll_offset = self.cursor;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Move cursor down by `n` half-pages (half viewport height
|
||||||
|
/// each). Clamps to the last item.
|
||||||
|
pub fn half_page_down(&mut self, n: usize) {
|
||||||
|
if self.height == 0 || self.filtered_count == 0 {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let half = (self.height / 2).max(1);
|
||||||
|
let distance = half.saturating_mul(n);
|
||||||
|
self.cursor = (self.cursor + distance).min(self.filtered_count - 1);
|
||||||
|
if self.cursor >= self.scroll_offset + self.height {
|
||||||
|
self.scroll_offset = self.cursor - self.height + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Move cursor down by `n` pages (viewport height each).
|
||||||
|
/// Clamps to the last item.
|
||||||
|
pub fn page_down(&mut self, n: usize) {
|
||||||
|
if self.height == 0 || self.filtered_count == 0 {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let distance = self.height.saturating_mul(n);
|
||||||
|
self.cursor = (self.cursor + distance).min(self.filtered_count - 1);
|
||||||
|
if self.cursor >= self.scroll_offset + self.height {
|
||||||
|
self.scroll_offset = self.cursor - self.height + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Range of indices into the filtered list that are
|
||||||
|
/// currently visible. Returns `0..0` if height or count
|
||||||
|
/// is zero.
|
||||||
|
pub fn visible_range(&self) -> std::ops::Range<usize> {
|
||||||
|
if self.height == 0 || self.filtered_count == 0 {
|
||||||
|
return 0..0;
|
||||||
|
}
|
||||||
|
let end = (self.scroll_offset + self.height).min(self.filtered_count);
|
||||||
|
self.scroll_offset..end
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Clamp cursor and scroll offset to valid positions after
|
||||||
|
/// a height or count change.
|
||||||
|
fn clamp(&mut self) {
|
||||||
|
if self.filtered_count == 0 {
|
||||||
|
self.cursor = 0;
|
||||||
|
self.scroll_offset = 0;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if self.cursor >= self.filtered_count {
|
||||||
|
self.cursor = self.filtered_count - 1;
|
||||||
|
}
|
||||||
|
if self.height > 0 && self.cursor >= self.scroll_offset + self.height {
|
||||||
|
self.scroll_offset = self.cursor - self.height + 1;
|
||||||
|
}
|
||||||
|
if self.cursor < self.scroll_offset {
|
||||||
|
self.scroll_offset = self.cursor;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn viewport(height: usize, count: usize) -> Viewport {
|
||||||
|
let mut v = Viewport::new();
|
||||||
|
v.set_height(height);
|
||||||
|
v.set_filtered_count(count);
|
||||||
|
v
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn initial_state() {
|
||||||
|
let v = viewport(10, 20);
|
||||||
|
assert_eq!(v.cursor(), 0);
|
||||||
|
assert_eq!(v.scroll_offset(), 0);
|
||||||
|
assert_eq!(v.visible_range(), 0..10);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn move_down_basic() {
|
||||||
|
let mut v = viewport(5, 10);
|
||||||
|
v.move_down(1);
|
||||||
|
assert_eq!(v.cursor(), 1);
|
||||||
|
assert_eq!(v.scroll_offset(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn move_down_scrolls() {
|
||||||
|
let mut v = viewport(3, 10);
|
||||||
|
for _ in 0..4 {
|
||||||
|
v.move_down(1);
|
||||||
|
}
|
||||||
|
assert_eq!(v.cursor(), 4);
|
||||||
|
assert_eq!(v.scroll_offset(), 2);
|
||||||
|
assert_eq!(v.visible_range(), 2..5);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn move_down_at_bottom() {
|
||||||
|
let mut v = viewport(5, 3);
|
||||||
|
v.move_down(1);
|
||||||
|
v.move_down(1);
|
||||||
|
v.move_down(1); // should be no-op
|
||||||
|
assert_eq!(v.cursor(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn move_up_basic() {
|
||||||
|
let mut v = viewport(5, 10);
|
||||||
|
v.move_down(1);
|
||||||
|
v.move_down(1);
|
||||||
|
v.move_up(1);
|
||||||
|
assert_eq!(v.cursor(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn move_up_at_top() {
|
||||||
|
let mut v = viewport(5, 10);
|
||||||
|
v.move_up(1); // no-op
|
||||||
|
assert_eq!(v.cursor(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn move_up_scrolls() {
|
||||||
|
let mut v = viewport(3, 10);
|
||||||
|
// Go down to trigger scroll
|
||||||
|
for _ in 0..5 {
|
||||||
|
v.move_down(1);
|
||||||
|
}
|
||||||
|
assert_eq!(v.scroll_offset(), 3);
|
||||||
|
// Now go back up past scroll offset
|
||||||
|
for _ in 0..3 {
|
||||||
|
v.move_up(1);
|
||||||
|
}
|
||||||
|
assert_eq!(v.cursor(), 2);
|
||||||
|
assert_eq!(v.scroll_offset(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn move_to_top() {
|
||||||
|
let mut v = viewport(5, 20);
|
||||||
|
for _ in 0..10 {
|
||||||
|
v.move_down(1);
|
||||||
|
}
|
||||||
|
v.move_to_top();
|
||||||
|
assert_eq!(v.cursor(), 0);
|
||||||
|
assert_eq!(v.scroll_offset(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn move_to_bottom() {
|
||||||
|
let mut v = viewport(5, 20);
|
||||||
|
v.move_to_bottom();
|
||||||
|
assert_eq!(v.cursor(), 19);
|
||||||
|
assert_eq!(v.scroll_offset(), 15);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn move_to_bottom_small_list() {
|
||||||
|
let mut v = viewport(10, 3);
|
||||||
|
v.move_to_bottom();
|
||||||
|
assert_eq!(v.cursor(), 2);
|
||||||
|
assert_eq!(v.scroll_offset(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn page_down() {
|
||||||
|
let mut v = viewport(5, 20);
|
||||||
|
v.page_down(1);
|
||||||
|
assert_eq!(v.cursor(), 5);
|
||||||
|
assert_eq!(v.scroll_offset(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn page_down_near_end() {
|
||||||
|
let mut v = viewport(5, 8);
|
||||||
|
v.page_down(1);
|
||||||
|
assert_eq!(v.cursor(), 5);
|
||||||
|
v.page_down(1);
|
||||||
|
assert_eq!(v.cursor(), 7); // clamped to last item
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn page_up() {
|
||||||
|
let mut v = viewport(5, 20);
|
||||||
|
// Go to middle
|
||||||
|
for _ in 0..3 {
|
||||||
|
v.page_down(1);
|
||||||
|
}
|
||||||
|
let cursor_before = v.cursor();
|
||||||
|
v.page_up(1);
|
||||||
|
assert_eq!(v.cursor(), cursor_before - 5);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn page_up_near_top() {
|
||||||
|
let mut v = viewport(5, 20);
|
||||||
|
v.page_down(1);
|
||||||
|
v.page_up(1);
|
||||||
|
assert_eq!(v.cursor(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty_list() {
|
||||||
|
let v = viewport(10, 0);
|
||||||
|
assert_eq!(v.cursor(), 0);
|
||||||
|
assert_eq!(v.visible_range(), 0..0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty_list_movement() {
|
||||||
|
let mut v = viewport(10, 0);
|
||||||
|
v.move_down(1);
|
||||||
|
v.move_up(1);
|
||||||
|
v.page_down(1);
|
||||||
|
v.page_up(1);
|
||||||
|
v.move_to_top();
|
||||||
|
v.move_to_bottom();
|
||||||
|
assert_eq!(v.cursor(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn zero_height() {
|
||||||
|
let v = viewport(0, 10);
|
||||||
|
assert_eq!(v.visible_range(), 0..0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn height_larger_than_count() {
|
||||||
|
let v = viewport(20, 5);
|
||||||
|
assert_eq!(v.visible_range(), 0..5);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn set_filtered_count_resets_cursor() {
|
||||||
|
let mut v = viewport(5, 20);
|
||||||
|
for _ in 0..10 {
|
||||||
|
v.move_down(1);
|
||||||
|
}
|
||||||
|
assert_eq!(v.cursor(), 10);
|
||||||
|
v.set_filtered_count(5);
|
||||||
|
assert_eq!(v.cursor(), 0);
|
||||||
|
assert_eq!(v.scroll_offset(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn set_height_clamps() {
|
||||||
|
let mut v = viewport(10, 20);
|
||||||
|
for _ in 0..8 {
|
||||||
|
v.move_down(1);
|
||||||
|
}
|
||||||
|
// Shrink viewport, cursor should remain visible
|
||||||
|
v.set_height(3);
|
||||||
|
assert!(v.cursor() < v.scroll_offset() + 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- half_page unit tests --
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn half_page_down_basic() {
|
||||||
|
let mut v = viewport(10, 30);
|
||||||
|
v.half_page_down(1);
|
||||||
|
assert_eq!(v.cursor(), 5);
|
||||||
|
assert_eq!(v.scroll_offset(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn half_page_down_scrolls() {
|
||||||
|
let mut v = viewport(10, 30);
|
||||||
|
// Move cursor near viewport edge, then half page down
|
||||||
|
v.half_page_down(1); // cursor 5
|
||||||
|
v.half_page_down(1); // cursor 10, should scroll
|
||||||
|
assert_eq!(v.cursor(), 10);
|
||||||
|
assert_eq!(v.scroll_offset(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn half_page_down_clamps() {
|
||||||
|
let mut v = viewport(10, 12);
|
||||||
|
v.half_page_down(1); // cursor 5
|
||||||
|
v.half_page_down(1); // cursor 10
|
||||||
|
v.half_page_down(1); // clamps to 11
|
||||||
|
assert_eq!(v.cursor(), 11);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn half_page_down_n_multiplier() {
|
||||||
|
let mut v = viewport(10, 30);
|
||||||
|
v.half_page_down(3); // (10/2)*3 = 15
|
||||||
|
assert_eq!(v.cursor(), 15);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn half_page_up_basic() {
|
||||||
|
let mut v = viewport(10, 30);
|
||||||
|
v.half_page_down(2); // cursor 10
|
||||||
|
v.half_page_up(1); // cursor 5
|
||||||
|
assert_eq!(v.cursor(), 5);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn half_page_up_clamps_at_top() {
|
||||||
|
let mut v = viewport(10, 30);
|
||||||
|
v.half_page_down(1); // cursor 5
|
||||||
|
v.half_page_up(1); // cursor 0
|
||||||
|
assert_eq!(v.cursor(), 0);
|
||||||
|
v.half_page_up(1); // still 0
|
||||||
|
assert_eq!(v.cursor(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn half_page_up_scrolls() {
|
||||||
|
let mut v = viewport(10, 30);
|
||||||
|
// Scroll down far enough that offset > 0
|
||||||
|
v.half_page_down(3); // cursor 15, offset 6
|
||||||
|
assert!(v.scroll_offset() > 0);
|
||||||
|
// Now half page up should track cursor back
|
||||||
|
v.half_page_up(1); // cursor 10
|
||||||
|
v.half_page_up(1); // cursor 5
|
||||||
|
assert_eq!(v.cursor(), 5);
|
||||||
|
// Offset should have followed cursor if it went above
|
||||||
|
assert!(v.scroll_offset() <= v.cursor());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn half_page_height_one() {
|
||||||
|
let mut v = viewport(1, 10);
|
||||||
|
// max(1/2, 1) = 1, moves 1 item
|
||||||
|
v.half_page_down(1);
|
||||||
|
assert_eq!(v.cursor(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn half_page_height_two() {
|
||||||
|
let mut v = viewport(2, 10);
|
||||||
|
// 2/2 = 1, moves 1 item
|
||||||
|
v.half_page_down(1);
|
||||||
|
assert_eq!(v.cursor(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn half_page_zero_height() {
|
||||||
|
let mut v = viewport(0, 10);
|
||||||
|
v.half_page_down(1);
|
||||||
|
assert_eq!(v.cursor(), 0);
|
||||||
|
v.half_page_up(1);
|
||||||
|
assert_eq!(v.cursor(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn half_page_empty_list() {
|
||||||
|
let mut v = viewport(10, 0);
|
||||||
|
v.half_page_down(1);
|
||||||
|
assert_eq!(v.cursor(), 0);
|
||||||
|
v.half_page_up(1);
|
||||||
|
assert_eq!(v.cursor(), 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
617
crates/pikl-core/src/query/pipeline.rs
Normal file
617
crates/pikl-core/src/query/pipeline.rs
Normal file
@@ -0,0 +1,617 @@
|
|||||||
|
//! Filter pipeline with `|` chaining. Splits a query into
|
||||||
|
//! segments, applies the appropriate filter strategy to each,
|
||||||
|
//! and chains results through stages. Supports incremental
|
||||||
|
//! caching: unchanged stages keep their results.
|
||||||
|
|
||||||
|
use super::filter::{Filter, FuzzyFilter};
|
||||||
|
use super::strategy::{self, FilterKind};
|
||||||
|
|
||||||
|
/// A multi-stage filter pipeline. Each `|` in the query
|
||||||
|
/// creates a new stage that filters the previous stage's
|
||||||
|
/// output. Implements [`Filter`] so it can be used as a
|
||||||
|
/// drop-in replacement for a single filter.
|
||||||
|
pub struct FilterPipeline {
|
||||||
|
/// Master item list: (original index, label).
|
||||||
|
items: Vec<(usize, String)>,
|
||||||
|
/// Pipeline stages, one per `|`-separated segment.
|
||||||
|
stages: Vec<PipelineStage>,
|
||||||
|
/// The last raw query string, used for diffing.
|
||||||
|
last_raw_query: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct PipelineStage {
|
||||||
|
/// The raw segment text (including prefix chars).
|
||||||
|
raw_segment: String,
|
||||||
|
kind: FilterKind,
|
||||||
|
inverse: bool,
|
||||||
|
/// The query text after prefix stripping.
|
||||||
|
query_text: String,
|
||||||
|
/// The strategy-specific filter (only used for fuzzy stages).
|
||||||
|
fuzzy: Option<FuzzyFilter>,
|
||||||
|
/// Items passing this stage (indices into master list).
|
||||||
|
cached_indices: Vec<usize>,
|
||||||
|
dirty: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Split a raw query on unescaped `|` characters, respecting
|
||||||
|
/// regex delimiters (`/pattern/` and `!/pattern/`). Returns
|
||||||
|
/// the segments with `\|` unescaped to literal `|`.
|
||||||
|
fn split_pipeline(query: &str) -> Vec<String> {
|
||||||
|
let mut segments = Vec::new();
|
||||||
|
let mut current = String::new();
|
||||||
|
let chars: Vec<char> = query.chars().collect();
|
||||||
|
let len = chars.len();
|
||||||
|
let mut i = 0;
|
||||||
|
let mut in_regex = false;
|
||||||
|
// Position of the opening `/` in current segment (char count into current)
|
||||||
|
let mut regex_open_pos: usize = 0;
|
||||||
|
|
||||||
|
while i < len {
|
||||||
|
let c = chars[i];
|
||||||
|
|
||||||
|
// Escaped pipe: always produce literal `|`
|
||||||
|
if c == '\\' && i + 1 < len && chars[i + 1] == '|' {
|
||||||
|
current.push('|');
|
||||||
|
i += 2;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect regex opening: `/` or `!/` at start of a segment
|
||||||
|
// (current is empty or whitespace-only after a previous pipe)
|
||||||
|
if !in_regex {
|
||||||
|
let trimmed = current.trim();
|
||||||
|
// `/pattern/`
|
||||||
|
if c == '/' && (trimmed.is_empty() || trimmed == "!") {
|
||||||
|
in_regex = true;
|
||||||
|
regex_open_pos = current.len();
|
||||||
|
current.push(c);
|
||||||
|
i += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect regex closing: `/` that is not the opening slash
|
||||||
|
if in_regex && c == '/' {
|
||||||
|
if current.len() > regex_open_pos {
|
||||||
|
// This is the closing slash
|
||||||
|
in_regex = false;
|
||||||
|
}
|
||||||
|
current.push(c);
|
||||||
|
i += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unescaped pipe outside regex: split here
|
||||||
|
if c == '|' && !in_regex {
|
||||||
|
segments.push(current.trim().to_string());
|
||||||
|
current = String::new();
|
||||||
|
i += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
current.push(c);
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
segments.push(current.trim().to_string());
|
||||||
|
|
||||||
|
// Filter out empty segments
|
||||||
|
segments.into_iter().filter(|s| !s.is_empty()).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for FilterPipeline {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FilterPipeline {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
items: Vec::new(),
|
||||||
|
stages: Vec::new(),
|
||||||
|
last_raw_query: String::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Evaluate all dirty stages in order. Each stage filters
|
||||||
|
/// against the previous stage's cached_indices.
|
||||||
|
fn evaluate(&mut self) {
|
||||||
|
for stage_idx in 0..self.stages.len() {
|
||||||
|
if !self.stages[stage_idx].dirty {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let input_indices: Vec<usize> = if stage_idx == 0 {
|
||||||
|
self.items.iter().map(|(idx, _)| *idx).collect()
|
||||||
|
} else {
|
||||||
|
self.stages[stage_idx - 1].cached_indices.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
let stage = &mut self.stages[stage_idx];
|
||||||
|
|
||||||
|
let result = match stage.kind {
|
||||||
|
FilterKind::Fuzzy => Self::eval_fuzzy(stage, &input_indices, stage_idx),
|
||||||
|
FilterKind::Exact => {
|
||||||
|
Self::eval_simple(stage, &input_indices, &self.items, |label, query| {
|
||||||
|
label.to_lowercase().contains(&query.to_lowercase())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
FilterKind::Regex => {
|
||||||
|
let re = fancy_regex::Regex::new(&stage.query_text).ok();
|
||||||
|
Self::eval_simple(stage, &input_indices, &self.items, |label, _query| {
|
||||||
|
match &re {
|
||||||
|
Some(r) => r.is_match(label).unwrap_or(false),
|
||||||
|
None => true, // invalid regex matches everything
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
self.stages[stage_idx].cached_indices = result;
|
||||||
|
self.stages[stage_idx].dirty = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn eval_fuzzy(
|
||||||
|
stage: &mut PipelineStage,
|
||||||
|
input_indices: &[usize],
|
||||||
|
stage_idx: usize,
|
||||||
|
) -> Vec<usize> {
|
||||||
|
let Some(fuzzy) = stage.fuzzy.as_mut() else {
|
||||||
|
return Vec::new();
|
||||||
|
};
|
||||||
|
fuzzy.set_query(&stage.query_text);
|
||||||
|
let fuzzy_results: Vec<usize> = (0..fuzzy.matched_count())
|
||||||
|
.filter_map(|i| fuzzy.matched_index(i))
|
||||||
|
.collect();
|
||||||
|
if stage.inverse {
|
||||||
|
let fuzzy_set: std::collections::HashSet<usize> = fuzzy_results.into_iter().collect();
|
||||||
|
input_indices
|
||||||
|
.iter()
|
||||||
|
.copied()
|
||||||
|
.filter(|idx| !fuzzy_set.contains(idx))
|
||||||
|
.collect()
|
||||||
|
} else if stage_idx == 0 {
|
||||||
|
fuzzy_results
|
||||||
|
} else {
|
||||||
|
let input_set: std::collections::HashSet<usize> =
|
||||||
|
input_indices.iter().copied().collect();
|
||||||
|
fuzzy_results
|
||||||
|
.into_iter()
|
||||||
|
.filter(|idx| input_set.contains(idx))
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn eval_simple(
|
||||||
|
stage: &PipelineStage,
|
||||||
|
input_indices: &[usize],
|
||||||
|
items: &[(usize, String)],
|
||||||
|
matcher: impl Fn(&str, &str) -> bool,
|
||||||
|
) -> Vec<usize> {
|
||||||
|
if stage.query_text.is_empty() {
|
||||||
|
return input_indices.to_vec();
|
||||||
|
}
|
||||||
|
if stage.inverse {
|
||||||
|
input_indices
|
||||||
|
.iter()
|
||||||
|
.copied()
|
||||||
|
.filter(|&idx| !matcher(&items[idx].1, &stage.query_text))
|
||||||
|
.collect()
|
||||||
|
} else {
|
||||||
|
input_indices
|
||||||
|
.iter()
|
||||||
|
.copied()
|
||||||
|
.filter(|&idx| matcher(&items[idx].1, &stage.query_text))
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Filter for FilterPipeline {
|
||||||
|
fn push(&mut self, index: usize, label: &str) {
|
||||||
|
self.items.push((index, label.to_string()));
|
||||||
|
// Push to any existing fuzzy filters in stages
|
||||||
|
for stage in &mut self.stages {
|
||||||
|
if let Some(ref mut fuzzy) = stage.fuzzy {
|
||||||
|
fuzzy.push(index, label);
|
||||||
|
}
|
||||||
|
stage.dirty = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_query(&mut self, query: &str) {
|
||||||
|
self.last_raw_query = query.to_string();
|
||||||
|
let segments = split_pipeline(query);
|
||||||
|
|
||||||
|
// Reconcile stages with new segments
|
||||||
|
let mut new_len = segments.len();
|
||||||
|
|
||||||
|
// If query is empty, clear everything
|
||||||
|
if segments.is_empty() {
|
||||||
|
self.stages.clear();
|
||||||
|
new_len = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compare position-by-position
|
||||||
|
for (i, seg) in segments.iter().enumerate() {
|
||||||
|
if i < self.stages.len() {
|
||||||
|
if self.stages[i].raw_segment == *seg {
|
||||||
|
// Unchanged: keep cache
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// Changed: update this stage, mark dirty
|
||||||
|
let parsed = strategy::parse_segment(seg);
|
||||||
|
self.stages[i].raw_segment = seg.clone();
|
||||||
|
self.stages[i].kind = parsed.kind;
|
||||||
|
self.stages[i].inverse = parsed.inverse;
|
||||||
|
self.stages[i].query_text = parsed.query.to_string();
|
||||||
|
self.stages[i].dirty = true;
|
||||||
|
// Mark all downstream stages dirty too
|
||||||
|
for j in (i + 1)..self.stages.len() {
|
||||||
|
self.stages[j].dirty = true;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// New stage
|
||||||
|
let parsed = strategy::parse_segment(seg);
|
||||||
|
let fuzzy = if parsed.kind == FilterKind::Fuzzy {
|
||||||
|
let mut f = FuzzyFilter::new();
|
||||||
|
for (idx, label) in &self.items {
|
||||||
|
f.push(*idx, label);
|
||||||
|
}
|
||||||
|
Some(f)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
self.stages.push(PipelineStage {
|
||||||
|
raw_segment: seg.clone(),
|
||||||
|
kind: parsed.kind,
|
||||||
|
inverse: parsed.inverse,
|
||||||
|
query_text: parsed.query.to_string(),
|
||||||
|
fuzzy,
|
||||||
|
cached_indices: Vec::new(),
|
||||||
|
dirty: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Truncate extra stages
|
||||||
|
self.stages.truncate(new_len);
|
||||||
|
|
||||||
|
// Evaluate dirty stages
|
||||||
|
self.evaluate();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn matched_count(&self) -> usize {
|
||||||
|
match self.stages.last() {
|
||||||
|
Some(stage) => stage.cached_indices.len(),
|
||||||
|
None => self.items.len(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn matched_index(&self, match_position: usize) -> Option<usize> {
|
||||||
|
match self.stages.last() {
|
||||||
|
Some(stage) => stage.cached_indices.get(match_position).copied(),
|
||||||
|
None => self.items.get(match_position).map(|(idx, _)| *idx),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn push_items(p: &mut FilterPipeline, labels: &[&str]) {
|
||||||
|
for (i, label) in labels.iter().enumerate() {
|
||||||
|
p.push(i, label);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn matched_labels<'a>(p: &FilterPipeline, labels: &'a [&str]) -> Vec<&'a str> {
|
||||||
|
(0..p.matched_count())
|
||||||
|
.filter_map(|i| p.matched_index(i))
|
||||||
|
.map(|idx| labels[idx])
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty_query_returns_all() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &["apple", "banana", "cherry"];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
p.set_query("");
|
||||||
|
assert_eq!(p.matched_count(), 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn single_fuzzy_stage() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &["apple", "banana", "cherry"];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
p.set_query("ban");
|
||||||
|
let result = matched_labels(&p, labels);
|
||||||
|
assert_eq!(result, vec!["banana"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn single_exact_stage() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &["apple", "pineapple", "cherry"];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
p.set_query("'apple");
|
||||||
|
let result = matched_labels(&p, labels);
|
||||||
|
assert!(result.contains(&"apple"));
|
||||||
|
assert!(result.contains(&"pineapple"));
|
||||||
|
assert!(!result.contains(&"cherry"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn two_stage_pipeline() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &["error_log", "warning_temp", "info_log", "debug_temp"];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
p.set_query("'log | !temp");
|
||||||
|
let result = matched_labels(&p, labels);
|
||||||
|
assert!(result.contains(&"error_log"));
|
||||||
|
assert!(result.contains(&"info_log"));
|
||||||
|
assert!(!result.contains(&"warning_temp"));
|
||||||
|
assert!(!result.contains(&"debug_temp"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn three_stage_pipeline() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &[
|
||||||
|
"error_log_123",
|
||||||
|
"warning_temp_456",
|
||||||
|
"info_log_789",
|
||||||
|
"debug_temp_012",
|
||||||
|
];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
p.set_query("'log | !temp | /[0-9]+/");
|
||||||
|
let result = matched_labels(&p, labels);
|
||||||
|
assert!(result.contains(&"error_log_123"));
|
||||||
|
assert!(result.contains(&"info_log_789"));
|
||||||
|
assert_eq!(result.len(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn incremental_stage_1_preserved() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &["error_log", "warning_temp", "info_log", "debug_temp"];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
// First query
|
||||||
|
p.set_query("'log | !error");
|
||||||
|
let result = matched_labels(&p, labels);
|
||||||
|
assert_eq!(result, vec!["info_log"]);
|
||||||
|
|
||||||
|
// Edit stage 2 only: stage 1 cache should be preserved
|
||||||
|
p.set_query("'log | !info");
|
||||||
|
let result = matched_labels(&p, labels);
|
||||||
|
assert_eq!(result, vec!["error_log"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn pop_stage_on_backspace() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &["error_log", "warning_temp", "info_log"];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
p.set_query("'log | !error");
|
||||||
|
assert_eq!(matched_labels(&p, labels), vec!["info_log"]);
|
||||||
|
|
||||||
|
// Backspace over the pipe: now just "'log"
|
||||||
|
p.set_query("'log");
|
||||||
|
let result = matched_labels(&p, labels);
|
||||||
|
assert!(result.contains(&"error_log"));
|
||||||
|
assert!(result.contains(&"info_log"));
|
||||||
|
assert_eq!(result.len(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty_segments_skipped() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &["apple", "banana"];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
p.set_query("apple | | banana");
|
||||||
|
// Middle empty segment should be ignored
|
||||||
|
// This should be equivalent to "apple | banana"
|
||||||
|
// which is fuzzy "apple" then fuzzy "banana".
|
||||||
|
// "apple" matches apple, "banana" matches banana.
|
||||||
|
// Pipeline: first stage matches apple, second stage filters that for banana.
|
||||||
|
// Neither "apple" nor "banana" matches both, so 0 results.
|
||||||
|
assert_eq!(p.matched_count(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn escaped_pipe() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &["foo|bar", "foobar", "baz"];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
p.set_query("'foo\\|bar");
|
||||||
|
let result = matched_labels(&p, labels);
|
||||||
|
assert_eq!(result, vec!["foo|bar"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn pipe_inside_regex_not_split() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &["foo", "bar", "baz"];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
p.set_query("/foo|bar/");
|
||||||
|
let result = matched_labels(&p, labels);
|
||||||
|
assert!(result.contains(&"foo"));
|
||||||
|
assert!(result.contains(&"bar"));
|
||||||
|
assert!(!result.contains(&"baz"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn inverse_exact() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &["apple", "banana", "cherry"];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
p.set_query("!'banana");
|
||||||
|
let result = matched_labels(&p, labels);
|
||||||
|
assert!(result.contains(&"apple"));
|
||||||
|
assert!(result.contains(&"cherry"));
|
||||||
|
assert!(!result.contains(&"banana"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn inverse_regex() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &["item-001", "item-abc", "item-123"];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
p.set_query("!/[0-9]+/");
|
||||||
|
let result = matched_labels(&p, labels);
|
||||||
|
assert_eq!(result, vec!["item-abc"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn add_items_picked_up() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &["apple", "banana"];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
p.set_query("'cherry");
|
||||||
|
assert_eq!(p.matched_count(), 0);
|
||||||
|
|
||||||
|
// Add new item
|
||||||
|
p.push(2, "cherry");
|
||||||
|
// Re-evaluate with same query
|
||||||
|
p.set_query("'cherry");
|
||||||
|
assert_eq!(p.matched_count(), 1);
|
||||||
|
assert_eq!(p.matched_index(0), Some(2));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn split_pipeline_basic() {
|
||||||
|
let segs = split_pipeline("foo | bar");
|
||||||
|
assert_eq!(segs, vec!["foo", "bar"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn split_pipeline_escaped() {
|
||||||
|
let segs = split_pipeline("foo\\|bar");
|
||||||
|
assert_eq!(segs, vec!["foo|bar"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn split_pipeline_regex() {
|
||||||
|
let segs = split_pipeline("/foo|bar/ | baz");
|
||||||
|
assert_eq!(segs, vec!["/foo|bar/", "baz"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn split_pipeline_empty_segments() {
|
||||||
|
let segs = split_pipeline("foo | | bar");
|
||||||
|
assert_eq!(segs, vec!["foo", "bar"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn split_pipeline_inverse_regex() {
|
||||||
|
let segs = split_pipeline("!/foo|bar/ | baz");
|
||||||
|
assert_eq!(segs, vec!["!/foo|bar/", "baz"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Pipeline edge case tests --
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn fuzzy_as_second_stage() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &["error_log", "warning_temp", "info_log", "debug_log"];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
// Exact first, then fuzzy second
|
||||||
|
p.set_query("'log | debug");
|
||||||
|
let result = matched_labels(&p, labels);
|
||||||
|
assert_eq!(result, vec!["debug_log"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn three_stage_edit_stage_one() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &[
|
||||||
|
"error_log_123",
|
||||||
|
"warning_temp_456",
|
||||||
|
"info_log_789",
|
||||||
|
"debug_temp_012",
|
||||||
|
];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
p.set_query("'log | !error | /[0-9]+/");
|
||||||
|
assert_eq!(matched_labels(&p, labels), vec!["info_log_789"]);
|
||||||
|
|
||||||
|
// Edit stage 1: now match "temp" instead of "log"
|
||||||
|
p.set_query("'temp | !error | /[0-9]+/");
|
||||||
|
let result = matched_labels(&p, labels);
|
||||||
|
assert!(result.contains(&"warning_temp_456"));
|
||||||
|
assert!(result.contains(&"debug_temp_012"));
|
||||||
|
assert!(!result.contains(&"error_log_123"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn invalid_regex_in_pipeline() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &["apple", "banana", "cherry"];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
// Invalid regex: unclosed bracket. Should match everything (graceful degradation).
|
||||||
|
p.set_query("/[invalid/");
|
||||||
|
assert_eq!(p.matched_count(), 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn same_query_twice_stable() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &["apple", "banana", "cherry"];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
p.set_query("ban");
|
||||||
|
let first = matched_labels(&p, labels);
|
||||||
|
p.set_query("ban");
|
||||||
|
let second = matched_labels(&p, labels);
|
||||||
|
assert_eq!(first, second);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn query_shrink_to_single() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &["apple", "banana", "cherry"];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
p.set_query("'ban | !x");
|
||||||
|
let result = matched_labels(&p, labels);
|
||||||
|
assert_eq!(result, vec!["banana"]);
|
||||||
|
|
||||||
|
// Shrink back to single stage
|
||||||
|
p.set_query("'ban");
|
||||||
|
let result = matched_labels(&p, labels);
|
||||||
|
assert_eq!(result, vec!["banana"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn all_items_excluded() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &["apple", "banana"];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
p.set_query("xyz");
|
||||||
|
assert_eq!(p.matched_count(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn single_regex_stage() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &["item-001", "item-abc", "item-123"];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
p.set_query("/[0-9]+/");
|
||||||
|
let result = matched_labels(&p, labels);
|
||||||
|
assert_eq!(result, vec!["item-001", "item-123"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn inverse_fuzzy_stage() {
|
||||||
|
let mut p = FilterPipeline::new();
|
||||||
|
let labels = &["apple", "banana", "cherry"];
|
||||||
|
push_items(&mut p, labels);
|
||||||
|
p.set_query("!ban");
|
||||||
|
let result = matched_labels(&p, labels);
|
||||||
|
assert!(result.contains(&"apple"));
|
||||||
|
assert!(result.contains(&"cherry"));
|
||||||
|
assert!(!result.contains(&"banana"));
|
||||||
|
}
|
||||||
|
}
|
||||||
201
crates/pikl-core/src/query/regex_filter.rs
Normal file
201
crates/pikl-core/src/query/regex_filter.rs
Normal file
@@ -0,0 +1,201 @@
|
|||||||
|
//! Regex filter using fancy-regex. Gracefully degrades on
|
||||||
|
//! invalid patterns (matches everything) so the user can
|
||||||
|
//! type patterns incrementally without errors.
|
||||||
|
|
||||||
|
use fancy_regex::Regex;
|
||||||
|
|
||||||
|
use crate::filter::Filter;
|
||||||
|
|
||||||
|
/// Regex filter. Case-sensitive by default; use `(?i)` in
|
||||||
|
/// the pattern for case-insensitive matching. Invalid
|
||||||
|
/// patterns match everything (graceful degradation while
|
||||||
|
/// typing). Results in insertion order.
|
||||||
|
pub struct RegexFilter {
|
||||||
|
items: Vec<(usize, String)>,
|
||||||
|
pattern: Option<Regex>,
|
||||||
|
results: Vec<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for RegexFilter {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RegexFilter {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
items: Vec::new(),
|
||||||
|
pattern: None,
|
||||||
|
results: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Filter for RegexFilter {
|
||||||
|
fn push(&mut self, index: usize, label: &str) {
|
||||||
|
self.items.push((index, label.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_query(&mut self, query: &str) {
|
||||||
|
if query.is_empty() {
|
||||||
|
self.pattern = None;
|
||||||
|
self.results = self.items.iter().map(|(idx, _)| *idx).collect();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compile pattern. If invalid, match everything.
|
||||||
|
self.pattern = Regex::new(query).ok();
|
||||||
|
|
||||||
|
match &self.pattern {
|
||||||
|
Some(re) => {
|
||||||
|
self.results = self
|
||||||
|
.items
|
||||||
|
.iter()
|
||||||
|
.filter(|(_, label)| re.is_match(label).unwrap_or(false))
|
||||||
|
.map(|(idx, _)| *idx)
|
||||||
|
.collect();
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
// Invalid pattern: match everything
|
||||||
|
self.results = self.items.iter().map(|(idx, _)| *idx).collect();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn matched_count(&self) -> usize {
|
||||||
|
self.results.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn matched_index(&self, match_position: usize) -> Option<usize> {
|
||||||
|
self.results.get(match_position).copied()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn push_items(f: &mut RegexFilter, labels: &[&str]) {
|
||||||
|
for (i, label) in labels.iter().enumerate() {
|
||||||
|
f.push(i, label);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty_query_returns_all() {
|
||||||
|
let mut f = RegexFilter::new();
|
||||||
|
push_items(&mut f, &["apple", "banana", "cherry"]);
|
||||||
|
f.set_query("");
|
||||||
|
assert_eq!(f.matched_count(), 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn valid_pattern() {
|
||||||
|
let mut f = RegexFilter::new();
|
||||||
|
push_items(&mut f, &["item-001", "item-abc", "item-123"]);
|
||||||
|
f.set_query("[0-9]+");
|
||||||
|
assert_eq!(f.matched_count(), 2);
|
||||||
|
assert_eq!(f.matched_index(0), Some(0));
|
||||||
|
assert_eq!(f.matched_index(1), Some(2));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn invalid_pattern_returns_all() {
|
||||||
|
let mut f = RegexFilter::new();
|
||||||
|
push_items(&mut f, &["apple", "banana"]);
|
||||||
|
f.set_query("[invalid");
|
||||||
|
assert_eq!(f.matched_count(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn case_sensitive_by_default() {
|
||||||
|
let mut f = RegexFilter::new();
|
||||||
|
push_items(&mut f, &["Apple", "apple", "APPLE"]);
|
||||||
|
f.set_query("^apple$");
|
||||||
|
assert_eq!(f.matched_count(), 1);
|
||||||
|
assert_eq!(f.matched_index(0), Some(1));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn case_insensitive_flag() {
|
||||||
|
let mut f = RegexFilter::new();
|
||||||
|
push_items(&mut f, &["Apple", "apple", "APPLE"]);
|
||||||
|
f.set_query("(?i)^apple$");
|
||||||
|
assert_eq!(f.matched_count(), 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn results_in_insertion_order() {
|
||||||
|
let mut f = RegexFilter::new();
|
||||||
|
push_items(&mut f, &["log-3", "log-1", "log-2"]);
|
||||||
|
f.set_query("log-[0-9]");
|
||||||
|
assert_eq!(f.matched_count(), 3);
|
||||||
|
assert_eq!(f.matched_index(0), Some(0));
|
||||||
|
assert_eq!(f.matched_index(1), Some(1));
|
||||||
|
assert_eq!(f.matched_index(2), Some(2));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_match() {
|
||||||
|
let mut f = RegexFilter::new();
|
||||||
|
push_items(&mut f, &["apple", "banana"]);
|
||||||
|
f.set_query("^xyz$");
|
||||||
|
assert_eq!(f.matched_count(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty_items() {
|
||||||
|
let mut f = RegexFilter::new();
|
||||||
|
f.set_query("test");
|
||||||
|
assert_eq!(f.matched_count(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn alternation_in_pattern() {
|
||||||
|
let mut f = RegexFilter::new();
|
||||||
|
push_items(&mut f, &["foo", "bar", "baz"]);
|
||||||
|
f.set_query("foo|bar");
|
||||||
|
assert_eq!(f.matched_count(), 2);
|
||||||
|
assert_eq!(f.matched_index(0), Some(0));
|
||||||
|
assert_eq!(f.matched_index(1), Some(1));
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- State transition and fancy-regex tests --
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn requery_valid_invalid_valid() {
|
||||||
|
let mut f = RegexFilter::new();
|
||||||
|
push_items(&mut f, &["apple", "banana", "cherry"]);
|
||||||
|
// Valid pattern
|
||||||
|
f.set_query("^apple$");
|
||||||
|
assert_eq!(f.matched_count(), 1);
|
||||||
|
// Invalid pattern: matches everything
|
||||||
|
f.set_query("[invalid");
|
||||||
|
assert_eq!(f.matched_count(), 3);
|
||||||
|
// Valid again
|
||||||
|
f.set_query("^cherry$");
|
||||||
|
assert_eq!(f.matched_count(), 1);
|
||||||
|
assert_eq!(f.matched_index(0), Some(2));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn anchored_match() {
|
||||||
|
let mut f = RegexFilter::new();
|
||||||
|
push_items(&mut f, &["apple", "pineapple"]);
|
||||||
|
f.set_query("^apple$");
|
||||||
|
assert_eq!(f.matched_count(), 1);
|
||||||
|
assert_eq!(f.matched_index(0), Some(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn lookbehind() {
|
||||||
|
let mut f = RegexFilter::new();
|
||||||
|
push_items(&mut f, &["log_error", "log_warning", "not_a_log"]);
|
||||||
|
// fancy-regex specific: positive lookbehind
|
||||||
|
f.set_query("(?<=log_)\\w+");
|
||||||
|
assert_eq!(f.matched_count(), 2);
|
||||||
|
assert_eq!(f.matched_index(0), Some(0));
|
||||||
|
assert_eq!(f.matched_index(1), Some(1));
|
||||||
|
}
|
||||||
|
}
|
||||||
231
crates/pikl-core/src/query/strategy.rs
Normal file
231
crates/pikl-core/src/query/strategy.rs
Normal file
@@ -0,0 +1,231 @@
|
|||||||
|
//! Filter segment prefix parsing. Determines which filter
|
||||||
|
//! strategy to use based on the query prefix.
|
||||||
|
|
||||||
|
/// The type of filter to apply for a query segment.
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
|
pub enum FilterKind {
|
||||||
|
Fuzzy,
|
||||||
|
Exact,
|
||||||
|
Regex,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A parsed filter segment with its kind, inversion flag,
|
||||||
|
/// and the actual query text (prefix stripped).
|
||||||
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
pub struct ParsedSegment<'a> {
|
||||||
|
pub kind: FilterKind,
|
||||||
|
pub inverse: bool,
|
||||||
|
pub query: &'a str,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a single filter segment's prefix to determine
|
||||||
|
/// the filter strategy and extract the query text.
|
||||||
|
///
|
||||||
|
/// Prefix rules (order matters):
|
||||||
|
/// 1. `!/pattern/` -> Regex, inverse, inner pattern
|
||||||
|
/// 2. `/pattern/` -> Regex, inner pattern
|
||||||
|
/// 3. `!'query` -> Exact, inverse, after `!'`
|
||||||
|
/// 4. `!query` -> Fuzzy, inverse, after `!`
|
||||||
|
/// 5. `'query` -> Exact, after `'`
|
||||||
|
/// 6. Everything else -> Fuzzy
|
||||||
|
///
|
||||||
|
/// A `/` with no closing slash is treated as fuzzy (user
|
||||||
|
/// is still typing the regex delimiter).
|
||||||
|
pub fn parse_segment(segment: &str) -> ParsedSegment<'_> {
|
||||||
|
// Check for inverse regex: !/pattern/
|
||||||
|
if let Some(rest) = segment.strip_prefix("!/") {
|
||||||
|
if let Some(inner) = rest.strip_suffix('/') {
|
||||||
|
return ParsedSegment {
|
||||||
|
kind: FilterKind::Regex,
|
||||||
|
inverse: true,
|
||||||
|
query: inner,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
// No closing slash: treat the whole thing as fuzzy inverse
|
||||||
|
return ParsedSegment {
|
||||||
|
kind: FilterKind::Fuzzy,
|
||||||
|
inverse: true,
|
||||||
|
query: &segment[1..],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for regex: /pattern/
|
||||||
|
if let Some(rest) = segment.strip_prefix('/') {
|
||||||
|
if let Some(inner) = rest.strip_suffix('/') {
|
||||||
|
return ParsedSegment {
|
||||||
|
kind: FilterKind::Regex,
|
||||||
|
inverse: false,
|
||||||
|
query: inner,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
// No closing slash: treat as fuzzy (still typing)
|
||||||
|
return ParsedSegment {
|
||||||
|
kind: FilterKind::Fuzzy,
|
||||||
|
inverse: false,
|
||||||
|
query: segment,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for inverse exact: !'query
|
||||||
|
if let Some(rest) = segment.strip_prefix("!'") {
|
||||||
|
return ParsedSegment {
|
||||||
|
kind: FilterKind::Exact,
|
||||||
|
inverse: true,
|
||||||
|
query: rest,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for inverse fuzzy: !query
|
||||||
|
if let Some(rest) = segment.strip_prefix('!') {
|
||||||
|
return ParsedSegment {
|
||||||
|
kind: FilterKind::Fuzzy,
|
||||||
|
inverse: true,
|
||||||
|
query: rest,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for exact: 'query
|
||||||
|
if let Some(rest) = segment.strip_prefix('\'') {
|
||||||
|
return ParsedSegment {
|
||||||
|
kind: FilterKind::Exact,
|
||||||
|
inverse: false,
|
||||||
|
query: rest,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default: fuzzy
|
||||||
|
ParsedSegment {
|
||||||
|
kind: FilterKind::Fuzzy,
|
||||||
|
inverse: false,
|
||||||
|
query: segment,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn plain_text_is_fuzzy() {
|
||||||
|
let p = parse_segment("hello");
|
||||||
|
assert_eq!(p.kind, FilterKind::Fuzzy);
|
||||||
|
assert!(!p.inverse);
|
||||||
|
assert_eq!(p.query, "hello");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty_is_fuzzy() {
|
||||||
|
let p = parse_segment("");
|
||||||
|
assert_eq!(p.kind, FilterKind::Fuzzy);
|
||||||
|
assert!(!p.inverse);
|
||||||
|
assert_eq!(p.query, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn exact_prefix() {
|
||||||
|
let p = parse_segment("'exact match");
|
||||||
|
assert_eq!(p.kind, FilterKind::Exact);
|
||||||
|
assert!(!p.inverse);
|
||||||
|
assert_eq!(p.query, "exact match");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn regex_delimiters() {
|
||||||
|
let p = parse_segment("/[0-9]+/");
|
||||||
|
assert_eq!(p.kind, FilterKind::Regex);
|
||||||
|
assert!(!p.inverse);
|
||||||
|
assert_eq!(p.query, "[0-9]+");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn inverse_fuzzy() {
|
||||||
|
let p = parse_segment("!temp");
|
||||||
|
assert_eq!(p.kind, FilterKind::Fuzzy);
|
||||||
|
assert!(p.inverse);
|
||||||
|
assert_eq!(p.query, "temp");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn inverse_exact() {
|
||||||
|
let p = parse_segment("!'temp");
|
||||||
|
assert_eq!(p.kind, FilterKind::Exact);
|
||||||
|
assert!(p.inverse);
|
||||||
|
assert_eq!(p.query, "temp");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn inverse_regex() {
|
||||||
|
let p = parse_segment("!/[0-9]+/");
|
||||||
|
assert_eq!(p.kind, FilterKind::Regex);
|
||||||
|
assert!(p.inverse);
|
||||||
|
assert_eq!(p.query, "[0-9]+");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unclosed_regex_is_fuzzy() {
|
||||||
|
let p = parse_segment("/still typing");
|
||||||
|
assert_eq!(p.kind, FilterKind::Fuzzy);
|
||||||
|
assert!(!p.inverse);
|
||||||
|
assert_eq!(p.query, "/still typing");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unclosed_inverse_regex_is_fuzzy_inverse() {
|
||||||
|
let p = parse_segment("!/still typing");
|
||||||
|
assert_eq!(p.kind, FilterKind::Fuzzy);
|
||||||
|
assert!(p.inverse);
|
||||||
|
assert_eq!(p.query, "/still typing");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn just_slash_is_fuzzy() {
|
||||||
|
let p = parse_segment("/");
|
||||||
|
assert_eq!(p.kind, FilterKind::Fuzzy);
|
||||||
|
assert!(!p.inverse);
|
||||||
|
assert_eq!(p.query, "/");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty_regex_pattern() {
|
||||||
|
let p = parse_segment("//");
|
||||||
|
assert_eq!(p.kind, FilterKind::Regex);
|
||||||
|
assert!(!p.inverse);
|
||||||
|
assert_eq!(p.query, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn just_exclamation() {
|
||||||
|
let p = parse_segment("!");
|
||||||
|
assert_eq!(p.kind, FilterKind::Fuzzy);
|
||||||
|
assert!(p.inverse);
|
||||||
|
assert_eq!(p.query, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn just_quote() {
|
||||||
|
let p = parse_segment("'");
|
||||||
|
assert_eq!(p.kind, FilterKind::Exact);
|
||||||
|
assert!(!p.inverse);
|
||||||
|
assert_eq!(p.query, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Double-prefix edge cases --
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn double_exclamation() {
|
||||||
|
// "!!query" -> first ! is inverse, rest is "!query" which is fuzzy inverse
|
||||||
|
let p = parse_segment("!!query");
|
||||||
|
assert_eq!(p.kind, FilterKind::Fuzzy);
|
||||||
|
assert!(p.inverse);
|
||||||
|
assert_eq!(p.query, "!query");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn inverse_exact_regex_like() {
|
||||||
|
// "!'[0-9]" -> exact inverse, query is "[0-9]" (not regex)
|
||||||
|
let p = parse_segment("!'[0-9]");
|
||||||
|
assert_eq!(p.kind, FilterKind::Exact);
|
||||||
|
assert!(p.inverse);
|
||||||
|
assert_eq!(p.query, "[0-9]");
|
||||||
|
}
|
||||||
|
}
|
||||||
15
crates/pikl-core/src/runtime/hook.rs
Normal file
15
crates/pikl-core/src/runtime/hook.rs
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
//! Hook trait for lifecycle events. The core library defines
|
||||||
|
//! the interface; concrete implementations (shell hooks, IPC
|
||||||
|
//! hooks, etc.) live in frontend crates.
|
||||||
|
|
||||||
|
use serde_json::Value;
|
||||||
|
|
||||||
|
use crate::error::PiklError;
|
||||||
|
|
||||||
|
/// A lifecycle hook that fires on menu events. Implementations
|
||||||
|
/// live outside pikl-core (e.g. in the CLI binary) so the core
|
||||||
|
/// library stays free of process/libc deps.
|
||||||
|
#[allow(async_fn_in_trait)]
|
||||||
|
pub trait Hook: Send + Sync {
|
||||||
|
async fn run(&self, value: &Value) -> Result<(), PiklError>;
|
||||||
|
}
|
||||||
117
crates/pikl-core/src/runtime/input.rs
Normal file
117
crates/pikl-core/src/runtime/input.rs
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
//! Input parsing. Reads lines from stdin (or any reader)
|
||||||
|
//! and turns them into [`Item`]s. Each line is tried as
|
||||||
|
//! JSON first. If that doesn't parse, it's treated as plain
|
||||||
|
//! text. Empty lines are skipped.
|
||||||
|
|
||||||
|
use tokio::io::AsyncBufReadExt;
|
||||||
|
|
||||||
|
use crate::error::PiklError;
|
||||||
|
use crate::item::Item;
|
||||||
|
|
||||||
|
/// Try to parse a line as JSON. Falls back to wrapping
|
||||||
|
/// it as a plain-text string. The `label_key` controls
|
||||||
|
/// which JSON key is used as the display label for object
|
||||||
|
/// items.
|
||||||
|
fn parse_line(line: &str, label_key: &str) -> Item {
|
||||||
|
match serde_json::from_str::<serde_json::Value>(line) {
|
||||||
|
Ok(value) => Item::new(value, label_key),
|
||||||
|
Err(_) => Item::from_plain_text(line),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read items from a synchronous reader. Use this for stdin so tokio
|
||||||
|
/// never registers the pipe fd. Avoids conflicts with crossterm's
|
||||||
|
/// event polling on fd 0 after dup2.
|
||||||
|
pub fn read_items_sync(
|
||||||
|
reader: impl std::io::BufRead,
|
||||||
|
label_key: &str,
|
||||||
|
) -> Result<Vec<Item>, PiklError> {
|
||||||
|
let mut items = Vec::new();
|
||||||
|
for line in reader.lines() {
|
||||||
|
let line = line?;
|
||||||
|
if line.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
items.push(parse_line(&line, label_key));
|
||||||
|
}
|
||||||
|
Ok(items)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Async version of [`read_items_sync`]. For streaming
|
||||||
|
/// input sources where items arrive over time.
|
||||||
|
pub async fn read_items(
|
||||||
|
reader: impl tokio::io::AsyncBufRead + Unpin,
|
||||||
|
label_key: &str,
|
||||||
|
) -> Result<Vec<Item>, PiklError> {
|
||||||
|
let mut items = Vec::new();
|
||||||
|
let mut lines = reader.lines();
|
||||||
|
while let Some(line) = lines.next_line().await? {
|
||||||
|
if line.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
items.push(parse_line(&line, label_key));
|
||||||
|
}
|
||||||
|
Ok(items)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
async fn parse(input: &str) -> Vec<Item> {
|
||||||
|
let reader = tokio::io::BufReader::new(input.as_bytes());
|
||||||
|
read_items(reader, "label").await.unwrap_or_default()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn plain_text_lines() {
|
||||||
|
let items = parse("foo\nbar\nbaz\n").await;
|
||||||
|
assert_eq!(items.len(), 3);
|
||||||
|
assert_eq!(items[0].label(), "foo");
|
||||||
|
assert_eq!(items[1].label(), "bar");
|
||||||
|
assert_eq!(items[2].label(), "baz");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn json_objects() {
|
||||||
|
let items = parse("{\"label\": \"one\"}\n{\"label\": \"two\"}\n").await;
|
||||||
|
assert_eq!(items.len(), 2);
|
||||||
|
assert_eq!(items[0].label(), "one");
|
||||||
|
assert_eq!(items[1].label(), "two");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn mixed_input() {
|
||||||
|
let items = parse("plain line\n{\"label\": \"json\"}\nanother plain\n").await;
|
||||||
|
assert_eq!(items.len(), 3);
|
||||||
|
assert_eq!(items[0].label(), "plain line");
|
||||||
|
assert_eq!(items[1].label(), "json");
|
||||||
|
assert_eq!(items[2].label(), "another plain");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn skips_empty_lines() {
|
||||||
|
let items = parse("foo\n\n\nbar\n").await;
|
||||||
|
assert_eq!(items.len(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn invalid_json_treated_as_text() {
|
||||||
|
let items = parse("{not valid json}\n").await;
|
||||||
|
assert_eq!(items.len(), 1);
|
||||||
|
assert_eq!(items[0].label(), "{not valid json}");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn empty_input() {
|
||||||
|
let items = parse("").await;
|
||||||
|
assert!(items.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn json_string_values() {
|
||||||
|
let items = parse("\"quoted string\"\n").await;
|
||||||
|
assert_eq!(items.len(), 1);
|
||||||
|
assert_eq!(items[0].label(), "quoted string");
|
||||||
|
}
|
||||||
|
}
|
||||||
69
crates/pikl-core/src/runtime/json_menu.rs
Normal file
69
crates/pikl-core/src/runtime/json_menu.rs
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
//! JSON-backed menu implementation. Wraps `Vec<Item>` with
|
||||||
|
//! pipeline filtering. This is the default backend
|
||||||
|
//! for `ls | pikl` style usage.
|
||||||
|
|
||||||
|
use crate::filter::Filter;
|
||||||
|
use crate::item::Item;
|
||||||
|
use crate::model::traits::Menu;
|
||||||
|
use crate::pipeline::FilterPipeline;
|
||||||
|
|
||||||
|
/// A menu backed by a flat list of JSON items. Handles
|
||||||
|
/// filtering internally using the [`FilterPipeline`] which
|
||||||
|
/// supports fuzzy, exact, regex, and `|`-chained queries.
|
||||||
|
/// The `label_key` controls which JSON key is used for
|
||||||
|
/// display labels on object items.
|
||||||
|
pub struct JsonMenu {
|
||||||
|
items: Vec<Item>,
|
||||||
|
label_key: String,
|
||||||
|
filter: FilterPipeline,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JsonMenu {
|
||||||
|
/// Create a new JSON menu with the given items and label key.
|
||||||
|
pub fn new(items: Vec<Item>, label_key: String) -> Self {
|
||||||
|
let mut filter = FilterPipeline::new();
|
||||||
|
for (i, item) in items.iter().enumerate() {
|
||||||
|
filter.push(i, item.label());
|
||||||
|
}
|
||||||
|
Self {
|
||||||
|
items,
|
||||||
|
label_key,
|
||||||
|
filter,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Menu for JsonMenu {
|
||||||
|
fn total(&self) -> usize {
|
||||||
|
self.items.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn apply_filter(&mut self, query: &str) {
|
||||||
|
self.filter.set_query(query);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn filtered_count(&self) -> usize {
|
||||||
|
self.filter.matched_count()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn filtered_label(&self, filtered_index: usize) -> Option<&str> {
|
||||||
|
self.filter
|
||||||
|
.matched_index(filtered_index)
|
||||||
|
.map(|idx| self.items[idx].label())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_raw(&mut self, values: Vec<serde_json::Value>) {
|
||||||
|
for value in values {
|
||||||
|
let idx = self.items.len();
|
||||||
|
let item = Item::new(value, &self.label_key);
|
||||||
|
self.filter.push(idx, item.label());
|
||||||
|
self.items.push(item);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn serialize_filtered(&self, filtered_index: usize) -> Option<&serde_json::Value> {
|
||||||
|
self.filter
|
||||||
|
.matched_index(filtered_index)
|
||||||
|
.map(|idx| &self.items[idx].value)
|
||||||
|
}
|
||||||
|
}
|
||||||
736
crates/pikl-core/src/runtime/menu.rs
Normal file
736
crates/pikl-core/src/runtime/menu.rs
Normal file
@@ -0,0 +1,736 @@
|
|||||||
|
//! The main event loop. [`MenuRunner`] wraps any [`Menu`]
|
||||||
|
//! implementation, drives the action/event channel loop,
|
||||||
|
//! and broadcasts state changes. Frontends never mutate
|
||||||
|
//! state directly. They send actions and react to events.
|
||||||
|
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use tokio::sync::{broadcast, mpsc};
|
||||||
|
|
||||||
|
use crate::error::PiklError;
|
||||||
|
use crate::event::{Action, MenuEvent, MenuResult, Mode, ViewState, VisibleItem};
|
||||||
|
use crate::model::traits::Menu;
|
||||||
|
use crate::navigation::Viewport;
|
||||||
|
use serde_json::Value;
|
||||||
|
|
||||||
|
/// Result of applying a single action to the menu state.
|
||||||
|
/// The run loop uses this to decide what to broadcast.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum ActionOutcome {
|
||||||
|
/// State changed, broadcast to subscribers.
|
||||||
|
Broadcast,
|
||||||
|
/// User confirmed a selection.
|
||||||
|
Selected(Value),
|
||||||
|
/// User cancelled.
|
||||||
|
Cancelled,
|
||||||
|
/// Nothing happened (e.g. confirm on empty list).
|
||||||
|
NoOp,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The menu engine. Wraps any [`Menu`] implementation and
|
||||||
|
/// drives it with an action/event channel loop. Create one,
|
||||||
|
/// grab the action sender and event subscriber, then call
|
||||||
|
/// [`MenuRunner::run`] to start the event loop.
|
||||||
|
pub struct MenuRunner<M: Menu> {
|
||||||
|
menu: M,
|
||||||
|
viewport: Viewport,
|
||||||
|
filter_text: Arc<str>,
|
||||||
|
mode: Mode,
|
||||||
|
action_rx: mpsc::Receiver<Action>,
|
||||||
|
event_tx: broadcast::Sender<MenuEvent>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<M: Menu> MenuRunner<M> {
|
||||||
|
/// Create a menu runner wrapping the given menu backend.
|
||||||
|
/// Returns the runner and an action sender. Call
|
||||||
|
/// [`subscribe`](Self::subscribe) to get an event handle,
|
||||||
|
/// then [`run`](Self::run) to start the event loop.
|
||||||
|
pub fn new(menu: M) -> (Self, mpsc::Sender<Action>) {
|
||||||
|
let (action_tx, action_rx) = mpsc::channel(256);
|
||||||
|
// 1024 slots: large enough that a burst of rapid state changes
|
||||||
|
// (e.g. streaming AddItems + filter updates) won't cause lag for
|
||||||
|
// subscribers. If a subscriber does fall behind, it gets a Lagged
|
||||||
|
// error and can catch up from the next StateChanged.
|
||||||
|
let (event_tx, _) = broadcast::channel(1024);
|
||||||
|
let runner = Self {
|
||||||
|
menu,
|
||||||
|
viewport: Viewport::new(),
|
||||||
|
filter_text: Arc::from(""),
|
||||||
|
mode: Mode::default(),
|
||||||
|
action_rx,
|
||||||
|
event_tx,
|
||||||
|
};
|
||||||
|
(runner, action_tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Subscribe to menu events. Returns a broadcast receiver
|
||||||
|
/// that gets state changes, selections, and cancellations.
|
||||||
|
pub fn subscribe(&self) -> broadcast::Receiver<MenuEvent> {
|
||||||
|
self.event_tx.subscribe()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Re-run the filter against all items with the current
|
||||||
|
/// filter text. Updates the viewport with the new count.
|
||||||
|
fn run_filter(&mut self) {
|
||||||
|
self.menu.apply_filter(&self.filter_text);
|
||||||
|
self.viewport.set_filtered_count(self.menu.filtered_count());
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build a [`ViewState`] snapshot from the current filter
|
||||||
|
/// results and viewport position.
|
||||||
|
fn build_view_state(&self) -> ViewState {
|
||||||
|
let range = self.viewport.visible_range();
|
||||||
|
let visible_items: Vec<VisibleItem> = range
|
||||||
|
.clone()
|
||||||
|
.filter_map(|i| {
|
||||||
|
self.menu.filtered_label(i).map(|label| VisibleItem {
|
||||||
|
label: label.to_string(),
|
||||||
|
index: i,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let cursor = if self.menu.filtered_count() == 0 {
|
||||||
|
0
|
||||||
|
} else {
|
||||||
|
self.viewport.cursor() - range.start
|
||||||
|
};
|
||||||
|
|
||||||
|
ViewState {
|
||||||
|
visible_items,
|
||||||
|
cursor,
|
||||||
|
filter_text: Arc::clone(&self.filter_text),
|
||||||
|
total_items: self.menu.total(),
|
||||||
|
total_filtered: self.menu.filtered_count(),
|
||||||
|
mode: self.mode,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Send the current view state to all subscribers.
|
||||||
|
fn broadcast_state(&self) {
|
||||||
|
let _ = self
|
||||||
|
.event_tx
|
||||||
|
.send(MenuEvent::StateChanged(self.build_view_state()));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Apply a single action to the menu state. Pure state
|
||||||
|
/// transition: no channels, no async. Testable in isolation.
|
||||||
|
pub fn apply_action(&mut self, action: Action) -> ActionOutcome {
|
||||||
|
match action {
|
||||||
|
Action::UpdateFilter(text) => {
|
||||||
|
self.filter_text = Arc::from(text);
|
||||||
|
self.run_filter();
|
||||||
|
ActionOutcome::Broadcast
|
||||||
|
}
|
||||||
|
Action::MoveUp(n) => {
|
||||||
|
self.viewport.move_up(n);
|
||||||
|
ActionOutcome::Broadcast
|
||||||
|
}
|
||||||
|
Action::MoveDown(n) => {
|
||||||
|
self.viewport.move_down(n);
|
||||||
|
ActionOutcome::Broadcast
|
||||||
|
}
|
||||||
|
Action::MoveToTop => {
|
||||||
|
self.viewport.move_to_top();
|
||||||
|
ActionOutcome::Broadcast
|
||||||
|
}
|
||||||
|
Action::MoveToBottom => {
|
||||||
|
self.viewport.move_to_bottom();
|
||||||
|
ActionOutcome::Broadcast
|
||||||
|
}
|
||||||
|
Action::PageUp(n) => {
|
||||||
|
self.viewport.page_up(n);
|
||||||
|
ActionOutcome::Broadcast
|
||||||
|
}
|
||||||
|
Action::PageDown(n) => {
|
||||||
|
self.viewport.page_down(n);
|
||||||
|
ActionOutcome::Broadcast
|
||||||
|
}
|
||||||
|
Action::Confirm => {
|
||||||
|
if self.menu.filtered_count() == 0 {
|
||||||
|
return ActionOutcome::NoOp;
|
||||||
|
}
|
||||||
|
let cursor = self.viewport.cursor();
|
||||||
|
match self.menu.serialize_filtered(cursor) {
|
||||||
|
Some(value) => ActionOutcome::Selected(value.clone()),
|
||||||
|
None => ActionOutcome::NoOp,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Action::Cancel => ActionOutcome::Cancelled,
|
||||||
|
Action::Resize { height } => {
|
||||||
|
self.viewport.set_height(height as usize);
|
||||||
|
ActionOutcome::Broadcast
|
||||||
|
}
|
||||||
|
Action::HalfPageUp(n) => {
|
||||||
|
self.viewport.half_page_up(n);
|
||||||
|
ActionOutcome::Broadcast
|
||||||
|
}
|
||||||
|
Action::HalfPageDown(n) => {
|
||||||
|
self.viewport.half_page_down(n);
|
||||||
|
ActionOutcome::Broadcast
|
||||||
|
}
|
||||||
|
Action::SetMode(m) => {
|
||||||
|
self.mode = m;
|
||||||
|
ActionOutcome::Broadcast
|
||||||
|
}
|
||||||
|
Action::AddItems(values) => {
|
||||||
|
self.menu.add_raw(values);
|
||||||
|
self.run_filter();
|
||||||
|
ActionOutcome::Broadcast
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the initial mode before running the event loop.
|
||||||
|
/// Used by `--start-mode` CLI flag.
|
||||||
|
pub fn set_initial_mode(&mut self, mode: Mode) {
|
||||||
|
self.mode = mode;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run the menu event loop. Consumes actions and
|
||||||
|
/// broadcasts events.
|
||||||
|
///
|
||||||
|
/// **Ordering guarantee:** Actions are processed
|
||||||
|
/// sequentially in the order received. Each action's
|
||||||
|
/// state change is fully applied before the next action
|
||||||
|
/// begins. A `Confirm` sent right after an
|
||||||
|
/// `UpdateFilter` will always select from the filtered
|
||||||
|
/// results, never stale pre-filter state.
|
||||||
|
///
|
||||||
|
/// This holds regardless of how actions are sent (TUI
|
||||||
|
/// keypresses, headless scripts, programmatic sends).
|
||||||
|
/// It's enforced by the single `recv()` loop below and
|
||||||
|
/// must be preserved by any future refactors.
|
||||||
|
pub async fn run(mut self) -> Result<MenuResult, PiklError> {
|
||||||
|
self.run_filter();
|
||||||
|
self.broadcast_state();
|
||||||
|
|
||||||
|
while let Some(action) = self.action_rx.recv().await {
|
||||||
|
match self.apply_action(action) {
|
||||||
|
ActionOutcome::Broadcast => self.broadcast_state(),
|
||||||
|
ActionOutcome::Selected(value) => {
|
||||||
|
let _ = self.event_tx.send(MenuEvent::Selected(value.clone()));
|
||||||
|
return Ok(MenuResult::Selected(value));
|
||||||
|
}
|
||||||
|
ActionOutcome::Cancelled => {
|
||||||
|
let _ = self.event_tx.send(MenuEvent::Cancelled);
|
||||||
|
return Ok(MenuResult::Cancelled);
|
||||||
|
}
|
||||||
|
ActionOutcome::NoOp => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sender dropped
|
||||||
|
Ok(MenuResult::Cancelled)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::event::MenuEvent;
|
||||||
|
use crate::item::Item;
|
||||||
|
use crate::runtime::json_menu::JsonMenu;
|
||||||
|
|
||||||
|
fn test_menu() -> (MenuRunner<JsonMenu>, mpsc::Sender<Action>) {
|
||||||
|
let items = vec![
|
||||||
|
Item::from_plain_text("alpha"),
|
||||||
|
Item::from_plain_text("beta"),
|
||||||
|
Item::from_plain_text("gamma"),
|
||||||
|
Item::from_plain_text("delta"),
|
||||||
|
];
|
||||||
|
MenuRunner::new(JsonMenu::new(items, "label".to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set up a menu runner with filter applied and viewport sized.
|
||||||
|
/// Ready for sync apply_action tests.
|
||||||
|
fn ready_menu() -> MenuRunner<JsonMenu> {
|
||||||
|
let (mut m, _tx) = test_menu();
|
||||||
|
m.run_filter();
|
||||||
|
m.apply_action(Action::Resize { height: 10 });
|
||||||
|
m
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Sync apply_action tests --
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn apply_move_down_updates_viewport() {
|
||||||
|
let mut m = ready_menu();
|
||||||
|
let outcome = m.apply_action(Action::MoveDown(1));
|
||||||
|
assert!(matches!(outcome, ActionOutcome::Broadcast));
|
||||||
|
assert_eq!(m.viewport.cursor(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn apply_move_up_updates_viewport() {
|
||||||
|
let mut m = ready_menu();
|
||||||
|
m.apply_action(Action::MoveDown(2));
|
||||||
|
let outcome = m.apply_action(Action::MoveUp(1));
|
||||||
|
assert!(matches!(outcome, ActionOutcome::Broadcast));
|
||||||
|
assert_eq!(m.viewport.cursor(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn apply_filter_changes_results() {
|
||||||
|
let mut m = ready_menu();
|
||||||
|
let outcome = m.apply_action(Action::UpdateFilter("al".to_string()));
|
||||||
|
assert!(matches!(outcome, ActionOutcome::Broadcast));
|
||||||
|
assert_eq!(&*m.filter_text, "al");
|
||||||
|
// alpha matches "al"
|
||||||
|
assert!(m.menu.filtered_count() >= 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn apply_confirm_returns_selected() {
|
||||||
|
let mut m = ready_menu();
|
||||||
|
m.apply_action(Action::MoveDown(1));
|
||||||
|
let outcome = m.apply_action(Action::Confirm);
|
||||||
|
assert!(matches!(&outcome, ActionOutcome::Selected(v) if v.as_str() == Some("beta")));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn apply_confirm_on_empty_is_noop() {
|
||||||
|
let mut m = ready_menu();
|
||||||
|
m.apply_action(Action::UpdateFilter("zzzzz".to_string()));
|
||||||
|
assert_eq!(m.menu.filtered_count(), 0);
|
||||||
|
let outcome = m.apply_action(Action::Confirm);
|
||||||
|
assert!(matches!(outcome, ActionOutcome::NoOp));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn apply_cancel_returns_cancelled() {
|
||||||
|
let mut m = ready_menu();
|
||||||
|
let outcome = m.apply_action(Action::Cancel);
|
||||||
|
assert!(matches!(outcome, ActionOutcome::Cancelled));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn apply_resize_updates_viewport() {
|
||||||
|
let mut m = ready_menu();
|
||||||
|
let outcome = m.apply_action(Action::Resize { height: 3 });
|
||||||
|
assert!(matches!(outcome, ActionOutcome::Broadcast));
|
||||||
|
assert_eq!(m.viewport.visible_range(), 0..3);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn apply_add_items_runs_filter() {
|
||||||
|
let mut m = ready_menu();
|
||||||
|
assert_eq!(m.menu.total(), 4);
|
||||||
|
let outcome = m.apply_action(Action::AddItems(vec![serde_json::Value::String(
|
||||||
|
"epsilon".to_string(),
|
||||||
|
)]));
|
||||||
|
assert!(matches!(outcome, ActionOutcome::Broadcast));
|
||||||
|
assert_eq!(m.menu.total(), 5);
|
||||||
|
assert_eq!(m.menu.filtered_count(), 5);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn apply_move_to_top_and_bottom() {
|
||||||
|
let mut m = ready_menu();
|
||||||
|
m.apply_action(Action::MoveToBottom);
|
||||||
|
assert_eq!(m.viewport.cursor(), 3);
|
||||||
|
m.apply_action(Action::MoveToTop);
|
||||||
|
assert_eq!(m.viewport.cursor(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn apply_page_movement() {
|
||||||
|
let (mut m, _tx) = test_menu();
|
||||||
|
m.run_filter();
|
||||||
|
m.apply_action(Action::Resize { height: 2 });
|
||||||
|
m.apply_action(Action::PageDown(1));
|
||||||
|
assert_eq!(m.viewport.cursor(), 2);
|
||||||
|
m.apply_action(Action::PageUp(1));
|
||||||
|
assert_eq!(m.viewport.cursor(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn initial_state_broadcast() {
|
||||||
|
let (menu, tx) = test_menu();
|
||||||
|
let mut rx = menu.subscribe();
|
||||||
|
|
||||||
|
let handle = tokio::spawn(async move { menu.run().await });
|
||||||
|
|
||||||
|
// Should receive initial state
|
||||||
|
if let Ok(MenuEvent::StateChanged(vs)) = rx.recv().await {
|
||||||
|
assert_eq!(vs.total_items, 4);
|
||||||
|
assert_eq!(vs.total_filtered, 4);
|
||||||
|
assert_eq!(vs.cursor, 0);
|
||||||
|
assert!(vs.filter_text.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cancel to exit
|
||||||
|
let _ = tx.send(Action::Cancel).await;
|
||||||
|
let result = handle.await.unwrap_or(Ok(MenuResult::Cancelled));
|
||||||
|
assert!(matches!(result, Ok(MenuResult::Cancelled)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn filter_updates() {
|
||||||
|
let (menu, tx) = test_menu();
|
||||||
|
let mut rx = menu.subscribe();
|
||||||
|
|
||||||
|
let handle = tokio::spawn(async move { menu.run().await });
|
||||||
|
|
||||||
|
// Skip initial state
|
||||||
|
let _ = rx.recv().await;
|
||||||
|
|
||||||
|
// Filter to "al"
|
||||||
|
let _ = tx.send(Action::UpdateFilter("al".to_string())).await;
|
||||||
|
if let Ok(MenuEvent::StateChanged(vs)) = rx.recv().await {
|
||||||
|
assert_eq!(vs.total_items, 4);
|
||||||
|
// alpha should match "al"
|
||||||
|
assert!(vs.total_filtered >= 1);
|
||||||
|
assert_eq!(&*vs.filter_text, "al");
|
||||||
|
}
|
||||||
|
|
||||||
|
let _ = tx.send(Action::Cancel).await;
|
||||||
|
let _ = handle.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn confirm_selection() {
|
||||||
|
let (menu, tx) = test_menu();
|
||||||
|
let mut rx = menu.subscribe();
|
||||||
|
|
||||||
|
let handle = tokio::spawn(async move { menu.run().await });
|
||||||
|
|
||||||
|
// Skip initial state
|
||||||
|
let _ = rx.recv().await;
|
||||||
|
|
||||||
|
// Need to send resize first so viewport has height
|
||||||
|
let _ = tx.send(Action::Resize { height: 10 }).await;
|
||||||
|
let _ = rx.recv().await;
|
||||||
|
|
||||||
|
// Move down and confirm
|
||||||
|
let _ = tx.send(Action::MoveDown(1)).await;
|
||||||
|
let _ = rx.recv().await;
|
||||||
|
let _ = tx.send(Action::Confirm).await;
|
||||||
|
|
||||||
|
// Should get Selected event
|
||||||
|
if let Ok(MenuEvent::Selected(value)) = rx.recv().await {
|
||||||
|
assert_eq!(value.as_str(), Some("beta"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = handle.await.unwrap_or(Ok(MenuResult::Cancelled));
|
||||||
|
assert!(matches!(result, Ok(MenuResult::Selected(_))));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn confirm_on_empty_is_noop() {
|
||||||
|
let (menu, tx) = test_menu();
|
||||||
|
let mut rx = menu.subscribe();
|
||||||
|
|
||||||
|
let handle = tokio::spawn(async move { menu.run().await });
|
||||||
|
|
||||||
|
let _ = rx.recv().await; // initial
|
||||||
|
|
||||||
|
// Filter to something that matches nothing
|
||||||
|
let _ = tx.send(Action::UpdateFilter("zzzzz".to_string())).await;
|
||||||
|
if let Ok(MenuEvent::StateChanged(vs)) = rx.recv().await {
|
||||||
|
assert_eq!(vs.total_filtered, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Confirm should be no-op
|
||||||
|
let _ = tx.send(Action::Confirm).await;
|
||||||
|
|
||||||
|
// Cancel to exit (should still work)
|
||||||
|
let _ = tx.send(Action::Cancel).await;
|
||||||
|
let result = handle.await.unwrap_or(Ok(MenuResult::Cancelled));
|
||||||
|
assert!(matches!(result, Ok(MenuResult::Cancelled)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn sender_drop_cancels() {
|
||||||
|
let (menu, tx) = test_menu();
|
||||||
|
let _rx = menu.subscribe();
|
||||||
|
|
||||||
|
let handle = tokio::spawn(async move { menu.run().await });
|
||||||
|
|
||||||
|
// Drop the only sender.
|
||||||
|
drop(tx);
|
||||||
|
|
||||||
|
let result = handle.await.unwrap_or(Ok(MenuResult::Cancelled));
|
||||||
|
assert!(matches!(result, Ok(MenuResult::Cancelled)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- End-to-end output correctness --
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn confirm_first_item_value() {
|
||||||
|
let (menu, tx) = test_menu();
|
||||||
|
let mut rx = menu.subscribe();
|
||||||
|
|
||||||
|
let handle = tokio::spawn(async move { menu.run().await });
|
||||||
|
|
||||||
|
let _ = rx.recv().await; // initial state
|
||||||
|
let _ = tx.send(Action::Resize { height: 10 }).await;
|
||||||
|
let _ = rx.recv().await;
|
||||||
|
|
||||||
|
// Confirm at cursor 0, should get "alpha"
|
||||||
|
let _ = tx.send(Action::Confirm).await;
|
||||||
|
|
||||||
|
let event = rx.recv().await;
|
||||||
|
assert!(matches!(&event, Ok(MenuEvent::Selected(v)) if v.as_str() == Some("alpha")));
|
||||||
|
|
||||||
|
let result = handle.await.unwrap_or(Ok(MenuResult::Cancelled));
|
||||||
|
assert!(matches!(result, Ok(MenuResult::Selected(ref v)) if v.as_str() == Some("alpha")));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn confirm_third_item_value() {
|
||||||
|
let (menu, tx) = test_menu();
|
||||||
|
let mut rx = menu.subscribe();
|
||||||
|
|
||||||
|
let handle = tokio::spawn(async move { menu.run().await });
|
||||||
|
|
||||||
|
let _ = rx.recv().await; // initial
|
||||||
|
let _ = tx.send(Action::Resize { height: 10 }).await;
|
||||||
|
let _ = rx.recv().await;
|
||||||
|
|
||||||
|
// Move down twice -> cursor at index 2 -> "gamma"
|
||||||
|
let _ = tx.send(Action::MoveDown(1)).await;
|
||||||
|
let _ = rx.recv().await;
|
||||||
|
let _ = tx.send(Action::MoveDown(1)).await;
|
||||||
|
let _ = rx.recv().await;
|
||||||
|
let _ = tx.send(Action::Confirm).await;
|
||||||
|
|
||||||
|
let result = handle.await.unwrap_or(Ok(MenuResult::Cancelled));
|
||||||
|
assert!(matches!(result, Ok(MenuResult::Selected(ref v)) if v.as_str() == Some("gamma")));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn filter_then_confirm_correct_item() {
|
||||||
|
let (menu, tx) = test_menu();
|
||||||
|
let mut rx = menu.subscribe();
|
||||||
|
|
||||||
|
let handle = tokio::spawn(async move { menu.run().await });
|
||||||
|
|
||||||
|
let _ = rx.recv().await; // initial
|
||||||
|
let _ = tx.send(Action::Resize { height: 10 }).await;
|
||||||
|
let _ = rx.recv().await;
|
||||||
|
|
||||||
|
// Filter to "del", should match "delta"
|
||||||
|
let _ = tx.send(Action::UpdateFilter("del".to_string())).await;
|
||||||
|
if let Ok(MenuEvent::StateChanged(vs)) = rx.recv().await {
|
||||||
|
assert!(vs.total_filtered >= 1);
|
||||||
|
assert_eq!(vs.visible_items[0].label, "delta");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Confirm, should select "delta"
|
||||||
|
let _ = tx.send(Action::Confirm).await;
|
||||||
|
|
||||||
|
let result = handle.await.unwrap_or(Ok(MenuResult::Cancelled));
|
||||||
|
assert!(matches!(result, Ok(MenuResult::Selected(ref v)) if v.as_str() == Some("delta")));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn add_items_then_confirm_new_item() {
|
||||||
|
let (menu, tx) = test_menu();
|
||||||
|
let mut rx = menu.subscribe();
|
||||||
|
|
||||||
|
let handle = tokio::spawn(async move { menu.run().await });
|
||||||
|
|
||||||
|
let _ = rx.recv().await; // initial
|
||||||
|
let _ = tx.send(Action::Resize { height: 10 }).await;
|
||||||
|
let _ = rx.recv().await;
|
||||||
|
|
||||||
|
// Add a new item
|
||||||
|
let _ = tx
|
||||||
|
.send(Action::AddItems(vec![serde_json::Value::String(
|
||||||
|
"epsilon".to_string(),
|
||||||
|
)]))
|
||||||
|
.await;
|
||||||
|
if let Ok(MenuEvent::StateChanged(vs)) = rx.recv().await {
|
||||||
|
assert_eq!(vs.total_items, 5);
|
||||||
|
assert_eq!(vs.total_filtered, 5);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter to "eps", only epsilon should match
|
||||||
|
let _ = tx.send(Action::UpdateFilter("eps".to_string())).await;
|
||||||
|
if let Ok(MenuEvent::StateChanged(vs)) = rx.recv().await {
|
||||||
|
assert!(vs.total_filtered >= 1);
|
||||||
|
assert_eq!(vs.visible_items[0].label, "epsilon");
|
||||||
|
}
|
||||||
|
|
||||||
|
let _ = tx.send(Action::Confirm).await;
|
||||||
|
|
||||||
|
let result = handle.await.unwrap_or(Ok(MenuResult::Cancelled));
|
||||||
|
assert!(matches!(result, Ok(MenuResult::Selected(ref v)) if v.as_str() == Some("epsilon")));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn cancel_returns_no_item() {
|
||||||
|
let (menu, tx) = test_menu();
|
||||||
|
let mut rx = menu.subscribe();
|
||||||
|
|
||||||
|
let handle = tokio::spawn(async move { menu.run().await });
|
||||||
|
|
||||||
|
let _ = rx.recv().await; // initial
|
||||||
|
|
||||||
|
let _ = tx.send(Action::Cancel).await;
|
||||||
|
|
||||||
|
// Should get Cancelled event
|
||||||
|
assert!(matches!(rx.recv().await, Ok(MenuEvent::Cancelled)));
|
||||||
|
|
||||||
|
let result = handle.await.unwrap_or(Ok(MenuResult::Cancelled));
|
||||||
|
assert!(matches!(result, Ok(MenuResult::Cancelled)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Ordering invariant tests --
|
||||||
|
// These tests verify the sequential processing guarantee documented on
|
||||||
|
// MenuRunner::run(). Actions sent back-to-back without waiting for
|
||||||
|
// intermediate events must still be processed in order, each fully
|
||||||
|
// applied before the next begins.
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn actions_process_in_order_filter_then_confirm() {
|
||||||
|
// Send filter + confirm back-to-back with no recv between them.
|
||||||
|
// Confirm must act on the filtered results, not the pre-filter state.
|
||||||
|
let items = vec![
|
||||||
|
Item::from_plain_text("alpha"),
|
||||||
|
Item::from_plain_text("beta"),
|
||||||
|
Item::from_plain_text("banana"),
|
||||||
|
];
|
||||||
|
let (menu, tx) = MenuRunner::new(JsonMenu::new(items, "label".to_string()));
|
||||||
|
let mut rx = menu.subscribe();
|
||||||
|
let handle = tokio::spawn(async move { menu.run().await });
|
||||||
|
|
||||||
|
let _ = rx.recv().await; // initial
|
||||||
|
let _ = tx.send(Action::Resize { height: 50 }).await;
|
||||||
|
let _ = rx.recv().await;
|
||||||
|
|
||||||
|
// Back-to-back, no waiting between these
|
||||||
|
let _ = tx.send(Action::UpdateFilter("ban".to_string())).await;
|
||||||
|
let _ = tx.send(Action::Confirm).await;
|
||||||
|
|
||||||
|
let result = handle.await.unwrap_or(Ok(MenuResult::Cancelled));
|
||||||
|
// Must get "banana". Filter was applied before confirm ran.
|
||||||
|
assert!(matches!(
|
||||||
|
result,
|
||||||
|
Ok(MenuResult::Selected(ref v)) if v.as_str() == Some("banana")
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn actions_process_in_order_move_then_confirm() {
|
||||||
|
// Send multiple moves + confirm back-to-back.
|
||||||
|
// Confirm must reflect the final cursor position.
|
||||||
|
let (menu, tx) = test_menu();
|
||||||
|
let mut rx = menu.subscribe();
|
||||||
|
let handle = tokio::spawn(async move { menu.run().await });
|
||||||
|
|
||||||
|
let _ = rx.recv().await; // initial
|
||||||
|
let _ = tx.send(Action::Resize { height: 50 }).await;
|
||||||
|
let _ = rx.recv().await;
|
||||||
|
|
||||||
|
// Three moves down back-to-back, then confirm
|
||||||
|
let _ = tx.send(Action::MoveDown(1)).await;
|
||||||
|
let _ = tx.send(Action::MoveDown(1)).await;
|
||||||
|
let _ = tx.send(Action::MoveDown(1)).await;
|
||||||
|
let _ = tx.send(Action::Confirm).await;
|
||||||
|
|
||||||
|
let result = handle.await.unwrap_or(Ok(MenuResult::Cancelled));
|
||||||
|
// Cursor at index 3 -> "delta"
|
||||||
|
assert!(matches!(
|
||||||
|
result,
|
||||||
|
Ok(MenuResult::Selected(ref v)) if v.as_str() == Some("delta")
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- New action variant tests --
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn apply_half_page_down() {
|
||||||
|
let (mut m, _tx) = test_menu();
|
||||||
|
m.run_filter();
|
||||||
|
m.apply_action(Action::Resize { height: 4 });
|
||||||
|
let outcome = m.apply_action(Action::HalfPageDown(1));
|
||||||
|
assert!(matches!(outcome, ActionOutcome::Broadcast));
|
||||||
|
assert_eq!(m.viewport.cursor(), 2); // 4/2 = 2
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn apply_half_page_up() {
|
||||||
|
let (mut m, _tx) = test_menu();
|
||||||
|
m.run_filter();
|
||||||
|
m.apply_action(Action::Resize { height: 4 });
|
||||||
|
m.apply_action(Action::HalfPageDown(1));
|
||||||
|
let outcome = m.apply_action(Action::HalfPageUp(1));
|
||||||
|
assert!(matches!(outcome, ActionOutcome::Broadcast));
|
||||||
|
assert_eq!(m.viewport.cursor(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn apply_set_mode_normal() {
|
||||||
|
let mut m = ready_menu();
|
||||||
|
let outcome = m.apply_action(Action::SetMode(Mode::Normal));
|
||||||
|
assert!(matches!(outcome, ActionOutcome::Broadcast));
|
||||||
|
assert_eq!(m.build_view_state().mode, Mode::Normal);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn apply_set_mode_insert() {
|
||||||
|
let mut m = ready_menu();
|
||||||
|
m.apply_action(Action::SetMode(Mode::Normal));
|
||||||
|
let outcome = m.apply_action(Action::SetMode(Mode::Insert));
|
||||||
|
assert!(matches!(outcome, ActionOutcome::Broadcast));
|
||||||
|
assert_eq!(m.build_view_state().mode, Mode::Insert);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn apply_set_mode_preserves_filter() {
|
||||||
|
let mut m = ready_menu();
|
||||||
|
m.apply_action(Action::UpdateFilter("al".to_string()));
|
||||||
|
let count_before = m.menu.filtered_count();
|
||||||
|
let filter_before = m.filter_text.clone();
|
||||||
|
m.apply_action(Action::SetMode(Mode::Normal));
|
||||||
|
assert_eq!(&*m.filter_text, &*filter_before);
|
||||||
|
assert_eq!(m.menu.filtered_count(), count_before);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn set_initial_mode_works() {
|
||||||
|
let (mut m, _tx) = test_menu();
|
||||||
|
m.set_initial_mode(Mode::Normal);
|
||||||
|
m.run_filter();
|
||||||
|
assert_eq!(m.build_view_state().mode, Mode::Normal);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn view_state_mode_after_switch() {
|
||||||
|
let mut m = ready_menu();
|
||||||
|
m.apply_action(Action::SetMode(Mode::Normal));
|
||||||
|
let vs = m.build_view_state();
|
||||||
|
assert_eq!(vs.mode, Mode::Normal);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn actions_process_in_order_add_items_then_filter_then_confirm() {
|
||||||
|
// AddItems + filter + confirm, all back-to-back.
|
||||||
|
// The filter must see the newly added items.
|
||||||
|
let (menu, tx) = test_menu();
|
||||||
|
let mut rx = menu.subscribe();
|
||||||
|
let handle = tokio::spawn(async move { menu.run().await });
|
||||||
|
|
||||||
|
let _ = rx.recv().await; // initial
|
||||||
|
let _ = tx.send(Action::Resize { height: 50 }).await;
|
||||||
|
let _ = rx.recv().await;
|
||||||
|
|
||||||
|
// All back-to-back
|
||||||
|
let _ = tx
|
||||||
|
.send(Action::AddItems(vec![serde_json::Value::String(
|
||||||
|
"zephyr".to_string(),
|
||||||
|
)]))
|
||||||
|
.await;
|
||||||
|
let _ = tx.send(Action::UpdateFilter("zep".to_string())).await;
|
||||||
|
let _ = tx.send(Action::Confirm).await;
|
||||||
|
|
||||||
|
let result = handle.await.unwrap_or(Ok(MenuResult::Cancelled));
|
||||||
|
// Must find "zephyr". It was added before the filter ran.
|
||||||
|
assert!(matches!(
|
||||||
|
result,
|
||||||
|
Ok(MenuResult::Selected(ref v)) if v.as_str() == Some("zephyr")
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
4
crates/pikl-core/src/runtime/mod.rs
Normal file
4
crates/pikl-core/src/runtime/mod.rs
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
pub mod hook;
|
||||||
|
pub mod input;
|
||||||
|
pub mod json_menu;
|
||||||
|
pub mod menu;
|
||||||
146
crates/pikl-core/src/script/action_fd/error.rs
Normal file
146
crates/pikl-core/src/script/action_fd/error.rs
Normal file
@@ -0,0 +1,146 @@
|
|||||||
|
//! Error types for action-fd script parsing and validation.
|
||||||
|
//! Errors carry line numbers and source text for rustc-style
|
||||||
|
//! diagnostic output.
|
||||||
|
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
|
/// Parse or validation error from an action-fd script.
|
||||||
|
/// Carries enough context to produce rustc-style
|
||||||
|
/// diagnostics with line numbers and carets.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ScriptError {
|
||||||
|
pub line: usize,
|
||||||
|
pub source_line: String,
|
||||||
|
pub kind: ScriptErrorKind,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum ScriptErrorKind {
|
||||||
|
UnknownAction(String),
|
||||||
|
InvalidArgument { action: String, message: String },
|
||||||
|
ActionsAfterShowUi,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write the common diagnostic header: error message, gutter,
|
||||||
|
/// and the source line. Returns the indent string for use by
|
||||||
|
/// the caller's annotation lines.
|
||||||
|
fn write_diagnostic_header(
|
||||||
|
f: &mut fmt::Formatter<'_>,
|
||||||
|
error_msg: &str,
|
||||||
|
line: usize,
|
||||||
|
source: &str,
|
||||||
|
) -> Result<String, fmt::Error> {
|
||||||
|
let indent = " ".repeat(line.to_string().len());
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"error: {error_msg} on action-fd line {line}\n\
|
||||||
|
{indent}|\n\
|
||||||
|
{line} | {source}\n",
|
||||||
|
)?;
|
||||||
|
Ok(indent)
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for ScriptError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match &self.kind {
|
||||||
|
ScriptErrorKind::UnknownAction(action) => {
|
||||||
|
let indent =
|
||||||
|
write_diagnostic_header(f, "unknown action", self.line, &self.source_line)?;
|
||||||
|
let underline = "^".repeat(action.len().max(1));
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{indent}| {underline} not a valid action\n\
|
||||||
|
{indent}|\n\
|
||||||
|
{indent}= help: valid actions: filter, move-up, move-down, move-to-top, \
|
||||||
|
move-to-bottom, page-up, page-down, resize, confirm, cancel, \
|
||||||
|
show-ui, show-tui, show-gui",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
ScriptErrorKind::InvalidArgument { action, message } => {
|
||||||
|
let indent =
|
||||||
|
write_diagnostic_header(f, "invalid argument", self.line, &self.source_line)?;
|
||||||
|
let underline = "^".repeat(self.source_line.len().max(1));
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{indent}| {underline} {message}\n\
|
||||||
|
{indent}|\n\
|
||||||
|
{indent}= help: usage: {action} <value>",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
ScriptErrorKind::ActionsAfterShowUi => {
|
||||||
|
let indent = write_diagnostic_header(
|
||||||
|
f,
|
||||||
|
"actions after show-ui",
|
||||||
|
self.line,
|
||||||
|
&self.source_line,
|
||||||
|
)?;
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{indent}| show-ui/show-tui/show-gui must be the last action in the script\n\
|
||||||
|
{indent}|",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for ScriptError {}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn display_unknown_action() {
|
||||||
|
let err = ScriptError {
|
||||||
|
line: 3,
|
||||||
|
source_line: "bogus".to_string(),
|
||||||
|
kind: ScriptErrorKind::UnknownAction("bogus".to_string()),
|
||||||
|
};
|
||||||
|
let display = err.to_string();
|
||||||
|
assert!(display.contains("error: unknown action on action-fd line 3"));
|
||||||
|
assert!(display.contains("bogus"));
|
||||||
|
assert!(display.contains("not a valid action"));
|
||||||
|
assert!(display.contains("help: valid actions:"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn display_invalid_argument() {
|
||||||
|
let err = ScriptError {
|
||||||
|
line: 5,
|
||||||
|
source_line: "resize abc".to_string(),
|
||||||
|
kind: ScriptErrorKind::InvalidArgument {
|
||||||
|
action: "resize".to_string(),
|
||||||
|
message: "'abc' is not a valid positive integer".to_string(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
let display = err.to_string();
|
||||||
|
assert!(display.contains("error: invalid argument on action-fd line 5"));
|
||||||
|
assert!(display.contains("resize abc"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn display_gutter_width_multi_digit_line() {
|
||||||
|
let err = ScriptError {
|
||||||
|
line: 100,
|
||||||
|
source_line: "bogus".to_string(),
|
||||||
|
kind: ScriptErrorKind::UnknownAction("bogus".to_string()),
|
||||||
|
};
|
||||||
|
let display = err.to_string();
|
||||||
|
// Line 100 = 3 digits, so indent should be " " (3 spaces)
|
||||||
|
assert!(display.contains(" |"));
|
||||||
|
assert!(display.contains("100 | bogus"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn display_actions_after_show() {
|
||||||
|
let err = ScriptError {
|
||||||
|
line: 4,
|
||||||
|
source_line: "confirm".to_string(),
|
||||||
|
kind: ScriptErrorKind::ActionsAfterShowUi,
|
||||||
|
};
|
||||||
|
let display = err.to_string();
|
||||||
|
assert!(display.contains("actions after show-ui on action-fd line 4"));
|
||||||
|
assert!(display.contains("must be the last action"));
|
||||||
|
}
|
||||||
|
}
|
||||||
107
crates/pikl-core/src/script/action_fd/mod.rs
Normal file
107
crates/pikl-core/src/script/action_fd/mod.rs
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
//! Action-fd scripting: drive the menu from a script piped
|
||||||
|
//! in via `--action-fd`.
|
||||||
|
//!
|
||||||
|
//! Scripts are line-oriented text, one action per line.
|
||||||
|
//! The script is parsed and validated upfront (fail-fast),
|
||||||
|
//! then replayed into the menu's action channel. A script
|
||||||
|
//! can optionally end with `show-ui`/`show-tui`/`show-gui`
|
||||||
|
//! to hand off to an interactive frontend after the scripted
|
||||||
|
//! actions complete.
|
||||||
|
|
||||||
|
pub mod error;
|
||||||
|
pub mod parse;
|
||||||
|
|
||||||
|
use tokio::sync::mpsc;
|
||||||
|
|
||||||
|
use crate::error::PiklError;
|
||||||
|
use crate::event::Action;
|
||||||
|
|
||||||
|
pub use error::{ScriptError, ScriptErrorKind};
|
||||||
|
pub use parse::{load_script, parse_action};
|
||||||
|
|
||||||
|
/// A parsed action from an action-fd script.
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub enum ScriptAction {
|
||||||
|
Core(Action),
|
||||||
|
ShowUi,
|
||||||
|
ShowTui,
|
||||||
|
ShowGui,
|
||||||
|
Comment,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Which frontend to hand off to after running a script.
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||||
|
pub enum ShowAction {
|
||||||
|
Ui,
|
||||||
|
Tui,
|
||||||
|
Gui,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Send all Core actions from the script into the channel.
|
||||||
|
/// Returns the Show* variant if the script ends with one, or None.
|
||||||
|
pub async fn run_script(
|
||||||
|
script: Vec<ScriptAction>,
|
||||||
|
tx: &mpsc::Sender<Action>,
|
||||||
|
) -> Result<Option<ShowAction>, PiklError> {
|
||||||
|
let mut show = None;
|
||||||
|
|
||||||
|
for action in script {
|
||||||
|
match action {
|
||||||
|
ScriptAction::Core(action) => {
|
||||||
|
tx.send(action)
|
||||||
|
.await
|
||||||
|
.map_err(|_| PiklError::ChannelClosed)?;
|
||||||
|
}
|
||||||
|
ScriptAction::ShowUi => show = Some(ShowAction::Ui),
|
||||||
|
ScriptAction::ShowTui => show = Some(ShowAction::Tui),
|
||||||
|
ScriptAction::ShowGui => show = Some(ShowAction::Gui),
|
||||||
|
ScriptAction::Comment => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(show)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn run_script_sends_actions_in_order() {
|
||||||
|
let script = vec![
|
||||||
|
ScriptAction::Core(Action::UpdateFilter("hello".to_string())),
|
||||||
|
ScriptAction::Core(Action::MoveDown(1)),
|
||||||
|
ScriptAction::Core(Action::Confirm),
|
||||||
|
];
|
||||||
|
let (tx, mut rx) = mpsc::channel(16);
|
||||||
|
let result = run_script(script, &tx).await;
|
||||||
|
assert!(result.is_ok());
|
||||||
|
assert_eq!(result.unwrap_or(Some(ShowAction::Ui)), None);
|
||||||
|
|
||||||
|
// Verify order
|
||||||
|
assert!(matches!(rx.recv().await, Some(Action::UpdateFilter(s)) if s == "hello"));
|
||||||
|
assert!(matches!(rx.recv().await, Some(Action::MoveDown(1))));
|
||||||
|
assert!(matches!(rx.recv().await, Some(Action::Confirm)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn run_script_returns_none_without_show() {
|
||||||
|
let script = vec![ScriptAction::Core(Action::Confirm)];
|
||||||
|
let (tx, _rx) = mpsc::channel(16);
|
||||||
|
let result = run_script(script, &tx).await;
|
||||||
|
assert!(result.is_ok());
|
||||||
|
assert_eq!(result.unwrap_or(Some(ShowAction::Ui)), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn run_script_returns_show_action() {
|
||||||
|
let script = vec![
|
||||||
|
ScriptAction::Core(Action::UpdateFilter("test".to_string())),
|
||||||
|
ScriptAction::ShowUi,
|
||||||
|
];
|
||||||
|
let (tx, _rx) = mpsc::channel(16);
|
||||||
|
let result = run_script(script, &tx).await;
|
||||||
|
assert!(result.is_ok());
|
||||||
|
assert_eq!(result.unwrap_or(None), Some(ShowAction::Ui));
|
||||||
|
}
|
||||||
|
}
|
||||||
687
crates/pikl-core/src/script/action_fd/parse.rs
Normal file
687
crates/pikl-core/src/script/action_fd/parse.rs
Normal file
@@ -0,0 +1,687 @@
|
|||||||
|
//! Line-oriented parser for action-fd scripts. Handles
|
||||||
|
//! single-line parsing, count arguments, and full script
|
||||||
|
//! loading with validation.
|
||||||
|
|
||||||
|
use std::io::BufRead;
|
||||||
|
|
||||||
|
use crate::event::{Action, Mode};
|
||||||
|
|
||||||
|
use super::ScriptAction;
|
||||||
|
use super::error::{ScriptError, ScriptErrorKind};
|
||||||
|
|
||||||
|
/// Build an InvalidArgument error for the given action and message.
|
||||||
|
fn invalid_arg(line_number: usize, line: &str, action: &str, message: String) -> ScriptError {
|
||||||
|
ScriptError {
|
||||||
|
line: line_number,
|
||||||
|
source_line: line.to_string(),
|
||||||
|
kind: ScriptErrorKind::InvalidArgument {
|
||||||
|
action: action.to_string(),
|
||||||
|
message,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse an optional positive integer count from a command argument.
|
||||||
|
/// Returns 1 if no argument is given.
|
||||||
|
fn parse_count(
|
||||||
|
line_number: usize,
|
||||||
|
line: &str,
|
||||||
|
action: &str,
|
||||||
|
arg: Option<&str>,
|
||||||
|
) -> Result<usize, ScriptError> {
|
||||||
|
match arg {
|
||||||
|
None => Ok(1),
|
||||||
|
Some(s) => match s.trim().parse::<usize>() {
|
||||||
|
Ok(0) => Err(invalid_arg(
|
||||||
|
line_number,
|
||||||
|
line,
|
||||||
|
action,
|
||||||
|
"count must be a positive number".to_string(),
|
||||||
|
)),
|
||||||
|
Ok(n) => Ok(n),
|
||||||
|
Err(_) => Err(invalid_arg(
|
||||||
|
line_number,
|
||||||
|
line,
|
||||||
|
action,
|
||||||
|
format!("'{}' is not a valid positive integer", s.trim()),
|
||||||
|
)),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a required positive u16 from a command argument.
|
||||||
|
/// Used for values like resize height.
|
||||||
|
fn parse_positive_u16(
|
||||||
|
line_number: usize,
|
||||||
|
line: &str,
|
||||||
|
action: &str,
|
||||||
|
arg: Option<&str>,
|
||||||
|
) -> Result<u16, ScriptError> {
|
||||||
|
let Some(s) = arg else {
|
||||||
|
return Err(invalid_arg(
|
||||||
|
line_number,
|
||||||
|
line,
|
||||||
|
action,
|
||||||
|
format!("missing {action} value"),
|
||||||
|
));
|
||||||
|
};
|
||||||
|
match s.trim().parse::<u16>() {
|
||||||
|
Ok(0) => Err(invalid_arg(
|
||||||
|
line_number,
|
||||||
|
line,
|
||||||
|
action,
|
||||||
|
format!("{action} must be a positive number"),
|
||||||
|
)),
|
||||||
|
Ok(n) => Ok(n),
|
||||||
|
Err(_) => Err(invalid_arg(
|
||||||
|
line_number,
|
||||||
|
line,
|
||||||
|
action,
|
||||||
|
format!("'{}' is not a valid positive integer", s.trim()),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a single line into a ScriptAction.
|
||||||
|
pub fn parse_action(line_number: usize, line: &str) -> Result<ScriptAction, ScriptError> {
|
||||||
|
let trimmed = line.trim();
|
||||||
|
|
||||||
|
if trimmed.is_empty() || trimmed.starts_with('#') {
|
||||||
|
return Ok(ScriptAction::Comment);
|
||||||
|
}
|
||||||
|
|
||||||
|
let (cmd, arg) = match trimmed.split_once(' ') {
|
||||||
|
Some((c, a)) => (c, Some(a)),
|
||||||
|
None => (trimmed, None),
|
||||||
|
};
|
||||||
|
|
||||||
|
match cmd {
|
||||||
|
"filter" => {
|
||||||
|
let text = arg.unwrap_or("");
|
||||||
|
Ok(ScriptAction::Core(Action::UpdateFilter(text.to_string())))
|
||||||
|
}
|
||||||
|
"move-up" => {
|
||||||
|
let n = parse_count(line_number, line, "move-up", arg)?;
|
||||||
|
Ok(ScriptAction::Core(Action::MoveUp(n)))
|
||||||
|
}
|
||||||
|
"move-down" => {
|
||||||
|
let n = parse_count(line_number, line, "move-down", arg)?;
|
||||||
|
Ok(ScriptAction::Core(Action::MoveDown(n)))
|
||||||
|
}
|
||||||
|
"move-to-top" => Ok(ScriptAction::Core(Action::MoveToTop)),
|
||||||
|
"move-to-bottom" => Ok(ScriptAction::Core(Action::MoveToBottom)),
|
||||||
|
"page-up" => {
|
||||||
|
let n = parse_count(line_number, line, "page-up", arg)?;
|
||||||
|
Ok(ScriptAction::Core(Action::PageUp(n)))
|
||||||
|
}
|
||||||
|
"page-down" => {
|
||||||
|
let n = parse_count(line_number, line, "page-down", arg)?;
|
||||||
|
Ok(ScriptAction::Core(Action::PageDown(n)))
|
||||||
|
}
|
||||||
|
"half-page-up" => {
|
||||||
|
let n = parse_count(line_number, line, "half-page-up", arg)?;
|
||||||
|
Ok(ScriptAction::Core(Action::HalfPageUp(n)))
|
||||||
|
}
|
||||||
|
"half-page-down" => {
|
||||||
|
let n = parse_count(line_number, line, "half-page-down", arg)?;
|
||||||
|
Ok(ScriptAction::Core(Action::HalfPageDown(n)))
|
||||||
|
}
|
||||||
|
"set-mode" => {
|
||||||
|
let Some(mode_str) = arg else {
|
||||||
|
return Err(invalid_arg(
|
||||||
|
line_number,
|
||||||
|
line,
|
||||||
|
"set-mode",
|
||||||
|
"missing mode value (insert or normal)".to_string(),
|
||||||
|
));
|
||||||
|
};
|
||||||
|
match mode_str.trim() {
|
||||||
|
"insert" => Ok(ScriptAction::Core(Action::SetMode(Mode::Insert))),
|
||||||
|
"normal" => Ok(ScriptAction::Core(Action::SetMode(Mode::Normal))),
|
||||||
|
other => Err(invalid_arg(
|
||||||
|
line_number,
|
||||||
|
line,
|
||||||
|
"set-mode",
|
||||||
|
format!("unknown mode '{other}', expected insert or normal"),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"confirm" => Ok(ScriptAction::Core(Action::Confirm)),
|
||||||
|
"cancel" => Ok(ScriptAction::Core(Action::Cancel)),
|
||||||
|
"resize" => {
|
||||||
|
let height = parse_positive_u16(line_number, line, "resize", arg)?;
|
||||||
|
Ok(ScriptAction::Core(Action::Resize { height }))
|
||||||
|
}
|
||||||
|
"show-ui" => Ok(ScriptAction::ShowUi),
|
||||||
|
"show-tui" => Ok(ScriptAction::ShowTui),
|
||||||
|
"show-gui" => Ok(ScriptAction::ShowGui),
|
||||||
|
_ => Err(ScriptError {
|
||||||
|
line: line_number,
|
||||||
|
source_line: line.to_string(),
|
||||||
|
kind: ScriptErrorKind::UnknownAction(cmd.to_string()),
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Intermediate struct to track line numbers during script loading.
|
||||||
|
struct ParsedLine {
|
||||||
|
line_number: usize,
|
||||||
|
source: String,
|
||||||
|
action: ScriptAction,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Validate that show-* actions (if present) are the last
|
||||||
|
/// action in the script. Returns an error pointing at the
|
||||||
|
/// first action after a show-* command.
|
||||||
|
fn validate_show_last(actions: &[ParsedLine]) -> Result<(), ScriptError> {
|
||||||
|
if let Some(show_pos) = actions.iter().position(|p| {
|
||||||
|
matches!(
|
||||||
|
p.action,
|
||||||
|
ScriptAction::ShowUi | ScriptAction::ShowTui | ScriptAction::ShowGui
|
||||||
|
)
|
||||||
|
}) && show_pos < actions.len() - 1
|
||||||
|
{
|
||||||
|
let offender = &actions[show_pos + 1];
|
||||||
|
return Err(ScriptError {
|
||||||
|
line: offender.line_number,
|
||||||
|
source_line: offender.source.clone(),
|
||||||
|
kind: ScriptErrorKind::ActionsAfterShowUi,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Load and validate an entire script from a reader.
|
||||||
|
/// Parses all lines, then validates (e.g. nothing after show-ui).
|
||||||
|
/// Returns only the actionable entries (Comments filtered out).
|
||||||
|
pub fn load_script(reader: impl BufRead) -> Result<Vec<ScriptAction>, ScriptError> {
|
||||||
|
let mut parsed = Vec::new();
|
||||||
|
|
||||||
|
for (i, line) in reader.lines().enumerate() {
|
||||||
|
let line = line.map_err(|e| ScriptError {
|
||||||
|
line: i + 1,
|
||||||
|
source_line: String::new(),
|
||||||
|
kind: ScriptErrorKind::InvalidArgument {
|
||||||
|
action: String::new(),
|
||||||
|
message: e.to_string(),
|
||||||
|
},
|
||||||
|
})?;
|
||||||
|
let action = parse_action(i + 1, &line)?;
|
||||||
|
parsed.push(ParsedLine {
|
||||||
|
line_number: i + 1,
|
||||||
|
source: line,
|
||||||
|
action,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter out comments
|
||||||
|
let actionable: Vec<ParsedLine> = parsed
|
||||||
|
.into_iter()
|
||||||
|
.filter(|p| !matches!(p.action, ScriptAction::Comment))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
validate_show_last(&actionable)?;
|
||||||
|
|
||||||
|
Ok(actionable.into_iter().map(|p| p.action).collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
// -- parse_action tests --
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_filter_with_text() {
|
||||||
|
let result = parse_action(1, "filter hello world");
|
||||||
|
assert!(result.is_ok());
|
||||||
|
assert_eq!(
|
||||||
|
result.unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::UpdateFilter("hello world".to_string()))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_filter_no_arg_clears() {
|
||||||
|
let result = parse_action(1, "filter");
|
||||||
|
assert!(result.is_ok());
|
||||||
|
assert_eq!(
|
||||||
|
result.unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::UpdateFilter(String::new()))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_movement_actions() {
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "move-up").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::MoveUp(1))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "move-down").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::MoveDown(1))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "move-to-top").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::MoveToTop)
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "move-to-bottom").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::MoveToBottom)
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "page-up").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::PageUp(1))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "page-down").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::PageDown(1))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_movement_with_count() {
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "move-up 5").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::MoveUp(5))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "move-down 3").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::MoveDown(3))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "page-up 2").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::PageUp(2))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "page-down 10").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::PageDown(10))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_movement_count_zero_is_error() {
|
||||||
|
assert!(parse_action(1, "move-up 0").is_err());
|
||||||
|
assert!(parse_action(1, "move-down 0").is_err());
|
||||||
|
assert!(parse_action(1, "page-up 0").is_err());
|
||||||
|
assert!(parse_action(1, "page-down 0").is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_movement_count_invalid_is_error() {
|
||||||
|
assert!(parse_action(1, "move-up abc").is_err());
|
||||||
|
assert!(parse_action(1, "page-down -1").is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_confirm_cancel() {
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "confirm").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::Confirm)
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "cancel").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::Cancel)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_resize_valid() {
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "resize 25").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::Resize { height: 25 })
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_resize_no_arg() {
|
||||||
|
let result = parse_action(3, "resize");
|
||||||
|
assert!(result.is_err());
|
||||||
|
if let Err(e) = result {
|
||||||
|
assert_eq!(e.line, 3);
|
||||||
|
assert!(matches!(e.kind, ScriptErrorKind::InvalidArgument { .. }));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_resize_not_a_number() {
|
||||||
|
let result = parse_action(2, "resize abc");
|
||||||
|
assert!(result.is_err());
|
||||||
|
if let Err(e) = result {
|
||||||
|
assert_eq!(e.line, 2);
|
||||||
|
assert!(matches!(e.kind, ScriptErrorKind::InvalidArgument { .. }));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_resize_zero() {
|
||||||
|
let result = parse_action(1, "resize 0");
|
||||||
|
assert!(result.is_err());
|
||||||
|
if let Err(e) = result {
|
||||||
|
assert!(matches!(e.kind, ScriptErrorKind::InvalidArgument { .. }));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_show_actions() {
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "show-ui").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::ShowUi
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "show-tui").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::ShowTui
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "show-gui").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::ShowGui
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_comment_and_blank() {
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "").unwrap_or(ScriptAction::ShowUi),
|
||||||
|
ScriptAction::Comment
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "# this is a comment").unwrap_or(ScriptAction::ShowUi),
|
||||||
|
ScriptAction::Comment
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, " ").unwrap_or(ScriptAction::ShowUi),
|
||||||
|
ScriptAction::Comment
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_half_page_actions() {
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "half-page-up").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::HalfPageUp(1))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "half-page-down").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::HalfPageDown(1))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "half-page-up 3").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::HalfPageUp(3))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "half-page-down 2").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::HalfPageDown(2))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_half_page_zero_is_error() {
|
||||||
|
assert!(parse_action(1, "half-page-up 0").is_err());
|
||||||
|
assert!(parse_action(1, "half-page-down 0").is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_set_mode() {
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "set-mode insert").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::SetMode(Mode::Insert))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "set-mode normal").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::SetMode(Mode::Normal))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_set_mode_missing_arg() {
|
||||||
|
assert!(parse_action(1, "set-mode").is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_set_mode_invalid() {
|
||||||
|
assert!(parse_action(1, "set-mode visual").is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_unknown_action() {
|
||||||
|
let result = parse_action(3, "bogus");
|
||||||
|
assert!(result.is_err());
|
||||||
|
if let Err(e) = result {
|
||||||
|
assert_eq!(e.line, 3);
|
||||||
|
assert_eq!(e.source_line, "bogus");
|
||||||
|
assert!(matches!(e.kind, ScriptErrorKind::UnknownAction(ref s) if s == "bogus"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- parse_positive_u16 tests --
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_positive_u16_valid() {
|
||||||
|
assert_eq!(
|
||||||
|
parse_positive_u16(1, "resize 25", "resize", Some("25")).unwrap(),
|
||||||
|
25
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_positive_u16_zero_is_error() {
|
||||||
|
assert!(parse_positive_u16(1, "resize 0", "resize", Some("0")).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_positive_u16_negative_is_error() {
|
||||||
|
assert!(parse_positive_u16(1, "resize -1", "resize", Some("-1")).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_positive_u16_non_numeric_is_error() {
|
||||||
|
assert!(parse_positive_u16(1, "resize abc", "resize", Some("abc")).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_positive_u16_missing_is_error() {
|
||||||
|
assert!(parse_positive_u16(1, "resize", "resize", None).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_positive_u16_max() {
|
||||||
|
assert_eq!(
|
||||||
|
parse_positive_u16(1, "resize 65535", "resize", Some("65535")).unwrap(),
|
||||||
|
u16::MAX
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_positive_u16_overflow_is_error() {
|
||||||
|
assert!(parse_positive_u16(1, "resize 65536", "resize", Some("65536")).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- validate_show_last tests --
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn validate_show_last_empty() {
|
||||||
|
assert!(validate_show_last(&[]).is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn validate_show_last_at_end() {
|
||||||
|
let actions = vec![
|
||||||
|
ParsedLine {
|
||||||
|
line_number: 1,
|
||||||
|
source: "filter test".into(),
|
||||||
|
action: ScriptAction::Core(Action::UpdateFilter("test".into())),
|
||||||
|
},
|
||||||
|
ParsedLine {
|
||||||
|
line_number: 2,
|
||||||
|
source: "show-ui".into(),
|
||||||
|
action: ScriptAction::ShowUi,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assert!(validate_show_last(&actions).is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn validate_show_last_at_start_with_more() {
|
||||||
|
let actions = vec![
|
||||||
|
ParsedLine {
|
||||||
|
line_number: 1,
|
||||||
|
source: "show-ui".into(),
|
||||||
|
action: ScriptAction::ShowUi,
|
||||||
|
},
|
||||||
|
ParsedLine {
|
||||||
|
line_number: 2,
|
||||||
|
source: "confirm".into(),
|
||||||
|
action: ScriptAction::Core(Action::Confirm),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
let err = validate_show_last(&actions).unwrap_err();
|
||||||
|
assert!(matches!(err.kind, ScriptErrorKind::ActionsAfterShowUi));
|
||||||
|
assert_eq!(err.line, 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn validate_show_last_in_middle() {
|
||||||
|
let actions = vec![
|
||||||
|
ParsedLine {
|
||||||
|
line_number: 1,
|
||||||
|
source: "filter x".into(),
|
||||||
|
action: ScriptAction::Core(Action::UpdateFilter("x".into())),
|
||||||
|
},
|
||||||
|
ParsedLine {
|
||||||
|
line_number: 2,
|
||||||
|
source: "show-tui".into(),
|
||||||
|
action: ScriptAction::ShowTui,
|
||||||
|
},
|
||||||
|
ParsedLine {
|
||||||
|
line_number: 3,
|
||||||
|
source: "confirm".into(),
|
||||||
|
action: ScriptAction::Core(Action::Confirm),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
let err = validate_show_last(&actions).unwrap_err();
|
||||||
|
assert!(matches!(err.kind, ScriptErrorKind::ActionsAfterShowUi));
|
||||||
|
assert_eq!(err.line, 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn validate_show_last_only_show() {
|
||||||
|
let actions = vec![ParsedLine {
|
||||||
|
line_number: 1,
|
||||||
|
source: "show-gui".into(),
|
||||||
|
action: ScriptAction::ShowGui,
|
||||||
|
}];
|
||||||
|
assert!(validate_show_last(&actions).is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn validate_show_last_no_show() {
|
||||||
|
let actions = vec![
|
||||||
|
ParsedLine {
|
||||||
|
line_number: 1,
|
||||||
|
source: "filter x".into(),
|
||||||
|
action: ScriptAction::Core(Action::UpdateFilter("x".into())),
|
||||||
|
},
|
||||||
|
ParsedLine {
|
||||||
|
line_number: 2,
|
||||||
|
source: "confirm".into(),
|
||||||
|
action: ScriptAction::Core(Action::Confirm),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assert!(validate_show_last(&actions).is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- load_script tests --
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn load_script_with_show_ui_last() {
|
||||||
|
let input = "filter hello\nmove-down\nshow-ui\n";
|
||||||
|
let result = load_script(input.as_bytes());
|
||||||
|
assert!(result.is_ok());
|
||||||
|
let actions = result.unwrap_or_default();
|
||||||
|
assert_eq!(actions.len(), 3);
|
||||||
|
assert_eq!(actions[2], ScriptAction::ShowUi);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn load_script_actions_after_show_ui() {
|
||||||
|
let input = "show-ui\nconfirm\n";
|
||||||
|
let result = load_script(input.as_bytes());
|
||||||
|
assert!(result.is_err());
|
||||||
|
if let Err(e) = result {
|
||||||
|
assert!(matches!(e.kind, ScriptErrorKind::ActionsAfterShowUi));
|
||||||
|
assert_eq!(e.line, 2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn load_script_only_comments() {
|
||||||
|
let input = "# comment\n\n# another\n";
|
||||||
|
let result = load_script(input.as_bytes());
|
||||||
|
assert!(result.is_ok());
|
||||||
|
assert!(result.unwrap_or(vec![ScriptAction::Comment]).is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn load_script_comments_after_show_ui_ok() {
|
||||||
|
// Comments are filtered before validation, so trailing comments are fine
|
||||||
|
let input = "filter test\nshow-ui\n# trailing comment\n";
|
||||||
|
let result = load_script(input.as_bytes());
|
||||||
|
assert!(result.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn load_script_empty() {
|
||||||
|
let input = "";
|
||||||
|
let result = load_script(input.as_bytes());
|
||||||
|
assert!(result.is_ok());
|
||||||
|
assert!(result.unwrap_or(vec![ScriptAction::Comment]).is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- New action edge case tests --
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_half_page_non_numeric() {
|
||||||
|
assert!(parse_action(1, "half-page-down abc").is_err());
|
||||||
|
assert!(parse_action(1, "half-page-up xyz").is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_set_mode_wrong_case() {
|
||||||
|
assert!(parse_action(1, "set-mode Insert").is_err());
|
||||||
|
assert!(parse_action(1, "set-mode Normal").is_err());
|
||||||
|
assert!(parse_action(1, "set-mode INSERT").is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_set_mode_whitespace() {
|
||||||
|
// Extra whitespace around the mode value should be trimmed
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "set-mode insert ").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::SetMode(Mode::Insert))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
parse_action(1, "set-mode normal ").unwrap_or(ScriptAction::Comment),
|
||||||
|
ScriptAction::Core(Action::SetMode(Mode::Normal))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn load_script_new_actions() {
|
||||||
|
let input = "half-page-up 2\nset-mode normal\nset-mode insert\nhalf-page-down\nconfirm\n";
|
||||||
|
let result = load_script(input.as_bytes());
|
||||||
|
assert!(result.is_ok());
|
||||||
|
let actions = result.unwrap_or_default();
|
||||||
|
assert_eq!(actions.len(), 5);
|
||||||
|
assert_eq!(actions[0], ScriptAction::Core(Action::HalfPageUp(2)));
|
||||||
|
assert_eq!(
|
||||||
|
actions[1],
|
||||||
|
ScriptAction::Core(Action::SetMode(Mode::Normal))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
actions[2],
|
||||||
|
ScriptAction::Core(Action::SetMode(Mode::Insert))
|
||||||
|
);
|
||||||
|
assert_eq!(actions[3], ScriptAction::Core(Action::HalfPageDown(1)));
|
||||||
|
assert_eq!(actions[4], ScriptAction::Core(Action::Confirm));
|
||||||
|
}
|
||||||
|
}
|
||||||
1
crates/pikl-core/src/script/mod.rs
Normal file
1
crates/pikl-core/src/script/mod.rs
Normal file
@@ -0,0 +1 @@
|
|||||||
|
pub mod action_fd;
|
||||||
298
crates/pikl-core/tests/dsl_tests.rs
Normal file
298
crates/pikl-core/tests/dsl_tests.rs
Normal file
@@ -0,0 +1,298 @@
|
|||||||
|
use pikl_test_macros::pikl_tests;
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Filter tests
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
pikl_tests! {
|
||||||
|
filter mod fuzzy_basics {
|
||||||
|
items: ["apple", "banana", "cherry"];
|
||||||
|
|
||||||
|
test empty_query_returns_all {
|
||||||
|
query: ""
|
||||||
|
matches: ["apple", "banana", "cherry"]
|
||||||
|
}
|
||||||
|
|
||||||
|
test no_results {
|
||||||
|
query: "xyz"
|
||||||
|
matches: []
|
||||||
|
}
|
||||||
|
|
||||||
|
test substring_match {
|
||||||
|
query: "ban"
|
||||||
|
matches: ["banana"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
filter mod empty_items {
|
||||||
|
items: [];
|
||||||
|
|
||||||
|
test query_on_empty {
|
||||||
|
query: "test"
|
||||||
|
matches: []
|
||||||
|
}
|
||||||
|
|
||||||
|
test empty_query_on_empty {
|
||||||
|
query: ""
|
||||||
|
matches: []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Navigation tests
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
pikl_tests! {
|
||||||
|
nav mod basic_movement {
|
||||||
|
viewport: { height: 5, count: 10 };
|
||||||
|
|
||||||
|
test initial_state {
|
||||||
|
actions: []
|
||||||
|
cursor: 0
|
||||||
|
offset: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test move_down_once {
|
||||||
|
actions: [move-down]
|
||||||
|
cursor: 1
|
||||||
|
offset: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test move_down_past_viewport {
|
||||||
|
actions: [move-down, move-down, move-down, move-down, move-down]
|
||||||
|
cursor: 5
|
||||||
|
offset: 1
|
||||||
|
}
|
||||||
|
|
||||||
|
test move_up_at_top_stays {
|
||||||
|
actions: [move-up]
|
||||||
|
cursor: 0
|
||||||
|
offset: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test move_down_then_up {
|
||||||
|
actions: [move-down, move-down, move-up]
|
||||||
|
cursor: 1
|
||||||
|
offset: 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
nav mod jumps {
|
||||||
|
viewport: { height: 5, count: 20 };
|
||||||
|
|
||||||
|
test move_to_top {
|
||||||
|
actions: [move-down, move-down, move-down, move-to-top]
|
||||||
|
cursor: 0
|
||||||
|
offset: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test move_to_bottom {
|
||||||
|
actions: [move-to-bottom]
|
||||||
|
cursor: 19
|
||||||
|
offset: 15
|
||||||
|
}
|
||||||
|
|
||||||
|
test page_down_from_top {
|
||||||
|
actions: [page-down]
|
||||||
|
cursor: 5
|
||||||
|
offset: 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
nav mod small_list {
|
||||||
|
viewport: { height: 10, count: 3 };
|
||||||
|
|
||||||
|
test move_to_bottom_small {
|
||||||
|
actions: [move-to-bottom]
|
||||||
|
cursor: 2
|
||||||
|
offset: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test at_bottom_stays {
|
||||||
|
actions: [move-down, move-down, move-down]
|
||||||
|
cursor: 2
|
||||||
|
offset: 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
nav mod empty_list {
|
||||||
|
viewport: { height: 5, count: 0 };
|
||||||
|
|
||||||
|
test movement_on_empty {
|
||||||
|
actions: [move-down, move-up, page-down, page-up]
|
||||||
|
cursor: 0
|
||||||
|
offset: 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
nav mod half_page {
|
||||||
|
viewport: { height: 10, count: 30 };
|
||||||
|
|
||||||
|
test half_page_down_from_top {
|
||||||
|
actions: [half-page-down]
|
||||||
|
cursor: 5
|
||||||
|
offset: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test half_page_up_from_middle {
|
||||||
|
actions: [half-page-down, half-page-down, half-page-up]
|
||||||
|
cursor: 5
|
||||||
|
offset: 1
|
||||||
|
}
|
||||||
|
|
||||||
|
test half_page_down_clamps_at_bottom {
|
||||||
|
actions: [half-page-down, half-page-down, half-page-down,
|
||||||
|
half-page-down, half-page-down, half-page-down]
|
||||||
|
cursor: 29
|
||||||
|
}
|
||||||
|
|
||||||
|
test half_page_up_clamps_at_top {
|
||||||
|
actions: [half-page-down, half-page-up, half-page-up]
|
||||||
|
cursor: 0
|
||||||
|
offset: 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Menu tests
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
pikl_tests! {
|
||||||
|
menu mod selection {
|
||||||
|
items: ["alpha", "bravo", "charlie", "delta"];
|
||||||
|
|
||||||
|
test confirm_first_item {
|
||||||
|
actions: [confirm]
|
||||||
|
selected: "alpha"
|
||||||
|
}
|
||||||
|
|
||||||
|
test move_down_and_confirm {
|
||||||
|
actions: [move-down, confirm]
|
||||||
|
selected: "bravo"
|
||||||
|
}
|
||||||
|
|
||||||
|
test move_to_third {
|
||||||
|
actions: [move-down, move-down, confirm]
|
||||||
|
selected: "charlie"
|
||||||
|
}
|
||||||
|
|
||||||
|
test move_to_last {
|
||||||
|
actions: [move-down, move-down, move-down, confirm]
|
||||||
|
selected: "delta"
|
||||||
|
}
|
||||||
|
|
||||||
|
test cancel_result {
|
||||||
|
actions: [cancel]
|
||||||
|
cancelled
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
menu mod filter_then_select {
|
||||||
|
items: ["alpha", "beta", "banana"];
|
||||||
|
|
||||||
|
test filter_and_confirm {
|
||||||
|
actions: [filter "ban", confirm]
|
||||||
|
selected: "banana"
|
||||||
|
}
|
||||||
|
|
||||||
|
test filter_no_match_then_cancel {
|
||||||
|
actions: [filter "zzz", cancel]
|
||||||
|
cancelled
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
menu mod dynamic_items {
|
||||||
|
items: ["alpha", "beta"];
|
||||||
|
|
||||||
|
test add_then_filter_then_confirm {
|
||||||
|
actions: [add-items ["zephyr"], filter "zep", confirm]
|
||||||
|
selected: "zephyr"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
menu mod sender_drop {
|
||||||
|
items: ["alpha"];
|
||||||
|
|
||||||
|
test drop_sender_cancels {
|
||||||
|
actions: []
|
||||||
|
cancelled
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
menu mod pipeline_queries {
|
||||||
|
items: ["error_log", "warning_temp", "info_log", "debug_temp"];
|
||||||
|
|
||||||
|
test exact_filter {
|
||||||
|
actions: [filter "'log", confirm]
|
||||||
|
selected: "error_log"
|
||||||
|
}
|
||||||
|
|
||||||
|
test exact_then_inverse {
|
||||||
|
actions: [filter "'log | !error", confirm]
|
||||||
|
selected: "info_log"
|
||||||
|
}
|
||||||
|
|
||||||
|
test cancel_pipeline {
|
||||||
|
actions: [filter "'xyz", cancel]
|
||||||
|
cancelled
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
menu mod mode_switching {
|
||||||
|
items: ["alpha", "bravo"];
|
||||||
|
|
||||||
|
test set_mode_and_confirm {
|
||||||
|
actions: [set-mode-normal, set-mode-insert, filter "bra", confirm]
|
||||||
|
selected: "bravo"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
menu mod regex_pipeline {
|
||||||
|
items: ["item-001", "item-abc", "item-123"];
|
||||||
|
|
||||||
|
test regex_filter_confirm {
|
||||||
|
actions: [filter "/[0-9]+/", confirm]
|
||||||
|
selected: "item-001"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
menu mod inverse_fuzzy {
|
||||||
|
items: ["alpha", "bravo"];
|
||||||
|
|
||||||
|
test exclude_alpha {
|
||||||
|
actions: [filter "!alpha", confirm]
|
||||||
|
selected: "bravo"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
menu mod three_stage {
|
||||||
|
items: ["error_log_123", "warning_temp_456", "info_log_789", "debug_temp_012"];
|
||||||
|
|
||||||
|
test full_pipeline {
|
||||||
|
actions: [filter "'log | !error | /[0-9]+/", confirm]
|
||||||
|
selected: "info_log_789"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
menu mod half_page_menu {
|
||||||
|
items: ["a","b","c","d","e","f","g","h","i","j"];
|
||||||
|
|
||||||
|
test half_page_then_confirm {
|
||||||
|
actions: [half-page-down, confirm]
|
||||||
|
// viewport height=50, half=25, clamps to last item (index 9)
|
||||||
|
selected: "j"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
nav mod half_page_small_height {
|
||||||
|
viewport: { height: 2, count: 10 };
|
||||||
|
|
||||||
|
test half_page_moves_one {
|
||||||
|
actions: [half-page-down]
|
||||||
|
cursor: 1
|
||||||
|
offset: 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
464
crates/pikl-test-macros/src/codegen.rs
Normal file
464
crates/pikl-test-macros/src/codegen.rs
Normal file
@@ -0,0 +1,464 @@
|
|||||||
|
use proc_macro2::{Ident, Span, TokenStream};
|
||||||
|
use quote::quote;
|
||||||
|
|
||||||
|
use crate::parse::{ActionExpr, Fixtures, PiklTests, TestCase, TestKind, TestModule};
|
||||||
|
|
||||||
|
/// Generate all test modules from the parsed DSL.
|
||||||
|
pub fn generate(input: &PiklTests) -> syn::Result<TokenStream> {
|
||||||
|
let mut output = TokenStream::new();
|
||||||
|
for module in &input.modules {
|
||||||
|
output.extend(gen_module(module)?);
|
||||||
|
}
|
||||||
|
Ok(output)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate a single `mod name { ... }` block with test
|
||||||
|
/// functions and the right imports for the test kind.
|
||||||
|
fn gen_module(module: &TestModule) -> syn::Result<TokenStream> {
|
||||||
|
let mod_name = &module.name;
|
||||||
|
let mut test_fns = Vec::new();
|
||||||
|
|
||||||
|
for case in &module.tests {
|
||||||
|
let tokens = match module.kind {
|
||||||
|
TestKind::Headless => gen_headless(case, &module.fixtures)?,
|
||||||
|
TestKind::Filter => gen_filter(case, &module.fixtures)?,
|
||||||
|
TestKind::Nav => gen_nav(case, &module.fixtures)?,
|
||||||
|
TestKind::Menu => gen_menu(case, &module.fixtures)?,
|
||||||
|
};
|
||||||
|
test_fns.push(tokens);
|
||||||
|
}
|
||||||
|
|
||||||
|
let imports = gen_imports(module.kind);
|
||||||
|
|
||||||
|
Ok(quote! {
|
||||||
|
mod #mod_name {
|
||||||
|
#imports
|
||||||
|
#(#test_fns)*
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Emit the `use` imports needed for a given test kind.
|
||||||
|
fn gen_imports(kind: TestKind) -> TokenStream {
|
||||||
|
match kind {
|
||||||
|
TestKind::Headless => {
|
||||||
|
quote! { use super::*; }
|
||||||
|
}
|
||||||
|
TestKind::Filter => {
|
||||||
|
quote! {
|
||||||
|
use pikl_core::item::Item;
|
||||||
|
use pikl_core::filter::{Filter, FuzzyFilter};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TestKind::Nav => {
|
||||||
|
quote! {
|
||||||
|
use pikl_core::navigation::Viewport;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TestKind::Menu => {
|
||||||
|
quote! {
|
||||||
|
use pikl_core::item::Item;
|
||||||
|
use pikl_core::event::{Action, MenuEvent, MenuResult};
|
||||||
|
use pikl_core::menu::MenuRunner;
|
||||||
|
use pikl_core::json_menu::JsonMenu;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Headless
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Generate a headless integration test that spawns the pikl
|
||||||
|
/// binary, feeds it items and a script, and asserts on
|
||||||
|
/// stdout/stderr/exit code.
|
||||||
|
fn gen_headless(case: &TestCase, fixtures: &Fixtures) -> syn::Result<TokenStream> {
|
||||||
|
let test_name = &case.name;
|
||||||
|
|
||||||
|
// Build items string: "item1\nitem2\n"
|
||||||
|
let items_str = match &fixtures.items {
|
||||||
|
Some(items) => {
|
||||||
|
let mut s = String::new();
|
||||||
|
for item in items {
|
||||||
|
s.push_str(item);
|
||||||
|
s.push('\n');
|
||||||
|
}
|
||||||
|
s
|
||||||
|
}
|
||||||
|
None => String::new(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Build script string from actions
|
||||||
|
let script = build_headless_script(&case.actions);
|
||||||
|
|
||||||
|
// Build extra CLI args (e.g. --label-key)
|
||||||
|
let extra_args: Vec<TokenStream> = if let Some(ref key) = fixtures.label_key {
|
||||||
|
vec![quote! { "--label-key" }, quote! { #key }]
|
||||||
|
} else {
|
||||||
|
Vec::new()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Build assertions
|
||||||
|
let mut asserts = Vec::new();
|
||||||
|
|
||||||
|
if let Some(exit) = case.exit_code {
|
||||||
|
asserts.push(quote! {
|
||||||
|
assert_eq!(
|
||||||
|
code, #exit,
|
||||||
|
"expected exit {}, got {}, stderr: {}", #exit, code, stderr
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ref expected) = case.stdout {
|
||||||
|
if expected.is_empty() {
|
||||||
|
asserts.push(quote! {
|
||||||
|
assert!(
|
||||||
|
stdout.trim().is_empty(),
|
||||||
|
"expected empty stdout, got: {}", stdout
|
||||||
|
);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
asserts.push(quote! {
|
||||||
|
assert!(
|
||||||
|
stdout.contains(#expected),
|
||||||
|
"expected stdout to contain {:?}, got: {}", #expected, stdout
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ref expected) = case.stderr_contains {
|
||||||
|
asserts.push(quote! {
|
||||||
|
assert!(
|
||||||
|
stderr.contains(#expected),
|
||||||
|
"expected stderr to contain {:?}, got: {}", #expected, stderr
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(quote! {
|
||||||
|
#[test]
|
||||||
|
fn #test_name() {
|
||||||
|
let (stdout, stderr, code) = run_pikl(#items_str, #script, &[#(#extra_args),*]);
|
||||||
|
#(#asserts)*
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Turn action expressions into the newline-delimited script
|
||||||
|
/// text that gets piped to `--action-fd`.
|
||||||
|
fn build_headless_script(actions: &[ActionExpr]) -> String {
|
||||||
|
let mut script = String::new();
|
||||||
|
for action in actions {
|
||||||
|
match action {
|
||||||
|
ActionExpr::Simple(name) => {
|
||||||
|
script.push_str(name);
|
||||||
|
script.push('\n');
|
||||||
|
}
|
||||||
|
ActionExpr::Filter(query) => {
|
||||||
|
script.push_str("filter ");
|
||||||
|
script.push_str(query);
|
||||||
|
script.push('\n');
|
||||||
|
}
|
||||||
|
ActionExpr::Raw(line) => {
|
||||||
|
script.push_str(line);
|
||||||
|
script.push('\n');
|
||||||
|
}
|
||||||
|
ActionExpr::AddItems(_) => {
|
||||||
|
// Not applicable for headless. Items come from stdin.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
script
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Filter
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Generate a filter unit test: create items, push them
|
||||||
|
/// into a `FuzzyFilter`, set the query, and assert on
|
||||||
|
/// matched labels.
|
||||||
|
fn gen_filter(case: &TestCase, fixtures: &Fixtures) -> syn::Result<TokenStream> {
|
||||||
|
let test_name = &case.name;
|
||||||
|
|
||||||
|
let item_exprs = gen_item_constructors(fixtures);
|
||||||
|
|
||||||
|
let query = case.query.as_deref().unwrap_or("");
|
||||||
|
|
||||||
|
let mut asserts = Vec::new();
|
||||||
|
|
||||||
|
if let Some(ref expected) = case.match_labels {
|
||||||
|
asserts.push(quote! {
|
||||||
|
let labels: Vec<&str> = (0..f.matched_count())
|
||||||
|
.filter_map(|i| f.matched_index(i))
|
||||||
|
.map(|idx| items[idx].label())
|
||||||
|
.collect();
|
||||||
|
let expected: Vec<&str> = vec![#(#expected),*];
|
||||||
|
assert_eq!(
|
||||||
|
labels, expected,
|
||||||
|
"query {:?}: expected {:?}, got {:?}", #query, expected, labels
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(quote! {
|
||||||
|
#[test]
|
||||||
|
fn #test_name() {
|
||||||
|
let items: Vec<Item> = vec![#(#item_exprs),*];
|
||||||
|
let mut f = FuzzyFilter::new();
|
||||||
|
for (i, item) in items.iter().enumerate() {
|
||||||
|
f.push(i, item.label());
|
||||||
|
}
|
||||||
|
f.set_query(#query);
|
||||||
|
#(#asserts)*
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Nav
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Generate a navigation unit test: create a viewport, run
|
||||||
|
/// movement actions, and assert on cursor/offset.
|
||||||
|
fn gen_nav(case: &TestCase, fixtures: &Fixtures) -> syn::Result<TokenStream> {
|
||||||
|
let test_name = &case.name;
|
||||||
|
|
||||||
|
let (height, count) = fixtures.viewport.unwrap_or((10, 20));
|
||||||
|
let height_lit = height;
|
||||||
|
let count_lit = count;
|
||||||
|
|
||||||
|
let action_calls = gen_nav_actions(&case.actions)?;
|
||||||
|
|
||||||
|
let mut asserts = Vec::new();
|
||||||
|
|
||||||
|
if let Some(cursor) = case.cursor {
|
||||||
|
asserts.push(quote! {
|
||||||
|
assert_eq!(
|
||||||
|
v.cursor(), #cursor,
|
||||||
|
"expected cursor {}, got {}", #cursor, v.cursor()
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(offset) = case.offset {
|
||||||
|
asserts.push(quote! {
|
||||||
|
assert_eq!(
|
||||||
|
v.scroll_offset(), #offset,
|
||||||
|
"expected offset {}, got {}", #offset, v.scroll_offset()
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(quote! {
|
||||||
|
#[test]
|
||||||
|
fn #test_name() {
|
||||||
|
let mut v = Viewport::new();
|
||||||
|
v.set_height(#height_lit);
|
||||||
|
v.set_filtered_count(#count_lit);
|
||||||
|
#(#action_calls)*
|
||||||
|
#(#asserts)*
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert DSL action names to `Viewport` method calls
|
||||||
|
/// (e.g. `move-down` becomes `v.move_down()`).
|
||||||
|
fn gen_nav_actions(actions: &[ActionExpr]) -> syn::Result<Vec<TokenStream>> {
|
||||||
|
let mut calls = Vec::new();
|
||||||
|
for action in actions {
|
||||||
|
match action {
|
||||||
|
ActionExpr::Simple(name) => {
|
||||||
|
let method = Ident::new(&name.replace('-', "_"), Span::call_site());
|
||||||
|
let needs_count = matches!(
|
||||||
|
name.as_str(),
|
||||||
|
"move-up"
|
||||||
|
| "move-down"
|
||||||
|
| "page-up"
|
||||||
|
| "page-down"
|
||||||
|
| "half-page-up"
|
||||||
|
| "half-page-down"
|
||||||
|
);
|
||||||
|
if needs_count {
|
||||||
|
calls.push(quote! { v.#method(1); });
|
||||||
|
} else {
|
||||||
|
calls.push(quote! { v.#method(); });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
return Err(syn::Error::new(
|
||||||
|
Span::call_site(),
|
||||||
|
format!(
|
||||||
|
"nav tests only support simple actions, got: {:?}",
|
||||||
|
action_debug(action)
|
||||||
|
),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(calls)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Menu
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Generate an async menu state machine test: create a menu,
|
||||||
|
/// send actions, and assert on the final result (selected
|
||||||
|
/// item or cancellation).
|
||||||
|
fn gen_menu(case: &TestCase, fixtures: &Fixtures) -> syn::Result<TokenStream> {
|
||||||
|
let test_name = &case.name;
|
||||||
|
let item_exprs = gen_item_constructors(fixtures);
|
||||||
|
|
||||||
|
let label_key = fixtures.label_key.as_deref().unwrap_or("label");
|
||||||
|
|
||||||
|
let action_sends = gen_menu_actions(&case.actions)?;
|
||||||
|
|
||||||
|
let result_assert = if case.cancelled {
|
||||||
|
quote! {
|
||||||
|
assert!(
|
||||||
|
matches!(result, Ok(MenuResult::Cancelled)),
|
||||||
|
"expected Cancelled, got: {:?}", result.as_ref().map(|r| format!("{:?}", r))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else if let Some(ref expected) = case.selected {
|
||||||
|
quote! {
|
||||||
|
match &result {
|
||||||
|
Ok(MenuResult::Selected(value)) => {
|
||||||
|
let got = value.as_str()
|
||||||
|
.or_else(|| value.get(#label_key).and_then(|v| v.as_str()))
|
||||||
|
.unwrap_or("");
|
||||||
|
assert_eq!(
|
||||||
|
got, #expected,
|
||||||
|
"expected selected {:?}, got value: {:?}", #expected, value
|
||||||
|
);
|
||||||
|
}
|
||||||
|
other => panic!("expected Selected, got: {:?}", other),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// No assertion on result. Probably an error, but let it compile.
|
||||||
|
quote! {}
|
||||||
|
};
|
||||||
|
|
||||||
|
// If test expects cancellation via sender drop (no cancel action, no confirm),
|
||||||
|
// we need to drop tx after sending actions.
|
||||||
|
let drop_sender = quote! { drop(tx); };
|
||||||
|
|
||||||
|
Ok(quote! {
|
||||||
|
#[tokio::test]
|
||||||
|
async fn #test_name() {
|
||||||
|
let items = vec![#(#item_exprs),*];
|
||||||
|
let (menu, tx) = MenuRunner::new(JsonMenu::new(items, #label_key.to_string()));
|
||||||
|
let mut rx = menu.subscribe();
|
||||||
|
let handle = tokio::spawn(async move { menu.run().await });
|
||||||
|
|
||||||
|
// Wait for initial state broadcast.
|
||||||
|
let _ = rx.recv().await;
|
||||||
|
|
||||||
|
// Give the viewport some height so confirms work.
|
||||||
|
let _ = tx.send(Action::Resize { height: 50 }).await;
|
||||||
|
let _ = rx.recv().await;
|
||||||
|
|
||||||
|
// Send all actions.
|
||||||
|
#(#action_sends)*
|
||||||
|
|
||||||
|
// Drop sender so menu loop can exit.
|
||||||
|
#drop_sender
|
||||||
|
|
||||||
|
let result = handle.await.unwrap_or(Ok(MenuResult::Cancelled));
|
||||||
|
#result_assert
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert DSL actions to `tx.send(Action::...)` calls for
|
||||||
|
/// menu tests.
|
||||||
|
fn gen_menu_actions(actions: &[ActionExpr]) -> syn::Result<Vec<TokenStream>> {
|
||||||
|
let mut sends = Vec::new();
|
||||||
|
for action in actions {
|
||||||
|
let expr = match action {
|
||||||
|
ActionExpr::Simple(name) => {
|
||||||
|
let variant = menu_action_variant(name)?;
|
||||||
|
quote! { let _ = tx.send(#variant).await; }
|
||||||
|
}
|
||||||
|
ActionExpr::Filter(query) => {
|
||||||
|
quote! {
|
||||||
|
let _ = tx.send(Action::UpdateFilter(#query.to_string())).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ActionExpr::AddItems(items) => {
|
||||||
|
let item_exprs: Vec<TokenStream> = items
|
||||||
|
.iter()
|
||||||
|
.map(|s| quote! { serde_json::Value::String(#s.to_string()) })
|
||||||
|
.collect();
|
||||||
|
quote! {
|
||||||
|
let _ = tx.send(Action::AddItems(vec![#(#item_exprs),*])).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ActionExpr::Raw(_) => {
|
||||||
|
return Err(syn::Error::new(
|
||||||
|
Span::call_site(),
|
||||||
|
"raw actions are only supported in headless tests",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
sends.push(expr);
|
||||||
|
}
|
||||||
|
Ok(sends)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Map a DSL action name like `"move-down"` to the
|
||||||
|
/// corresponding `Action::MoveDown` token stream.
|
||||||
|
fn menu_action_variant(name: &str) -> syn::Result<TokenStream> {
|
||||||
|
let tokens = match name {
|
||||||
|
"confirm" => quote! { Action::Confirm },
|
||||||
|
"cancel" => quote! { Action::Cancel },
|
||||||
|
"move-down" => quote! { Action::MoveDown(1) },
|
||||||
|
"move-up" => quote! { Action::MoveUp(1) },
|
||||||
|
"move-to-top" => quote! { Action::MoveToTop },
|
||||||
|
"move-to-bottom" => quote! { Action::MoveToBottom },
|
||||||
|
"page-up" => quote! { Action::PageUp(1) },
|
||||||
|
"page-down" => quote! { Action::PageDown(1) },
|
||||||
|
"half-page-up" => quote! { Action::HalfPageUp(1) },
|
||||||
|
"half-page-down" => quote! { Action::HalfPageDown(1) },
|
||||||
|
"set-mode-insert" => quote! { Action::SetMode(pikl_core::event::Mode::Insert) },
|
||||||
|
"set-mode-normal" => quote! { Action::SetMode(pikl_core::event::Mode::Normal) },
|
||||||
|
_ => {
|
||||||
|
return Err(syn::Error::new(
|
||||||
|
Span::call_site(),
|
||||||
|
format!("unknown menu action: '{name}'"),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Ok(tokens)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Shared helpers
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Generate `Item::from_plain_text("...")` expressions for
|
||||||
|
/// each item in the fixtures.
|
||||||
|
fn gen_item_constructors(fixtures: &Fixtures) -> Vec<TokenStream> {
|
||||||
|
match &fixtures.items {
|
||||||
|
Some(items) => items
|
||||||
|
.iter()
|
||||||
|
.map(|s| quote! { Item::from_plain_text(#s) })
|
||||||
|
.collect(),
|
||||||
|
None => Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Format an action expression for use in error messages.
|
||||||
|
fn action_debug(action: &ActionExpr) -> String {
|
||||||
|
match action {
|
||||||
|
ActionExpr::Simple(name) => name.clone(),
|
||||||
|
ActionExpr::Filter(q) => format!("filter \"{q}\""),
|
||||||
|
ActionExpr::Raw(r) => format!("raw \"{r}\""),
|
||||||
|
ActionExpr::AddItems(items) => format!("add-items {:?}", items),
|
||||||
|
}
|
||||||
|
}
|
||||||
25
crates/pikl-test-macros/src/lib.rs
Normal file
25
crates/pikl-test-macros/src/lib.rs
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
extern crate proc_macro;
|
||||||
|
|
||||||
|
mod codegen;
|
||||||
|
mod parse;
|
||||||
|
|
||||||
|
use proc_macro::TokenStream;
|
||||||
|
|
||||||
|
/// Test DSL for pikl-menu. Generates individual test
|
||||||
|
/// functions from a concise block-based syntax.
|
||||||
|
///
|
||||||
|
/// Supported test kinds:
|
||||||
|
/// - `headless`: integration tests that spawn the pikl binary
|
||||||
|
/// - `filter`: unit tests for fuzzy filter matching
|
||||||
|
/// - `nav`: unit tests for viewport/cursor math
|
||||||
|
/// - `menu`: async unit tests for the menu state machine
|
||||||
|
///
|
||||||
|
/// See the project's test files for usage examples.
|
||||||
|
#[proc_macro]
|
||||||
|
pub fn pikl_tests(input: TokenStream) -> TokenStream {
|
||||||
|
let parsed = syn::parse_macro_input!(input as parse::PiklTests);
|
||||||
|
match codegen::generate(&parsed) {
|
||||||
|
Ok(tokens) => tokens.into(),
|
||||||
|
Err(err) => err.to_compile_error().into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
428
crates/pikl-test-macros/src/parse.rs
Normal file
428
crates/pikl-test-macros/src/parse.rs
Normal file
@@ -0,0 +1,428 @@
|
|||||||
|
use proc_macro2::Span;
|
||||||
|
use syn::{
|
||||||
|
Ident, LitInt, LitStr, Token, braced, bracketed,
|
||||||
|
parse::{Parse, ParseStream},
|
||||||
|
};
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// AST types
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
pub struct PiklTests {
|
||||||
|
pub modules: Vec<TestModule>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct TestModule {
|
||||||
|
pub kind: TestKind,
|
||||||
|
pub name: Ident,
|
||||||
|
pub fixtures: Fixtures,
|
||||||
|
pub tests: Vec<TestCase>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||||
|
pub enum TestKind {
|
||||||
|
Headless,
|
||||||
|
Filter,
|
||||||
|
Nav,
|
||||||
|
Menu,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Fixtures {
|
||||||
|
pub items: Option<Vec<String>>,
|
||||||
|
pub label_key: Option<String>,
|
||||||
|
pub viewport: Option<(usize, usize)>, // (height, count)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct TestCase {
|
||||||
|
pub name: Ident,
|
||||||
|
pub actions: Vec<ActionExpr>,
|
||||||
|
pub stdout: Option<String>,
|
||||||
|
pub stderr_contains: Option<String>,
|
||||||
|
pub exit_code: Option<i32>,
|
||||||
|
pub query: Option<String>,
|
||||||
|
pub match_labels: Option<Vec<String>>,
|
||||||
|
pub cursor: Option<usize>,
|
||||||
|
pub offset: Option<usize>,
|
||||||
|
pub selected: Option<String>,
|
||||||
|
pub cancelled: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum ActionExpr {
|
||||||
|
/// A simple action like "confirm", "cancel", "move-down".
|
||||||
|
Simple(String),
|
||||||
|
/// `filter "query text"`
|
||||||
|
Filter(String),
|
||||||
|
/// `raw "literal script line"`
|
||||||
|
Raw(String),
|
||||||
|
/// `add-items ["a", "b", "c"]`
|
||||||
|
AddItems(Vec<String>),
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Top-level parse
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
impl Parse for PiklTests {
|
||||||
|
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||||
|
let mut modules = Vec::new();
|
||||||
|
while !input.is_empty() {
|
||||||
|
modules.push(input.parse()?);
|
||||||
|
}
|
||||||
|
Ok(PiklTests { modules })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Module parse
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
impl Parse for TestModule {
|
||||||
|
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||||
|
let kind = parse_kind(input)?;
|
||||||
|
input.parse::<Token![mod]>()?;
|
||||||
|
let name: Ident = input.parse()?;
|
||||||
|
|
||||||
|
let content;
|
||||||
|
braced!(content in input);
|
||||||
|
|
||||||
|
let mut fixtures = Fixtures {
|
||||||
|
items: None,
|
||||||
|
label_key: None,
|
||||||
|
viewport: None,
|
||||||
|
};
|
||||||
|
let mut tests = Vec::new();
|
||||||
|
|
||||||
|
while !content.is_empty() {
|
||||||
|
// Peek at the next identifier to decide what we're parsing.
|
||||||
|
let fork = content.fork();
|
||||||
|
let ident_str = parse_ident_or_keyword(&fork)?;
|
||||||
|
|
||||||
|
match ident_str.as_str() {
|
||||||
|
"test" => {
|
||||||
|
tests.push(parse_test_case(&content)?);
|
||||||
|
}
|
||||||
|
"items" => {
|
||||||
|
consume_ident_or_keyword(&content)?;
|
||||||
|
content.parse::<Token![:]>()?;
|
||||||
|
fixtures.items = Some(parse_string_list(&content)?);
|
||||||
|
eat_semi(&content);
|
||||||
|
}
|
||||||
|
"label_key" => {
|
||||||
|
consume_ident_or_keyword(&content)?;
|
||||||
|
content.parse::<Token![:]>()?;
|
||||||
|
let val: LitStr = content.parse()?;
|
||||||
|
fixtures.label_key = Some(val.value());
|
||||||
|
eat_semi(&content);
|
||||||
|
}
|
||||||
|
"viewport" => {
|
||||||
|
consume_ident_or_keyword(&content)?;
|
||||||
|
content.parse::<Token![:]>()?;
|
||||||
|
fixtures.viewport = Some(parse_viewport_def(&content)?);
|
||||||
|
eat_semi(&content);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
return Err(syn::Error::new(
|
||||||
|
content.span(),
|
||||||
|
format!(
|
||||||
|
"unexpected field '{ident_str}', expected test, items, label_key, or viewport"
|
||||||
|
),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(TestModule {
|
||||||
|
kind,
|
||||||
|
name,
|
||||||
|
fixtures,
|
||||||
|
tests,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Test case parse
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Parse a single `test name { ... }` block inside a module.
|
||||||
|
fn parse_test_case(input: ParseStream) -> syn::Result<TestCase> {
|
||||||
|
consume_ident_or_keyword(input)?; // "test"
|
||||||
|
let name: Ident = input.parse()?;
|
||||||
|
|
||||||
|
let content;
|
||||||
|
braced!(content in input);
|
||||||
|
|
||||||
|
let mut case = TestCase {
|
||||||
|
name,
|
||||||
|
actions: Vec::new(),
|
||||||
|
stdout: None,
|
||||||
|
stderr_contains: None,
|
||||||
|
exit_code: None,
|
||||||
|
query: None,
|
||||||
|
match_labels: None,
|
||||||
|
cursor: None,
|
||||||
|
offset: None,
|
||||||
|
selected: None,
|
||||||
|
cancelled: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
while !content.is_empty() {
|
||||||
|
let field = parse_ident_or_keyword(&content)?;
|
||||||
|
// Consume the ident we just peeked.
|
||||||
|
consume_ident_or_keyword(&content)?;
|
||||||
|
|
||||||
|
match field.as_str() {
|
||||||
|
"actions" => {
|
||||||
|
content.parse::<Token![:]>()?;
|
||||||
|
case.actions = parse_action_list(&content)?;
|
||||||
|
}
|
||||||
|
"stdout" => {
|
||||||
|
content.parse::<Token![:]>()?;
|
||||||
|
let val: LitStr = content.parse()?;
|
||||||
|
case.stdout = Some(val.value());
|
||||||
|
}
|
||||||
|
"stderr" => {
|
||||||
|
// stderr contains: "text"
|
||||||
|
let kw = parse_ident_or_keyword(&content)?;
|
||||||
|
consume_ident_or_keyword(&content)?;
|
||||||
|
if kw != "contains" {
|
||||||
|
return Err(syn::Error::new(
|
||||||
|
content.span(),
|
||||||
|
"expected 'contains' after 'stderr'",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
content.parse::<Token![:]>()?;
|
||||||
|
let val: LitStr = content.parse()?;
|
||||||
|
case.stderr_contains = Some(val.value());
|
||||||
|
}
|
||||||
|
"exit" => {
|
||||||
|
content.parse::<Token![:]>()?;
|
||||||
|
let val: LitInt = content.parse()?;
|
||||||
|
case.exit_code = Some(val.base10_parse()?);
|
||||||
|
}
|
||||||
|
"query" => {
|
||||||
|
content.parse::<Token![:]>()?;
|
||||||
|
let val: LitStr = content.parse()?;
|
||||||
|
case.query = Some(val.value());
|
||||||
|
}
|
||||||
|
"matches" => {
|
||||||
|
content.parse::<Token![:]>()?;
|
||||||
|
case.match_labels = Some(parse_string_list(&content)?);
|
||||||
|
}
|
||||||
|
"cursor" => {
|
||||||
|
content.parse::<Token![:]>()?;
|
||||||
|
let val: LitInt = content.parse()?;
|
||||||
|
case.cursor = Some(val.base10_parse()?);
|
||||||
|
}
|
||||||
|
"offset" => {
|
||||||
|
content.parse::<Token![:]>()?;
|
||||||
|
let val: LitInt = content.parse()?;
|
||||||
|
case.offset = Some(val.base10_parse()?);
|
||||||
|
}
|
||||||
|
"selected" => {
|
||||||
|
content.parse::<Token![:]>()?;
|
||||||
|
let val: LitStr = content.parse()?;
|
||||||
|
case.selected = Some(val.value());
|
||||||
|
}
|
||||||
|
"cancelled" => {
|
||||||
|
// Just the keyword presence means true. Optionally parse `: true`.
|
||||||
|
if content.peek(Token![:]) {
|
||||||
|
content.parse::<Token![:]>()?;
|
||||||
|
// Accept `true` or just skip
|
||||||
|
if content.peek(Ident) {
|
||||||
|
consume_ident_or_keyword(&content)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case.cancelled = true;
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
return Err(syn::Error::new(
|
||||||
|
content.span(),
|
||||||
|
format!("unknown test field: '{field}'"),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(case)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Action parsing
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Parse `[action1, action2, ...]` inside a test case's
|
||||||
|
/// `actions:` field.
|
||||||
|
fn parse_action_list(input: ParseStream) -> syn::Result<Vec<ActionExpr>> {
|
||||||
|
let content;
|
||||||
|
bracketed!(content in input);
|
||||||
|
|
||||||
|
let mut actions = Vec::new();
|
||||||
|
while !content.is_empty() {
|
||||||
|
actions.push(parse_action_expr(&content)?);
|
||||||
|
if content.peek(Token![,]) {
|
||||||
|
content.parse::<Token![,]>()?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(actions)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a single action expression: `confirm`, `filter "text"`,
|
||||||
|
/// `raw "line"`, or `add-items ["a", "b"]`.
|
||||||
|
fn parse_action_expr(input: ParseStream) -> syn::Result<ActionExpr> {
|
||||||
|
let name = parse_hyphenated_name(input)?;
|
||||||
|
|
||||||
|
match name.as_str() {
|
||||||
|
"filter" => {
|
||||||
|
let val: LitStr = input.parse()?;
|
||||||
|
Ok(ActionExpr::Filter(val.value()))
|
||||||
|
}
|
||||||
|
"raw" => {
|
||||||
|
let val: LitStr = input.parse()?;
|
||||||
|
Ok(ActionExpr::Raw(val.value()))
|
||||||
|
}
|
||||||
|
"add-items" => {
|
||||||
|
let items = parse_string_list(input)?;
|
||||||
|
Ok(ActionExpr::AddItems(items))
|
||||||
|
}
|
||||||
|
_ => Ok(ActionExpr::Simple(name)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Helpers
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Parse the test module kind keyword: `headless`, `filter`,
|
||||||
|
/// `nav`, or `menu`.
|
||||||
|
fn parse_kind(input: ParseStream) -> syn::Result<TestKind> {
|
||||||
|
let ident: Ident = input.parse()?;
|
||||||
|
match ident.to_string().as_str() {
|
||||||
|
"headless" => Ok(TestKind::Headless),
|
||||||
|
"filter" => Ok(TestKind::Filter),
|
||||||
|
"nav" => Ok(TestKind::Nav),
|
||||||
|
"menu" => Ok(TestKind::Menu),
|
||||||
|
other => Err(syn::Error::new(
|
||||||
|
ident.span(),
|
||||||
|
format!("unknown test kind '{other}', expected headless, filter, nav, or menu"),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a potentially hyphenated name like "move-down" or "move-to-top".
|
||||||
|
/// Handles the `move` keyword specially since it's reserved in Rust.
|
||||||
|
fn parse_hyphenated_name(input: ParseStream) -> syn::Result<String> {
|
||||||
|
let mut name = String::new();
|
||||||
|
|
||||||
|
// First segment: might be the `move` keyword or a regular ident.
|
||||||
|
if input.peek(Token![move]) {
|
||||||
|
input.parse::<Token![move]>()?;
|
||||||
|
name.push_str("move");
|
||||||
|
} else {
|
||||||
|
let ident: Ident = input.parse()?;
|
||||||
|
name.push_str(&ident.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Consume hyphenated continuations: `-ident`
|
||||||
|
// Be careful not to consume `-` that's actually a negative number.
|
||||||
|
while input.peek(Token![-]) && !input.peek2(LitInt) {
|
||||||
|
input.parse::<Token![-]>()?;
|
||||||
|
name.push('-');
|
||||||
|
if input.peek(Token![move]) {
|
||||||
|
input.parse::<Token![move]>()?;
|
||||||
|
name.push_str("move");
|
||||||
|
} else {
|
||||||
|
let next: Ident = input.parse()?;
|
||||||
|
name.push_str(&next.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse `["a", "b", "c"]`.
|
||||||
|
fn parse_string_list(input: ParseStream) -> syn::Result<Vec<String>> {
|
||||||
|
let content;
|
||||||
|
bracketed!(content in input);
|
||||||
|
|
||||||
|
let mut items = Vec::new();
|
||||||
|
while !content.is_empty() {
|
||||||
|
let val: LitStr = content.parse()?;
|
||||||
|
items.push(val.value());
|
||||||
|
if content.peek(Token![,]) {
|
||||||
|
content.parse::<Token![,]>()?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(items)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse `{ height: N, count: N }`.
|
||||||
|
fn parse_viewport_def(input: ParseStream) -> syn::Result<(usize, usize)> {
|
||||||
|
let content;
|
||||||
|
braced!(content in input);
|
||||||
|
|
||||||
|
let mut height: Option<usize> = None;
|
||||||
|
let mut count: Option<usize> = None;
|
||||||
|
|
||||||
|
while !content.is_empty() {
|
||||||
|
let key: Ident = content.parse()?;
|
||||||
|
content.parse::<Token![:]>()?;
|
||||||
|
let val: LitInt = content.parse()?;
|
||||||
|
let n: usize = val.base10_parse()?;
|
||||||
|
|
||||||
|
match key.to_string().as_str() {
|
||||||
|
"height" => height = Some(n),
|
||||||
|
"count" => count = Some(n),
|
||||||
|
other => {
|
||||||
|
return Err(syn::Error::new(
|
||||||
|
key.span(),
|
||||||
|
format!("unknown viewport field '{other}', expected height or count"),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if content.peek(Token![,]) {
|
||||||
|
content.parse::<Token![,]>()?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let h =
|
||||||
|
height.ok_or_else(|| syn::Error::new(Span::call_site(), "viewport missing 'height'"))?;
|
||||||
|
let c = count.ok_or_else(|| syn::Error::new(Span::call_site(), "viewport missing 'count'"))?;
|
||||||
|
Ok((h, c))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Peek at the next ident-like token without consuming it.
|
||||||
|
/// Handles Rust keywords that might appear as DSL field names.
|
||||||
|
fn parse_ident_or_keyword(input: ParseStream) -> syn::Result<String> {
|
||||||
|
if input.peek(Token![move]) {
|
||||||
|
Ok("move".to_string())
|
||||||
|
} else if input.peek(Token![match]) {
|
||||||
|
Ok("match".to_string())
|
||||||
|
} else if input.peek(Ident) {
|
||||||
|
let fork = input.fork();
|
||||||
|
let ident: Ident = fork.parse()?;
|
||||||
|
Ok(ident.to_string())
|
||||||
|
} else {
|
||||||
|
Err(input.error("expected identifier"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Consume an ident-like token (including keywords used as DSL fields).
|
||||||
|
fn consume_ident_or_keyword(input: ParseStream) -> syn::Result<()> {
|
||||||
|
if input.peek(Token![move]) {
|
||||||
|
input.parse::<Token![move]>()?;
|
||||||
|
} else if input.peek(Token![match]) {
|
||||||
|
input.parse::<Token![match]>()?;
|
||||||
|
} else {
|
||||||
|
input.parse::<Ident>()?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Eat an optional semicolon.
|
||||||
|
fn eat_semi(input: ParseStream) {
|
||||||
|
if input.peek(Token![;]) {
|
||||||
|
let _ = input.parse::<Token![;]>();
|
||||||
|
}
|
||||||
|
}
|
||||||
872
crates/pikl-tui/src/lib.rs
Normal file
872
crates/pikl-tui/src/lib.rs
Normal file
@@ -0,0 +1,872 @@
|
|||||||
|
//! TUI frontend for pikl-menu. Thin rendering layer on
|
||||||
|
//! top of pikl-core. Translates crossterm key events into
|
||||||
|
//! [`Action`]s and renders [`ViewState`] snapshots via
|
||||||
|
//! ratatui. All state lives in the core; this crate is
|
||||||
|
//! just I/O.
|
||||||
|
|
||||||
|
use crossterm::event::{Event, EventStream, KeyCode, KeyEvent, KeyModifiers};
|
||||||
|
use futures::StreamExt;
|
||||||
|
use ratatui::Terminal;
|
||||||
|
use ratatui::backend::CrosstermBackend;
|
||||||
|
use ratatui::layout::{Constraint, Direction, Layout};
|
||||||
|
use ratatui::style::{Color, Modifier, Style};
|
||||||
|
use ratatui::text::{Line, Span};
|
||||||
|
use ratatui::widgets::{List, ListItem, Paragraph};
|
||||||
|
use tokio::sync::{broadcast, mpsc};
|
||||||
|
|
||||||
|
use pikl_core::event::{Action, MenuEvent, Mode, ViewState};
|
||||||
|
|
||||||
|
/// Pending key state for multi-key sequences (e.g. `gg`).
|
||||||
|
/// TUI-local, not part of core state.
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
|
enum PendingKey {
|
||||||
|
None,
|
||||||
|
G,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Restore the terminal to a sane state. Called on clean
|
||||||
|
/// exit and from the panic hook so Ctrl+C or a crash
|
||||||
|
/// doesn't leave the terminal in raw mode.
|
||||||
|
pub fn restore_terminal() {
|
||||||
|
let _ = crossterm::terminal::disable_raw_mode();
|
||||||
|
let _ = crossterm::execute!(std::io::stderr(), crossterm::terminal::LeaveAlternateScreen);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Start the TUI. Enters the alternate screen, runs the
|
||||||
|
/// event loop, and restores the terminal on exit. Translates
|
||||||
|
/// crossterm key events into [`Action`]s and renders
|
||||||
|
/// [`ViewState`] snapshots.
|
||||||
|
pub async fn run(
|
||||||
|
action_tx: mpsc::Sender<Action>,
|
||||||
|
mut event_rx: broadcast::Receiver<MenuEvent>,
|
||||||
|
) -> std::io::Result<()> {
|
||||||
|
crossterm::terminal::enable_raw_mode()?;
|
||||||
|
crossterm::execute!(std::io::stderr(), crossterm::terminal::EnterAlternateScreen,)?;
|
||||||
|
|
||||||
|
let backend = CrosstermBackend::new(std::io::stderr());
|
||||||
|
let mut terminal = Terminal::new(backend)?;
|
||||||
|
|
||||||
|
// Drain any stale input that arrived between dup2 and raw mode
|
||||||
|
// (e.g. the Enter keypress from running the command). Poll with a
|
||||||
|
// short timeout so late-arriving bytes are caught too.
|
||||||
|
while crossterm::event::poll(std::time::Duration::from_millis(50))? {
|
||||||
|
let _ = crossterm::event::read()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = run_inner(&action_tx, &mut event_rx, &mut terminal).await;
|
||||||
|
|
||||||
|
// Always clean up terminal, even on error
|
||||||
|
restore_terminal();
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Inner event loop. Separated from [`run`] so terminal
|
||||||
|
/// cleanup always happens even if this returns an error.
|
||||||
|
async fn run_inner(
|
||||||
|
action_tx: &mpsc::Sender<Action>,
|
||||||
|
event_rx: &mut broadcast::Receiver<MenuEvent>,
|
||||||
|
terminal: &mut Terminal<CrosstermBackend<std::io::Stderr>>,
|
||||||
|
) -> std::io::Result<()> {
|
||||||
|
// Send initial resize
|
||||||
|
let size = terminal.size()?;
|
||||||
|
let list_height = size.height.saturating_sub(1);
|
||||||
|
let _ = action_tx
|
||||||
|
.send(Action::Resize {
|
||||||
|
height: list_height,
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let mut filter_text = String::new();
|
||||||
|
let mut view_state: Option<ViewState> = None;
|
||||||
|
let mut event_stream = EventStream::new();
|
||||||
|
let mut mode = Mode::Insert;
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
if let Some(ref vs) = view_state {
|
||||||
|
let ft = filter_text.clone();
|
||||||
|
terminal.draw(|frame| {
|
||||||
|
render_menu(frame, vs, &ft);
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
|
tokio::select! {
|
||||||
|
term_event = event_stream.next() => {
|
||||||
|
let Some(event_result) = term_event else {
|
||||||
|
break;
|
||||||
|
};
|
||||||
|
let event = event_result?;
|
||||||
|
match event {
|
||||||
|
Event::Key(key) => {
|
||||||
|
if let Some(action) = map_key_event(key, &mut filter_text, mode, &mut pending) {
|
||||||
|
// Track mode locally for key mapping
|
||||||
|
if let Action::SetMode(m) = &action {
|
||||||
|
mode = *m;
|
||||||
|
pending = PendingKey::None;
|
||||||
|
}
|
||||||
|
let _ = action_tx.send(action).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Event::Resize(_, h) => {
|
||||||
|
let list_height = h.saturating_sub(1);
|
||||||
|
let _ = action_tx.send(Action::Resize { height: list_height }).await;
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
menu_event = event_rx.recv() => {
|
||||||
|
match menu_event {
|
||||||
|
Ok(MenuEvent::StateChanged(vs)) => {
|
||||||
|
// Sync filter text from core. Local keystrokes
|
||||||
|
// update filter_text immediately for responsiveness,
|
||||||
|
// but if core pushes a different value (e.g. IPC
|
||||||
|
// changed the filter), the core wins.
|
||||||
|
if &*vs.filter_text != filter_text.as_str() {
|
||||||
|
filter_text = vs.filter_text.to_string();
|
||||||
|
}
|
||||||
|
// Sync mode from core
|
||||||
|
if vs.mode != mode {
|
||||||
|
mode = vs.mode;
|
||||||
|
pending = PendingKey::None;
|
||||||
|
}
|
||||||
|
view_state = Some(vs);
|
||||||
|
}
|
||||||
|
Ok(MenuEvent::Selected(_) | MenuEvent::Cancelled) => {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
Err(broadcast::error::RecvError::Lagged(_)) => {}
|
||||||
|
Err(broadcast::error::RecvError::Closed) => {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Render the menu into the given frame. Extracted from the
|
||||||
|
/// event loop so it can be tested with a [`TestBackend`].
|
||||||
|
fn render_menu(frame: &mut ratatui::Frame, vs: &ViewState, filter_text: &str) {
|
||||||
|
let chunks = Layout::default()
|
||||||
|
.direction(Direction::Vertical)
|
||||||
|
.constraints([Constraint::Length(1), Constraint::Min(1)])
|
||||||
|
.split(frame.area());
|
||||||
|
|
||||||
|
let filtered_count = vs.total_filtered;
|
||||||
|
let total_count = vs.total_items;
|
||||||
|
|
||||||
|
let mode_indicator = match vs.mode {
|
||||||
|
Mode::Insert => "[I]",
|
||||||
|
Mode::Normal => "[N]",
|
||||||
|
};
|
||||||
|
|
||||||
|
let prompt = Paragraph::new(Line::from(vec![
|
||||||
|
Span::styled(
|
||||||
|
format!("{mode_indicator}> "),
|
||||||
|
Style::default().fg(Color::Cyan),
|
||||||
|
),
|
||||||
|
Span::raw(filter_text),
|
||||||
|
Span::styled(
|
||||||
|
format!(" {filtered_count}/{total_count}"),
|
||||||
|
Style::default().fg(Color::DarkGray),
|
||||||
|
),
|
||||||
|
]));
|
||||||
|
frame.render_widget(prompt, chunks[0]);
|
||||||
|
|
||||||
|
// mode_indicator len + "> " = mode_indicator.len() + 2
|
||||||
|
let prompt_prefix_len = mode_indicator.len() + 2;
|
||||||
|
|
||||||
|
// Show cursor in insert mode, hide in normal mode
|
||||||
|
if vs.mode == Mode::Insert {
|
||||||
|
frame.set_cursor_position(((prompt_prefix_len + filter_text.len()) as u16, 0));
|
||||||
|
}
|
||||||
|
|
||||||
|
let items: Vec<ListItem> = vs
|
||||||
|
.visible_items
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, vi)| {
|
||||||
|
let style = if i == vs.cursor {
|
||||||
|
Style::default().add_modifier(Modifier::REVERSED)
|
||||||
|
} else {
|
||||||
|
Style::default()
|
||||||
|
};
|
||||||
|
ListItem::new(vi.label.as_str()).style(style)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let list = List::new(items);
|
||||||
|
frame.render_widget(list, chunks[1]);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Map a crossterm key event to an [`Action`], updating
|
||||||
|
/// `filter_text` in place for character/backspace keys.
|
||||||
|
/// Returns `None` for unmapped keys.
|
||||||
|
fn map_key_event(
|
||||||
|
key: KeyEvent,
|
||||||
|
filter_text: &mut String,
|
||||||
|
mode: Mode,
|
||||||
|
pending: &mut PendingKey,
|
||||||
|
) -> Option<Action> {
|
||||||
|
match mode {
|
||||||
|
Mode::Insert => map_insert_mode(key, filter_text),
|
||||||
|
Mode::Normal => map_normal_mode(key, pending),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Insert mode: characters go to filter, navigation via
|
||||||
|
/// arrows and ctrl keys.
|
||||||
|
fn map_insert_mode(key: KeyEvent, filter_text: &mut String) -> Option<Action> {
|
||||||
|
match (key.code, key.modifiers) {
|
||||||
|
(KeyCode::Esc, _) => Some(Action::Cancel),
|
||||||
|
(KeyCode::Enter, _) => Some(Action::Confirm),
|
||||||
|
(KeyCode::Up, _) | (KeyCode::Char('p'), KeyModifiers::CONTROL) => Some(Action::MoveUp(1)),
|
||||||
|
(KeyCode::Down, _) => Some(Action::MoveDown(1)),
|
||||||
|
(KeyCode::Char('n'), KeyModifiers::CONTROL) => Some(Action::SetMode(Mode::Normal)),
|
||||||
|
(KeyCode::PageUp, _) => Some(Action::PageUp(1)),
|
||||||
|
(KeyCode::PageDown, _) => Some(Action::PageDown(1)),
|
||||||
|
(KeyCode::Backspace, _) => {
|
||||||
|
filter_text.pop();
|
||||||
|
Some(Action::UpdateFilter(filter_text.clone()))
|
||||||
|
}
|
||||||
|
(KeyCode::Char(c), mods) if !mods.intersects(KeyModifiers::CONTROL | KeyModifiers::ALT) => {
|
||||||
|
filter_text.push(c);
|
||||||
|
Some(Action::UpdateFilter(filter_text.clone()))
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Normal mode: vim-style navigation keybinds.
|
||||||
|
fn map_normal_mode(key: KeyEvent, pending: &mut PendingKey) -> Option<Action> {
|
||||||
|
// Handle pending `g` key for `gg` sequence
|
||||||
|
if *pending == PendingKey::G {
|
||||||
|
*pending = PendingKey::None;
|
||||||
|
if key.code == KeyCode::Char('g')
|
||||||
|
&& !key
|
||||||
|
.modifiers
|
||||||
|
.intersects(KeyModifiers::CONTROL | KeyModifiers::ALT)
|
||||||
|
{
|
||||||
|
return Some(Action::MoveToTop);
|
||||||
|
}
|
||||||
|
// Not `gg`, process the second key normally below.
|
||||||
|
}
|
||||||
|
|
||||||
|
match (key.code, key.modifiers) {
|
||||||
|
(KeyCode::Char('j'), m) if !m.intersects(KeyModifiers::CONTROL | KeyModifiers::ALT) => {
|
||||||
|
Some(Action::MoveDown(1))
|
||||||
|
}
|
||||||
|
(KeyCode::Char('k'), m) if !m.intersects(KeyModifiers::CONTROL | KeyModifiers::ALT) => {
|
||||||
|
Some(Action::MoveUp(1))
|
||||||
|
}
|
||||||
|
(KeyCode::Char('G'), _) => Some(Action::MoveToBottom),
|
||||||
|
(KeyCode::Char('g'), m) if !m.intersects(KeyModifiers::CONTROL | KeyModifiers::ALT) => {
|
||||||
|
*pending = PendingKey::G;
|
||||||
|
None
|
||||||
|
}
|
||||||
|
(KeyCode::Char('d'), KeyModifiers::CONTROL) => Some(Action::HalfPageDown(1)),
|
||||||
|
(KeyCode::Char('u'), KeyModifiers::CONTROL) => Some(Action::HalfPageUp(1)),
|
||||||
|
(KeyCode::Char('f'), KeyModifiers::CONTROL) => Some(Action::PageDown(1)),
|
||||||
|
(KeyCode::Char('b'), KeyModifiers::CONTROL) => Some(Action::PageUp(1)),
|
||||||
|
(KeyCode::Char('e'), KeyModifiers::CONTROL) => Some(Action::SetMode(Mode::Insert)),
|
||||||
|
(KeyCode::Char('/'), m) if !m.intersects(KeyModifiers::CONTROL | KeyModifiers::ALT) => {
|
||||||
|
Some(Action::SetMode(Mode::Insert))
|
||||||
|
}
|
||||||
|
(KeyCode::Char('q'), m) if !m.intersects(KeyModifiers::CONTROL | KeyModifiers::ALT) => {
|
||||||
|
Some(Action::Cancel)
|
||||||
|
}
|
||||||
|
(KeyCode::Enter, _) => Some(Action::Confirm),
|
||||||
|
(KeyCode::Esc, _) => Some(Action::Cancel),
|
||||||
|
(KeyCode::Up, _) => Some(Action::MoveUp(1)),
|
||||||
|
(KeyCode::Down, _) => Some(Action::MoveDown(1)),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crossterm::event::{KeyCode, KeyEvent, KeyEventKind, KeyEventState, KeyModifiers};
|
||||||
|
use pikl_core::event::{ViewState, VisibleItem};
|
||||||
|
use ratatui::backend::TestBackend;
|
||||||
|
use ratatui::style::Modifier;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
fn key(code: KeyCode) -> KeyEvent {
|
||||||
|
KeyEvent {
|
||||||
|
code,
|
||||||
|
modifiers: KeyModifiers::NONE,
|
||||||
|
kind: KeyEventKind::Press,
|
||||||
|
state: KeyEventState::NONE,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn key_with_mods(code: KeyCode, modifiers: KeyModifiers) -> KeyEvent {
|
||||||
|
KeyEvent {
|
||||||
|
code,
|
||||||
|
modifiers,
|
||||||
|
kind: KeyEventKind::Press,
|
||||||
|
state: KeyEventState::NONE,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sample_view_state() -> ViewState {
|
||||||
|
ViewState {
|
||||||
|
visible_items: vec![
|
||||||
|
VisibleItem {
|
||||||
|
label: "alpha".into(),
|
||||||
|
index: 0,
|
||||||
|
},
|
||||||
|
VisibleItem {
|
||||||
|
label: "bravo".into(),
|
||||||
|
index: 1,
|
||||||
|
},
|
||||||
|
VisibleItem {
|
||||||
|
label: "charlie".into(),
|
||||||
|
index: 2,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
cursor: 0,
|
||||||
|
filter_text: Arc::from(""),
|
||||||
|
total_items: 5,
|
||||||
|
total_filtered: 3,
|
||||||
|
mode: Mode::Insert,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Insert mode key mapping tests --
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn esc_maps_to_cancel() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(key(KeyCode::Esc), &mut ft, Mode::Insert, &mut pending),
|
||||||
|
Some(Action::Cancel)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn enter_maps_to_confirm() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(key(KeyCode::Enter), &mut ft, Mode::Insert, &mut pending),
|
||||||
|
Some(Action::Confirm)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn arrow_up_maps_to_move_up() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(key(KeyCode::Up), &mut ft, Mode::Insert, &mut pending),
|
||||||
|
Some(Action::MoveUp(1))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn arrow_down_maps_to_move_down() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(key(KeyCode::Down), &mut ft, Mode::Insert, &mut pending),
|
||||||
|
Some(Action::MoveDown(1))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ctrl_p_maps_to_move_up() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
let k = key_with_mods(KeyCode::Char('p'), KeyModifiers::CONTROL);
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(k, &mut ft, Mode::Insert, &mut pending),
|
||||||
|
Some(Action::MoveUp(1))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ctrl_n_maps_to_normal_mode() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
let k = key_with_mods(KeyCode::Char('n'), KeyModifiers::CONTROL);
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(k, &mut ft, Mode::Insert, &mut pending),
|
||||||
|
Some(Action::SetMode(Mode::Normal))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn page_keys_map_correctly() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(key(KeyCode::PageUp), &mut ft, Mode::Insert, &mut pending),
|
||||||
|
Some(Action::PageUp(1))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(key(KeyCode::PageDown), &mut ft, Mode::Insert, &mut pending),
|
||||||
|
Some(Action::PageDown(1))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn char_appends_to_filter() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
let action = map_key_event(key(KeyCode::Char('a')), &mut ft, Mode::Insert, &mut pending);
|
||||||
|
assert_eq!(ft, "a");
|
||||||
|
assert_eq!(action, Some(Action::UpdateFilter("a".into())));
|
||||||
|
|
||||||
|
let action = map_key_event(key(KeyCode::Char('b')), &mut ft, Mode::Insert, &mut pending);
|
||||||
|
assert_eq!(ft, "ab");
|
||||||
|
assert_eq!(action, Some(Action::UpdateFilter("ab".into())));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn backspace_pops_filter() {
|
||||||
|
let mut ft = "abc".to_string();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
let action = map_key_event(key(KeyCode::Backspace), &mut ft, Mode::Insert, &mut pending);
|
||||||
|
assert_eq!(ft, "ab");
|
||||||
|
assert_eq!(action, Some(Action::UpdateFilter("ab".into())));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn backspace_on_empty_filter_is_noop() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
let action = map_key_event(key(KeyCode::Backspace), &mut ft, Mode::Insert, &mut pending);
|
||||||
|
assert_eq!(ft, "");
|
||||||
|
assert_eq!(action, Some(Action::UpdateFilter(String::new())));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ctrl_char_ignored_in_insert() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
let k = key_with_mods(KeyCode::Char('c'), KeyModifiers::CONTROL);
|
||||||
|
assert_eq!(map_key_event(k, &mut ft, Mode::Insert, &mut pending), None);
|
||||||
|
assert_eq!(ft, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn alt_char_ignored() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
let k = key_with_mods(KeyCode::Char('x'), KeyModifiers::ALT);
|
||||||
|
assert_eq!(map_key_event(k, &mut ft, Mode::Insert, &mut pending), None);
|
||||||
|
assert_eq!(ft, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn shift_char_passes_through() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
let k = key_with_mods(KeyCode::Char('A'), KeyModifiers::SHIFT);
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(k, &mut ft, Mode::Insert, &mut pending),
|
||||||
|
Some(Action::UpdateFilter("A".into()))
|
||||||
|
);
|
||||||
|
assert_eq!(ft, "A");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unmapped_key_returns_none() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(key(KeyCode::Tab), &mut ft, Mode::Insert, &mut pending),
|
||||||
|
None
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(key(KeyCode::Home), &mut ft, Mode::Insert, &mut pending),
|
||||||
|
None
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(key(KeyCode::F(1)), &mut ft, Mode::Insert, &mut pending),
|
||||||
|
None
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Normal mode key mapping tests --
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn normal_j_maps_to_move_down() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(key(KeyCode::Char('j')), &mut ft, Mode::Normal, &mut pending),
|
||||||
|
Some(Action::MoveDown(1))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn normal_k_maps_to_move_up() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(key(KeyCode::Char('k')), &mut ft, Mode::Normal, &mut pending),
|
||||||
|
Some(Action::MoveUp(1))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn normal_big_g_maps_to_bottom() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
let k = key_with_mods(KeyCode::Char('G'), KeyModifiers::SHIFT);
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(k, &mut ft, Mode::Normal, &mut pending),
|
||||||
|
Some(Action::MoveToBottom)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn normal_gg_maps_to_top() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
// First g sets pending
|
||||||
|
let action = map_key_event(key(KeyCode::Char('g')), &mut ft, Mode::Normal, &mut pending);
|
||||||
|
assert_eq!(action, None);
|
||||||
|
assert_eq!(pending, PendingKey::G);
|
||||||
|
// Second g triggers move to top
|
||||||
|
let action = map_key_event(key(KeyCode::Char('g')), &mut ft, Mode::Normal, &mut pending);
|
||||||
|
assert_eq!(action, Some(Action::MoveToTop));
|
||||||
|
assert_eq!(pending, PendingKey::None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn normal_g_then_j_drops_g() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
// First g sets pending
|
||||||
|
map_key_event(key(KeyCode::Char('g')), &mut ft, Mode::Normal, &mut pending);
|
||||||
|
assert_eq!(pending, PendingKey::G);
|
||||||
|
// j after g: pending cleared, j processed normally
|
||||||
|
let action = map_key_event(key(KeyCode::Char('j')), &mut ft, Mode::Normal, &mut pending);
|
||||||
|
assert_eq!(action, Some(Action::MoveDown(1)));
|
||||||
|
assert_eq!(pending, PendingKey::None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn normal_ctrl_d_half_page_down() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
let k = key_with_mods(KeyCode::Char('d'), KeyModifiers::CONTROL);
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(k, &mut ft, Mode::Normal, &mut pending),
|
||||||
|
Some(Action::HalfPageDown(1))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn normal_ctrl_u_half_page_up() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
let k = key_with_mods(KeyCode::Char('u'), KeyModifiers::CONTROL);
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(k, &mut ft, Mode::Normal, &mut pending),
|
||||||
|
Some(Action::HalfPageUp(1))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn normal_ctrl_f_page_down() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
let k = key_with_mods(KeyCode::Char('f'), KeyModifiers::CONTROL);
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(k, &mut ft, Mode::Normal, &mut pending),
|
||||||
|
Some(Action::PageDown(1))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn normal_ctrl_b_page_up() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
let k = key_with_mods(KeyCode::Char('b'), KeyModifiers::CONTROL);
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(k, &mut ft, Mode::Normal, &mut pending),
|
||||||
|
Some(Action::PageUp(1))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn normal_slash_enters_insert() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(key(KeyCode::Char('/')), &mut ft, Mode::Normal, &mut pending),
|
||||||
|
Some(Action::SetMode(Mode::Insert))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn normal_ctrl_e_enters_insert() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
let k = key_with_mods(KeyCode::Char('e'), KeyModifiers::CONTROL);
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(k, &mut ft, Mode::Normal, &mut pending),
|
||||||
|
Some(Action::SetMode(Mode::Insert))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn normal_q_cancels() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(key(KeyCode::Char('q')), &mut ft, Mode::Normal, &mut pending),
|
||||||
|
Some(Action::Cancel)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn normal_enter_confirms() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(key(KeyCode::Enter), &mut ft, Mode::Normal, &mut pending),
|
||||||
|
Some(Action::Confirm)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn normal_esc_cancels() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
assert_eq!(
|
||||||
|
map_key_event(key(KeyCode::Esc), &mut ft, Mode::Normal, &mut pending),
|
||||||
|
Some(Action::Cancel)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn normal_chars_dont_filter() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
// 'a' in normal mode should not append to filter
|
||||||
|
let action = map_key_event(key(KeyCode::Char('a')), &mut ft, Mode::Normal, &mut pending);
|
||||||
|
assert_eq!(action, None);
|
||||||
|
assert_eq!(ft, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Rendering tests (TestBackend) --
|
||||||
|
|
||||||
|
fn render_to_backend(width: u16, height: u16, vs: &ViewState, filter: &str) -> TestBackend {
|
||||||
|
let backend = TestBackend::new(width, height);
|
||||||
|
let mut terminal = Terminal::new(backend).ok().expect("test terminal");
|
||||||
|
terminal
|
||||||
|
.draw(|frame| {
|
||||||
|
render_menu(frame, vs, filter);
|
||||||
|
})
|
||||||
|
.ok()
|
||||||
|
.expect("draw");
|
||||||
|
terminal.backend().clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn line_text(backend: &TestBackend, row: u16) -> String {
|
||||||
|
let buf = backend.buffer();
|
||||||
|
let width = buf.area.width;
|
||||||
|
let mut s = String::new();
|
||||||
|
for col in 0..width {
|
||||||
|
let cell = &buf[(col, row)];
|
||||||
|
s.push_str(cell.symbol());
|
||||||
|
}
|
||||||
|
// Trim trailing whitespace for easier assertions
|
||||||
|
s.trim_end().to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn prompt_shows_count() {
|
||||||
|
let vs = sample_view_state();
|
||||||
|
let backend = render_to_backend(30, 6, &vs, "");
|
||||||
|
let prompt = line_text(&backend, 0);
|
||||||
|
assert!(prompt.contains(">"), "prompt should have > prefix");
|
||||||
|
assert!(
|
||||||
|
prompt.contains("3/5"),
|
||||||
|
"prompt should show filtered/total: got '{prompt}'"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn prompt_shows_filter_text() {
|
||||||
|
let vs = sample_view_state();
|
||||||
|
let backend = render_to_backend(30, 6, &vs, "foo");
|
||||||
|
let prompt = line_text(&backend, 0);
|
||||||
|
assert!(
|
||||||
|
prompt.contains("foo"),
|
||||||
|
"prompt should contain filter text: got '{prompt}'"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn prompt_shows_mode_indicator() {
|
||||||
|
let vs = sample_view_state();
|
||||||
|
let backend = render_to_backend(30, 6, &vs, "");
|
||||||
|
let prompt = line_text(&backend, 0);
|
||||||
|
assert!(
|
||||||
|
prompt.contains("[I]"),
|
||||||
|
"insert mode should show [I]: got '{prompt}'"
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut vs_normal = sample_view_state();
|
||||||
|
vs_normal.mode = Mode::Normal;
|
||||||
|
let backend = render_to_backend(30, 6, &vs_normal, "");
|
||||||
|
let prompt = line_text(&backend, 0);
|
||||||
|
assert!(
|
||||||
|
prompt.contains("[N]"),
|
||||||
|
"normal mode should show [N]: got '{prompt}'"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn items_render_in_list_area() {
|
||||||
|
let vs = sample_view_state();
|
||||||
|
let backend = render_to_backend(30, 6, &vs, "");
|
||||||
|
// Items start at row 1 (row 0 is the prompt)
|
||||||
|
let row1 = line_text(&backend, 1);
|
||||||
|
let row2 = line_text(&backend, 2);
|
||||||
|
let row3 = line_text(&backend, 3);
|
||||||
|
assert!(row1.contains("alpha"), "row 1: got '{row1}'");
|
||||||
|
assert!(row2.contains("bravo"), "row 2: got '{row2}'");
|
||||||
|
assert!(row3.contains("charlie"), "row 3: got '{row3}'");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn cursor_row_has_reversed_style() {
|
||||||
|
let vs = sample_view_state(); // cursor at 0
|
||||||
|
let backend = render_to_backend(30, 6, &vs, "");
|
||||||
|
let buf = backend.buffer();
|
||||||
|
// Row 1, col 0 should be the cursor row with REVERSED modifier
|
||||||
|
let cell = &buf[(0, 1)];
|
||||||
|
assert!(
|
||||||
|
cell.modifier.contains(Modifier::REVERSED),
|
||||||
|
"cursor row should have REVERSED style"
|
||||||
|
);
|
||||||
|
// Row 2 should not
|
||||||
|
let cell2 = &buf[(0, 2)];
|
||||||
|
assert!(
|
||||||
|
!cell2.modifier.contains(Modifier::REVERSED),
|
||||||
|
"non-cursor row should not have REVERSED"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn cursor_at_middle_item() {
|
||||||
|
let mut vs = sample_view_state();
|
||||||
|
vs.cursor = 1; // bravo
|
||||||
|
let backend = render_to_backend(30, 6, &vs, "");
|
||||||
|
let buf = backend.buffer();
|
||||||
|
// Row 1 (alpha) should NOT be reversed
|
||||||
|
let cell1 = &buf[(0, 1)];
|
||||||
|
assert!(!cell1.modifier.contains(Modifier::REVERSED));
|
||||||
|
// Row 2 (bravo) should be reversed
|
||||||
|
let cell2 = &buf[(0, 2)];
|
||||||
|
assert!(cell2.modifier.contains(Modifier::REVERSED));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty_items_still_renders_prompt() {
|
||||||
|
let vs = ViewState {
|
||||||
|
visible_items: vec![],
|
||||||
|
cursor: 0,
|
||||||
|
filter_text: Arc::from(""),
|
||||||
|
total_items: 0,
|
||||||
|
total_filtered: 0,
|
||||||
|
mode: Mode::Insert,
|
||||||
|
};
|
||||||
|
let backend = render_to_backend(30, 4, &vs, "");
|
||||||
|
let prompt = line_text(&backend, 0);
|
||||||
|
assert!(prompt.contains(">"), "prompt renders even with no items");
|
||||||
|
assert!(prompt.contains("0/0"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn narrow_viewport_truncates() {
|
||||||
|
let vs = sample_view_state();
|
||||||
|
// 10 cols wide. Items should be truncated, not panic.
|
||||||
|
let backend = render_to_backend(10, 5, &vs, "");
|
||||||
|
let row1 = line_text(&backend, 1);
|
||||||
|
// "alpha" is 5 chars, should fit in 10-wide viewport
|
||||||
|
assert!(row1.contains("alpha"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn minimal_viewport_does_not_panic() {
|
||||||
|
let vs = sample_view_state();
|
||||||
|
// Absolute minimum: 1 col wide, 2 rows (1 prompt + 1 list)
|
||||||
|
let _backend = render_to_backend(1, 2, &vs, "");
|
||||||
|
// Just verifying it doesn't panic
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn prompt_cursor_position_tracks_filter() {
|
||||||
|
let vs = sample_view_state();
|
||||||
|
let backend = TestBackend::new(30, 6);
|
||||||
|
let mut terminal = Terminal::new(backend).ok().expect("test terminal");
|
||||||
|
terminal
|
||||||
|
.draw(|frame| {
|
||||||
|
render_menu(frame, &vs, "hi");
|
||||||
|
})
|
||||||
|
.ok()
|
||||||
|
.expect("draw");
|
||||||
|
// Cursor should be at column "[I]> " (5) + 2 ("hi") = 7
|
||||||
|
let pos = terminal.get_cursor_position().ok().expect("cursor");
|
||||||
|
assert_eq!(pos.x, 7, "cursor x should be after '[I]> hi'");
|
||||||
|
assert_eq!(pos.y, 0, "cursor y should be on prompt row");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn normal_mode_cursor_not_on_prompt() {
|
||||||
|
let mut vs = sample_view_state();
|
||||||
|
vs.mode = Mode::Normal;
|
||||||
|
let backend = TestBackend::new(30, 6);
|
||||||
|
let mut terminal = Terminal::new(backend).ok().expect("test terminal");
|
||||||
|
terminal
|
||||||
|
.draw(|frame| {
|
||||||
|
render_menu(frame, &vs, "");
|
||||||
|
})
|
||||||
|
.ok()
|
||||||
|
.expect("draw");
|
||||||
|
// In normal mode, set_cursor_position is never called,
|
||||||
|
// so the cursor should NOT be on row 0.
|
||||||
|
// TestBackend starts cursor at (0,0) but after a draw
|
||||||
|
// with hide_cursor semantics, get_cursor_position returns
|
||||||
|
// whatever ratatui left it at. The key assertion is that
|
||||||
|
// render_menu does NOT call set_cursor_position in normal mode.
|
||||||
|
// We verify by checking the cursor is not at the prompt text position.
|
||||||
|
let pos = terminal.get_cursor_position().ok().expect("cursor");
|
||||||
|
// In insert mode the cursor would be at (5, 0) for empty filter.
|
||||||
|
// In normal mode it should NOT be placed there.
|
||||||
|
let not_insert_cursor = pos.x != 5 || pos.y != 0;
|
||||||
|
assert!(
|
||||||
|
not_insert_cursor,
|
||||||
|
"normal mode should not position cursor on prompt: got ({}, {})",
|
||||||
|
pos.x, pos.y
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn pending_cleared_on_mode_key() {
|
||||||
|
let mut ft = String::new();
|
||||||
|
let mut pending = PendingKey::None;
|
||||||
|
// Set pending=G
|
||||||
|
map_key_event(key(KeyCode::Char('g')), &mut ft, Mode::Normal, &mut pending);
|
||||||
|
assert_eq!(pending, PendingKey::G);
|
||||||
|
// Send '/' which triggers mode switch to Insert
|
||||||
|
let action = map_key_event(key(KeyCode::Char('/')), &mut ft, Mode::Normal, &mut pending);
|
||||||
|
assert_eq!(action, Some(Action::SetMode(Mode::Insert)));
|
||||||
|
// Pending should have been cleared by the mode switch key's processing.
|
||||||
|
// The '/' key doesn't match 'g', so pending resets to None in map_normal_mode.
|
||||||
|
assert_eq!(pending, PendingKey::None);
|
||||||
|
}
|
||||||
|
}
|
||||||
278
crates/pikl/src/hook.rs
Normal file
278
crates/pikl/src/hook.rs
Normal file
@@ -0,0 +1,278 @@
|
|||||||
|
//! Shell hook execution. Hooks are shell commands that fire
|
||||||
|
//! on menu events (selection, cancellation). The selected
|
||||||
|
//! item's JSON is piped to the hook's stdin.
|
||||||
|
//!
|
||||||
|
//! Hook stdout is redirected to stderr so it doesn't end up
|
||||||
|
//! mixed into pikl's structured output on stdout.
|
||||||
|
|
||||||
|
use serde_json::Value;
|
||||||
|
use tokio::io::AsyncWriteExt;
|
||||||
|
use tokio::process::Command;
|
||||||
|
|
||||||
|
use pikl_core::error::PiklError;
|
||||||
|
|
||||||
|
/// Duplicate stderr as a [`Stdio`] handle for use as a
|
||||||
|
/// child process's stdout. Keeps hook output on stderr
|
||||||
|
/// so stdout stays clean for pikl's JSON output.
|
||||||
|
fn stderr_as_stdio() -> std::process::Stdio {
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
use std::os::unix::io::FromRawFd;
|
||||||
|
let fd = unsafe { libc::dup(libc::STDERR_FILENO) };
|
||||||
|
if fd >= 0 {
|
||||||
|
return unsafe { std::process::Stdio::from(std::fs::File::from_raw_fd(fd)) };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
std::process::Stdio::inherit()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run a shell hook, piping the value as JSON to stdin.
|
||||||
|
/// Hook stdout goes to stderr (see module docs). Returns
|
||||||
|
/// an error if the command exits non-zero.
|
||||||
|
pub async fn run_hook(command: &str, value: &Value) -> Result<(), PiklError> {
|
||||||
|
run_hook_with_stdout(command, value, stderr_as_stdio()).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Serialize a value as JSON and write it to a child process's
|
||||||
|
/// stdin. Takes ownership of stdin and drops it after writing
|
||||||
|
/// so the child sees EOF.
|
||||||
|
async fn write_json_stdin(
|
||||||
|
child: &mut tokio::process::Child,
|
||||||
|
value: &Value,
|
||||||
|
) -> Result<(), PiklError> {
|
||||||
|
if let Some(mut stdin) = child.stdin.take() {
|
||||||
|
let json = serde_json::to_string(value)?;
|
||||||
|
let _ = stdin.write_all(json.as_bytes()).await;
|
||||||
|
drop(stdin);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run a shell hook with a custom stdout handle. Used by
|
||||||
|
/// [`run_hook`] to redirect hook output to stderr.
|
||||||
|
async fn run_hook_with_stdout(
|
||||||
|
command: &str,
|
||||||
|
value: &Value,
|
||||||
|
stdout: std::process::Stdio,
|
||||||
|
) -> Result<(), PiklError> {
|
||||||
|
let mut child = Command::new("sh")
|
||||||
|
.arg("-c")
|
||||||
|
.arg(command)
|
||||||
|
.stdin(std::process::Stdio::piped())
|
||||||
|
.stdout(stdout)
|
||||||
|
.stderr(std::process::Stdio::inherit())
|
||||||
|
.spawn()?;
|
||||||
|
|
||||||
|
write_json_stdin(&mut child, value).await?;
|
||||||
|
|
||||||
|
let status = child.wait().await?;
|
||||||
|
if !status.success() {
|
||||||
|
return Err(PiklError::HookFailed {
|
||||||
|
command: command.to_string(),
|
||||||
|
status,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn successful_hook() {
|
||||||
|
let value = json!("test");
|
||||||
|
let result = run_hook("true", &value).await;
|
||||||
|
assert!(result.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn failed_hook() {
|
||||||
|
let value = json!("test");
|
||||||
|
let result = run_hook("false", &value).await;
|
||||||
|
assert!(result.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Hook stdin verification --
|
||||||
|
|
||||||
|
/// Helper: run `cat` with piped stdout so we can capture what it echoes back
|
||||||
|
/// from the value JSON written to stdin.
|
||||||
|
async fn capture_hook_stdin(value: &Value) -> String {
|
||||||
|
let child = Command::new("sh")
|
||||||
|
.arg("-c")
|
||||||
|
.arg("cat")
|
||||||
|
.stdin(std::process::Stdio::piped())
|
||||||
|
.stdout(std::process::Stdio::piped())
|
||||||
|
.stderr(std::process::Stdio::inherit())
|
||||||
|
.spawn();
|
||||||
|
let Ok(mut child) = child else {
|
||||||
|
return String::new();
|
||||||
|
};
|
||||||
|
|
||||||
|
let _ = write_json_stdin(&mut child, value).await;
|
||||||
|
|
||||||
|
let output = child
|
||||||
|
.wait_with_output()
|
||||||
|
.await
|
||||||
|
.unwrap_or_else(|_| std::process::Output {
|
||||||
|
status: std::process::ExitStatus::default(),
|
||||||
|
stdout: Vec::new(),
|
||||||
|
stderr: Vec::new(),
|
||||||
|
});
|
||||||
|
String::from_utf8(output.stdout).unwrap_or_default()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn write_json_stdin_sends_correct_data() {
|
||||||
|
let value = json!({"key": "value"});
|
||||||
|
let mut child = Command::new("cat")
|
||||||
|
.stdin(std::process::Stdio::piped())
|
||||||
|
.stdout(std::process::Stdio::piped())
|
||||||
|
.spawn()
|
||||||
|
.unwrap();
|
||||||
|
write_json_stdin(&mut child, &value).await.unwrap();
|
||||||
|
let output = child.wait_with_output().await.unwrap();
|
||||||
|
let got = String::from_utf8(output.stdout).unwrap();
|
||||||
|
let parsed: Value = serde_json::from_str(&got).unwrap();
|
||||||
|
assert_eq!(parsed["key"], "value");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn hook_receives_plain_text_json() {
|
||||||
|
let value = json!("hello");
|
||||||
|
let got = capture_hook_stdin(&value).await;
|
||||||
|
assert_eq!(got, r#""hello""#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn hook_receives_object_json() {
|
||||||
|
let value = json!({"label": "foo", "value": 42});
|
||||||
|
let got = capture_hook_stdin(&value).await;
|
||||||
|
let parsed: Value = serde_json::from_str(&got).unwrap_or_default();
|
||||||
|
assert_eq!(parsed["label"], "foo");
|
||||||
|
assert_eq!(parsed["value"], 42);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn hook_receives_special_chars() {
|
||||||
|
let value = json!("he said \"hi\"\nand left");
|
||||||
|
let got = capture_hook_stdin(&value).await;
|
||||||
|
let parsed: Value = serde_json::from_str(&got).unwrap_or_default();
|
||||||
|
assert_eq!(
|
||||||
|
parsed.as_str().unwrap_or_default(),
|
||||||
|
"he said \"hi\"\nand left"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Hook stdout-to-stderr redirection --
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn hook_stdout_not_on_piped_stdout() {
|
||||||
|
// With piped stdout, `echo hello` output is capturable:
|
||||||
|
let value = json!("test");
|
||||||
|
let child = Command::new("sh")
|
||||||
|
.arg("-c")
|
||||||
|
.arg("echo hello")
|
||||||
|
.stdin(std::process::Stdio::piped())
|
||||||
|
.stdout(std::process::Stdio::piped())
|
||||||
|
.stderr(std::process::Stdio::inherit())
|
||||||
|
.spawn();
|
||||||
|
assert!(child.is_ok(), "should be able to spawn echo");
|
||||||
|
if let Ok(mut child) = child {
|
||||||
|
if let Some(mut stdin) = child.stdin.take() {
|
||||||
|
let json = serde_json::to_string(&value).unwrap_or_default();
|
||||||
|
let _ = stdin.write_all(json.as_bytes()).await;
|
||||||
|
drop(stdin);
|
||||||
|
}
|
||||||
|
let output = child
|
||||||
|
.wait_with_output()
|
||||||
|
.await
|
||||||
|
.unwrap_or_else(|_| std::process::Output {
|
||||||
|
status: std::process::ExitStatus::default(),
|
||||||
|
stdout: Vec::new(),
|
||||||
|
stderr: Vec::new(),
|
||||||
|
});
|
||||||
|
let piped_out = String::from_utf8(output.stdout).unwrap_or_default();
|
||||||
|
assert_eq!(piped_out.trim(), "hello");
|
||||||
|
}
|
||||||
|
|
||||||
|
// With stderr_as_stdio(), hook stdout is redirected away from stdout.
|
||||||
|
// Verify the hook still succeeds (output goes to stderr instead).
|
||||||
|
let result = run_hook("echo hello", &value).await;
|
||||||
|
assert!(result.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn stderr_as_stdio_returns_valid_fd() {
|
||||||
|
// Verify stderr_as_stdio() produces a usable Stdio.
|
||||||
|
// A child process using it should spawn and exit cleanly.
|
||||||
|
let child = Command::new("sh")
|
||||||
|
.arg("-c")
|
||||||
|
.arg("echo ok >&1")
|
||||||
|
.stdin(std::process::Stdio::null())
|
||||||
|
.stdout(stderr_as_stdio())
|
||||||
|
.stderr(std::process::Stdio::inherit())
|
||||||
|
.spawn();
|
||||||
|
assert!(child.is_ok());
|
||||||
|
let output = child
|
||||||
|
.unwrap_or_else(|_| unreachable!())
|
||||||
|
.wait_with_output()
|
||||||
|
.await;
|
||||||
|
assert!(output.is_ok());
|
||||||
|
assert!(
|
||||||
|
output
|
||||||
|
.unwrap_or_else(|_| std::process::Output {
|
||||||
|
status: std::process::ExitStatus::default(),
|
||||||
|
stdout: Vec::new(),
|
||||||
|
stderr: Vec::new(),
|
||||||
|
})
|
||||||
|
.status
|
||||||
|
.success()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Hook error propagation --
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn hook_nonzero_exit() {
|
||||||
|
let value = json!("test");
|
||||||
|
let result = run_hook("exit 42", &value).await;
|
||||||
|
assert!(
|
||||||
|
matches!(&result, Err(PiklError::HookFailed { command, .. }) if command == "exit 42")
|
||||||
|
);
|
||||||
|
if let Err(PiklError::HookFailed { status, .. }) = &result {
|
||||||
|
assert_eq!(status.code(), Some(42));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn hook_missing_command() {
|
||||||
|
let value = json!("test");
|
||||||
|
let result = run_hook("/nonexistent_binary_that_does_not_exist_12345", &value).await;
|
||||||
|
// sh -c will fail with 127 (command not found)
|
||||||
|
assert!(result.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn hook_empty_command() {
|
||||||
|
let value = json!("test");
|
||||||
|
// Empty string passed to sh -c is a no-op, exits 0
|
||||||
|
let result = run_hook("", &value).await;
|
||||||
|
assert!(result.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn hook_with_stdout_uses_custom_stdio() {
|
||||||
|
let value = json!("custom");
|
||||||
|
let result = run_hook_with_stdout("echo ok", &value, std::process::Stdio::piped()).await;
|
||||||
|
assert!(result.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn hook_with_stdout_propagates_failure() {
|
||||||
|
let value = json!("test");
|
||||||
|
let result = run_hook_with_stdout("exit 1", &value, std::process::Stdio::piped()).await;
|
||||||
|
assert!(matches!(result, Err(PiklError::HookFailed { .. })));
|
||||||
|
}
|
||||||
|
}
|
||||||
332
crates/pikl/src/main.rs
Normal file
332
crates/pikl/src/main.rs
Normal file
@@ -0,0 +1,332 @@
|
|||||||
|
mod hook;
|
||||||
|
|
||||||
|
use std::io::{BufReader, IsTerminal, Write};
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use clap::Parser;
|
||||||
|
|
||||||
|
use pikl_core::error::PiklError;
|
||||||
|
use pikl_core::event::{Action, MenuResult, Mode};
|
||||||
|
use pikl_core::input::read_items_sync;
|
||||||
|
use pikl_core::item::Item;
|
||||||
|
use pikl_core::json_menu::JsonMenu;
|
||||||
|
use pikl_core::menu::MenuRunner;
|
||||||
|
use pikl_core::script::action_fd::{self, ScriptAction, ShowAction};
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(
|
||||||
|
name = "pikl",
|
||||||
|
about = "Keyboard-driven streaming menu. Pipe stuff in, pick stuff out."
|
||||||
|
)]
|
||||||
|
struct Cli {
|
||||||
|
/// JSON key to use as the display label for object items
|
||||||
|
#[arg(long, default_value = "label")]
|
||||||
|
label_key: String,
|
||||||
|
|
||||||
|
/// Shell command to run on selection (item JSON piped to stdin)
|
||||||
|
#[arg(long)]
|
||||||
|
on_select: Option<String>,
|
||||||
|
|
||||||
|
/// Shell command to run on cancel
|
||||||
|
#[arg(long)]
|
||||||
|
on_cancel: Option<String>,
|
||||||
|
|
||||||
|
/// Read action script from this file descriptor (enables headless mode)
|
||||||
|
#[arg(long, value_name = "FD")]
|
||||||
|
action_fd: Option<i32>,
|
||||||
|
|
||||||
|
/// Timeout in seconds for reading stdin (default: 30 with --action-fd, 0 otherwise)
|
||||||
|
#[arg(long, value_name = "SECONDS")]
|
||||||
|
stdin_timeout: Option<u64>,
|
||||||
|
|
||||||
|
/// Start in this input mode (insert or normal, default: insert)
|
||||||
|
#[arg(long, value_name = "MODE", default_value = "insert")]
|
||||||
|
start_mode: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let cli = Cli::parse();
|
||||||
|
|
||||||
|
// Install a panic hook that restores the terminal so a crash
|
||||||
|
// doesn't leave the user staring at a broken shell.
|
||||||
|
let default_hook = std::panic::take_hook();
|
||||||
|
std::panic::set_hook(Box::new(move |info| {
|
||||||
|
pikl_tui::restore_terminal();
|
||||||
|
default_hook(info);
|
||||||
|
}));
|
||||||
|
|
||||||
|
// STEP 1: If action-fd, load + validate script FIRST (fail fast on bad scripts)
|
||||||
|
let script: Option<Vec<ScriptAction>> = match cli.action_fd {
|
||||||
|
Some(fd) => {
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
use std::os::unix::io::FromRawFd;
|
||||||
|
let file = unsafe { std::fs::File::from_raw_fd(fd) };
|
||||||
|
match action_fd::load_script(BufReader::new(file)) {
|
||||||
|
Ok(s) => Some(s),
|
||||||
|
Err(e) => {
|
||||||
|
let _ = writeln!(std::io::stderr().lock(), "pikl: {e}");
|
||||||
|
std::process::exit(2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[cfg(not(unix))]
|
||||||
|
{
|
||||||
|
let _ = writeln!(
|
||||||
|
std::io::stderr().lock(),
|
||||||
|
"pikl: --action-fd is only supported on unix"
|
||||||
|
);
|
||||||
|
std::process::exit(2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
// STEP 2: Read stdin items (terminal check only when NOT in headless mode)
|
||||||
|
if script.is_none() && std::io::stdin().is_terminal() {
|
||||||
|
let _ = writeln!(
|
||||||
|
std::io::stderr().lock(),
|
||||||
|
"pikl: no input. pipe something in (e.g. ls | pikl)"
|
||||||
|
);
|
||||||
|
std::process::exit(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
let timeout = cli
|
||||||
|
.stdin_timeout
|
||||||
|
.unwrap_or(if script.is_some() { 30 } else { 0 });
|
||||||
|
|
||||||
|
let items = match read_stdin_with_timeout(timeout, &cli.label_key) {
|
||||||
|
Ok(items) => items,
|
||||||
|
Err(e) => {
|
||||||
|
let _ = writeln!(std::io::stderr().lock(), "pikl: {e}");
|
||||||
|
std::process::exit(2);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if items.is_empty() {
|
||||||
|
let _ = writeln!(
|
||||||
|
std::io::stderr().lock(),
|
||||||
|
"pikl: empty input. nothing to pick from"
|
||||||
|
);
|
||||||
|
std::process::exit(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
// STEP 3: Build menu, start runtime
|
||||||
|
let rt = tokio::runtime::Runtime::new().unwrap_or_else(|e| {
|
||||||
|
let _ = writeln!(
|
||||||
|
std::io::stderr().lock(),
|
||||||
|
"pikl: failed to start runtime: {e}"
|
||||||
|
);
|
||||||
|
std::process::exit(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Reopen stdin from /dev/tty before entering async context.
|
||||||
|
// Both headless (show-ui branch) and interactive paths need this,
|
||||||
|
// so do it once here. Headless-only (no show-ui) doesn't need
|
||||||
|
// terminal input, but reopening is harmless.
|
||||||
|
if script.is_none()
|
||||||
|
&& let Err(e) = reopen_stdin_from_tty()
|
||||||
|
{
|
||||||
|
let _ = writeln!(std::io::stderr().lock(), "pikl: {e}");
|
||||||
|
std::process::exit(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse start mode
|
||||||
|
let start_mode = match cli.start_mode.as_str() {
|
||||||
|
"insert" => Mode::Insert,
|
||||||
|
"normal" => Mode::Normal,
|
||||||
|
other => {
|
||||||
|
let _ = writeln!(
|
||||||
|
std::io::stderr().lock(),
|
||||||
|
"pikl: unknown mode '{other}', expected insert or normal"
|
||||||
|
);
|
||||||
|
std::process::exit(2);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// STEP 4: Branch on headless vs interactive
|
||||||
|
let label_key = cli.label_key.clone();
|
||||||
|
let result = if let Some(script) = script {
|
||||||
|
rt.block_on(run_headless(items, label_key, script, start_mode))
|
||||||
|
} else {
|
||||||
|
rt.block_on(run_interactive(items, label_key, start_mode))
|
||||||
|
};
|
||||||
|
|
||||||
|
// STEP 5: Handle result
|
||||||
|
handle_result(result, &cli, &rt);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run in headless mode: replay a script, optionally hand
|
||||||
|
/// off to a TUI if the script ends with show-ui/show-tui/show-gui.
|
||||||
|
async fn run_headless(
|
||||||
|
items: Vec<Item>,
|
||||||
|
label_key: String,
|
||||||
|
script: Vec<ScriptAction>,
|
||||||
|
start_mode: Mode,
|
||||||
|
) -> Result<MenuResult, PiklError> {
|
||||||
|
let (mut menu, action_tx) = MenuRunner::new(JsonMenu::new(items, label_key));
|
||||||
|
menu.set_initial_mode(start_mode);
|
||||||
|
let event_rx = menu.subscribe();
|
||||||
|
|
||||||
|
// Default headless viewport
|
||||||
|
let _ = action_tx.send(Action::Resize { height: 50 }).await;
|
||||||
|
|
||||||
|
let menu_handle = tokio::spawn(menu.run());
|
||||||
|
|
||||||
|
// Run script. May return a Show* action.
|
||||||
|
let show = action_fd::run_script(script, &action_tx).await?;
|
||||||
|
|
||||||
|
if let Some(show_action) = show {
|
||||||
|
// Hand off to interactive frontend
|
||||||
|
reopen_stdin_from_tty()?;
|
||||||
|
|
||||||
|
match show_action {
|
||||||
|
ShowAction::Ui | ShowAction::Tui | ShowAction::Gui => {
|
||||||
|
// GUI doesn't exist yet. All show-* variants launch TUI for now.
|
||||||
|
let tui_handle = tokio::spawn(pikl_tui::run(action_tx, event_rx));
|
||||||
|
let result = menu_handle
|
||||||
|
.await
|
||||||
|
.map_err(|e| PiklError::Io(std::io::Error::other(e.to_string())))??;
|
||||||
|
let _ = tui_handle.await;
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// No show-ui. Drop sender, let menu finish.
|
||||||
|
drop(action_tx);
|
||||||
|
menu_handle
|
||||||
|
.await
|
||||||
|
.map_err(|e| PiklError::Io(std::io::Error::other(e.to_string())))?
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run in interactive mode: start the TUI and let the user
|
||||||
|
/// pick from the menu.
|
||||||
|
async fn run_interactive(
|
||||||
|
items: Vec<Item>,
|
||||||
|
label_key: String,
|
||||||
|
start_mode: Mode,
|
||||||
|
) -> Result<MenuResult, PiklError> {
|
||||||
|
let (mut menu, action_tx) = MenuRunner::new(JsonMenu::new(items, label_key));
|
||||||
|
menu.set_initial_mode(start_mode);
|
||||||
|
let event_rx = menu.subscribe();
|
||||||
|
|
||||||
|
// Handle SIGINT/SIGTERM: restore terminal and exit cleanly.
|
||||||
|
let signal_tx = action_tx.clone();
|
||||||
|
tokio::spawn(async move {
|
||||||
|
if let Ok(()) = tokio::signal::ctrl_c().await {
|
||||||
|
pikl_tui::restore_terminal();
|
||||||
|
// Send cancel so the menu loop exits cleanly.
|
||||||
|
let _ = signal_tx.send(Action::Cancel).await;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let tui_handle = tokio::spawn(async move { pikl_tui::run(action_tx, event_rx).await });
|
||||||
|
|
||||||
|
let result = menu.run().await;
|
||||||
|
|
||||||
|
let _ = tui_handle.await;
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Serialize a value as JSON and write it to the given writer.
|
||||||
|
fn write_selected_json(
|
||||||
|
writer: &mut impl Write,
|
||||||
|
value: &serde_json::Value,
|
||||||
|
) -> Result<(), std::io::Error> {
|
||||||
|
let json = serde_json::to_string(value).unwrap_or_default();
|
||||||
|
writeln!(writer, "{json}")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run a hook command if present. On failure, print the error
|
||||||
|
/// to stderr and exit.
|
||||||
|
fn run_result_hook(
|
||||||
|
rt: &tokio::runtime::Runtime,
|
||||||
|
hook_name: &str,
|
||||||
|
command: Option<&str>,
|
||||||
|
value: &serde_json::Value,
|
||||||
|
) {
|
||||||
|
if let Some(cmd) = command
|
||||||
|
&& let Err(e) = rt.block_on(hook::run_hook(cmd, value))
|
||||||
|
{
|
||||||
|
let _ = writeln!(std::io::stderr().lock(), "pikl: {hook_name} hook: {e}");
|
||||||
|
std::process::exit(2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Process the menu result: print selected item JSON to
|
||||||
|
/// stdout, run hooks, or exit with the appropriate code.
|
||||||
|
fn handle_result(result: Result<MenuResult, PiklError>, cli: &Cli, rt: &tokio::runtime::Runtime) {
|
||||||
|
match result {
|
||||||
|
Ok(MenuResult::Selected(value)) => {
|
||||||
|
run_result_hook(rt, "on-select", cli.on_select.as_deref(), &value);
|
||||||
|
let _ = write_selected_json(&mut std::io::stdout().lock(), &value);
|
||||||
|
}
|
||||||
|
Ok(MenuResult::Cancelled) => {
|
||||||
|
let empty = serde_json::Value::String(String::new());
|
||||||
|
run_result_hook(rt, "on-cancel", cli.on_cancel.as_deref(), &empty);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
let _ = writeln!(std::io::stderr().lock(), "pikl: {e}");
|
||||||
|
std::process::exit(2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read items from stdin. If `timeout_secs` is non-zero,
|
||||||
|
/// spawn a thread and bail if it doesn't finish in time.
|
||||||
|
/// A timeout of 0 means no timeout (blocking read).
|
||||||
|
// TODO: The interactive path blocks on all of stdin before showing
|
||||||
|
// the menu. Switch to streaming items via Action::AddItems so the
|
||||||
|
// menu renders immediately and populates as lines arrive.
|
||||||
|
fn read_stdin_with_timeout(timeout_secs: u64, label_key: &str) -> Result<Vec<Item>, PiklError> {
|
||||||
|
if timeout_secs == 0 {
|
||||||
|
return read_items_sync(std::io::stdin().lock(), label_key);
|
||||||
|
}
|
||||||
|
|
||||||
|
let label_key = label_key.to_string();
|
||||||
|
let (tx, rx) = std::sync::mpsc::channel();
|
||||||
|
std::thread::spawn(move || {
|
||||||
|
let _ = tx.send(read_items_sync(std::io::stdin().lock(), &label_key));
|
||||||
|
});
|
||||||
|
|
||||||
|
match rx.recv_timeout(Duration::from_secs(timeout_secs)) {
|
||||||
|
Ok(result) => result,
|
||||||
|
Err(_) => {
|
||||||
|
let _ = writeln!(
|
||||||
|
std::io::stderr().lock(),
|
||||||
|
"pikl: timed out reading stdin ({timeout_secs}s)"
|
||||||
|
);
|
||||||
|
let _ = writeln!(
|
||||||
|
std::io::stderr().lock(),
|
||||||
|
" = help: use --stdin-timeout to increase or set to 0 to disable"
|
||||||
|
);
|
||||||
|
std::process::exit(2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Redirect stdin to `/dev/tty` so the TUI can read keyboard
|
||||||
|
/// input after stdin was consumed for piped items. Flushes
|
||||||
|
/// stale input so crossterm starts clean.
|
||||||
|
fn reopen_stdin_from_tty() -> Result<(), PiklError> {
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
use std::os::unix::io::AsRawFd;
|
||||||
|
let tty = std::fs::File::open("/dev/tty")?;
|
||||||
|
// SAFETY: dup2 is a standard POSIX call. We're
|
||||||
|
// redirecting stdin to the controlling tty so the
|
||||||
|
// TUI can read keyboard input after stdin was
|
||||||
|
// consumed for piped items.
|
||||||
|
let r = unsafe { libc::dup2(tty.as_raw_fd(), libc::STDIN_FILENO) };
|
||||||
|
if r < 0 {
|
||||||
|
return Err(PiklError::Io(std::io::Error::last_os_error()));
|
||||||
|
}
|
||||||
|
// SAFETY: tcflush is a standard POSIX call. Flush
|
||||||
|
// stale input that arrived between dup2 and raw
|
||||||
|
// mode so crossterm starts clean.
|
||||||
|
unsafe { libc::tcflush(libc::STDIN_FILENO, libc::TCIFLUSH) };
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
52
crates/pikl/tests/common/mod.rs
Normal file
52
crates/pikl/tests/common/mod.rs
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
use std::io::Write;
|
||||||
|
use std::os::unix::io::FromRawFd;
|
||||||
|
use std::os::unix::process::CommandExt;
|
||||||
|
use std::process::{Command, Stdio};
|
||||||
|
|
||||||
|
/// Spawn pikl with items on stdin and an action script on fd 3.
|
||||||
|
/// Returns (stdout, stderr, exit_code).
|
||||||
|
pub fn run_pikl(items: &str, script: &str, extra_args: &[&str]) -> (String, String, i32) {
|
||||||
|
let mut fds = [0i32; 2];
|
||||||
|
unsafe { libc::pipe(fds.as_mut_ptr()) };
|
||||||
|
let [read_fd, write_fd] = fds;
|
||||||
|
|
||||||
|
let mut write_file = unsafe { std::fs::File::from_raw_fd(write_fd) };
|
||||||
|
write_file.write_all(script.as_bytes()).unwrap_or_default();
|
||||||
|
drop(write_file);
|
||||||
|
|
||||||
|
let target_fd = 3i32;
|
||||||
|
let mut cmd = Command::new(env!("CARGO_BIN_EXE_pikl"));
|
||||||
|
cmd.args(["--action-fd", "3"])
|
||||||
|
.args(extra_args)
|
||||||
|
.stdin(Stdio::piped())
|
||||||
|
.stdout(Stdio::piped())
|
||||||
|
.stderr(Stdio::piped());
|
||||||
|
|
||||||
|
unsafe {
|
||||||
|
cmd.pre_exec(move || {
|
||||||
|
if read_fd != target_fd {
|
||||||
|
libc::dup2(read_fd, target_fd);
|
||||||
|
libc::close(read_fd);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let Ok(mut child) = cmd.spawn() else {
|
||||||
|
return ("".to_string(), "failed to spawn pikl".to_string(), -1);
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(mut stdin) = child.stdin.take() {
|
||||||
|
stdin.write_all(items.as_bytes()).unwrap_or_default();
|
||||||
|
}
|
||||||
|
|
||||||
|
let Ok(output) = child.wait_with_output() else {
|
||||||
|
return ("".to_string(), "failed to wait for pikl".to_string(), -1);
|
||||||
|
};
|
||||||
|
|
||||||
|
let stdout = String::from_utf8(output.stdout).unwrap_or_default();
|
||||||
|
let stderr = String::from_utf8(output.stderr).unwrap_or_default();
|
||||||
|
let code = output.status.code().unwrap_or(-1);
|
||||||
|
|
||||||
|
(stdout, stderr, code)
|
||||||
|
}
|
||||||
69
crates/pikl/tests/headless.rs
Normal file
69
crates/pikl/tests/headless.rs
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
#![cfg(unix)]
|
||||||
|
|
||||||
|
mod common;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn headless_confirm_first() {
|
||||||
|
let (stdout, _stderr, code) = common::run_pikl("alpha\nbeta\n", "confirm\n", &[]);
|
||||||
|
assert_eq!(code, 0, "expected exit 0, stderr: {_stderr}");
|
||||||
|
assert!(
|
||||||
|
stdout.contains("alpha"),
|
||||||
|
"expected alpha in stdout, got: {stdout}"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn headless_move_then_confirm() {
|
||||||
|
let (stdout, _stderr, code) = common::run_pikl("alpha\nbeta\n", "move-down\nconfirm\n", &[]);
|
||||||
|
assert_eq!(code, 0, "expected exit 0, stderr: {_stderr}");
|
||||||
|
assert!(
|
||||||
|
stdout.contains("beta"),
|
||||||
|
"expected beta in stdout, got: {stdout}"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn headless_filter_then_confirm() {
|
||||||
|
let (stdout, _stderr, code) =
|
||||||
|
common::run_pikl("alpha\nbeta\nbanana\n", "filter ban\nconfirm\n", &[]);
|
||||||
|
assert_eq!(code, 0, "expected exit 0, stderr: {_stderr}");
|
||||||
|
assert!(
|
||||||
|
stdout.contains("banana"),
|
||||||
|
"expected banana in stdout, got: {stdout}"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn headless_cancel() {
|
||||||
|
let (_stdout, _stderr, code) = common::run_pikl("alpha\n", "cancel\n", &[]);
|
||||||
|
assert_eq!(code, 1, "expected exit 1 on cancel");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn headless_empty_script_cancels() {
|
||||||
|
let (_stdout, _stderr, code) = common::run_pikl("alpha\n", "", &[]);
|
||||||
|
assert_eq!(
|
||||||
|
code, 1,
|
||||||
|
"expected exit 1 when script is empty (sender dropped → cancelled)"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn headless_invalid_script_exits_2() {
|
||||||
|
let (_stdout, stderr, code) = common::run_pikl("alpha\n", "bogus\n", &[]);
|
||||||
|
assert_eq!(code, 2, "expected exit 2 on invalid script");
|
||||||
|
assert!(
|
||||||
|
stderr.contains("unknown action"),
|
||||||
|
"expected error diagnostic, got: {stderr}"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn headless_actions_after_show_ui_exits_2() {
|
||||||
|
let (_stdout, stderr, code) = common::run_pikl("alpha\n", "show-ui\nconfirm\n", &[]);
|
||||||
|
assert_eq!(code, 2, "expected exit 2 for actions after show-ui");
|
||||||
|
assert!(
|
||||||
|
stderr.contains("actions after show-ui"),
|
||||||
|
"expected show-ui error, got: {stderr}"
|
||||||
|
);
|
||||||
|
}
|
||||||
296
crates/pikl/tests/headless_dsl.rs
Normal file
296
crates/pikl/tests/headless_dsl.rs
Normal file
@@ -0,0 +1,296 @@
|
|||||||
|
#![cfg(unix)]
|
||||||
|
|
||||||
|
mod common;
|
||||||
|
use common::run_pikl;
|
||||||
|
|
||||||
|
use pikl_test_macros::pikl_tests;
|
||||||
|
|
||||||
|
pikl_tests! {
|
||||||
|
headless mod basic_selection {
|
||||||
|
items: ["alpha", "beta", "charlie"];
|
||||||
|
|
||||||
|
test confirm_first {
|
||||||
|
actions: [confirm]
|
||||||
|
stdout: "alpha"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test move_down_and_confirm {
|
||||||
|
actions: [move-down, confirm]
|
||||||
|
stdout: "beta"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test move_to_third {
|
||||||
|
actions: [move-down, move-down, confirm]
|
||||||
|
stdout: "charlie"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test cancel_exits_1 {
|
||||||
|
actions: [cancel]
|
||||||
|
stdout: ""
|
||||||
|
exit: 1
|
||||||
|
}
|
||||||
|
|
||||||
|
test empty_script_cancels {
|
||||||
|
actions: []
|
||||||
|
exit: 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
headless mod filtering {
|
||||||
|
items: ["alpha", "beta", "banana"];
|
||||||
|
|
||||||
|
test filter_then_confirm {
|
||||||
|
actions: [filter "ban", confirm]
|
||||||
|
stdout: "banana"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
headless mod errors {
|
||||||
|
items: ["one", "two"];
|
||||||
|
|
||||||
|
test invalid_action {
|
||||||
|
actions: [raw "bogus"]
|
||||||
|
stderr contains: "unknown action"
|
||||||
|
exit: 2
|
||||||
|
}
|
||||||
|
|
||||||
|
test actions_after_show_ui {
|
||||||
|
actions: [raw "show-ui", raw "confirm"]
|
||||||
|
stderr contains: "actions after show-ui"
|
||||||
|
exit: 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Demo scenario tests ──────────────────────────────
|
||||||
|
// These cover the scenarios from examples/demo.sh so
|
||||||
|
// we catch regressions in the demo workflows.
|
||||||
|
|
||||||
|
headless mod demo_plain_text {
|
||||||
|
items: [
|
||||||
|
"apple", "banana", "cherry", "date",
|
||||||
|
"elderberry", "fig", "grape", "honeydew"
|
||||||
|
];
|
||||||
|
|
||||||
|
test confirm_first {
|
||||||
|
actions: [confirm]
|
||||||
|
stdout: "apple"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test navigate_to_last {
|
||||||
|
actions: [move-to-bottom, confirm]
|
||||||
|
stdout: "honeydew"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test filter_then_confirm {
|
||||||
|
actions: [filter "cher", confirm]
|
||||||
|
stdout: "cherry"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test filter_narrows_to_one {
|
||||||
|
actions: [filter "elder", confirm]
|
||||||
|
stdout: "elderberry"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test cancel {
|
||||||
|
actions: [cancel]
|
||||||
|
exit: 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
headless mod demo_big_list {
|
||||||
|
// Bare numbers parse as JSON numbers with empty labels,
|
||||||
|
// so we prefix with "item-" to keep them as plain text.
|
||||||
|
items: [
|
||||||
|
"item-1", "item-2", "item-3", "item-4", "item-5",
|
||||||
|
"item-6", "item-7", "item-8", "item-9", "item-10",
|
||||||
|
"item-11", "item-12", "item-13", "item-14", "item-15",
|
||||||
|
"item-16", "item-17", "item-18", "item-19", "item-20",
|
||||||
|
"item-50", "item-100", "item-200", "item-499", "item-500"
|
||||||
|
];
|
||||||
|
|
||||||
|
test confirm_first {
|
||||||
|
actions: [confirm]
|
||||||
|
stdout: "item-1"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test move_to_bottom {
|
||||||
|
actions: [move-to-bottom, confirm]
|
||||||
|
stdout: "item-500"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test filter_exact {
|
||||||
|
actions: [filter "item-499", confirm]
|
||||||
|
stdout: "item-499"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test page_down_then_confirm {
|
||||||
|
actions: [page-down, confirm]
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
headless mod demo_json_objects {
|
||||||
|
items: [
|
||||||
|
"{\"label\": \"Arch Linux\", \"category\": \"rolling\", \"init\": \"systemd\"}",
|
||||||
|
"{\"label\": \"NixOS\", \"category\": \"rolling\", \"init\": \"systemd\"}",
|
||||||
|
"{\"label\": \"Void Linux\", \"category\": \"rolling\", \"init\": \"runit\"}",
|
||||||
|
"{\"label\": \"Debian\", \"category\": \"stable\", \"init\": \"systemd\"}",
|
||||||
|
"{\"label\": \"Alpine\", \"category\": \"stable\", \"init\": \"openrc\"}",
|
||||||
|
"{\"label\": \"Fedora\", \"category\": \"semi-rolling\", \"init\": \"systemd\"}",
|
||||||
|
"{\"label\": \"Gentoo\", \"category\": \"rolling\", \"init\": \"openrc\"}"
|
||||||
|
];
|
||||||
|
|
||||||
|
test confirm_first {
|
||||||
|
actions: [confirm]
|
||||||
|
stdout: "Arch Linux"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test preserves_json_fields {
|
||||||
|
actions: [confirm]
|
||||||
|
stdout: "rolling"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test filter_by_label {
|
||||||
|
actions: [filter "Void", confirm]
|
||||||
|
stdout: "Void Linux"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test navigate_to_debian {
|
||||||
|
actions: [move-down, move-down, move-down, confirm]
|
||||||
|
stdout: "Debian"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test filter_and_navigate {
|
||||||
|
actions: [filter "Linux", move-down, confirm]
|
||||||
|
stdout: "Void Linux"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test cancel {
|
||||||
|
actions: [cancel]
|
||||||
|
exit: 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
headless mod demo_custom_label_key {
|
||||||
|
items: [
|
||||||
|
"{\"name\": \"Neovim\", \"type\": \"editor\", \"lang\": \"C/Lua\"}",
|
||||||
|
"{\"name\": \"Helix\", \"type\": \"editor\", \"lang\": \"Rust\"}",
|
||||||
|
"{\"name\": \"Kakoune\", \"type\": \"editor\", \"lang\": \"C++\"}",
|
||||||
|
"{\"name\": \"Emacs\", \"type\": \"editor\", \"lang\": \"Lisp\"}",
|
||||||
|
"{\"name\": \"Vim\", \"type\": \"editor\", \"lang\": \"C\"}"
|
||||||
|
];
|
||||||
|
label_key: "name";
|
||||||
|
|
||||||
|
test confirm_first {
|
||||||
|
actions: [confirm]
|
||||||
|
stdout: "Neovim"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test filter_by_name {
|
||||||
|
actions: [filter "Hel", confirm]
|
||||||
|
stdout: "Helix"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test preserves_lang_field {
|
||||||
|
actions: [move-down, confirm]
|
||||||
|
stdout: "Rust"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test navigate_to_last {
|
||||||
|
actions: [move-to-bottom, confirm]
|
||||||
|
stdout: "Vim"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
headless mod demo_mixed_input {
|
||||||
|
items: [
|
||||||
|
"just a plain string",
|
||||||
|
"{\"label\": \"a json object\", \"extra\": 42}",
|
||||||
|
"another plain string",
|
||||||
|
"{\"label\": \"second object\", \"extra\": 99}"
|
||||||
|
];
|
||||||
|
|
||||||
|
test confirm_plain_text {
|
||||||
|
actions: [confirm]
|
||||||
|
stdout: "just a plain string"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test confirm_json_object {
|
||||||
|
actions: [move-down, confirm]
|
||||||
|
stdout: "a json object"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test json_preserves_extra {
|
||||||
|
actions: [move-down, confirm]
|
||||||
|
stdout: "42"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test navigate_to_second_plain {
|
||||||
|
actions: [move-down, move-down, confirm]
|
||||||
|
stdout: "another plain string"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test navigate_to_second_json {
|
||||||
|
actions: [move-down, move-down, move-down, confirm]
|
||||||
|
stdout: "second object"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test filter_across_types {
|
||||||
|
actions: [filter "plain", confirm]
|
||||||
|
stdout: "just a plain string"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
headless mod new_actions {
|
||||||
|
items: ["a","b","c","d","e","f","g","h","i","j"];
|
||||||
|
|
||||||
|
test half_page_down_confirm {
|
||||||
|
actions: [raw "half-page-down", confirm]
|
||||||
|
// headless viewport=50, half=25, clamps to last (idx 9) -> "j"
|
||||||
|
stdout: "j"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
|
||||||
|
test set_mode_round_trip {
|
||||||
|
actions: [raw "set-mode normal", raw "set-mode insert", confirm]
|
||||||
|
stdout: "a"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
headless mod pipeline_headless {
|
||||||
|
items: ["error_log", "warning_temp", "info_log"];
|
||||||
|
|
||||||
|
test pipeline_filter {
|
||||||
|
actions: [filter "'log | !error", confirm]
|
||||||
|
stdout: "info_log"
|
||||||
|
exit: 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
186
docs/lessons/unix-pipes-and-buffering.md
Normal file
186
docs/lessons/unix-pipes-and-buffering.md
Normal file
@@ -0,0 +1,186 @@
|
|||||||
|
# Unix Pipes Are Not Buffered (But Everything Else Is)
|
||||||
|
|
||||||
|
If you've ever piped a command into another and wondered why
|
||||||
|
the output seems to "lag" or arrive in chunks, this one's
|
||||||
|
for you. The pipe isn't the problem. It never was.
|
||||||
|
|
||||||
|
## Pipes are just a kernel FIFO
|
||||||
|
|
||||||
|
When the shell sets up `cmd1 | cmd2`, it does roughly this:
|
||||||
|
|
||||||
|
```c
|
||||||
|
int fds[2];
|
||||||
|
pipe(fds); // fds[0] = read end, fds[1] = write end
|
||||||
|
// fork cmd1, dup2(fds[1], STDOUT)
|
||||||
|
// cmd1's stdout writes into the pipe
|
||||||
|
// fork cmd2, dup2(fds[0], STDIN)
|
||||||
|
// cmd2's stdin reads from the pipe
|
||||||
|
```
|
||||||
|
|
||||||
|
The pipe itself is a dumb byte queue in the kernel. No
|
||||||
|
buffering strategy, no flushing, no opinions. Bytes written
|
||||||
|
to the write end are immediately available on the read end.
|
||||||
|
It has a capacity (64KB on Linux, varies elsewhere) and
|
||||||
|
`write()` blocks if it's full. That's your backpressure.
|
||||||
|
|
||||||
|
Think of it like a bounded `tokio::sync::mpsc::channel` but
|
||||||
|
for raw bytes instead of typed messages. One side writes,
|
||||||
|
the other reads, the kernel handles the queue.
|
||||||
|
|
||||||
|
## So where does the buffering come from?
|
||||||
|
|
||||||
|
The C standard library (`libc` / `glibc`). Specifically, its
|
||||||
|
`FILE*` stream layer (the thing behind `printf`, `puts`,
|
||||||
|
`fwrite` to stdout, etc.).
|
||||||
|
|
||||||
|
When a C program starts up, before your `main()` even runs,
|
||||||
|
libc's runtime initializes stdout with this rule:
|
||||||
|
|
||||||
|
| stdout points to... | Buffering mode |
|
||||||
|
|----------------------|----------------------------|
|
||||||
|
| A terminal (tty) | **Line-buffered**: flushes on every `\n` |
|
||||||
|
| A pipe or file | **Fully buffered**: flushes when the internal buffer fills (~4-8KB) |
|
||||||
|
|
||||||
|
This detection happens via `isatty(STDOUT_FILENO)`. The
|
||||||
|
program checks if its stdout is a terminal and picks a
|
||||||
|
buffering strategy accordingly.
|
||||||
|
|
||||||
|
**This is not a decision the shell makes.** The shell just
|
||||||
|
wires up the pipe. The *program* decides to buffer based on
|
||||||
|
what it sees on the other end.
|
||||||
|
|
||||||
|
## The classic surprise
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Works fine. stdout is a terminal, line-buffered, lines
|
||||||
|
# appear immediately.
|
||||||
|
tail -f /var/log/something
|
||||||
|
|
||||||
|
# Seems to lag. stdout is a pipe, fully buffered, lines
|
||||||
|
# arrive in 4KB chunks.
|
||||||
|
tail -f /var/log/something | grep error
|
||||||
|
```
|
||||||
|
|
||||||
|
The pipe between `tail` and `grep` is instant. But `tail`
|
||||||
|
detects its stdout is a pipe, switches to full buffering,
|
||||||
|
and holds onto output until its internal buffer fills. So
|
||||||
|
`grep` sits there waiting for a 4KB chunk instead of getting
|
||||||
|
lines one at a time.
|
||||||
|
|
||||||
|
Same deal with any command. `awk`, `sed`, `cut`, they all
|
||||||
|
do the same isatty check.
|
||||||
|
|
||||||
|
## The workarounds
|
||||||
|
|
||||||
|
### `stdbuf`: override libc's buffering choice
|
||||||
|
|
||||||
|
```bash
|
||||||
|
stdbuf -oL tail -f /var/log/something | grep error
|
||||||
|
```
|
||||||
|
|
||||||
|
`-oL` means "force stdout to line-buffered." It works by
|
||||||
|
LD_PRELOADing a shim library that overrides libc's
|
||||||
|
initialization. This only works for dynamically-linked
|
||||||
|
programs that use libc's stdio (most things, but not
|
||||||
|
everything).
|
||||||
|
|
||||||
|
### `unbuffer` (from `expect`)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
unbuffer tail -f /var/log/something | grep error
|
||||||
|
```
|
||||||
|
|
||||||
|
Creates a pseudo-terminal (pty) so the program *thinks*
|
||||||
|
it's talking to a terminal and uses line buffering. Heavier
|
||||||
|
than `stdbuf` but works on programs that don't use libc's
|
||||||
|
stdio.
|
||||||
|
|
||||||
|
### In your own code: just don't add buffering
|
||||||
|
|
||||||
|
In Rust, raw `std::fs::File` writes are unbuffered. Every
|
||||||
|
`.write()` call goes straight to the kernel via the `write`
|
||||||
|
syscall:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use std::io::Write;
|
||||||
|
|
||||||
|
// Immediately available on the read end. No flush needed.
|
||||||
|
write_file.write_all(b"first line\n")?;
|
||||||
|
|
||||||
|
// Reader already has that line. Do whatever.
|
||||||
|
tokio::time::sleep(Duration::from_secs(1)).await;
|
||||||
|
|
||||||
|
// This also lands immediately.
|
||||||
|
write_file.write_all(b"second line\n")?;
|
||||||
|
```
|
||||||
|
|
||||||
|
If you wrap it in `BufWriter`, now you've opted into the
|
||||||
|
same buffering libc does:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use std::io::{BufWriter, Write};
|
||||||
|
|
||||||
|
let mut writer = BufWriter::new(write_file);
|
||||||
|
writer.write_all(b"first line\n")?;
|
||||||
|
// NOT visible yet. Sitting in an 8KB userspace buffer.
|
||||||
|
writer.flush()?;
|
||||||
|
// NOW visible on the read end.
|
||||||
|
```
|
||||||
|
|
||||||
|
Rust's `println!` and `stdout().lock()` do their own tty
|
||||||
|
detection similar to libc. If you need guaranteed unbuffered
|
||||||
|
writes, use the raw fd or explicitly flush.
|
||||||
|
|
||||||
|
## How pikl uses this
|
||||||
|
|
||||||
|
In pikl's test helpers, we create a pipe to feed action
|
||||||
|
scripts to the `--action-fd` flag:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
let mut fds = [0i32; 2];
|
||||||
|
unsafe { libc::pipe(fds.as_mut_ptr()) };
|
||||||
|
let [read_fd, write_fd] = fds;
|
||||||
|
|
||||||
|
// Wrap the write end in a File. Raw, unbuffered.
|
||||||
|
let mut write_file =
|
||||||
|
unsafe { std::fs::File::from_raw_fd(write_fd) };
|
||||||
|
|
||||||
|
// Write the script. Immediately available on read_fd.
|
||||||
|
write_file.write_all(script.as_bytes())?;
|
||||||
|
// Close the write end so the reader gets EOF.
|
||||||
|
drop(write_file);
|
||||||
|
```
|
||||||
|
|
||||||
|
Then in the child process, we remap the read end to the
|
||||||
|
expected fd:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// pre_exec runs in the child after fork(), before exec()
|
||||||
|
cmd.pre_exec(move || {
|
||||||
|
if read_fd != target_fd {
|
||||||
|
// make fd 3 point to the pipe
|
||||||
|
libc::dup2(read_fd, target_fd);
|
||||||
|
// close the original (now redundant)
|
||||||
|
libc::close(read_fd);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
For streaming/async scenarios (like feeding items to pikl
|
||||||
|
over time), the same approach works. Just don't drop the
|
||||||
|
write end. Each `write_all` call pushes bytes through the
|
||||||
|
pipe immediately, and the reader picks them up as they
|
||||||
|
arrive. No flush needed because `File` doesn't buffer.
|
||||||
|
|
||||||
|
## tl;dr
|
||||||
|
|
||||||
|
- Pipes are instant. They're a kernel FIFO with zero
|
||||||
|
buffering.
|
||||||
|
- The "buffering" you see is libc's `FILE*` layer choosing
|
||||||
|
full buffering when stdout isn't a terminal.
|
||||||
|
- `stdbuf -oL` or `unbuffer` to fix other people's
|
||||||
|
programs.
|
||||||
|
- In your own code, use raw `File` (not `BufWriter`) and
|
||||||
|
every write lands immediately.
|
||||||
|
- It was always libc. Bloody libc.
|
||||||
159
examples/demo.sh
Executable file
159
examples/demo.sh
Executable file
@@ -0,0 +1,159 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Interactive demo launcher for pikl-menu.
|
||||||
|
# Uses pikl to pick a scenario, then runs that scenario in pikl.
|
||||||
|
#
|
||||||
|
# Usage: ./examples/demo.sh
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Resolve the pikl binary once up front.
|
||||||
|
if [[ -n "${PIKL:-}" ]]; then
|
||||||
|
PIKL_BIN="$PIKL"
|
||||||
|
elif command -v pikl >/dev/null 2>&1; then
|
||||||
|
PIKL_BIN="pikl"
|
||||||
|
else
|
||||||
|
# Build quietly, use the debug binary directly.
|
||||||
|
cargo build --quiet 2>&1
|
||||||
|
PIKL_BIN="cargo run --quiet --"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Wrapper so scenarios can just call `pikl` with args.
|
||||||
|
pikl() {
|
||||||
|
$PIKL_BIN "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ── Scenario runners ──────────────────────────────────────
|
||||||
|
|
||||||
|
plain_list() {
|
||||||
|
printf "apple\nbanana\ncherry\ndate\nelderberry\nfig\ngrape\nhoneydew\n" \
|
||||||
|
| pikl
|
||||||
|
}
|
||||||
|
|
||||||
|
big_list() {
|
||||||
|
# seq output is wrapped as JSON strings so they get
|
||||||
|
# proper labels (bare numbers parse as JSON numbers
|
||||||
|
# with empty display text).
|
||||||
|
seq 1 500 | sed 's/.*/"&"/' | pikl
|
||||||
|
}
|
||||||
|
|
||||||
|
json_objects() {
|
||||||
|
cat <<'ITEMS' | pikl
|
||||||
|
{"label": "Arch Linux", "category": "rolling", "init": "systemd"}
|
||||||
|
{"label": "NixOS", "category": "rolling", "init": "systemd"}
|
||||||
|
{"label": "Void Linux", "category": "rolling", "init": "runit"}
|
||||||
|
{"label": "Debian", "category": "stable", "init": "systemd"}
|
||||||
|
{"label": "Alpine", "category": "stable", "init": "openrc"}
|
||||||
|
{"label": "Fedora", "category": "semi-rolling", "init": "systemd"}
|
||||||
|
{"label": "Gentoo", "category": "rolling", "init": "openrc"}
|
||||||
|
ITEMS
|
||||||
|
}
|
||||||
|
|
||||||
|
custom_label_key() {
|
||||||
|
cat <<'ITEMS' | pikl --label-key name
|
||||||
|
{"name": "Neovim", "type": "editor", "lang": "C/Lua"}
|
||||||
|
{"name": "Helix", "type": "editor", "lang": "Rust"}
|
||||||
|
{"name": "Kakoune", "type": "editor", "lang": "C++"}
|
||||||
|
{"name": "Emacs", "type": "editor", "lang": "Lisp"}
|
||||||
|
{"name": "Vim", "type": "editor", "lang": "C"}
|
||||||
|
ITEMS
|
||||||
|
}
|
||||||
|
|
||||||
|
git_branches() {
|
||||||
|
if ! git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
|
||||||
|
echo "not in a git repo" >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
git branch --format='%(refname:short)' | pikl
|
||||||
|
}
|
||||||
|
|
||||||
|
git_log_picker() {
|
||||||
|
if ! git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
|
||||||
|
echo "not in a git repo" >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
git log --oneline -30 | pikl
|
||||||
|
}
|
||||||
|
|
||||||
|
file_picker() {
|
||||||
|
find . -maxdepth 3 -type f \
|
||||||
|
-not -path './.git/*' \
|
||||||
|
-not -path './target/*' \
|
||||||
|
-not -name '*.lock' \
|
||||||
|
| sort \
|
||||||
|
| pikl
|
||||||
|
}
|
||||||
|
|
||||||
|
on_select_hook() {
|
||||||
|
printf "one\ntwo\nthree\nfour\nfive\n" \
|
||||||
|
| pikl --on-select 'echo "you picked: $(cat)"'
|
||||||
|
}
|
||||||
|
|
||||||
|
mixed_input() {
|
||||||
|
cat <<'ITEMS' | pikl
|
||||||
|
just a plain string
|
||||||
|
{"label": "a json object", "extra": 42}
|
||||||
|
another plain string
|
||||||
|
{"label": "second object", "extra": 99}
|
||||||
|
ITEMS
|
||||||
|
}
|
||||||
|
|
||||||
|
# ── Scenario menu ─────────────────────────────────────────
|
||||||
|
|
||||||
|
scenarios=(
|
||||||
|
"Plain text list"
|
||||||
|
"Big list (500 items)"
|
||||||
|
"JSON objects (distros)"
|
||||||
|
"Custom --label-key (editors)"
|
||||||
|
"Git branches"
|
||||||
|
"Git log (last 30)"
|
||||||
|
"File picker"
|
||||||
|
"on-select hook"
|
||||||
|
"Mixed input (plain + JSON)"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Map display names to functions
|
||||||
|
run_scenario() {
|
||||||
|
case "$1" in
|
||||||
|
*"Plain text"*) plain_list ;;
|
||||||
|
*"Big list"*) big_list ;;
|
||||||
|
*"JSON objects"*) json_objects ;;
|
||||||
|
*"label-key"*) custom_label_key ;;
|
||||||
|
*"Git branches"*) git_branches ;;
|
||||||
|
*"Git log"*) git_log_picker ;;
|
||||||
|
*"File picker"*) file_picker ;;
|
||||||
|
*"on-select"*) on_select_hook ;;
|
||||||
|
*"Mixed input"*) mixed_input ;;
|
||||||
|
*)
|
||||||
|
echo "unknown scenario" >&2
|
||||||
|
return 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# ── Main ──────────────────────────────────────────────────
|
||||||
|
|
||||||
|
main() {
|
||||||
|
echo "pikl demo launcher" >&2
|
||||||
|
echo "pick a scenario, then interact with it" >&2
|
||||||
|
echo "" >&2
|
||||||
|
|
||||||
|
choice=$(printf '%s\n' "${scenarios[@]}" | pikl) || {
|
||||||
|
echo "cancelled" >&2
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# pikl outputs JSON. Strip the quotes for matching.
|
||||||
|
choice=$(echo "$choice" | tr -d '"')
|
||||||
|
|
||||||
|
echo "" >&2
|
||||||
|
echo "── running: $choice ──" >&2
|
||||||
|
echo "" >&2
|
||||||
|
|
||||||
|
result=$(run_scenario "$choice") || exit $?
|
||||||
|
|
||||||
|
echo "" >&2
|
||||||
|
echo "── result ──" >&2
|
||||||
|
echo "$result"
|
||||||
|
}
|
||||||
|
|
||||||
|
main "$@"
|
||||||
Reference in New Issue
Block a user