diff --git a/Cargo.lock b/Cargo.lock index 5acc8438..a4971517 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,13 +4,14 @@ version = 3 [[package]] name = "ahash" -version = "0.8.3" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" dependencies = [ "cfg-if", "once_cell", "version_check", + "zerocopy", ] [[package]] @@ -234,6 +235,12 @@ version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + [[package]] name = "errno" version = "0.3.1" @@ -294,6 +301,17 @@ dependencies = [ "windows-targets", ] +[[package]] +name = "getrandom" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + [[package]] name = "glob" version = "0.3.1" @@ -302,9 +320,9 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "hashbrown" -version = "0.14.0" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" dependencies = [ "ahash", "allocator-api2", @@ -348,6 +366,16 @@ dependencies = [ "cc", ] +[[package]] +name = "indexmap" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" +dependencies = [ + "equivalent", + "hashbrown", +] + [[package]] name = "itertools" version = "0.10.5" @@ -557,6 +585,24 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + [[package]] name = "redox_syscall" version = "0.3.5" @@ -576,9 +622,11 @@ dependencies = [ "crossterm", "fd-lock", "gethostname", + "indexmap", "itertools", "nu-ansi-term", "pretty_assertions", + "rand", "rstest", "rusqlite", "serde", @@ -811,9 +859,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.26" +version = "2.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45c3457aacde3c65315de5031ec191ce46604304d2446e803d71ade03308d970" +checksum = "239814284fd6f1a4ffe4ca893952cdd93c224b6a1571c9a9eadd670295c0c9e2" dependencies = [ "proc-macro2", "quote", @@ -1090,3 +1138,23 @@ name = "yansi" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" + +[[package]] +name = "zerocopy" +version = "0.7.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d075cf85bbb114e933343e087b92f2146bac0d55b534cbb8188becf0039948e" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86cd5ca076997b97ef09d3ad65efe811fa68c9e874cb636ccb211223a813b0c2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] diff --git a/Cargo.toml b/Cargo.toml index ca1bc5ad..ff7633d8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,8 +21,13 @@ clipboard = { version = "0.5.0", optional = true } crossbeam = { version = "0.8.2", optional = true } crossterm = { version = "0.27.0", features = ["serde"] } fd-lock = "3.0.3" +indexmap = "2.1.0" itertools = "0.10.3" nu-ansi-term = "0.49.0" +rand = { version = "0.8.5", default-features = false, features = [ + "small_rng", + "getrandom", +] } rusqlite = { version = "0.29.0", optional = true } serde = { version = "1.0", features = ["derive"] } serde_json = { version = "1.0.79", optional = true } diff --git a/examples/cwd_aware_hinter.rs b/examples/cwd_aware_hinter.rs index b8b9145d..c5592fb4 100644 --- a/examples/cwd_aware_hinter.rs +++ b/examples/cwd_aware_hinter.rs @@ -6,14 +6,21 @@ // pressing "a" hints to abc. // Up/Down or Ctrl p/n, to select next/previous match -use std::io; +use std::{ + io, + sync::atomic::{AtomicI64, Ordering}, +}; + +use reedline::HistoryItemId; + +static COUNTER: AtomicI64 = AtomicI64::new(0); fn create_item(cwd: &str, cmd: &str, exit_status: i64) -> reedline::HistoryItem { use std::time::Duration; use reedline::HistoryItem; HistoryItem { - id: None, + id: HistoryItemId(COUNTER.fetch_add(1, Ordering::SeqCst)), start_timestamp: None, command_line: cmd.to_string(), session_id: None, @@ -32,13 +39,13 @@ fn create_filled_example_history(home_dir: &str, orig_dir: &str) -> Box io::Result { let buffer = self.editor.get_buffer().to_string(); self.hide_hints = true; + // Additional repaint to show the content without hints etc. if let Some(transient_prompt) = self.transient_prompt.take() { self.repaint(transient_prompt.as_ref())?; @@ -1737,25 +1738,33 @@ impl Reedline { } else { self.repaint(prompt)?; } - if !buffer.is_empty() { - let mut entry = HistoryItem::from_command_line(&buffer); - entry.session_id = self.get_history_session_id(); - if self + if !buffer.is_empty() { + let filtered = self .history_exclusion_prefix .as_ref() - .map(|prefix| buffer.starts_with(prefix)) - .unwrap_or(false) - { - entry.id = Some(Self::FILTERED_ITEM_ID); - self.history_last_run_id = entry.id; - self.history_excluded_item = Some(entry); + .map_or(false, |prefix| buffer.starts_with(prefix)); + + let entry_id = if filtered { + Self::FILTERED_ITEM_ID + } else { + self.history.generate_id() + }; + + let mut entry = HistoryItem::from_command_line(&buffer, entry_id); + + entry.session_id = self.get_history_session_id(); + + if filtered { + self.history.replace(&entry).expect("todo: error handling"); } else { - entry = self.history.save(entry).expect("todo: error handling"); - self.history_last_run_id = entry.id; - self.history_excluded_item = None; + self.history.save(&entry).expect("todo: error handling"); } + + self.history_last_run_id = Some(entry_id); + self.history_excluded_item = if filtered { Some(entry) } else { None }; } + self.run_edit_commands(&[EditCommand::Clear]); self.editor.reset_undo_stack(); diff --git a/src/history/base.rs b/src/history/base.rs index 26d4da05..1f96b7de 100644 --- a/src/history/base.rs +++ b/src/history/base.rs @@ -186,19 +186,25 @@ impl SearchQuery { /// Represents a history file or database /// Data could be stored e.g. in a plain text file, in a `JSONL` file, in a `SQLite` database pub trait History: Send { - /// save a history item to the database - /// if given id is None, a new id is created and set in the return value - /// if given id is Some, the existing entry is updated - fn save(&mut self, h: HistoryItem) -> Result; + /// generate a unique identifier for a new [`HistoryItem`] + fn generate_id(&mut self) -> HistoryItemId; + + /// save a new history item to the database + fn save(&mut self, h: &HistoryItem) -> Result<()>; + + /// replace an existing history item in the database + fn replace(&mut self, h: &HistoryItem) -> Result<()>; + /// load a history item by its id fn load(&self, id: HistoryItemId) -> Result; /// retrieves the next unused session id /// count the results of a query - fn count(&self, query: SearchQuery) -> Result; + fn count(&self, query: SearchQuery) -> Result; + /// return the total number of history items - fn count_all(&self) -> Result { + fn count_all(&self) -> Result { self.count(SearchQuery::everything(SearchDirection::Forward, None)) } /// return the results of a query @@ -222,16 +228,21 @@ pub trait History: Send { #[cfg(test)] mod test { - #[cfg(any(feature = "sqlite", feature = "sqlite-dynlib"))] - const IS_FILE_BASED: bool = false; - #[cfg(not(any(feature = "sqlite", feature = "sqlite-dynlib")))] - const IS_FILE_BASED: bool = true; + use std::time::Duration; use crate::HistorySessionId; - fn create_item(session: i64, cwd: &str, cmd: &str, exit_status: i64) -> HistoryItem { - HistoryItem { - id: None, + fn create_item( + hist: &mut dyn History, + session: i64, + cwd: &str, + cmd: &str, + exit_status: i64, + ) -> Result<()> { + let id = hist.count_all()? + 1; + + let item = HistoryItem { + id: HistoryItemId(id as i64), start_timestamp: None, command_line: cmd.to_string(), session_id: Some(HistorySessionId::new(session)), @@ -240,31 +251,38 @@ mod test { duration: Some(Duration::from_millis(1000)), exit_status: Some(exit_status), more_info: None, - } + }; + + hist.save(&item)?; + + // Ensure the item was correctly inserted + assert_eq!(hist.count_all()?, id); + + Ok(()) } - use std::time::Duration; use super::*; fn create_filled_example_history() -> Result> { #[cfg(any(feature = "sqlite", feature = "sqlite-dynlib"))] let mut history = crate::SqliteBackedHistory::in_memory()?; + #[cfg(not(any(feature = "sqlite", feature = "sqlite-dynlib")))] let mut history = crate::FileBackedHistory::default(); - #[cfg(not(any(feature = "sqlite", feature = "sqlite-dynlib")))] - history.save(create_item(1, "/", "dummy", 0))?; // add dummy item so ids start with 1 - history.save(create_item(1, "/home/me", "cd ~/Downloads", 0))?; // 1 - history.save(create_item(1, "/home/me/Downloads", "unzp foo.zip", 1))?; // 2 - history.save(create_item(1, "/home/me/Downloads", "unzip foo.zip", 0))?; // 3 - history.save(create_item(1, "/home/me/Downloads", "cd foo", 0))?; // 4 - history.save(create_item(1, "/home/me/Downloads/foo", "ls", 0))?; // 5 - history.save(create_item(1, "/home/me/Downloads/foo", "ls -alh", 0))?; // 6 - history.save(create_item(1, "/home/me/Downloads/foo", "cat x.txt", 0))?; // 7 - - history.save(create_item(1, "/home/me", "cd /etc/nginx", 0))?; // 8 - history.save(create_item(1, "/etc/nginx", "ls -l", 0))?; // 9 - history.save(create_item(1, "/etc/nginx", "vim nginx.conf", 0))?; // 10 - history.save(create_item(1, "/etc/nginx", "vim htpasswd", 0))?; // 11 - history.save(create_item(1, "/etc/nginx", "cat nginx.conf", 0))?; // 12 + + create_item(&mut history, 1, "/home/me", "cd ~/Downloads", 0)?; // 1 + create_item(&mut history, 1, "/home/me/Downloads", "unzp foo.zip", 1)?; // 2 + create_item(&mut history, 1, "/home/me/Downloads", "unzip foo.zip", 0)?; // 3 + create_item(&mut history, 1, "/home/me/Downloads", "cd foo", 0)?; // 4 + create_item(&mut history, 1, "/home/me/Downloads/foo", "ls", 0)?; // 5 + create_item(&mut history, 1, "/home/me/Downloads/foo", "ls -alh", 0)?; // 6 + create_item(&mut history, 1, "/home/me/Downloads/foo", "cat x.txt", 0)?; // 7 + + create_item(&mut history, 1, "/home/me", "cd /etc/nginx", 0)?; // 8 + create_item(&mut history, 1, "/etc/nginx", "ls -l", 0)?; // 9 + create_item(&mut history, 1, "/etc/nginx", "vim nginx.conf", 0)?; // 10 + create_item(&mut history, 1, "/etc/nginx", "vim htpasswd", 0)?; // 11 + create_item(&mut history, 1, "/etc/nginx", "cat nginx.conf", 0)?; // 12 + Ok(Box::new(history)) } @@ -298,7 +316,9 @@ mod test { .iter() .map(|id| history.load(HistoryItemId::new(*id))) .collect::>>()?; + assert_eq!(res, wanted); + Ok(()) } @@ -310,7 +330,7 @@ mod test { history.search(SearchQuery::everything(SearchDirection::Forward, None)) ); - assert_eq!(history.count_all()?, if IS_FILE_BASED { 13 } else { 12 }); + assert_eq!(history.count_all()?, 12); Ok(()) } @@ -330,7 +350,7 @@ mod test { limit: Some(1), ..SearchQuery::everything(SearchDirection::Forward, None) })?; - search_returned(&*history, res, vec![if IS_FILE_BASED { 0 } else { 1 }])?; + search_returned(&*history, res, vec![1])?; Ok(()) } @@ -409,8 +429,8 @@ mod test { // create history, add a few entries let mut history = open_history(); - history.save(create_item(1, "/home/me", "cd ~/Downloads", 0))?; // 1 - history.save(create_item(1, "/home/me/Downloads", "unzp foo.zip", 1))?; // 2 + create_item(history.as_mut(), 1, "/home/me", "cd ~/Downloads", 0)?; // 1 + create_item(history.as_mut(), 1, "/home/me/Downloads", "unzp foo.zip", 1)?; // 2 assert_eq!(history.count_all()?, 2); drop(history); diff --git a/src/history/cursor.rs b/src/history/cursor.rs index ab41ba4f..5d8284e3 100644 --- a/src/history/cursor.rs +++ b/src/history/cursor.rs @@ -58,6 +58,7 @@ impl HistoryCursor { filter } } + fn navigate_in_direction( &mut self, history: &dyn History, @@ -67,7 +68,9 @@ impl HistoryCursor { // if searching forward but we don't have a starting point, assume we are at the end return Ok(()); } - let start_id = self.current.as_ref().and_then(|e| e.id); + + let start_id = self.current.as_ref().map(|e| e.id); + let mut next = history.search(SearchQuery { start_id, end_id: None, @@ -77,12 +80,18 @@ impl HistoryCursor { limit: Some(1), filter: self.get_search_filter(), })?; + if next.len() == 1 { + // NOTE: .swap_remove() does not preserve the vector's order + // But is *is* faster than .remove() as it completes in O(1) + // And given we won't use this vector of results afterwards, + // we can use this function without worrying here self.current = Some(next.swap_remove(0)); } else if direction == SearchDirection::Forward { // no result and searching forward: we are at the end self.current = None; } + Ok(()) } @@ -108,6 +117,13 @@ mod tests { use super::super::*; use super::*; + fn save_from_command_line(hist: &mut dyn History, cmd_line: impl Into) -> Result<()> { + let id = hist.generate_id(); + let entry = HistoryItem::from_command_line(cmd_line, id); + + hist.save(&entry) + } + fn create_history() -> (Box, HistoryCursor) { #[cfg(any(feature = "sqlite", feature = "sqlite-dynlib"))] let hist = Box::new(SqliteBackedHistory::in_memory().unwrap()); @@ -118,6 +134,7 @@ mod tests { HistoryCursor::new(HistoryNavigationQuery::Normal(LineBuffer::default()), None), ) } + fn create_history_at(cap: usize, path: &Path) -> (Box, HistoryCursor) { let hist = Box::new(FileBackedHistory::with_file(cap, path.to_owned()).unwrap()); ( @@ -133,10 +150,10 @@ mod tests { let actual: Vec<_> = res.iter().map(|e| e.command_line.to_string()).collect(); actual } + fn add_text_entries(hist: &mut dyn History, entries: &[impl AsRef]) { entries.iter().for_each(|e| { - hist.save(HistoryItem::from_command_line(e.as_ref())) - .unwrap(); + save_from_command_line(hist, e.as_ref()).unwrap(); }); } @@ -166,8 +183,8 @@ mod tests { #[test] fn going_backwards_bottoms_out() -> Result<()> { let (mut hist, mut cursor) = create_history(); - hist.save(HistoryItem::from_command_line("command1"))?; - hist.save(HistoryItem::from_command_line("command2"))?; + save_from_command_line(hist.as_mut(), "command1")?; + save_from_command_line(hist.as_mut(), "command2")?; cursor.back(&*hist)?; cursor.back(&*hist)?; cursor.back(&*hist)?; @@ -180,8 +197,8 @@ mod tests { #[test] fn going_forwards_bottoms_out() -> Result<()> { let (mut hist, mut cursor) = create_history(); - hist.save(HistoryItem::from_command_line("command1"))?; - hist.save(HistoryItem::from_command_line("command2"))?; + save_from_command_line(hist.as_mut(), "command1")?; + save_from_command_line(hist.as_mut(), "command2")?; cursor.forward(&*hist)?; cursor.forward(&*hist)?; cursor.forward(&*hist)?; @@ -195,10 +212,10 @@ mod tests { #[test] fn appends_only_unique() -> Result<()> { let (mut hist, _) = create_history(); - hist.save(HistoryItem::from_command_line("unique_old"))?; - hist.save(HistoryItem::from_command_line("test"))?; - hist.save(HistoryItem::from_command_line("test"))?; - hist.save(HistoryItem::from_command_line("unique"))?; + save_from_command_line(hist.as_mut(), "unique_old")?; + save_from_command_line(hist.as_mut(), "test")?; + save_from_command_line(hist.as_mut(), "test")?; + save_from_command_line(hist.as_mut(), "unique")?; assert_eq!(hist.count_all()?, 3); Ok(()) } @@ -206,9 +223,9 @@ mod tests { #[test] fn prefix_search_works() -> Result<()> { let (mut hist, _) = create_history(); - hist.save(HistoryItem::from_command_line("find me as well"))?; - hist.save(HistoryItem::from_command_line("test"))?; - hist.save(HistoryItem::from_command_line("find me"))?; + save_from_command_line(hist.as_mut(), "find me as well")?; + save_from_command_line(hist.as_mut(), "test")?; + save_from_command_line(hist.as_mut(), "find me")?; let mut cursor = HistoryCursor::new( HistoryNavigationQuery::PrefixSearch("find".to_string()), @@ -228,9 +245,9 @@ mod tests { #[test] fn prefix_search_bottoms_out() -> Result<()> { let (mut hist, _) = create_history(); - hist.save(HistoryItem::from_command_line("find me as well"))?; - hist.save(HistoryItem::from_command_line("test"))?; - hist.save(HistoryItem::from_command_line("find me"))?; + save_from_command_line(hist.as_mut(), "find me as well")?; + save_from_command_line(hist.as_mut(), "test")?; + save_from_command_line(hist.as_mut(), "find me")?; let mut cursor = HistoryCursor::new( HistoryNavigationQuery::PrefixSearch("find".to_string()), @@ -256,9 +273,9 @@ mod tests { #[test] fn prefix_search_returns_to_none() -> Result<()> { let (mut hist, _) = create_history(); - hist.save(HistoryItem::from_command_line("find me as well"))?; - hist.save(HistoryItem::from_command_line("test"))?; - hist.save(HistoryItem::from_command_line("find me"))?; + save_from_command_line(hist.as_mut(), "find me as well")?; + save_from_command_line(hist.as_mut(), "test")?; + save_from_command_line(hist.as_mut(), "find me")?; let mut cursor = HistoryCursor::new( HistoryNavigationQuery::PrefixSearch("find".to_string()), @@ -283,10 +300,10 @@ mod tests { #[test] fn prefix_search_ignores_consecutive_equivalent_entries_going_backwards() -> Result<()> { let (mut hist, _) = create_history(); - hist.save(HistoryItem::from_command_line("find me as well"))?; - hist.save(HistoryItem::from_command_line("find me once"))?; - hist.save(HistoryItem::from_command_line("test"))?; - hist.save(HistoryItem::from_command_line("find me once"))?; + save_from_command_line(hist.as_mut(), "find me as well")?; + save_from_command_line(hist.as_mut(), "find me once")?; + save_from_command_line(hist.as_mut(), "test")?; + save_from_command_line(hist.as_mut(), "find me once")?; let mut cursor = HistoryCursor::new( HistoryNavigationQuery::PrefixSearch("find".to_string()), @@ -305,41 +322,47 @@ mod tests { #[test] fn prefix_search_ignores_consecutive_equivalent_entries_going_forwards() -> Result<()> { let (mut hist, _) = create_history(); - hist.save(HistoryItem::from_command_line("find me once"))?; - hist.save(HistoryItem::from_command_line("test"))?; - hist.save(HistoryItem::from_command_line("find me once"))?; - hist.save(HistoryItem::from_command_line("find me as well"))?; + + save_from_command_line(hist.as_mut(), "find me once")?; + save_from_command_line(hist.as_mut(), "test")?; + save_from_command_line(hist.as_mut(), "find me once")?; + save_from_command_line(hist.as_mut(), "find me as well")?; let mut cursor = HistoryCursor::new( HistoryNavigationQuery::PrefixSearch("find".to_string()), None, ); + cursor.back(&*hist)?; assert_eq!( cursor.string_at_cursor(), Some("find me as well".to_string()) ); + cursor.back(&*hist)?; cursor.back(&*hist)?; assert_eq!(cursor.string_at_cursor(), Some("find me once".to_string())); + cursor.forward(&*hist)?; assert_eq!( cursor.string_at_cursor(), Some("find me as well".to_string()) ); + cursor.forward(&*hist)?; assert_eq!(cursor.string_at_cursor(), None); + Ok(()) } #[test] fn substring_search_works() -> Result<()> { let (mut hist, _) = create_history(); - hist.save(HistoryItem::from_command_line("substring"))?; - hist.save(HistoryItem::from_command_line("don't find me either"))?; - hist.save(HistoryItem::from_command_line("prefix substring"))?; - hist.save(HistoryItem::from_command_line("don't find me"))?; - hist.save(HistoryItem::from_command_line("prefix substring suffix"))?; + save_from_command_line(hist.as_mut(), "substring")?; + save_from_command_line(hist.as_mut(), "don't find me either")?; + save_from_command_line(hist.as_mut(), "prefix substring")?; + save_from_command_line(hist.as_mut(), "don't find me")?; + save_from_command_line(hist.as_mut(), "prefix substring suffix")?; let mut cursor = HistoryCursor::new( HistoryNavigationQuery::SubstringSearch("substring".to_string()), @@ -363,7 +386,7 @@ mod tests { #[test] fn substring_search_with_empty_value_returns_none() -> Result<()> { let (mut hist, _) = create_history(); - hist.save(HistoryItem::from_command_line("substring"))?; + save_from_command_line(hist.as_mut(), "substring")?; let cursor = HistoryCursor::new( HistoryNavigationQuery::SubstringSearch("".to_string()), @@ -392,6 +415,7 @@ mod tests { } let (reading_hist, _) = create_history_at(5, &histfile); + let actual = get_all_entry_texts(reading_hist.as_ref()); assert_eq!(entries, actual); @@ -444,27 +468,35 @@ mod tests { let expected_truncated_entries = vec!["test 4", "test 5", "test 6", "test 7", "test 8"]; { + println!("> Creating writing history..."); let (mut writing_hist, _) = create_history_at(capacity, &histfile); add_text_entries(writing_hist.as_mut(), &initial_entries); // As `hist` goes out of scope and get's dropped, its contents are flushed to disk + println!("> Flushing writing history..."); } { + println!("> Creating appending history..."); let (mut appending_hist, _) = create_history_at(capacity, &histfile); add_text_entries(appending_hist.as_mut(), &appending_entries); // As `hist` goes out of scope and get's dropped, its contents are flushed to disk let actual: Vec<_> = get_all_entry_texts(appending_hist.as_ref()); assert_eq!(expected_appended_entries, actual); + println!("> Flushing appending history..."); } { + println!("> Creating truncating history..."); let (mut truncating_hist, _) = create_history_at(capacity, &histfile); add_text_entries(truncating_hist.as_mut(), &truncating_entries); let actual: Vec<_> = get_all_entry_texts(truncating_hist.as_ref()); assert_eq!(expected_truncated_entries, actual); // As `hist` goes out of scope and get's dropped, its contents are flushed to disk + + println!("> Flushing truncating history..."); } + println!("> Creating reading history..."); let (reading_hist, _) = create_history_at(capacity, &histfile); let actual: Vec<_> = get_all_entry_texts(reading_hist.as_ref()); @@ -531,12 +563,17 @@ mod tests { { let (mut hist_a, _) = create_history_at(capacity, &histfile); + assert_eq!(get_all_entry_texts(hist_a.as_ref()), initial_entries); + { let (mut hist_b, _) = create_history_at(capacity, &histfile); + assert_eq!(get_all_entry_texts(hist_b.as_ref()), initial_entries); + add_text_entries(hist_b.as_mut(), &entries_b); // As `hist` goes out of scope and get's dropped, its contents are flushed to disk } + add_text_entries(hist_a.as_mut(), &entries_a); // As `hist` goes out of scope and get's dropped, its contents are flushed to disk } @@ -574,11 +611,9 @@ mod tests { let hfile = histfile.clone(); std::thread::spawn(move || { let (mut hist, _) = create_history_at(cap, &hfile); - hist.save(HistoryItem::from_command_line(format!("A{i}"))) - .unwrap(); + save_from_command_line(hist.as_mut(), format!("A{i}")).unwrap(); hist.sync().unwrap(); - hist.save(HistoryItem::from_command_line(format!("B{i}"))) - .unwrap(); + save_from_command_line(hist.as_mut(), format!("B{i}")).unwrap(); }) }) .collect::>(); diff --git a/src/history/file_backed.rs b/src/history/file_backed.rs index 28b437f7..27278768 100644 --- a/src/history/file_backed.rs +++ b/src/history/file_backed.rs @@ -1,3 +1,6 @@ +use indexmap::IndexMap; +use rand::{rngs::SmallRng, Rng, SeedableRng}; + use super::{ base::CommandLineSearch, History, HistoryItem, HistoryItemId, SearchDirection, SearchQuery, }; @@ -7,7 +10,6 @@ use crate::{ }; use std::{ - collections::VecDeque, fs::OpenOptions, io::{BufRead, BufReader, BufWriter, Seek, SeekFrom, Write}, ops::{Deref, DerefMut}, @@ -27,10 +29,11 @@ pub const NEWLINE_ESCAPE: &str = "<\\n>"; #[derive(Debug)] pub struct FileBackedHistory { capacity: usize, - entries: VecDeque, + entries: IndexMap, file: Option, - len_on_disk: usize, // Keep track what was previously written to disk + last_on_disk: Option, session: Option, + rng: SmallRng, } impl Default for FileBackedHistory { @@ -42,44 +45,80 @@ impl Default for FileBackedHistory { } } -fn encode_entry(s: &str) -> String { - s.replace('\n', NEWLINE_ESCAPE) +static ID_SEP: &str = ":"; + +fn encode_entry(id: HistoryItemId, s: &str) -> String { + format!("{id}{ID_SEP}{}", s.replace('\n', NEWLINE_ESCAPE)) } -fn decode_entry(s: &str) -> String { - s.replace(NEWLINE_ESCAPE, "\n") +/// Decode an entry +/// +/// Legacy format: ls / +/// New format : 182535:ls / +/// +/// If a line can't be parsed using the new format, it will fallback to the legacy one. +/// +/// This allows this function to support decoding for both legacy and new histories, +/// as well as mixing both of them. +fn decode_entry(s: &str, counter: &mut i64) -> (HistoryItemId, String) { + let mut parsed = None; + + if let Some(sep) = s.find(ID_SEP) { + if let Ok(parsed_id) = s[..sep].parse() { + parsed = Some((parsed_id, &s[sep + ID_SEP.len()..])); + } + } + + let (id, content) = parsed.unwrap_or_else(|| { + *counter += 1; + (*counter - 1, s) + }); + + (HistoryItemId(id), content.replace(NEWLINE_ESCAPE, "\n")) } impl History for FileBackedHistory { + fn generate_id(&mut self) -> HistoryItemId { + HistoryItemId(self.rng.gen()) + } + /// only saves a value if it's different than the last value - fn save(&mut self, h: HistoryItem) -> Result { - let entry = h.command_line; + fn save(&mut self, h: &HistoryItem) -> Result<()> { + let entry = h.command_line.clone(); + // Don't append if the preceding value is identical or the string empty - let entry_id = if self + if self .entries - .back() - .map_or(true, |previous| previous != &entry) + .last() + .map_or(true, |(_, previous)| previous != &entry) && !entry.is_empty() { - if self.entries.len() == self.capacity { + if self.entries.len() >= self.capacity { // History is "full", so we delete the oldest entry first, // before adding a new one. - self.entries.pop_front(); - self.len_on_disk = self.len_on_disk.saturating_sub(1); + let first_id = *(self.entries.first().unwrap().0); + let prev = self.entries.shift_remove(&first_id); + assert!(prev.is_some()); } - self.entries.push_back(entry.to_string()); - Some(HistoryItemId::new((self.entries.len() - 1) as i64)) - } else { - None - }; - Ok(FileBackedHistory::construct_entry(entry_id, entry)) + + self.entries.insert(h.id, entry.to_string()); + } + + Ok(()) } - fn load(&self, id: HistoryItemId) -> Result { + /// this history doesn't replace entries + fn replace(&mut self, h: &HistoryItem) -> Result<()> { + self.save(h) + } + + fn load(&self, id: HistoryItemId) -> Result { + println!("{:?}", self.entries); + Ok(FileBackedHistory::construct_entry( - Some(id), + id, self.entries - .get(id.0 as usize) + .get(&id) .ok_or(ReedlineError(ReedlineErrorVariants::OtherHistoryError( "Item does not exist", )))? @@ -87,13 +126,25 @@ impl History for FileBackedHistory { )) } - fn count(&self, query: SearchQuery) -> Result { + fn count(&self, query: SearchQuery) -> Result { // todo: this could be done cheaper - Ok(self.search(query)?.len() as i64) + Ok(self.search(query)?.len() as u64) } fn search(&self, query: SearchQuery) -> Result> { - if query.start_time.is_some() || query.end_time.is_some() { + // Destructure the query - this ensures that if another element is added to this type later on, + // we won't forget to update this function as the destructuring will then be incomplete. + let SearchQuery { + direction, + start_time, + end_time, + start_id, + end_id, + limit, + filter, + } = query; + + if start_time.is_some() || end_time.is_some() { return Err(ReedlineError( ReedlineErrorVariants::HistoryFeatureUnsupported { history: "FileBackedHistory", @@ -102,10 +153,10 @@ impl History for FileBackedHistory { )); } - if query.filter.hostname.is_some() - || query.filter.cwd_exact.is_some() - || query.filter.cwd_prefix.is_some() - || query.filter.exit_successful.is_some() + if filter.hostname.is_some() + || filter.cwd_exact.is_some() + || filter.cwd_prefix.is_some() + || filter.exit_successful.is_some() { return Err(ReedlineError( ReedlineErrorVariants::HistoryFeatureUnsupported { @@ -114,61 +165,69 @@ impl History for FileBackedHistory { }, )); } - let (min_id, max_id) = { - let start = query.start_id.map(|e| e.0); - let end = query.end_id.map(|e| e.0); - if let SearchDirection::Backward = query.direction { - (end, start) + + let (start_id, end_id) = { + if let SearchDirection::Backward = direction { + (end_id, start_id) } else { - (start, end) + (start_id, end_id) } }; - // add one to make it inclusive - let min_id = min_id.map(|e| e + 1).unwrap_or(0); - // subtract one to make it inclusive - let max_id = max_id - .map(|e| e - 1) - .unwrap_or(self.entries.len() as i64 - 1); - if max_id < 0 || min_id > self.entries.len() as i64 - 1 { - return Ok(vec![]); - } - let intrinsic_limit = max_id - min_id + 1; - let limit = if let Some(given_limit) = query.limit { - std::cmp::min(intrinsic_limit, given_limit) as usize - } else { - intrinsic_limit as usize + + let start_idx = match start_id { + Some(from_id) => self.entries.get_index_of(&from_id).ok_or(ReedlineError( + ReedlineErrorVariants::OtherHistoryError("provided 'start_id' item was not found"), + ))?, + None => 0, }; - let filter = |(idx, cmd): (usize, &String)| { - if !match &query.filter.command_line { + + let end_idx = match end_id { + Some(to_id) => self.entries.get_index_of(&to_id).ok_or(ReedlineError( + ReedlineErrorVariants::OtherHistoryError("provided 'end_id' item was not found"), + ))?, + None => self.entries.len().saturating_sub(1), + }; + + assert!(start_idx <= end_idx); + + let iter = self + .entries + .iter() + .skip(start_idx) + .take(1 + end_idx - start_idx); + + let limit = limit + .and_then(|limit| usize::try_from(limit).ok()) + .unwrap_or(usize::MAX); + + let filter = |(id, cmd): (&HistoryItemId, &String)| { + let str_matches = match &filter.command_line { Some(CommandLineSearch::Prefix(p)) => cmd.starts_with(p), Some(CommandLineSearch::Substring(p)) => cmd.contains(p), Some(CommandLineSearch::Exact(p)) => cmd == p, None => true, - } { + }; + + if !str_matches { return None; } - if let Some(str) = &query.filter.not_command_line { + + if let Some(str) = &filter.not_command_line { if cmd == str { return None; } } + Some(FileBackedHistory::construct_entry( - Some(HistoryItemId::new(idx as i64)), - cmd.to_string(), // todo: this copy might be a perf bottleneck + *id, + cmd.clone(), // todo: this cloning might be a perf bottleneck )) }; - let iter = self - .entries - .iter() - .enumerate() - .skip(min_id as usize) - .take(intrinsic_limit as usize); - if let SearchDirection::Backward = query.direction { - Ok(iter.rev().filter_map(filter).take(limit).collect()) - } else { - Ok(iter.filter_map(filter).take(limit).collect()) - } + Ok(match query.direction { + SearchDirection::Backward => iter.rev().filter_map(filter).take(limit).collect(), + SearchDirection::Forward => iter.filter_map(filter).take(limit).collect(), + }) } fn update( @@ -186,7 +245,7 @@ impl History for FileBackedHistory { fn clear(&mut self) -> Result<()> { self.entries.clear(); - self.len_on_disk = 0; + self.last_on_disk = None; if let Some(file) = &self.file { if let Err(err) = std::fs::remove_file(file) { @@ -210,68 +269,103 @@ impl History for FileBackedHistory { /// /// If file would exceed `capacity` truncates the oldest entries. fn sync(&mut self) -> std::io::Result<()> { - if let Some(fname) = &self.file { - // The unwritten entries - let own_entries = self.entries.range(self.len_on_disk..); + let Some(fname) = &self.file else { + return Ok(()); + }; + + // The unwritten entries + let last_index_on_disk = self + .last_on_disk + .map(|id| self.entries.get_index_of(&id).unwrap()); + + let range_start = match last_index_on_disk { + Some(index) => index + 1, + None => 0, + }; + + let own_entries = self.entries.get_range(range_start..).unwrap(); + + if let Some(base_dir) = fname.parent() { + std::fs::create_dir_all(base_dir)?; + } - if let Some(base_dir) = fname.parent() { - std::fs::create_dir_all(base_dir)?; + let mut f_lock = fd_lock::RwLock::new( + OpenOptions::new() + .create(true) + .write(true) + .read(true) + .open(fname)?, + ); + + let mut writer_guard = f_lock.write()?; + + let (mut foreign_entries, truncate) = { + let reader = BufReader::new(writer_guard.deref()); + + let mut counter = 0; + + let mut from_file = reader + .lines() + .map(|o| o.map(|i| decode_entry(&i, &mut counter))) + .collect::>>()?; + + if from_file.len() + own_entries.len() > self.capacity { + let start = from_file.len() + own_entries.len() - self.capacity; + + (from_file.split_off(start), true) + } else { + (from_file, false) } + }; - let mut f_lock = fd_lock::RwLock::new( - OpenOptions::new() - .create(true) - .write(true) - .read(true) - .open(fname)?, - ); - let mut writer_guard = f_lock.write()?; - let (mut foreign_entries, truncate) = { - let reader = BufReader::new(writer_guard.deref()); - let mut from_file = reader - .lines() - .map(|o| o.map(|i| decode_entry(&i))) - .collect::>>()?; - if from_file.len() + own_entries.len() > self.capacity { - ( - from_file.split_off(from_file.len() - (self.capacity - own_entries.len())), - true, - ) - } else { - (from_file, false) - } - }; + { + let mut writer = BufWriter::new(writer_guard.deref_mut()); - { - let mut writer = BufWriter::new(writer_guard.deref_mut()); - if truncate { - writer.rewind()?; - - for line in &foreign_entries { - writer.write_all(encode_entry(line).as_bytes())?; - writer.write_all("\n".as_bytes())?; - } - } else { - writer.seek(SeekFrom::End(0))?; - } - for line in own_entries { - writer.write_all(encode_entry(line).as_bytes())?; + // In case of truncation, we first write every foreign entry (replacing existing content) + if truncate { + writer.rewind()?; + + for (id, line) in &foreign_entries { + writer.write_all(encode_entry(*id, line).as_bytes())?; writer.write_all("\n".as_bytes())?; } - writer.flush()?; + } else { + // Otherwise we directly jump at the end of the file + writer.seek(SeekFrom::End(0))?; } - if truncate { - let file = writer_guard.deref_mut(); - let file_len = file.stream_position()?; - file.set_len(file_len)?; + + // Then we write new entries (that haven't been synced to the file yet) + for (id, line) in own_entries { + writer.write_all(encode_entry(*id, line).as_bytes())?; + writer.write_all("\n".as_bytes())?; } - let own_entries = self.entries.drain(self.len_on_disk..); - foreign_entries.extend(own_entries); - self.entries = foreign_entries; + writer.flush()?; + } + + // If truncation is needed, we then remove everything after the cursor's current location + if truncate { + let file = writer_guard.deref_mut(); + let file_len = file.stream_position()?; + file.set_len(file_len)?; + } + + match last_index_on_disk { + Some(last_index_on_disk) => { + if last_index_on_disk + 1 < self.entries.len() { + foreign_entries.extend(self.entries.drain(last_index_on_disk + 1..)); + } + } - self.len_on_disk = self.entries.len(); + None => { + foreign_entries.extend(self.entries.drain(..)); + } } + + self.entries = foreign_entries; + + self.last_on_disk = self.entries.last().map(|(id, _)| *id); + Ok(()) } @@ -292,10 +386,11 @@ impl FileBackedHistory { } FileBackedHistory { capacity, - entries: VecDeque::new(), + entries: IndexMap::new(), file: None, - len_on_disk: 0, + last_on_disk: None, session: None, + rng: SmallRng::from_entropy(), } } @@ -309,16 +404,19 @@ impl FileBackedHistory { /// pub fn with_file(capacity: usize, file: PathBuf) -> std::io::Result { let mut hist = Self::new(capacity); + if let Some(base_dir) = file.parent() { std::fs::create_dir_all(base_dir)?; } + hist.file = Some(file); hist.sync()?; + Ok(hist) } // this history doesn't store any info except command line - fn construct_entry(id: Option, command_line: String) -> HistoryItem { + fn construct_entry(id: HistoryItemId, command_line: String) -> HistoryItem { HistoryItem { id, start_timestamp: None, diff --git a/src/history/item.rs b/src/history/item.rs index 0c36fba3..c6c3d4ca 100644 --- a/src/history/item.rs +++ b/src/history/item.rs @@ -81,7 +81,7 @@ impl HistoryItemExtraInfo for IgnoreAllExtraInfo {} #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct HistoryItem { /// primary key, unique across one history - pub id: Option, + pub id: HistoryItemId, /// date-time when this command was started pub start_timestamp: Option>, /// the full command line as text @@ -106,9 +106,9 @@ pub struct HistoryItem { impl HistoryItem { /// create a history item purely from the command line with everything else set to None - pub fn from_command_line(cmd: impl Into) -> HistoryItem { + pub fn from_command_line(cmd: impl Into, id: HistoryItemId) -> HistoryItem { HistoryItem { - id: None, + id, start_timestamp: None, command_line: cmd.into(), session_id: None, diff --git a/src/history/sqlite_backed.rs b/src/history/sqlite_backed.rs index 7ab56e96..8eea55a6 100644 --- a/src/history/sqlite_backed.rs +++ b/src/history/sqlite_backed.rs @@ -7,6 +7,7 @@ use crate::{ Result, }; use chrono::{TimeZone, Utc}; +use rand::{rngs::SmallRng, Rng, SeedableRng}; use rusqlite::{named_params, params, Connection, ToSql}; use std::{path::PathBuf, time::Duration}; const SQLITE_APPLICATION_ID: i32 = 1151497937; @@ -18,12 +19,14 @@ pub struct SqliteBackedHistory { db: rusqlite::Connection, session: Option, session_timestamp: Option>, + rng: SmallRng, } fn deserialize_history_item(row: &rusqlite::Row) -> rusqlite::Result { let x: Option = row.get("more_info")?; + Ok(HistoryItem { - id: Some(HistoryItemId::new(row.get("id")?)), + id: HistoryItemId::new(row.get("id")?), start_timestamp: row.get::<&str, Option>("start_timestamp")?.map(|e| { match Utc.timestamp_millis_opt(e) { chrono::LocalResult::Single(e) => e, @@ -56,12 +59,16 @@ fn deserialize_history_item(row: &rusqlite::Row) -> rusqlite::Result Result { - let ret: i64 = self + fn generate_id(&mut self) -> HistoryItemId { + HistoryItemId(self.rng.gen()) + } + + fn save(&mut self, entry: &HistoryItem) -> Result<()> { + self .db .prepare( "insert into history - (id, start_timestamp, command_line, session_id, hostname, cwd, duration_ms, exit_status, more_info) + (id, start_timestamp, command_line, session_id, hostname, cwd, duration_ms, exit_status, more_info) values (:id, :start_timestamp, :command_line, :session_id, :hostname, :cwd, :duration_ms, :exit_status, :more_info) on conflict (history.id) do update set start_timestamp = excluded.start_timestamp, @@ -71,13 +78,12 @@ impl History for SqliteBackedHistory { cwd = excluded.cwd, duration_ms = excluded.duration_ms, exit_status = excluded.exit_status, - more_info = excluded.more_info - returning id", + more_info = excluded.more_info", ) .map_err(map_sqlite_err)? - .query_row( + .execute( named_params! { - ":id": entry.id.map(|id| id.0), + ":id": entry.id.0, ":start_timestamp": entry.start_timestamp.map(|e| e.timestamp_millis()), ":command_line": entry.command_line, ":session_id": entry.session_id.map(|e| e.0), @@ -87,11 +93,14 @@ impl History for SqliteBackedHistory { ":exit_status": entry.exit_status, ":more_info": entry.more_info.as_ref().map(|e| serde_json::to_string(e).unwrap()) }, - |row| row.get(0), ) - .map_err(map_sqlite_err)?; - entry.id = Some(HistoryItemId::new(ret)); - Ok(entry) + .map(|_| ()) + .map_err(map_sqlite_err) + } + + /// this history doesn't replace entries + fn replace(&mut self, h: &HistoryItem) -> Result<()> { + self.save(h) } fn load(&self, id: HistoryItemId) -> Result { @@ -101,24 +110,30 @@ impl History for SqliteBackedHistory { .map_err(map_sqlite_err)? .query_row(named_params! { ":id": id.0 }, deserialize_history_item) .map_err(map_sqlite_err)?; + Ok(entry) } - fn count(&self, query: SearchQuery) -> Result { + fn count(&self, query: SearchQuery) -> Result { let (query, params) = self.construct_query(&query, "coalesce(count(*), 0)"); + let params_borrow: Vec<(&str, &dyn ToSql)> = params.iter().map(|e| (e.0, &*e.1)).collect(); - let result: i64 = self + + let result: u64 = self .db .prepare(&query) .unwrap() .query_row(¶ms_borrow[..], |r| r.get(0)) .map_err(map_sqlite_err)?; + Ok(result) } fn search(&self, query: SearchQuery) -> Result> { let (query, params) = self.construct_query(&query, "*"); + let params_borrow: Vec<(&str, &dyn ToSql)> = params.iter().map(|e| (e.0, &*e.1)).collect(); + let results: Vec = self .db .prepare(&query) @@ -137,7 +152,7 @@ impl History for SqliteBackedHistory { ) -> Result<()> { // in theory this should run in a transaction let item = self.load(id)?; - self.save(updater(item))?; + self.save(&updater(item))?; Ok(()) } @@ -215,60 +230,125 @@ impl SqliteBackedHistory { } /// initialize a new database / migrate an existing one fn from_connection( - db: Connection, + mut db: Connection, session: Option, session_timestamp: Option>, ) -> Result { - // https://phiresky.github.io/blog/2020/sqlite-performance-tuning/ - db.pragma_update(None, "journal_mode", "wal") - .map_err(map_sqlite_err)?; - db.pragma_update(None, "synchronous", "normal") - .map_err(map_sqlite_err)?; - db.pragma_update(None, "mmap_size", "1000000000") - .map_err(map_sqlite_err)?; - db.pragma_update(None, "foreign_keys", "on") - .map_err(map_sqlite_err)?; - db.pragma_update(None, "application_id", SQLITE_APPLICATION_ID) - .map_err(map_sqlite_err)?; - let db_version: i32 = db - .query_row( + let inner = || -> rusqlite::Result<(i32, Self)> { + // https://phiresky.github.io/blog/2020/sqlite-performance-tuning/ + db.pragma_update(None, "journal_mode", "wal")?; + db.pragma_update(None, "synchronous", "normal")?; + db.pragma_update(None, "mmap_size", "1000000000")?; + db.pragma_update(None, "foreign_keys", "on")?; + db.pragma_update(None, "application_id", SQLITE_APPLICATION_ID)?; + + // Get the user version + // By default, it is set to 0 + let mut db_version: i32 = db.query_row( "SELECT user_version FROM pragma_user_version", params![], |r| r.get(0), - ) - .map_err(map_sqlite_err)?; - if db_version != 0 { + )?; + + // An up-to-date database should have its version set to the latest number (currently 1) + // 0 means the database is either uninitialized or it is using the old history format + if db_version == 0 { + // Check if an history already exists + let existing_history = db.query_row( + "select count(*) from pragma_table_list() where name = 'history';", + (), + |result| Ok(result.get::<_, usize>("count(*)")? > 0), + )?; + + let mut statements = vec![]; + + // If so, rename it and delete related indexes + if existing_history { + statements.push( + " + alter table history rename to history_old; + + drop index if exists idx_history_time; + drop index if exists idx_history_cwd; + drop index if exists idx_history_exit_status; + drop index if exists idx_history_cmd; + ", + ); + } + + // Create the history table using the v1 schema + statements.push( + " + create table history ( + idx integer primary key autoincrement, + id integer unique not null, + command_line text not null, + start_timestamp integer, + session_id integer, + hostname text, + cwd text, + duration_ms integer, + exit_status integer, + more_info text + ) strict; + + create index if not exists idx_history_time on history(start_timestamp); + create index if not exists idx_history_cwd on history(cwd); -- suboptimal for many hosts + create index if not exists idx_history_exit_status on history(exit_status); + create index if not exists idx_history_cmd on history(command_line); + create index if not exists idx_history_session_id on history(session_id); + ", + ); + + // If there was an history previously, migrate it to the new table + // Then delete it + if existing_history { + statements.push( + " + insert into history (id, command_line, start_timestamp, session_id, hostname, cwd, duration_ms, exit_status, more_info) + select id as idx, command_line, start_timestamp, session_id, hostname, cwd, duration_ms, exit_status, more_info + from history_old; + + drop table history_old; + ", + ); + } + + // Update the version to indicate the DB is up-to-date + statements.push("pragma user_version = 1;"); + db_version = 1; + + // We use a transaction to ensure consistency, given we're doing multiple operations + let transaction = db.transaction()?; + transaction.execute_batch(&statements.join("\n"))?; + transaction.commit()?; + } + + Ok(( + db_version, + SqliteBackedHistory { + db, + session, + session_timestamp, + rng: SmallRng::from_entropy(), + }, + )) + }; + + // Connect to the database, check it and (if required) initialize it + let (db_version, history) = inner().map_err(map_sqlite_err)?; + + // Ensure the database version is the currently supported one + // If this isn't the case, then something is wrong + // (either the previous versions migration is buggy, or the database is using a format deployed on a + // later reedline version than this one) + if db_version != 1 { return Err(ReedlineError(ReedlineErrorVariants::HistoryDatabaseError( format!("Unknown database version {db_version}"), ))); } - db.execute_batch( - " - create table if not exists history ( - id integer primary key autoincrement, - command_line text not null, - start_timestamp integer, - session_id integer, - hostname text, - cwd text, - duration_ms integer, - exit_status integer, - more_info text - ) strict; - create index if not exists idx_history_time on history(start_timestamp); - create index if not exists idx_history_cwd on history(cwd); -- suboptimal for many hosts - create index if not exists idx_history_exit_status on history(exit_status); - create index if not exists idx_history_cmd on history(command_line); - create index if not exists idx_history_cmd on history(session_id); - -- todo: better indexes - ", - ) - .map_err(map_sqlite_err)?; - Ok(SqliteBackedHistory { - db, - session, - session_timestamp, - }) + + Ok(history) } fn construct_query<'a>( @@ -276,111 +356,152 @@ impl SqliteBackedHistory { query: &'a SearchQuery, select_expression: &str, ) -> (String, BoxedNamedParams<'a>) { - // TODO: this whole function could be done with less allocs - let (is_asc, asc) = match query.direction { + // Destructure the query - this ensures that if another element is added to this type later on, + // we won't forget to update this function as the destructuring will then be incomplete. + let SearchQuery { + direction, + start_time, + end_time, + start_id, + end_id, + limit, + filter, + } = query; + + let (is_asc, asc) = match direction { SearchDirection::Forward => (true, "asc"), SearchDirection::Backward => (false, "desc"), }; + + // TODO: find a way to avoid too many allocations + // Current version is an acceptable compromise given most of the performance + // will be eaten by SQLite anyway let mut wheres = Vec::new(); let mut params: BoxedNamedParams = Vec::new(); - if let Some(start) = query.start_time { - wheres.push(if is_asc { - "timestamp_start > :start_time" - } else { - "timestamp_start < :start_time" - }); + + if let Some(start) = start_time { + let cmp_op = if is_asc { '>' } else { '<' }; + wheres.push(format!("timestamp_start {cmp_op} :start_time")); params.push((":start_time", Box::new(start.timestamp_millis()))); } - if let Some(end) = query.end_time { - wheres.push(if is_asc { - ":end_time >= timestamp_start" - } else { - ":end_time <= timestamp_start" - }); + + if let Some(end) = end_time { + let cmp_op = if is_asc { ">=" } else { "<=" }; + wheres.push(format!(":end_time {cmp_op} timestamp_start")); params.push((":end_time", Box::new(end.timestamp_millis()))); } - if let Some(start) = query.start_id { - wheres.push(if is_asc { - "id > :start_id" - } else { - "id < :start_id" - }); + + if let Some(start) = start_id { + let cmp_op = if is_asc { '>' } else { '<' }; + wheres.push(format!( + "idx {cmp_op} (SELECT idx FROM history WHERE id = :start_id)" + )); params.push((":start_id", Box::new(start.0))); } - if let Some(end) = query.end_id { - wheres.push(if is_asc { - ":end_id >= id" - } else { - ":end_id <= id" - }); + + if let Some(end) = end_id { + let cmp_op = if is_asc { ">=" } else { "<=" }; + wheres.push(format!( + "idx {cmp_op} (SELECT idx FROM history WHERE id = :start_id)" + )); params.push((":end_id", Box::new(end.0))); } - let limit = match query.limit { + + let limit = match limit { Some(l) => { params.push((":limit", Box::new(l))); "limit :limit" } None => "", }; - if let Some(command_line) = &query.filter.command_line { - // TODO: escape % + + if let Some(command_line) = &filter.command_line { let command_line_like = match command_line { - CommandLineSearch::Exact(e) => e.to_string(), - CommandLineSearch::Prefix(prefix) => format!("{prefix}%"), - CommandLineSearch::Substring(cont) => format!("%{cont}%"), + CommandLineSearch::Exact(e) => escape_like_with_backslashes(e, ESCAPE_CHAR), + CommandLineSearch::Prefix(prefix) => { + format!("{}%", escape_like_with_backslashes(prefix, ESCAPE_CHAR)) + } + CommandLineSearch::Substring(cont) => { + format!("%{}%", escape_like_with_backslashes(cont, ESCAPE_CHAR)) + } }; - wheres.push("command_line like :command_line"); + + wheres.push(format!( + "command_line like :command_line escape '{ESCAPE_CHAR}'" + )); params.push((":command_line", Box::new(command_line_like))); } - if let Some(str) = &query.filter.not_command_line { - wheres.push("command_line != :not_cmd"); + if let Some(str) = &filter.not_command_line { + wheres.push("command_line != :not_cmd".to_owned()); params.push((":not_cmd", Box::new(str))); } - if let Some(hostname) = &query.filter.hostname { - wheres.push("hostname = :hostname"); + + if let Some(hostname) = &filter.hostname { + wheres.push("hostname = :hostname".to_owned()); params.push((":hostname", Box::new(hostname))); } - if let Some(cwd_exact) = &query.filter.cwd_exact { - wheres.push("cwd = :cwd"); + + if let Some(cwd_exact) = &filter.cwd_exact { + wheres.push("cwd = :cwd".to_owned()); params.push((":cwd", Box::new(cwd_exact))); } - if let Some(cwd_prefix) = &query.filter.cwd_prefix { - wheres.push("cwd like :cwd_like"); + + if let Some(cwd_prefix) = &filter.cwd_prefix { + wheres.push("cwd like :cwd_like".to_owned()); let cwd_like = format!("{cwd_prefix}%"); params.push((":cwd_like", Box::new(cwd_like))); } - if let Some(exit_successful) = query.filter.exit_successful { - if exit_successful { - wheres.push("exit_status = 0"); - } else { - wheres.push("exit_status != 0"); - } + + if let Some(exit_successful) = filter.exit_successful { + let cmp_op = if exit_successful { "=" } else { "!=" }; + wheres.push(format!("exit_status {cmp_op} 0")); } + if let (Some(session_id), Some(session_timestamp)) = - (query.filter.session, self.session_timestamp) + (filter.session, self.session_timestamp) { // Filter so that we get rows: // - that have the same session_id, or // - were executed before our session started - wheres.push("(session_id = :session_id OR start_timestamp < :session_timestamp)"); + wheres.push( + "(session_id = :session_id OR start_timestamp < :session_timestamp)".to_owned(), + ); params.push((":session_id", Box::new(session_id))); params.push(( ":session_timestamp", Box::new(session_timestamp.timestamp_millis()), )); } + let mut wheres = wheres.join(" and "); + if wheres.is_empty() { wheres = "true".to_string(); } + let query = format!( "SELECT {select_expression} \ FROM history \ WHERE ({wheres}) \ - ORDER BY id {asc} \ + ORDER BY idx {asc} \ {limit}" ); + (query, params) } } + +/// Escape special symbols for an SQL LIKE clause +/// (!) Requires LIKE clause to specify an `ESCAPE ''` clause +fn escape_like_with_backslashes(str: &str, escape_char: char) -> String { + let mut str = str.replace(escape_char, &format!("{escape_char}{escape_char}")); + + for forbidden in ['%', '\'', '\n'] { + str = str.replace(forbidden, &format!("{escape_char}{forbidden}")); + } + + str +} + +static ESCAPE_CHAR: char = '\\';