Early implementation of bash-like completions with 'complete' and 'compgen' builtins

This commit is contained in:
2026-02-27 01:10:52 -05:00
parent 4fbc25090d
commit 5f3610c298
17 changed files with 879 additions and 227 deletions

248
src/builtin/complete.rs Normal file
View File

@@ -0,0 +1,248 @@
use bitflags::bitflags;
use nix::{libc::STDOUT_FILENO, unistd::write};
use crate::{builtin::setup_builtin, getopt::{Opt, OptSpec, get_opts_from_tokens}, jobs::JobBldr, libsh::error::{ShErr, ShErrKind, ShResult}, parse::{NdRule, Node}, procio::{IoStack, borrow_fd}, readline::complete::{BashCompSpec, CompContext, CompSpec}, state::{self, read_meta, write_meta}};
pub const COMPGEN_OPTS: [OptSpec;7] = [
OptSpec {
opt: Opt::Short('F'),
takes_arg: true
},
OptSpec {
opt: Opt::Short('W'),
takes_arg: true
},
OptSpec {
opt: Opt::Short('f'),
takes_arg: false
},
OptSpec {
opt: Opt::Short('d'),
takes_arg: false
},
OptSpec {
opt: Opt::Short('c'),
takes_arg: false
},
OptSpec {
opt: Opt::Short('u'),
takes_arg: false
},
OptSpec {
opt: Opt::Short('v'),
takes_arg: false
}
];
pub const COMP_OPTS: [OptSpec;10] = [
OptSpec {
opt: Opt::Short('F'),
takes_arg: true
},
OptSpec {
opt: Opt::Short('W'),
takes_arg: true
},
OptSpec {
opt: Opt::Short('A'),
takes_arg: true
},
OptSpec {
opt: Opt::Short('p'),
takes_arg: false
},
OptSpec {
opt: Opt::Short('r'),
takes_arg: false
},
OptSpec {
opt: Opt::Short('f'),
takes_arg: false
},
OptSpec {
opt: Opt::Short('d'),
takes_arg: false
},
OptSpec {
opt: Opt::Short('c'),
takes_arg: false
},
OptSpec {
opt: Opt::Short('u'),
takes_arg: false
},
OptSpec {
opt: Opt::Short('v'),
takes_arg: false
}
];
bitflags! {
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct CompFlags: u32 {
const FILES = 0b0000000001;
const DIRS = 0b0000000010;
const CMDS = 0b0000000100;
const USERS = 0b0000001000;
const VARS = 0b0000010000;
const PRINT = 0b0000100000;
const REMOVE = 0b0001000000;
}
}
#[derive(Default, Debug, Clone)]
pub struct CompOpts {
pub func: Option<String>,
pub wordlist: Option<Vec<String>>,
pub action: Option<String>,
pub flags: CompFlags
}
pub fn complete_builtin(node: Node, io_stack: &mut IoStack, job: &mut JobBldr) -> ShResult<()> {
let blame = node.get_span().clone();
let NdRule::Command {
assignments: _,
argv,
} = node.class
else {
unreachable!()
};
assert!(!argv.is_empty());
let src = argv.clone()
.into_iter()
.map(|tk| tk.expand().map(|tk| tk.get_words().join(" ")))
.collect::<ShResult<Vec<String>>>()?
.join(" ");
let (argv, opts) = get_opts_from_tokens(argv, &COMP_OPTS)?;
let comp_opts = get_comp_opts(opts)?;
let (argv, _) = setup_builtin(argv, job, Some((io_stack, node.redirs)))?;
if comp_opts.flags.contains(CompFlags::PRINT) {
if argv.is_empty() {
read_meta(|m| {
let specs = m.comp_specs().values();
for spec in specs {
println!("{}", spec.source());
}
})
} else {
read_meta(|m| {
for (cmd,_) in &argv {
if let Some(spec) = m.comp_specs().get(cmd) {
println!("{}", spec.source());
}
}
})
}
state::set_status(0);
return Ok(());
}
if comp_opts.flags.contains(CompFlags::REMOVE) {
write_meta(|m| {
for (cmd,_) in &argv {
m.remove_comp_spec(cmd);
}
});
state::set_status(0);
return Ok(());
}
if argv.is_empty() {
state::set_status(1);
return Err(ShErr::full(ShErrKind::ExecFail, "complete: no command specified", blame));
}
let comp_spec = BashCompSpec::from_comp_opts(comp_opts)
.with_source(src);
for (cmd,_) in argv {
write_meta(|m| m.set_comp_spec(cmd, Box::new(comp_spec.clone())));
}
state::set_status(0);
Ok(())
}
pub fn compgen_builtin(node: Node, io_stack: &mut IoStack, job: &mut JobBldr) -> ShResult<()> {
let blame = node.get_span().clone();
let NdRule::Command {
assignments: _,
argv,
} = node.class
else {
unreachable!()
};
assert!(!argv.is_empty());
let src = argv.clone()
.into_iter()
.map(|tk| tk.expand().map(|tk| tk.get_words().join(" ")))
.collect::<ShResult<Vec<String>>>()?
.join(" ");
let (argv, opts) = get_opts_from_tokens(argv, &COMPGEN_OPTS)?;
let prefix = argv
.clone()
.into_iter()
.nth(1)
.unwrap_or_default();
let comp_opts = get_comp_opts(opts)?;
let (_, _guard) = setup_builtin(argv, job, Some((io_stack, node.redirs)))?;
let comp_spec = BashCompSpec::from_comp_opts(comp_opts)
.with_source(src);
log::debug!("compgen: prefix='{}', spec={:?}", prefix.as_str(), comp_spec);
let dummy_ctx = CompContext {
words: vec![prefix.clone()],
cword: 0,
line: prefix.to_string(),
cursor_pos: prefix.as_str().len()
};
let results = comp_spec.complete(&dummy_ctx)?;
log::debug!("compgen: {} results: {:?}", results.len(), results);
let stdout = borrow_fd(STDOUT_FILENO);
for result in &results {
write(stdout, result.as_bytes())?;
write(stdout, b"\n")?;
}
state::set_status(0);
Ok(())
}
pub fn get_comp_opts(opts: Vec<Opt>) -> ShResult<CompOpts> {
let mut comp_opts = CompOpts::default();
for opt in opts {
match opt {
Opt::ShortWithArg('F',func) => {
comp_opts.func = Some(func);
},
Opt::ShortWithArg('W',wordlist) => {
comp_opts.wordlist = Some(wordlist.split_whitespace().map(|s| s.to_string()).collect());
},
Opt::ShortWithArg('A',action) => {
comp_opts.action = Some(action);
}
Opt::Short('r') => comp_opts.flags |= CompFlags::REMOVE,
Opt::Short('p') => comp_opts.flags |= CompFlags::PRINT,
Opt::Short('f') => comp_opts.flags |= CompFlags::FILES,
Opt::Short('d') => comp_opts.flags |= CompFlags::DIRS,
Opt::Short('c') => comp_opts.flags |= CompFlags::CMDS,
Opt::Short('u') => comp_opts.flags |= CompFlags::USERS,
Opt::Short('v') => comp_opts.flags |= CompFlags::VARS,
_ => unreachable!()
}
}
Ok(comp_opts)
}

View File

@@ -1,5 +1,3 @@
use std::sync::LazyLock;
use crate::{
builtin::setup_builtin,
expand::expand_prompt,

View File

@@ -1,5 +1,3 @@
use nix::{errno::Errno, unistd::execvpe};
use crate::{
builtin::setup_builtin,
jobs::JobBldr,

View File

@@ -1,7 +1,6 @@
use crate::{
libsh::error::{ShErr, ShErrKind, ShResult},
parse::{execute::prepare_argv, NdRule, Node},
prelude::*,
};
pub fn flowctl(node: Node, kind: ShErrKind) -> ShResult<()> {

View File

@@ -8,7 +8,7 @@ use crate::{
execute::prepare_argv,
lex::{Span, Tk},
},
procio::{IoFrame, IoStack, RedirGuard}, state,
procio::{IoStack, RedirGuard}, state,
};
pub mod alias;
@@ -28,11 +28,12 @@ pub mod zoltraak;
pub mod dirstack;
pub mod exec;
pub mod eval;
pub mod complete;
pub const BUILTINS: [&str; 33] = [
pub const BUILTINS: [&str; 35] = [
"echo", "cd", "read", "export", "local", "pwd", "source", "shift", "jobs", "fg", "bg", "disown", "alias", "unalias",
"return", "break", "continue", "exit", "zoltraak", "shopt", "builtin", "command", "trap",
"pushd", "popd", "dirs", "exec", "eval", "true", "false", ":", "readonly", "unset"
"pushd", "popd", "dirs", "exec", "eval", "true", "false", ":", "readonly", "unset", "complete", "compgen"
];
/// Sets up a builtin command

View File

@@ -150,7 +150,7 @@ pub fn read_builtin(node: Node, _io_stack: &mut IoStack, job: &mut JobBldr) -> S
state::set_status(1);
break; // EOF
}
Ok(n) => {
Ok(_) => {
if buf[0] == read_opts.delim {
state::set_status(0);
break; // Delimiter reached, stop reading

View File

@@ -9,7 +9,6 @@ use regex::Regex;
use crate::{
libsh::error::{ShErr, ShErrKind, ShResult},
parse::{ConjunctOp, NdRule, Node, TestCase, TEST_UNARY_OPS},
prelude::*,
};
#[derive(Debug, Clone)]

View File

@@ -60,10 +60,10 @@ impl FromStr for TrapTarget {
"PWR" => Ok(TrapTarget::Signal(Signal::SIGPWR)),
"SYS" => Ok(TrapTarget::Signal(Signal::SIGSYS)),
_ => {
return Err(ShErr::simple(
Err(ShErr::simple(
ShErrKind::ExecFail,
format!("invalid trap target '{}'", s),
));
))
}
}
}
@@ -117,7 +117,6 @@ impl Display for TrapTarget {
}
pub fn trap(node: Node, io_stack: &mut IoStack, job: &mut JobBldr) -> ShResult<()> {
let span = node.get_span();
let NdRule::Command {
assignments: _,
argv,

View File

@@ -1,7 +1,7 @@
use std::{os::unix::fs::OpenOptionsExt, sync::LazyLock};
use std::os::unix::fs::OpenOptionsExt;
use crate::{
getopt::{get_opts_from_tokens, Opt, OptSet, OptSpec},
getopt::{get_opts_from_tokens, Opt, OptSpec},
jobs::JobBldr,
libsh::error::{Note, ShErr, ShErrKind, ShResult, ShResultExt},
parse::{NdRule, Node},
@@ -121,9 +121,7 @@ pub fn zoltraak(node: Node, io_stack: &mut IoStack, job: &mut JobBldr) -> ShResu
),
);
}
if let Err(e) = annihilate(&arg, flags).blame(span) {
return Err(e);
}
annihilate(&arg, flags).blame(span)?
}
Ok(())

View File

@@ -12,7 +12,7 @@ use crate::parse::{Redir, RedirType};
use crate::procio::{IoBuf, IoFrame, IoMode, IoStack};
use crate::readline::markers;
use crate::state::{
LogTab, VarFlags, VarKind, read_jobs, read_logic, read_vars, write_jobs, write_meta, write_vars
ArrIndex, LogTab, VarFlags, VarKind, read_jobs, read_logic, read_vars, write_jobs, write_meta, write_vars
};
use crate::{jobs, prelude::*};
@@ -80,6 +80,7 @@ impl Expander {
let mut chars = self.raw.chars();
let mut cur_word = String::new();
let mut was_quoted = false;
let ifs = env::var("IFS").unwrap_or_else(|_| " \t\n".to_string());
'outer: while let Some(ch) = chars.next() {
match ch {
@@ -97,7 +98,7 @@ impl Expander {
}
}
}
_ if is_field_sep(ch) || ch == markers::ARG_SEP => {
_ if ifs.contains(ch) || ch == markers::ARG_SEP => {
if cur_word.is_empty() && !was_quoted {
cur_word.clear();
} else {
@@ -549,9 +550,25 @@ pub fn expand_var(chars: &mut Peekable<Chars<'_>>) -> ShResult<String> {
}
'}' if brace_depth > 0 && bracket_depth == 0 && inner_brace_depth == 0 => {
chars.next(); // consume the brace
log::debug!("expand_var closing brace, var_name: {:?}", var_name);
let val = if let Some(idx) = idx {
read_vars(|v| v.index_var(&var_name, idx))?
match idx {
ArrIndex::AllSplit => {
let arg_sep = markers::ARG_SEP.to_string();
read_vars(|v| v.get_arr_elems(&var_name))?.join(&arg_sep)
}
ArrIndex::AllJoined => {
let ifs = read_vars(|v| v.try_get_var("IFS"))
.unwrap_or_else(|| " \t\n".to_string())
.chars()
.next()
.unwrap_or(' ')
.to_string();
read_vars(|v| v.get_arr_elems(&var_name))?.join(&ifs)
},
_ => read_vars(|v| v.index_var(&var_name, idx))?
}
} else {
perform_param_expansion(&var_name)?
};
@@ -566,7 +583,7 @@ pub fn expand_var(chars: &mut Peekable<Chars<'_>>) -> ShResult<String> {
chars.next(); // consume the bracket
if bracket_depth == 0 {
let expanded_idx = expand_raw(&mut idx_raw.chars().peekable())?;
idx = Some(expanded_idx.parse::<isize>().map_err(|_| ShErr::simple(ShErrKind::ParseErr, format!("Array index must be a number, got '{expanded_idx}'")))?);
idx = Some(expanded_idx.parse::<ArrIndex>().map_err(|_| ShErr::simple(ShErrKind::ParseErr, format!("Array index must be a number, got '{expanded_idx}'")))?);
}
}
ch if bracket_depth > 0 => {
@@ -1389,76 +1406,73 @@ pub fn perform_param_expansion(raw: &str) -> ShResult<String> {
match expansion {
ParamExp::Len => unreachable!(),
ParamExp::DefaultUnsetOrNull(default) => {
if !vars.var_exists(&var_name) || vars.get_var(&var_name).is_empty() {
log::debug!("DefaultUnsetOrNull default: {:?}", default);
let result = expand_raw(&mut default.chars().peekable());
log::debug!("DefaultUnsetOrNull expanded: {:?}", result);
result
} else {
Ok(vars.get_var(&var_name))
match vars.try_get_var(&var_name).filter(|v| !v.is_empty()) {
Some(val) => Ok(val),
None => expand_raw(&mut default.chars().peekable()),
}
}
ParamExp::DefaultUnset(default) => {
if !vars.var_exists(&var_name) {
expand_raw(&mut default.chars().peekable())
} else {
Ok(vars.get_var(&var_name))
match vars.try_get_var(&var_name) {
Some(val) => Ok(val),
None => expand_raw(&mut default.chars().peekable()),
}
}
ParamExp::SetDefaultUnsetOrNull(default) => {
if !vars.var_exists(&var_name) || vars.get_var(&var_name).is_empty() {
let expanded = expand_raw(&mut default.chars().peekable())?;
write_vars(|v| v.set_var(&var_name, VarKind::Str(expanded.clone()), VarFlags::NONE));
Ok(expanded)
} else {
Ok(vars.get_var(&var_name))
match vars.try_get_var(&var_name).filter(|v| !v.is_empty()) {
Some(val) => Ok(val),
None => {
let expanded = expand_raw(&mut default.chars().peekable())?;
write_vars(|v| v.set_var(&var_name, VarKind::Str(expanded.clone()), VarFlags::NONE))?;
Ok(expanded)
}
}
}
ParamExp::SetDefaultUnset(default) => {
if !vars.var_exists(&var_name) {
let expanded = expand_raw(&mut default.chars().peekable())?;
write_vars(|v| v.set_var(&var_name, VarKind::Str(expanded.clone()), VarFlags::NONE));
Ok(expanded)
} else {
Ok(vars.get_var(&var_name))
match vars.try_get_var(&var_name) {
Some(val) => Ok(val),
None => {
let expanded = expand_raw(&mut default.chars().peekable())?;
write_vars(|v| v.set_var(&var_name, VarKind::Str(expanded.clone()), VarFlags::NONE))?;
Ok(expanded)
}
}
}
ParamExp::AltSetNotNull(alt) => {
if vars.var_exists(&var_name) && !vars.get_var(&var_name).is_empty() {
expand_raw(&mut alt.chars().peekable())
} else {
Ok("".into())
match vars.try_get_var(&var_name).filter(|v| !v.is_empty()) {
Some(_) => expand_raw(&mut alt.chars().peekable()),
None => Ok("".into()),
}
}
ParamExp::AltNotNull(alt) => {
if vars.var_exists(&var_name) {
expand_raw(&mut alt.chars().peekable())
} else {
Ok("".into())
match vars.try_get_var(&var_name) {
Some(_) => expand_raw(&mut alt.chars().peekable()),
None => Ok("".into()),
}
}
ParamExp::ErrUnsetOrNull(err) => {
if !vars.var_exists(&var_name) || vars.get_var(&var_name).is_empty() {
let expanded = expand_raw(&mut err.chars().peekable())?;
Err(ShErr::Simple {
kind: ShErrKind::ExecFail,
msg: expanded,
notes: vec![],
})
} else {
Ok(vars.get_var(&var_name))
match vars.try_get_var(&var_name).filter(|v| !v.is_empty()) {
Some(val) => Ok(val),
None => {
let expanded = expand_raw(&mut err.chars().peekable())?;
Err(ShErr::Simple {
kind: ShErrKind::ExecFail,
msg: expanded,
notes: vec![],
})
}
}
}
ParamExp::ErrUnset(err) => {
if !vars.var_exists(&var_name) {
let expanded = expand_raw(&mut err.chars().peekable())?;
Err(ShErr::Simple {
kind: ShErrKind::ExecFail,
msg: expanded,
notes: vec![],
})
} else {
Ok(vars.get_var(&var_name))
match vars.try_get_var(&var_name) {
Some(val) => Ok(val),
None => {
let expanded = expand_raw(&mut err.chars().peekable())?;
Err(ShErr::Simple {
kind: ShErrKind::ExecFail,
msg: expanded,
notes: vec![],
})
}
}
}
ParamExp::Substr(pos) => {

View File

@@ -1,6 +1,6 @@
use std::collections::VecDeque;
use crate::parse::lex::{Span, Tk};
use crate::parse::lex::{Span, Tk, TkRule};
use crate::parse::{Redir, RedirType};
use crate::prelude::*;
@@ -17,6 +17,7 @@ pub trait CharDequeUtils {
pub trait TkVecUtils<Tk> {
fn get_span(&self) -> Option<Span>;
fn debug_tokens(&self);
fn split_at_separators(&self) -> Vec<Vec<Tk>>;
}
pub trait RedirVecUtils<Redir> {
@@ -85,6 +86,29 @@ impl TkVecUtils<Tk> for Vec<Tk> {
fn debug_tokens(&self) {
for token in self {}
}
fn split_at_separators(&self) -> Vec<Vec<Tk>> {
let mut splits = vec![];
let mut cur_split = vec![];
for tk in self {
match tk.class {
TkRule::Pipe |
TkRule::ErrPipe |
TkRule::And |
TkRule::Or |
TkRule::Bg |
TkRule::Sep => {
splits.push(std::mem::take(&mut cur_split));
}
_ => cur_split.push(tk.clone()),
}
}
if !cur_split.is_empty() {
splits.push(cur_split);
}
splits
}
}
impl RedirVecUtils<Redir> for Vec<Redir> {

View File

@@ -2,7 +2,7 @@ use std::{collections::{HashSet, VecDeque}, os::unix::fs::PermissionsExt};
use crate::{
builtin::{
alias::{alias, unalias}, cd::cd, dirstack::{dirs, popd, pushd}, echo::echo, eval, exec, flowctl::flowctl, jobctl::{JobBehavior, continue_job, disown, jobs}, pwd::pwd, read::read_builtin, shift::shift, shopt::shopt, source::source, test::double_bracket_test, trap::{TrapTarget, trap}, true_builtin, varcmds::{export, local, readonly, unset}, zoltraak::zoltraak
alias::{alias, unalias}, cd::cd, complete::{compgen_builtin, complete_builtin}, dirstack::{dirs, popd, pushd}, echo::echo, eval, exec, flowctl::flowctl, jobctl::{JobBehavior, continue_job, disown, jobs}, pwd::pwd, read::read_builtin, shift::shift, shopt::shopt, source::source, test::double_bracket_test, trap::{TrapTarget, trap}, true_builtin, varcmds::{export, local, readonly, unset}, zoltraak::zoltraak
},
expand::{expand_aliases, glob_to_regex},
jobs::{ChildProc, JobStack, dispatch_job},
@@ -78,11 +78,11 @@ impl Drop for ScopeGuard {
/// Used to throw away variables that exist in temporary contexts
/// such as 'VAR=value <command> <args>'
/// or for-loop variables
struct VarCtxGuard {
pub struct VarCtxGuard {
vars: HashSet<String>,
}
impl VarCtxGuard {
fn new(vars: HashSet<String>) -> Self {
pub fn new(vars: HashSet<String>) -> Self {
Self { vars }
}
}
@@ -780,6 +780,8 @@ impl Dispatcher {
"eval" => eval::eval(cmd, io_stack_mut, curr_job_mut),
"readonly" => readonly(cmd, io_stack_mut, curr_job_mut),
"unset" => unset(cmd, io_stack_mut, curr_job_mut),
"complete" => complete_builtin(cmd, io_stack_mut, curr_job_mut),
"compgen" => compgen_builtin(cmd, io_stack_mut, curr_job_mut),
"true" | ":" => {
state::set_status(0);
Ok(())

View File

@@ -899,7 +899,7 @@ pub fn is_field_sep(ch: char) -> bool {
}
pub fn is_keyword(slice: &str) -> bool {
KEYWORDS.contains(&slice) || ends_with_unescaped(slice, "()")
KEYWORDS.contains(&slice) || (ends_with_unescaped(slice, "()") && !ends_with_unescaped(slice, "=()"))
}
pub fn is_cmd_sub(slice: &str) -> bool {

View File

@@ -1,14 +1,14 @@
use std::{env, fmt::Debug, os::unix::fs::PermissionsExt, path::PathBuf, sync::Arc};
use std::{collections::HashSet, env, fmt::Debug, os::unix::fs::PermissionsExt, path::PathBuf, sync::Arc};
use crate::{
builtin::BUILTINS,
libsh::error::{ShErr, ShErrKind, ShResult},
parse::lex::{self, LexFlags, Tk, TkFlags},
builtin::{BUILTINS, complete::{CompFlags, CompOpts}},
libsh::{error::{ShErr, ShErrKind, ShResult}, utils::TkVecUtils},
parse::{execute::{VarCtxGuard, exec_input}, lex::{self, LexFlags, Tk, TkFlags, TkRule}},
readline::{
Marker, annotate_input, annotate_input_recursive, get_insertions,
markers::{self, is_marker},
},
state::{read_logic, read_vars},
state::{VarFlags, VarKind, read_logic, read_meta, read_vars, write_vars},
};
pub fn complete_users(start: &str) -> Vec<String> {
@@ -91,63 +91,15 @@ pub fn extract_var_name(text: &str) -> Option<(String, usize, usize)> {
}
fn complete_commands(start: &str) -> Vec<String> {
let mut candidates = vec![];
let path = env::var("PATH").unwrap_or_default();
let paths = path.split(':').map(PathBuf::from).collect::<Vec<_>>();
for path in paths {
// Skip directories that don't exist (common in PATH)
let Ok(entries) = std::fs::read_dir(path) else {
continue;
};
for entry in entries {
let Ok(entry) = entry else {
continue;
};
let Ok(meta) = entry.metadata() else {
continue;
};
let file_name = entry.file_name().to_string_lossy().to_string();
if meta.is_file()
&& (meta.permissions().mode() & 0o111) != 0
&& file_name.starts_with(start)
{
candidates.push(file_name);
}
}
}
let builtin_candidates = BUILTINS
.iter()
.filter(|b| b.starts_with(start))
.map(|s| s.to_string());
candidates.extend(builtin_candidates);
read_logic(|l| {
let func_table = l.funcs();
let matches = func_table
.keys()
.filter(|k| k.starts_with(start))
.map(|k| k.to_string());
candidates.extend(matches);
let aliases = l.aliases();
let matches = aliases
.keys()
.filter(|k| k.starts_with(start))
.map(|k| k.to_string());
candidates.extend(matches);
let mut candidates: Vec<String> = read_meta(|m| {
m.cached_cmds()
.iter()
.filter(|c| c.starts_with(start))
.cloned()
.collect()
});
// Deduplicate (same command may appear in multiple PATH dirs)
candidates.sort();
candidates.dedup();
candidates
}
@@ -231,7 +183,10 @@ pub struct BashCompSpec {
/// -v complete variable names
pub vars: bool,
/// -A signal: complete signal names
pub signals: bool
pub signals: bool,
/// The original command
pub source: String
}
impl BashCompSpec {
@@ -246,6 +201,10 @@ impl BashCompSpec {
self.wordlist = Some(wordlist);
self
}
pub fn with_source(mut self, source: String) -> Self {
self.source = source;
self
}
pub fn files(mut self, enable: bool) -> Self {
self.files = enable;
self
@@ -270,61 +229,130 @@ impl BashCompSpec {
self.signals = enable;
self
}
pub fn exec_comp_func(&self) -> Vec<String> {
pub fn from_comp_opts(opts: CompOpts) -> Self {
let CompOpts { func, wordlist, action: _, flags } = opts;
Self {
function: func,
wordlist,
files: flags.contains(CompFlags::FILES),
dirs: flags.contains(CompFlags::DIRS),
commands: flags.contains(CompFlags::CMDS),
users: flags.contains(CompFlags::USERS),
vars: flags.contains(CompFlags::VARS),
signals: false, // TODO: implement signal completion
source: String::new()
}
}
pub fn exec_comp_func(&self, ctx: &CompContext) -> ShResult<Vec<String>> {
let mut vars_to_unset = HashSet::new();
for var in [ "COMP_WORDS", "COMP_CWORD", "COMP_LINE", "COMP_POINT", "COMPREPLY" ] {
vars_to_unset.insert(var.to_string());
}
let _guard = VarCtxGuard::new(vars_to_unset);
todo!()
let CompContext { words, cword, line, cursor_pos } = ctx;
let raw_words = words.to_vec().into_iter().map(|tk| tk.to_string()).collect();
write_vars(|v| v.set_var("COMP_WORDS", VarKind::arr_from_vec(raw_words), VarFlags::NONE))?;
write_vars(|v| v.set_var("COMP_CWORD", VarKind::Str(cword.to_string()), VarFlags::NONE))?;
write_vars(|v| v.set_var("COMP_LINE", VarKind::Str(line.to_string()), VarFlags::NONE))?;
write_vars(|v| v.set_var("COMP_POINT", VarKind::Str(cursor_pos.to_string()), VarFlags::NONE))?;
let cmd_name = words
.first()
.map(|s| s.to_string())
.unwrap_or_default();
let cword_str = words.get(*cword)
.map(|s| s.to_string())
.unwrap_or_default();
let pword_str = if *cword > 0 {
words.get(cword - 1).map(|s| s.to_string()).unwrap_or_default()
} else {
String::new()
};
let input = format!("{} {cmd_name} {cword_str} {pword_str}", self.function.as_ref().unwrap());
exec_input(input, None, false)?;
Ok(read_vars(|v| v.get_arr_elems("COMPREPLY")).unwrap_or_default())
}
}
impl CompSpec for BashCompSpec {
fn complete(&self, ctx: &CompContext) -> Vec<String> {
fn complete(&self, ctx: &CompContext) -> ShResult<Vec<String>> {
let mut candidates = vec![];
let prefix = &ctx.words[ctx.cword];
let expanded = prefix.clone().expand()?.get_words().join(" ");
if self.files {
candidates.extend(complete_filename(prefix));
candidates.extend(complete_filename(&expanded));
}
if self.dirs {
candidates.extend(complete_dirs(prefix));
candidates.extend(complete_dirs(&expanded));
}
if self.commands {
candidates.extend(complete_commands(prefix));
candidates.extend(complete_commands(&expanded));
}
if self.vars {
candidates.extend(complete_vars(prefix));
candidates.extend(complete_vars(&expanded));
}
if self.users {
candidates.extend(complete_users(prefix));
candidates.extend(complete_users(&expanded));
}
if let Some(words) = &self.wordlist {
candidates.extend(
words
.iter()
.filter(|w| w.starts_with(prefix))
.filter(|w| w.starts_with(&expanded))
.cloned(),
);
}
if let Some(func) = &self.function {
if self.function.is_some() {
candidates.extend(self.exec_comp_func(ctx)?);
}
candidates
Ok(candidates)
}
fn source(&self) -> &str {
&self.source
}
}
pub trait CompSpec: Debug {
fn complete(&self, ctx: &CompContext) -> Vec<String>;
pub trait CompSpec: Debug + CloneCompSpec {
fn complete(&self, ctx: &CompContext) -> ShResult<Vec<String>>;
fn source(&self) -> &str;
}
pub trait CloneCompSpec {
fn clone_box(&self) -> Box<dyn CompSpec>;
}
impl<T: CompSpec + Clone + 'static> CloneCompSpec for T {
fn clone_box(&self) -> Box<dyn CompSpec> {
Box::new(self.clone())
}
}
impl Clone for Box<dyn CompSpec> {
fn clone(&self) -> Self {
self.clone_box()
}
}
pub struct CompContext {
pub words: Vec<String>,
pub words: Vec<Tk>,
pub cword: usize,
pub line: String,
pub cursor_pos: usize
}
pub enum CompCtx {
CmdName,
FileName,
impl CompContext {
pub fn cmd(&self) -> Option<&str> {
self.words.first().map(|s| s.as_str())
}
}
pub enum CompResult {
@@ -497,17 +525,120 @@ impl Completer {
)
}
pub fn build_comp_ctx(&self, tks: &[Tk], line: &str, cursor_pos: usize) -> ShResult<CompContext> {
log::debug!("build_comp_ctx: cursor_pos={}, tokens={}", cursor_pos, tks.len());
let mut ctx = CompContext {
words: vec![],
cword: 0,
line: line.to_string(),
cursor_pos,
};
let segments = tks
.iter()
.filter(|&tk| !matches!(tk.class, TkRule::SOI | TkRule::EOI))
.cloned()
.collect::<Vec<_>>()
.split_at_separators();
log::debug!("build_comp_ctx: {} segments after split", segments.len());
if segments.is_empty() {
log::debug!("build_comp_ctx: no segments found");
return Ok(ctx);
}
let relevant_pos = segments
.iter()
.position(|tks| tks.iter().next().is_some_and(|tk|{ log::debug!("checking span: {}", tk.span.start); tk.span.start > cursor_pos }))
.map(|i| i.saturating_sub(1)) // take the pos before it
.unwrap_or(segments.len().saturating_sub(1));
let mut relevant = segments[relevant_pos].to_vec();
log::debug!("build_comp_ctx: relevant segment has {} tokens: {:?}",
relevant.len(),
relevant.iter().map(|tk| tk.as_str()).collect::<Vec<_>>()
);
let cword = if let Some(pos) = relevant.iter().position(|tk| {
cursor_pos >= tk.span.start && cursor_pos <= tk.span.end
}) {
// Cursor is inside or at the end of an existing token
pos
} else {
// Cursor is in whitespace — find where to insert an empty token
let insert_pos = relevant.iter()
.position(|tk| tk.span.start > cursor_pos)
.unwrap_or(relevant.len());
let mut new_tk = Tk::default();
if let Some(tk) = relevant.last() {
let mut span = tk.span.clone();
span.set_range(cursor_pos..cursor_pos);
new_tk.span = span;
}
relevant.insert(insert_pos, new_tk);
insert_pos
};
log::debug!("build_comp_ctx: cword={} ('{}')", cword, relevant[cword].as_str());
ctx.words = relevant;
ctx.cword = cword;
Ok(ctx)
}
pub fn try_comp_spec(&self, ctx: &CompContext) -> ShResult<CompResult> {
let cmd = ctx.cmd().unwrap_or("<empty>");
log::debug!("try_comp_spec: looking up spec for '{}'", cmd);
let Some(cmd) = ctx.cmd() else {
log::debug!("try_comp_spec: no command in context");
return Ok(CompResult::NoMatch);
};
let Some(spec) = read_meta(|m| m.get_comp_spec(cmd)) else {
log::debug!("try_comp_spec: no spec registered for '{}'", cmd);
return Ok(CompResult::NoMatch);
};
log::debug!("try_comp_spec: found spec for '{}', executing", cmd);
let candidates = spec.complete(ctx)?;
log::debug!("try_comp_spec: got {} candidates: {:?}", candidates.len(), candidates);
if candidates.is_empty() {
Ok(CompResult::NoMatch)
} else {
Ok(CompResult::from_candidates(candidates))
}
}
pub fn get_candidates(&mut self, line: String, cursor_pos: usize) -> ShResult<CompResult> {
log::debug!("get_candidates: line='{}', cursor_pos={}", line, cursor_pos);
let source = Arc::new(line.clone());
let tokens =
lex::LexStream::new(source, LexFlags::LEX_UNFINISHED).collect::<ShResult<Vec<Tk>>>()?;
let Some(mut cur_token) = tokens.into_iter().find(|tk| {
let start = tk.span.start;
let end = tk.span.end;
(start..=end).contains(&cursor_pos)
}) else {
let candidates = complete_filename("./"); // Default to filename completion if no token is found
let ctx = self.build_comp_ctx(&tokens, &line, cursor_pos)?;
// Set token_span from CompContext's current word
if let Some(cur) = ctx.words.get(ctx.cword) {
self.token_span = (cur.span.start, cur.span.end);
} else {
self.token_span = (cursor_pos, cursor_pos);
}
// Try programmable completion first
let res = self.try_comp_spec(&ctx)?;
if !matches!(res, CompResult::NoMatch) {
log::debug!("get_candidates: comp_spec matched, returning");
return Ok(res);
}
// Get the current token from CompContext
let Some(mut cur_token) = ctx.words.get(ctx.cword).cloned() else {
log::debug!("get_candidates: no current token, falling back to filename completion");
let candidates = complete_filename("./");
let end_pos = line.len();
self.token_span = (end_pos, end_pos);
return Ok(CompResult::from_candidates(candidates));
@@ -515,50 +646,37 @@ impl Completer {
self.token_span = (cur_token.span.start, cur_token.span.end);
// Look for marker at the START of what we're completing, not at cursor
let (mut ctx, token_start) = self.get_completion_context(&line, cursor_pos);
self.token_span.0 = token_start; // Update start of token span based on context
cur_token
.span
.set_range(self.token_span.0..self.token_span.1); // Update token span to reflect context
// If token contains '=', only complete after the '='
let token_str = cur_token.span.as_str();
if let Some(eq_pos) = token_str.rfind('=') {
// Adjust span to only replace the part after '='
log::debug!("get_candidates: assignment token, completing after '='");
self.token_span.0 = cur_token.span.start + eq_pos + 1;
cur_token
.span
.set_range(self.token_span.0..self.token_span.1);
}
if ctx.last().is_some_and(|m| *m == markers::VAR_SUB) {
let var_sub = &cur_token.as_str();
}
let raw_tk = cur_token.as_str().to_string();
let is_cmd = cur_token.flags.contains(TkFlags::IS_CMD)
|| cur_token.flags.contains(TkFlags::BUILTIN)
|| ctx.cword == 0;
let expanded_tk = cur_token.expand()?;
let expanded_words = expanded_tk.get_words().into_iter().collect::<Vec<_>>();
let expanded = expanded_words.join("\\ ");
let mut candidates = match ctx.pop() {
Some(markers::COMMAND) => complete_commands(&expanded),
Some(markers::ARG) => complete_filename(&expanded),
Some(_) => {
return Ok(CompResult::NoMatch);
}
None => {
return Ok(CompResult::NoMatch);
}
};
log::debug!("get_candidates: is_cmd={}, raw='{}', expanded='{}'", is_cmd, raw_tk, expanded);
// Now we are just going to graft the completed text
// onto the original token. This prevents something like
// $SOME_PATH/
// from being completed into
// /path/to/some_path/file.txt
// and instead returns
// $SOME_PATH/file.txt
let mut candidates = if is_cmd {
complete_commands(&expanded)
} else {
complete_filename(&expanded)
};
log::debug!("get_candidates: {} candidates from default completion", candidates.len());
// Graft the completed text onto the original token.
// This prevents something like $SOME_PATH/ from being
// completed into /path/to/some_path/file.txt
// and instead returns $SOME_PATH/file.txt
candidates = candidates
.into_iter()
.map(|c| match c.strip_prefix(&expanded) {

View File

@@ -132,7 +132,7 @@ impl Highlighter {
}
}
self.output.push_str(&var_name);
self.output.push_str(&Self::strip_markers(&var_name));
self.push_style(Style::Blue);
self.output.push('=');
self.pop_style();

View File

@@ -36,6 +36,11 @@ pub mod vimode;
pub mod markers {
use super::Marker;
/*
* These are invisible Unicode characters used to annotate
* strings with various contextual metadata.
*/
/* Highlight Markers */
// token-level (derived from token class)
@@ -114,7 +119,7 @@ pub mod markers {
pub const MISC: [Marker; 3] = [ESCAPE, VISUAL_MODE_START, VISUAL_MODE_END];
pub fn is_marker(c: Marker) -> bool {
c >= '\u{e000}' && c <= '\u{efff}'
('\u{e000}'..'\u{efff}').contains(&c)
}
}
type Marker = char;
@@ -1103,7 +1108,7 @@ pub fn annotate_token(token: Tk) -> Vec<(usize, Marker)> {
in_sng_qt = !in_sng_qt;
token_chars.next(); // consume the quote
}
'[' if !in_dub_qt && !in_sng_qt => {
'[' if !in_dub_qt && !in_sng_qt && !token.flags.contains(TkFlags::ASSIGN) => {
token_chars.next(); // consume the opening bracket
let start_pos = span_start + index;
let mut is_glob_pat = false;

View File

@@ -8,7 +8,7 @@ use crate::{
builtin::{BUILTINS, trap::TrapTarget}, exec_input, jobs::JobTab, libsh::{
error::{ShErr, ShErrKind, ShResult},
utils::VecDequeExt,
}, parse::{ConjunctNode, NdRule, Node, ParsedSrc, lex::{LexFlags, LexStream, Tk}}, prelude::*, readline::markers, shopt::ShOpts
}, parse::{ConjunctNode, NdRule, Node, ParsedSrc, lex::{LexFlags, LexStream, Tk}}, prelude::*, readline::{complete::{BashCompSpec, CompSpec}, markers}, shopt::ShOpts
};
pub struct Shed {
@@ -191,6 +191,82 @@ impl ScopeStack {
flat_vars
}
fn parse_arr_index(&self, var_name: &str) -> ShResult<Option<(String,ArrIndex)>> {
let mut chars = var_name.chars();
let mut var_name = String::new();
let mut idx_raw = String::new();
let mut bracket_depth = 0;
while let Some(ch) = chars.next() {
match ch {
'\\' => {
// Skip the next character, as it's escaped
chars.next();
}
'[' => {
bracket_depth += 1;
if bracket_depth > 1 {
idx_raw.push(ch);
}
}
']' => {
if bracket_depth > 0 {
bracket_depth -= 1;
if bracket_depth == 0 {
if idx_raw.is_empty() {
return Ok(None);
}
break;
}
}
idx_raw.push(ch);
}
_ if bracket_depth > 0 => {
idx_raw.push(ch);
}
_ => {
var_name.push(ch);
}
}
}
if idx_raw.is_empty() {
Ok(None)
} else {
if var_name.is_empty() {
return Ok(None);
}
if !self.var_exists(&var_name) {
return Err(ShErr::simple(
ShErrKind::ExecFail,
format!("Variable '{}' not found", var_name)
));
}
let expanded = LexStream::new(Arc::new(idx_raw), LexFlags::empty())
.map(|tk| tk.and_then(|tk| tk.expand()).map(|tk| tk.get_words()))
.try_fold(vec![], |mut acc, wrds| {
match wrds {
Ok(wrds) => acc.extend(wrds),
Err(e) => return Err(e),
}
Ok(acc)
})?
.into_iter()
.next();
let Some(exp) = expanded else {
return Ok(None)
};
let idx = exp.parse::<ArrIndex>().map_err(|_| ShErr::simple(
ShErrKind::ParseErr,
format!("Invalid array index: {}", exp)
))?;
Ok(Some((var_name, idx)))
}
}
pub fn set_var(&mut self, var_name: &str, val: VarKind, flags: VarFlags) -> ShResult<()> {
let is_local = self.is_local_var(var_name);
if flags.contains(VarFlags::LOCAL) || is_local {
@@ -200,41 +276,86 @@ impl ScopeStack {
}
}
fn set_var_global(&mut self, var_name: &str, val: VarKind, flags: VarFlags) -> ShResult<()> {
if let Some(scope) = self.scopes.first_mut() {
scope.set_var(var_name, val, flags)
} else {
Ok(())
}
let idx_result = self.parse_arr_index(var_name);
let Some(scope) = self.scopes.first_mut() else {
return Ok(())
};
if let Ok(Some((var,idx))) = idx_result {
scope.set_index(&var, idx, val.to_string())
} else {
scope.set_var(var_name, val, flags)
}
}
fn set_var_local(&mut self, var_name: &str, val: VarKind, flags: VarFlags) -> ShResult<()> {
if let Some(scope) = self.scopes.last_mut() {
scope.set_var(var_name, val, flags)
} else {
Ok(())
}
let idx_result = self.parse_arr_index(var_name);
let Some(scope) = self.scopes.last_mut() else {
return Ok(())
};
if let Ok(Some((var,idx))) = idx_result {
scope.set_index(&var, idx, val.to_string())
} else {
scope.set_var(var_name, val, flags)
}
}
pub fn index_var(&self, var_name: &str, idx: isize) -> ShResult<String> {
pub fn get_arr_elems(&self, var_name: &str) -> ShResult<Vec<String>> {
for scope in self.scopes.iter().rev() {
if scope.var_exists(var_name)
&& let Some(var) = scope.vars().get(var_name) {
match var.kind() {
VarKind::Arr(items) => {
let idx = match idx.cmp(&0) {
Ordering::Less => {
if items.len() >= idx.unsigned_abs() {
items.len() - idx.unsigned_abs()
let mut item_vec = items.clone()
.into_iter()
.collect::<Vec<(usize, String)>>();
item_vec.sort_by_key(|(idx, _)| *idx); // sort by index
return Ok(item_vec.into_iter()
.map(|(_,s)| s)
.collect())
}
_ => {
return Err(ShErr::simple(
ShErrKind::ExecFail,
format!("Variable '{}' is not an array", var_name)
));
}
}
}
}
Err(ShErr::simple(
ShErrKind::ExecFail,
format!("Variable '{}' not found", var_name)
))
}
pub fn index_var(&self, var_name: &str, idx: ArrIndex) -> ShResult<String> {
for scope in self.scopes.iter().rev() {
if scope.var_exists(var_name)
&& let Some(var) = scope.vars().get(var_name) {
match var.kind() {
VarKind::Arr(items) => {
let idx = match idx {
ArrIndex::Literal(n) => {
n
}
ArrIndex::FromBack(n) => {
if items.len() >= n {
items.len() - n
} else {
return Err(ShErr::simple(
ShErrKind::ExecFail,
format!("Index {} out of bounds for array '{}'", idx, var_name)
format!("Index {} out of bounds for array '{}'", n, var_name)
));
}
}
Ordering::Equal => idx as usize,
Ordering::Greater => idx as usize
_ => return Err(ShErr::simple(
ShErrKind::ExecFail,
format!("Cannot index all elements of array '{}'", var_name)
)),
};
if let Some(item) = items.get(idx) {
if let Some(item) = items.get(&idx) {
return Ok(item.clone());
} else {
return Err(ShErr::simple(
@@ -254,6 +375,26 @@ impl ScopeStack {
}
Ok("".into())
}
pub fn try_get_var(&self, var_name: &str) -> Option<String> {
// This version of get_var() is mainly used internally
// so that we have access to Option methods
if let Ok(param) = var_name.parse::<ShellParam>() {
let val = self.get_param(param);
if !val.is_empty() {
return Some(val);
} else {
return None;
}
}
for scope in self.scopes.iter().rev() {
if scope.var_exists(var_name) {
return Some(scope.get_var(var_name));
}
}
None
}
pub fn get_var(&self, var_name: &str) -> String {
if let Ok(param) = var_name.parse::<ShellParam>() {
return self.get_param(param);
@@ -469,12 +610,52 @@ impl VarFlags {
}
}
#[derive(Clone, Debug)]
pub enum ArrIndex {
Literal(usize),
FromBack(usize),
AllJoined,
AllSplit
}
impl FromStr for ArrIndex {
type Err = ShErr;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"@" => Ok(Self::AllSplit),
"*" => Ok(Self::AllJoined),
_ if s.starts_with('-') && s[1..].chars().all(|c| c.is_digit(1)) => {
let idx = s[1..].parse::<usize>().unwrap();
Ok(Self::FromBack(idx))
}
_ if !s.is_empty() && s.chars().all(|c| c.is_ascii_digit()) => {
let idx = s.parse::<usize>().unwrap();
Ok(Self::Literal(idx))
}
_ => Err(ShErr::simple(
ShErrKind::ParseErr,
format!("Invalid array index: {}", s)
))
}
}
}
pub fn hashmap_to_vec(map: HashMap<usize, String>) -> Vec<String> {
let mut items = map.into_iter()
.collect::<Vec<(usize, String)>>();
items.sort_by_key(|(idx, _)| *idx);
items.into_iter()
.map(|(_,i)| i)
.collect()
}
#[derive(Clone, Debug)]
pub enum VarKind {
Str(String),
Int(i32),
Arr(Vec<String>),
AssocArr(Vec<(String, String)>),
Str(String),
Int(i32),
Arr(HashMap<usize,String>),
AssocArr(Vec<(String, String)>),
}
impl VarKind {
@@ -482,22 +663,34 @@ impl VarKind {
let raw = tk.as_str();
if !raw.starts_with('(') || !raw.ends_with(')') {
return Err(ShErr::simple(
ShErrKind::ParseErr,
format!("Invalid array syntax: {}", raw),
ShErrKind::ParseErr,
format!("Invalid array syntax: {}", raw),
));
}
let raw = raw[1..raw.len() - 1].to_string();
let mut words = vec![];
let tokens = LexStream::new(Arc::new(raw), LexFlags::empty())
.collect::<ShResult<Vec<Tk>>>()?;
let tokens: HashMap<usize,String> = LexStream::new(Arc::new(raw), LexFlags::empty())
.map(|tk| tk.and_then(|tk| tk.expand()).map(|tk| tk.get_words()))
.try_fold(vec![], |mut acc, wrds| {
match wrds {
Ok(wrds) => acc.extend(wrds),
Err(e) => return Err(e),
}
Ok(acc)
})?
.into_iter()
.enumerate()
.collect();
for token in tokens {
let tk_words = token.expand()?.get_words();
words.extend(tk_words);
}
Ok(Self::Arr(tokens))
}
Ok(Self::Arr(words))
pub fn arr_from_vec(vec: Vec<String>) -> Self {
let tokens: HashMap<usize,String> = vec.into_iter()
.enumerate()
.collect();
Self::Arr(tokens)
}
}
@@ -507,6 +700,7 @@ impl Display for VarKind {
VarKind::Str(s) => write!(f, "{s}"),
VarKind::Int(i) => write!(f, "{i}"),
VarKind::Arr(items) => {
let items = hashmap_to_vec(items.clone());
let mut item_iter = items.iter().peekable();
while let Some(item) = item_iter.next() {
write!(f, "{item}")?;
@@ -741,6 +935,44 @@ impl VarTab {
unsafe { env::remove_var(var_name) };
Ok(())
}
pub fn set_index(&mut self, var_name: &str, idx: ArrIndex, val: String) -> ShResult<()> {
if self.var_exists(var_name)
&& let Some(var) = self.vars_mut().get_mut(var_name) {
match var.kind_mut() {
VarKind::Arr(items) => {
let idx = match idx {
ArrIndex::Literal(n) => {
n
}
ArrIndex::FromBack(n) => {
if items.len() >= n {
items.len() - n
} else {
return Err(ShErr::simple(
ShErrKind::ExecFail,
format!("Index {} out of bounds for array '{}'", n, var_name)
));
}
}
_ => return Err(ShErr::simple(
ShErrKind::ExecFail,
format!("Cannot index all elements of array '{}'", var_name)
)),
};
items.insert(idx, val);
return Ok(());
}
_ => {
return Err(ShErr::simple(
ShErrKind::ExecFail,
format!("Variable '{}' is not an array", var_name)
));
}
}
}
Ok(())
}
pub fn set_var(&mut self, var_name: &str, val: VarKind, flags: VarFlags) -> ShResult<()> {
if let Some(var) = self.vars.get_mut(var_name) {
if var.flags.contains(VarFlags::READONLY) && !flags.contains(VarFlags::READONLY) {
@@ -814,7 +1046,9 @@ pub struct MetaTab {
old_pwd: Option<String>,
// valid command cache
path_cache: HashSet<String>,
cwd_cache: HashSet<String>
cwd_cache: HashSet<String>,
// programmable completion specs
comp_specs: HashMap<String, Box<dyn CompSpec>>,
}
impl MetaTab {
@@ -827,6 +1061,21 @@ impl MetaTab {
pub fn cwd_cache(&self) -> &HashSet<String> {
&self.cwd_cache
}
pub fn comp_specs(&self) -> &HashMap<String, Box<dyn CompSpec>> {
&self.comp_specs
}
pub fn comp_specs_mut(&mut self) -> &mut HashMap<String, Box<dyn CompSpec>> {
&mut self.comp_specs
}
pub fn get_comp_spec(&self, cmd: &str) -> Option<Box<dyn CompSpec>> {
self.comp_specs.get(cmd).map(|spec| spec.clone())
}
pub fn set_comp_spec(&mut self, cmd: String, spec: Box<dyn CompSpec>) {
self.comp_specs.insert(cmd, spec);
}
pub fn remove_comp_spec(&mut self, cmd: &str) -> bool {
self.comp_specs.remove(cmd).is_some()
}
pub fn try_rehash_commands(&mut self) {
let path = env::var("PATH").unwrap_or_default();
let cwd = env::var("PWD").unwrap_or_default();