Early implementation of scripting elements

This commit is contained in:
2025-03-05 01:36:58 -05:00
parent 5dd9ee96ad
commit 1b3e2c0887
28 changed files with 1384 additions and 371 deletions

View File

@@ -6,12 +6,16 @@ pub fn alias(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
let argv = argv.drop_first();
let mut argv_iter = argv.into_iter();
while let Some(arg) = argv_iter.next() {
let arg_raw = arg.to_string();
let arg_raw = shenv.input_slice(arg.span()).to_string();
if let Some((alias,body)) = arg_raw.split_once('=') {
log!(DEBUG, "{:?}",arg.span());
log!(DEBUG, arg_raw);
log!(DEBUG, body);
let clean_body = trim_quotes(&body);
log!(DEBUG, clean_body);
shenv.logic_mut().set_alias(alias, &clean_body);
} else {
return Err(ShErr::full(ShErrKind::SyntaxErr, "Expected an assignment in alias args", arg.span().clone()))
return Err(ShErr::full(ShErrKind::SyntaxErr, "Expected an assignment in alias args", shenv.get_input(), arg.span().clone()))
}
}
} else { unreachable!() }

View File

@@ -5,7 +5,7 @@ pub fn cd(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
if let NdRule::Command { argv, redirs: _ } = rule {
let mut argv_iter = argv.into_iter();
argv_iter.next(); // Ignore 'cd'
let dir_raw = argv_iter.next().map(|arg| arg.to_string()).unwrap_or(std::env::var("HOME")?);
let dir_raw = argv_iter.next().map(|arg| shenv.input_slice(arg.span()).into()).unwrap_or(std::env::var("HOME")?);
let dir = PathBuf::from(&dir_raw);
std::env::set_current_dir(dir)?;
shenv.vars_mut().export("PWD",&dir_raw);

View File

@@ -0,0 +1,20 @@
use crate::prelude::*;
pub fn sh_flow(node: Node, shenv: &mut ShEnv, kind: ShErrKind) -> ShResult<()> {
let rule = node.into_rule();
let mut code: i32 = 0;
if let NdRule::Command { argv, redirs } = rule {
let mut argv_iter = argv.into_iter();
while let Some(arg) = argv_iter.next() {
if let Ok(code_arg) = shenv.input_slice(arg.span()).parse() {
code = code_arg
}
}
} else { unreachable!() }
shenv.set_code(code);
// Our control flow keywords are used as ShErrKinds
// This design will halt the execution flow and start heading straight back upward
// Function returns and loop breaks/continues will be caught in the proper context to allow
// Execution to continue at the proper return point.
Err(ShErr::simple(kind, ""))
}

View File

@@ -6,7 +6,7 @@ pub fn export(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
let mut argv_iter = argv.into_iter();
argv_iter.next(); // Ignore 'export'
while let Some(arg) = argv_iter.next() {
let arg_raw = arg.to_string();
let arg_raw = shenv.input_slice(arg.span()).to_string();
if let Some((var,val)) = arg_raw.split_once('=') {
shenv.vars_mut().export(var, val);
} else {

View File

@@ -14,6 +14,7 @@ pub fn continue_job(node: Node, shenv: &mut ShEnv, fg: bool) -> ShResult<()> {
ShErr::full(
ShErrKind::InternalErr,
format!("Somehow called {} with an existing foreground job",cmd),
shenv.get_input(),
blame
)
)
@@ -22,11 +23,11 @@ pub fn continue_job(node: Node, shenv: &mut ShEnv, fg: bool) -> ShResult<()> {
let curr_job_id = if let Some(id) = read_jobs(|j| j.curr_job()) {
id
} else {
return Err(ShErr::full(ShErrKind::ExecFail, "No jobs found", blame))
return Err(ShErr::full(ShErrKind::ExecFail, "No jobs found", shenv.get_input(), blame))
};
let tabid = match argv_s.next() {
Some(arg) => parse_job_id(&arg, blame.clone())?,
Some(arg) => parse_job_id(&arg, blame.clone(),shenv)?,
None => curr_job_id
};
@@ -36,7 +37,14 @@ pub fn continue_job(node: Node, shenv: &mut ShEnv, fg: bool) -> ShResult<()> {
if query_result.is_some() {
Ok(j.remove_job(id.clone()).unwrap())
} else {
Err(ShErr::full(ShErrKind::ExecFail, format!("Job id `{}' not found", tabid), blame))
Err(
ShErr::full(
ShErrKind::ExecFail,
format!("Job id `{}' not found", tabid),
shenv.get_input(),
blame
)
)
}
})?;
@@ -54,7 +62,7 @@ pub fn continue_job(node: Node, shenv: &mut ShEnv, fg: bool) -> ShResult<()> {
Ok(())
}
fn parse_job_id(arg: &str, blame: Span) -> ShResult<usize> {
fn parse_job_id(arg: &str, blame: Rc<RefCell<Span>>, shenv: &mut ShEnv) -> ShResult<usize> {
if arg.starts_with('%') {
let arg = arg.strip_prefix('%').unwrap();
if arg.chars().all(|ch| ch.is_ascii_digit()) {
@@ -66,7 +74,14 @@ fn parse_job_id(arg: &str, blame: Span) -> ShResult<usize> {
});
match result {
Some(id) => Ok(id),
None => Err(ShErr::full(ShErrKind::InternalErr,"Found a job but no table id in parse_job_id()",blame))
None => Err(
ShErr::full(
ShErrKind::InternalErr,
"Found a job but no table id in parse_job_id()",
shenv.get_input(),
blame
)
)
}
}
} else if arg.chars().all(|ch| ch.is_ascii_digit()) {
@@ -86,10 +101,24 @@ fn parse_job_id(arg: &str, blame: Span) -> ShResult<usize> {
match result {
Some(id) => Ok(id),
None => Err(ShErr::full(ShErrKind::InternalErr,"Found a job but no table id in parse_job_id()",blame))
None => Err(
ShErr::full(
ShErrKind::InternalErr,
"Found a job but no table id in parse_job_id()",
shenv.get_input(),
blame
)
)
}
} else {
Err(ShErr::full(ShErrKind::SyntaxErr,format!("Invalid fd arg: {}", arg),blame))
Err(
ShErr::full(
ShErrKind::SyntaxErr,
format!("Invalid fd arg: {}", arg),
shenv.get_input(),
blame
)
)
}
}
@@ -100,10 +129,17 @@ pub fn jobs(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
let mut flags = JobCmdFlags::empty();
while let Some(arg) = argv.next() {
let arg_s = arg.to_string();
let arg_s = shenv.input_slice(arg.span());
let mut chars = arg_s.chars().peekable();
if chars.peek().is_none_or(|ch| *ch != '-') {
return Err(ShErr::full(ShErrKind::SyntaxErr, "Invalid flag in jobs call", arg.span().clone()))
return Err(
ShErr::full(
ShErrKind::SyntaxErr,
"Invalid flag in jobs call",
shenv.get_input(),
arg.span()
)
)
}
chars.next();
while let Some(ch) = chars.next() {
@@ -113,7 +149,14 @@ pub fn jobs(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
'n' => JobCmdFlags::NEW_ONLY,
'r' => JobCmdFlags::RUNNING,
's' => JobCmdFlags::STOPPED,
_ => return Err(ShErr::full(ShErrKind::SyntaxErr, "Invalid flag in jobs call", arg.span().clone()))
_ => return Err(
ShErr::full(
ShErrKind::SyntaxErr,
"Invalid flag in jobs call",
shenv.get_input(),
arg.span()
)
)
};
flags |= flag

View File

@@ -5,8 +5,9 @@ pub mod export;
pub mod jobctl;
pub mod read;
pub mod alias;
pub mod control_flow;
pub const BUILTINS: [&str;9] = [
pub const BUILTINS: [&str;13] = [
"echo",
"cd",
"pwd",
@@ -15,5 +16,9 @@ pub const BUILTINS: [&str;9] = [
"bg",
"jobs",
"read",
"alias"
"alias",
"exit",
"continue",
"return",
"break",
];

View File

@@ -27,7 +27,8 @@ pub fn read_builtin(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
shenv.vars_mut().set_var(&first_var.to_string(), &read_input);
}
*/
shenv.vars_mut().set_var(&var.to_string(), &read_input);
let var_name = shenv.input_slice(var.span()).to_string();
shenv.vars_mut().set_var(&var_name, &read_input);
}
} else {
unreachable!()

28
src/execute/ifthen.rs Normal file
View File

@@ -0,0 +1,28 @@
use crate::{parse::parse::SynTree, prelude::*};
pub fn exec_if(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
let rule = node.into_rule();
if let NdRule::IfThen { cond_blocks, else_block } = rule {
if shenv.ctx().flags().contains(ExecFlags::NO_FORK) {
shenv.ctx_mut().unset_flag(ExecFlags::NO_FORK);
}
let mut cond_blocks = cond_blocks.into_iter();
while let Some(block) = cond_blocks.next() {
let cond = block.0;
let body = block.1;
let ast = SynTree::from_vec(cond);
Executor::new(ast,shenv).walk()?;
if shenv.get_code() == 0 {
let ast = SynTree::from_vec(body);
return Executor::new(ast,shenv).walk()
}
}
if let Some(block) = else_block {
let ast = SynTree::from_vec(block);
Executor::new(ast,shenv).walk()?;
}
} else { unreachable!() }
Ok(())
}

View File

@@ -4,6 +4,33 @@ use shellenv::jobs::{ChildProc, JobBldr};
use crate::{builtin::export::export, libsh::{error::Blame, sys::{execvpe, get_bin_path}, utils::{ArgVec, StrOps}}, parse::{lex::Token, parse::{CmdGuard, NdFlag, Node, NdRule, SynTree}}, prelude::*};
pub mod ifthen;
pub fn exec_input<S: Into<String>>(input: S, shenv: &mut ShEnv) -> ShResult<()> {
let input = input.into();
shenv.new_input(&input);
log!(INFO, "New input: {:?}", input);
let token_stream = Lexer::new(input,shenv).lex();
let token_stream = expand_aliases(token_stream, shenv);
for token in &token_stream {
log!(DEBUG, token);
log!(DEBUG, "{}",token.as_raw(shenv));
}
let syn_tree = Parser::new(token_stream,shenv).parse()?;
if let Err(e) = Executor::new(syn_tree, shenv).walk() {
if let ShErrKind::CleanExit = e.kind() {
let code = shenv.get_code();
sh_quit(code);
} else {
return Err(e.into())
}
}
Ok(())
}
pub struct Executor<'a> {
ast: SynTree,
shenv: &'a mut ShEnv
@@ -16,10 +43,9 @@ impl<'a> Executor<'a> {
pub fn walk(&mut self) -> ShResult<()> {
log!(DEBUG, "Starting walk");
while let Some(node) = self.ast.next_node() {
let span = node.span();
if let NdRule::CmdList { cmds } = node.clone().into_rule() {
log!(TRACE, "{:?}", cmds);
exec_list(cmds, self.shenv).try_blame(span)?
exec_list(cmds, self.shenv).try_blame(node.as_raw(self.shenv),node.span())?
} else { unreachable!() }
}
Ok(())
@@ -33,6 +59,7 @@ fn exec_list(list: Vec<(Option<CmdGuard>, Node)>, shenv: &mut ShEnv) -> ShResult
let guard = cmd_info.0;
let cmd = cmd_info.1;
let span = cmd.span();
let cmd_raw = cmd.as_raw(shenv);
if let Some(guard) = guard {
let code = shenv.get_code();
@@ -47,11 +74,12 @@ fn exec_list(list: Vec<(Option<CmdGuard>, Node)>, shenv: &mut ShEnv) -> ShResult
}
log!(TRACE, "{:?}", *cmd.rule());
match *cmd.rule() {
NdRule::Command {..} => dispatch_command(cmd, shenv).try_blame(span)?,
NdRule::Subshell {..} => exec_subshell(cmd,shenv).try_blame(span)?,
NdRule::FuncDef {..} => exec_funcdef(cmd,shenv).try_blame(span)?,
NdRule::Assignment {..} => exec_assignment(cmd,shenv).try_blame(span)?,
NdRule::Pipeline {..} => exec_pipeline(cmd, shenv).try_blame(span)?,
NdRule::Command {..} => dispatch_command(cmd, shenv).try_blame(cmd_raw, span)?,
NdRule::Subshell {..} => exec_subshell(cmd,shenv).try_blame(cmd_raw, span)?,
NdRule::IfThen {..} => ifthen::exec_if(cmd, shenv).try_blame(cmd_raw, span)?,
NdRule::FuncDef {..} => exec_funcdef(cmd,shenv).try_blame(cmd_raw, span)?,
NdRule::Assignment {..} => exec_assignment(cmd,shenv).try_blame(cmd_raw, span)?,
NdRule::Pipeline {..} => exec_pipeline(cmd, shenv).try_blame(cmd_raw, span)?,
_ => unimplemented!()
}
}
@@ -64,7 +92,7 @@ fn dispatch_command(mut node: Node, shenv: &mut ShEnv) -> ShResult<()> {
let mut is_subsh = false;
if let NdRule::Command { ref mut argv, redirs: _ } = node.rule_mut() {
*argv = expand_argv(argv.to_vec(), shenv);
let cmd = argv.first().unwrap().to_string();
let cmd = argv.first().unwrap().as_raw(shenv);
if shenv.logic().get_function(&cmd).is_some() {
is_func = true;
} else if node.flags().contains(NdFlag::BUILTIN) {
@@ -91,33 +119,32 @@ fn exec_func(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
let rule = node.into_rule();
if let NdRule::Command { argv, redirs } = rule {
let mut argv_iter = argv.into_iter();
let func_name = argv_iter.next().unwrap().to_string();
let func_name = argv_iter.next().unwrap().as_raw(shenv);
let body = shenv.logic().get_function(&func_name).unwrap().to_string();
let snapshot = shenv.clone();
shenv.vars_mut().reset_params();
while let Some(arg) = argv_iter.next() {
shenv.vars_mut().bpush_arg(&arg.to_string());
let arg_raw = shenv.input_slice(arg.span()).to_string();
shenv.vars_mut().bpush_arg(&arg_raw);
}
shenv.collect_redirs(redirs);
let lex_input = Rc::new(body);
let tokens = Lexer::new(lex_input).lex();
match Parser::new(tokens).parse() {
Ok(syn_tree) => {
match Executor::new(syn_tree, shenv).walk() {
Ok(_) => { /* yippee */ }
Err(e) => {
*shenv = snapshot;
return Err(e.into())
}
}
match exec_input(body, shenv) {
Ok(()) => {
*shenv = snapshot;
return Ok(())
}
Err(e) if e.kind() == ShErrKind::FuncReturn => {
let code = shenv.get_code();
*shenv = snapshot;
shenv.set_code(code);
return Ok(())
}
Err(e) => {
*shenv = snapshot;
return Err(e.into())
}
}
*shenv = snapshot;
}
Ok(())
}
@@ -125,9 +152,9 @@ fn exec_func(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
fn exec_funcdef(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
let rule = node.into_rule();
if let NdRule::FuncDef { name, body } = rule {
let name_raw = name.to_string();
let name_raw = name.as_raw(shenv);
let name = name_raw.trim_end_matches("()");
let body_raw = body.to_string();
let body_raw = body.as_raw(shenv);
let body = body_raw[1..body_raw.len() - 1].trim();
shenv.logic_mut().set_function(name, body);
@@ -148,26 +175,18 @@ fn exec_subshell(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
exit(1);
}
for arg in argv {
shenv.vars_mut().bpush_arg(&arg.to_string());
let arg_raw = &arg.as_raw(shenv);
shenv.vars_mut().bpush_arg(arg_raw);
}
let body_raw = body.to_string();
let lexer_input = Rc::new(
body_raw[1..body_raw.len() - 1].to_string()
);
let token_stream = Lexer::new(lexer_input).lex();
match Parser::new(token_stream).parse() {
Ok(syn_tree) => {
if let Err(e) = Executor::new(syn_tree, shenv).walk() {
write_err(e)?;
exit(1);
}
}
let body_raw = body.as_raw(shenv);
match exec_input(body_raw, shenv) {
Ok(()) => sh_quit(0),
Err(e) => {
write_err(e)?;
exit(1);
eprintln!("{}",e);
sh_quit(1);
}
}
exit(0);
} else {
match unsafe { fork()? } {
Child => {
@@ -177,26 +196,17 @@ fn exec_subshell(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
exit(1);
}
for arg in argv {
shenv.vars_mut().bpush_arg(&arg.to_string());
let arg_raw = &arg.as_raw(shenv);
shenv.vars_mut().bpush_arg(arg_raw);
}
let body_raw = body.to_string();
let lexer_input = Rc::new(
body_raw[1..body_raw.len() - 1].to_string()
);
let token_stream = Lexer::new(lexer_input).lex();
match Parser::new(token_stream).parse() {
Ok(syn_tree) => {
if let Err(e) = Executor::new(syn_tree, shenv).walk() {
write_err(e)?;
exit(1);
}
}
let body_raw = body.as_raw(shenv);
match exec_input(body_raw, shenv) {
Ok(()) => sh_quit(0),
Err(e) => {
write_err(e)?;
exit(1);
eprintln!("{}",e);
sh_quit(1);
}
}
exit(0);
}
Parent { child } => {
*shenv = snapshot;
@@ -218,7 +228,7 @@ fn exec_subshell(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
fn exec_builtin(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
log!(DEBUG, "Executing builtin");
let command = if let NdRule::Command { argv, redirs: _ } = node.rule() {
argv.first().unwrap().to_string()
argv.first().unwrap().as_raw(shenv)
} else { unreachable!() };
log!(TRACE, "{}", command.as_str());
@@ -232,6 +242,8 @@ fn exec_builtin(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
"bg" => continue_job(node, shenv, false)?,
"read" => read_builtin(node, shenv)?,
"alias" => alias(node, shenv)?,
"exit" => sh_flow(node, shenv, ShErrKind::CleanExit)?,
"return" => sh_flow(node, shenv, ShErrKind::FuncReturn)?,
_ => unimplemented!("Have not yet implemented support for builtin `{}'",command)
}
log!(TRACE, "done");
@@ -247,7 +259,7 @@ fn exec_assignment(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
let mut assigns = assignments.into_iter();
if let Some(cmd) = cmd {
while let Some(assign) = assigns.next() {
let assign_raw = assign.to_string();
let assign_raw = assign.as_raw(shenv);
if let Some((var,val)) = assign_raw.split_once('=') {
shenv.vars_mut().export(var, val);
}
@@ -259,7 +271,7 @@ fn exec_assignment(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
}
} else {
while let Some(assign) = assigns.next() {
let assign_raw = assign.to_string();
let assign_raw = assign.as_raw(shenv);
if let Some((var,val)) = assign_raw.split_once('=') {
shenv.vars_mut().set_var(var, val);
}
@@ -288,7 +300,7 @@ fn exec_pipeline(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
(Some(r_pipe),Some(w_pipe))
};
if let NdRule::Command { argv, redirs: _ } = cmd.rule() {
let cmd_name = argv.first().unwrap().span().get_slice().to_string();
let cmd_name = argv.first().unwrap().as_raw(shenv);
cmd_names.push(cmd_name);
} else if let NdRule::Subshell {..} = cmd.rule() {
cmd_names.push("subshell".to_string());
@@ -305,12 +317,12 @@ fn exec_pipeline(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
// Create some redirections
if let Some(w_pipe) = w_pipe {
let wpipe_redir = Redir::new(1, RedirType::Output, RedirTarget::Fd(w_pipe.as_raw_fd()));
let wpipe_redir = Redir::output(1, w_pipe);
shenv.ctx_mut().push_rdr(wpipe_redir);
}
// Use the r_pipe created in the last iteration
if let Some(prev_rpipe) = prev_rpipe {
let rpipe_redir = Redir::new(0, RedirType::Input, RedirTarget::Fd(prev_rpipe.as_raw_fd()));
let rpipe_redir = Redir::input(0, prev_rpipe);
shenv.ctx_mut().push_rdr(rpipe_redir);
}
@@ -348,6 +360,7 @@ fn exec_pipeline(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
fn exec_cmd(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
log!(DEBUG, "Executing command");
let blame = node.span();
let blame_raw = node.as_raw(shenv);
let rule = node.into_rule();
if let NdRule::Command { argv, redirs } = rule {
@@ -395,7 +408,7 @@ fn exec_cmd(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
}
}
} else {
return Err(ShErr::full(ShErrKind::CmdNotFound, format!("{}", command), blame))
return Err(ShErr::full(ShErrKind::CmdNotFound, format!("{}", command), shenv.get_input(), blame))
}
} else { unreachable!("Found this rule in exec_cmd: {:?}", rule) }
Ok(())

View File

@@ -1,75 +1,62 @@
use crate::{parse::lex::SEPARATORS, prelude::*};
pub fn expand_aliases(input: &str, shenv: &mut ShEnv) -> Option<String> {
let mut result = input.to_string();
let mut expanded_aliases = Vec::new();
let mut found_in_iteration = true;
pub fn expand_alias(candidate: Token, shenv: &mut ShEnv) -> Vec<Token> {
let mut tokens = vec![];
let mut work_stack = VecDeque::new();
let mut expanded_aliases = vec![];
let logic = shenv.logic().clone();
// Loop until no new alias expansion happens.
while found_in_iteration {
found_in_iteration = false;
let mut new_result = String::new();
let mut chars = result.chars().peekable();
let mut alias_cand = String::new();
let mut is_cmd = true;
// Start with the candidate token in the work queue
work_stack.bpush(candidate);
while let Some(ch) = chars.next() {
match ch {
';' | '\n' => {
new_result.push(ch);
is_cmd = true;
// Consume any extra whitespace or delimiters.
while let Some(&next_ch) = chars.peek() {
if matches!(next_ch, ' ' | '\t' | ';' | '\n') {
new_result.push(next_ch);
chars.next();
} else {
break;
}
// Process until there are no more tokens in the queue
while let Some(token) = work_stack.fpop() {
if token.rule() == TkRule::Ident {
let candidate_str = token.as_raw(shenv);
if let Some(alias) = logic.get_alias(&candidate_str) {
// Expand the alias only if it hasn't been expanded yet
if !expanded_aliases.contains(&candidate_str) {
expanded_aliases.push(candidate_str);
let mut new_tokens = shenv.expand_input(alias, token.span());
for token in new_tokens.iter_mut() {
work_stack.bpush(token.clone());
}
} else {
// If already expanded, just add the token to the output
tokens.push(token);
}
' ' | '\t' => {
is_cmd = false;
new_result.push(ch);
}
_ if is_cmd => {
// Accumulate token characters.
alias_cand.push(ch);
while let Some(&next_ch) = chars.peek() {
if matches!(next_ch, ' ' | '\t' | ';' | '\n') {
break;
} else {
alias_cand.push(next_ch);
chars.next();
}
}
// Check for an alias expansion.
if let Some(alias) = shenv.logic().get_alias(&alias_cand) {
// Only expand if we haven't already done so.
if !expanded_aliases.contains(&alias_cand) {
new_result.push_str(alias);
expanded_aliases.push(alias_cand.clone());
found_in_iteration = true;
} else {
new_result.push_str(&alias_cand);
}
} else {
new_result.push_str(&alias_cand);
}
alias_cand.clear();
}
_ => {
new_result.push(ch);
} else {
tokens.push(token);
}
} else {
tokens.push(token);
}
}
tokens
}
pub fn expand_aliases(tokens: Vec<Token>, shenv: &mut ShEnv) -> Vec<Token> {
let mut stream = tokens.iter();
let mut processed = vec![];
let mut is_command = true;
while let Some(token) = stream.next() {
match token.rule() {
_ if SEPARATORS.contains(&token.rule()) => {
is_command = true;
processed.push(token.clone());
}
TkRule::Ident if is_command => {
is_command = false;
let mut alias_tokens = expand_alias(token.clone(), shenv);
log!(DEBUG, alias_tokens);
if !alias_tokens.is_empty() {
processed.append(&mut alias_tokens);
} else {
processed.push(token.clone());
}
}
_ => processed.push(token.clone()),
}
result = new_result;
log!(DEBUG, result);
}
if expanded_aliases.is_empty() {
None
} else {
Some(result)
}
processed
}

30
src/expand/cmdsub.rs Normal file
View File

@@ -0,0 +1,30 @@
use crate::prelude::*;
pub fn expand_cmdsub(token: Token, shenv: &mut ShEnv) -> ShResult<Vec<Token>> {
let mut new_tokens = vec![];
let cmdsub_raw = token.as_raw(shenv);
let body = &cmdsub_raw[2..cmdsub_raw.len() - 1].to_string(); // From '$(this)' to 'this'
let (r_pipe,w_pipe) = c_pipe()?;
let pipe_redir = Redir::output(1, w_pipe);
let mut sub_shenv = shenv.clone();
sub_shenv.ctx_mut().set_flag(ExecFlags::NO_FORK);
sub_shenv.collect_redirs(vec![pipe_redir]);
match unsafe { fork()? } {
Child => {
close(r_pipe).ok();
exec_input(body, shenv).abort_if_err();
sh_quit(0);
}
Parent { child } => {
close(w_pipe).ok();
}
}
let output = read_to_string(r_pipe)?;
if !output.is_empty() {
let lex_input = Rc::new(output);
}
Ok(new_tokens)
}

View File

@@ -1,9 +1,10 @@
pub mod expand_vars;
pub mod vars;
pub mod tilde;
pub mod alias;
pub mod cmdsub;
use alias::expand_aliases;
use expand_vars::{expand_dquote, expand_var};
use vars::{expand_dquote, expand_var};
use tilde::expand_tilde;
use crate::prelude::*;
@@ -11,7 +12,7 @@ use crate::prelude::*;
pub fn expand_argv(argv: Vec<Token>, shenv: &mut ShEnv) -> Vec<Token> {
let mut processed = vec![];
for arg in argv {
log!(DEBUG, arg);
log!(DEBUG, "{}",arg.as_raw(shenv));
log!(DEBUG, processed);
match arg.rule() {
TkRule::DQuote => {
@@ -23,7 +24,7 @@ pub fn expand_argv(argv: Vec<Token>, shenv: &mut ShEnv) -> Vec<Token> {
processed.append(&mut varsub_exp);
}
TkRule::TildeSub => {
let tilde_exp = expand_tilde(arg.clone());
let tilde_exp = expand_tilde(arg.clone(), shenv);
processed.push(tilde_exp);
}
_ => {

View File

@@ -1,12 +1,11 @@
use crate::prelude::*;
pub fn expand_tilde(tilde_sub: Token) -> Token {
let tilde_sub_raw = tilde_sub.to_string();
pub fn expand_tilde(tilde_sub: Token, shenv: &mut ShEnv) -> Token {
let mut tilde_sub_raw = tilde_sub.as_raw(shenv);
if tilde_sub_raw.starts_with('~') {
let home = std::env::var("HOME").unwrap_or_default();
tilde_sub_raw.replacen('~', &home, 1);
let lex_input = Rc::new(tilde_sub_raw);
let mut tokens = Lexer::new(lex_input).lex();
tilde_sub_raw = tilde_sub_raw.replacen('~', &home, 1);
let mut tokens = Lexer::new(tilde_sub_raw,shenv).lex();
tokens.pop().unwrap_or(tilde_sub)
} else {
tilde_sub

View File

@@ -1,18 +1,15 @@
use crate::{parse::lex::Token, prelude::*};
pub fn expand_var(var_sub: Token, shenv: &mut ShEnv) -> Vec<Token> {
let var_name = var_sub.to_string();
let var_name = var_sub.as_raw(shenv);
let var_name = var_name.trim_start_matches('$').trim_matches(['{','}']);
let value = Rc::new(
shenv.vars()
.get_var(var_name)
.to_string()
);
Lexer::new(value).lex() // Automatically handles word splitting for us
let value = shenv.vars().get_var(var_name).to_string();
shenv.expand_input(&value, var_sub.span())
}
pub fn expand_dquote(dquote: Token, shenv: &mut ShEnv) -> Token {
let dquote_raw = dquote.to_string();
let dquote_raw = dquote.as_raw(shenv);
let mut result = String::new();
let mut var_name = String::new();
let mut chars = dquote_raw.chars().peekable();
@@ -63,5 +60,8 @@ pub fn expand_dquote(dquote: Token, shenv: &mut ShEnv) -> Token {
log!(DEBUG, result);
Lexer::new(Rc::new(result)).lex().pop().unwrap_or(dquote)
log!(DEBUG, "{:?}",dquote.span());
let token = shenv.expand_input(&result, dquote.span()).pop().unwrap_or(dquote);
log!(DEBUG, "{}",token.as_raw(shenv));
token
}

View File

@@ -5,16 +5,62 @@ use crate::prelude::*;
pub type ShResult<T> = Result<T,ShErr>;
pub trait ResultExt {
fn eprint(self) -> Self;
fn abort_if_err(&self);
}
#[derive(Clone,Debug)]
pub struct BlamePair {
input: String,
span: Rc<RefCell<Span>>
}
impl BlamePair {
pub fn new(input: String, span: Rc<RefCell<Span>>) -> Self {
Self { input, span }
}
pub fn start(&self) -> usize {
self.span.borrow().start()
}
pub fn end(&self) -> usize {
self.span.borrow().end()
}
pub fn len(&self) -> usize {
self.input.len()
}
}
impl Into<String> for BlamePair {
fn into(self) -> String {
self.input
}
}
impl<T, E: Display> ResultExt for Result<T, E> {
fn eprint(self) -> Self {
if let Err(err) = &self {
eprintln!("{}", err);
}
self
}
fn abort_if_err(&self) {
if let Err(err) = &self {
eprintln!("{}", err);
sh_quit(1)
}
}
}
pub trait Blame {
/// Blame a span for a propagated error. This will convert a ShErr::Simple into a ShErr::Full
/// This will also set the span on a ShErr::Builder
fn blame(self, span: Span) -> Self;
fn blame(self, input: String, span: Rc<RefCell<Span>>) -> Self;
/// If an error is propagated to this point, then attempt to blame a span.
/// If the error in question has already blamed a span, don't overwrite it.
/// Used as a last resort in higher level contexts in case an error somehow goes unblamed
fn try_blame(self, span: Span) -> Self;
fn try_blame(self, input: String, span: Rc<RefCell<Span>>) -> Self;
}
impl From<std::io::Error> for ShErr {
@@ -42,17 +88,17 @@ impl From<Errno> for ShErr {
}
impl<T> Blame for Result<T,ShErr> {
fn blame(self, span: Span) -> Self {
fn blame(self, input: String, span: Rc<RefCell<Span>>) -> Self {
if let Err(mut e) = self {
e.blame(span);
e.blame(input,span);
Err(e)
} else {
self
}
}
fn try_blame(self, span: Span) -> Self {
fn try_blame(self, input: String, span: Rc<RefCell<Span>>) -> Self {
if let Err(mut e) = self {
e.try_blame(span);
e.try_blame(input,span);
Err(e)
} else {
self
@@ -60,7 +106,7 @@ impl<T> Blame for Result<T,ShErr> {
}
}
#[derive(Debug,Clone)]
#[derive(Debug,Copy,Clone,PartialEq,Eq)]
pub enum ShErrKind {
IoErr,
SyntaxErr,
@@ -85,7 +131,7 @@ impl Default for ShErrKind {
#[derive(Clone,Debug)]
pub enum ShErr {
Simple { kind: ShErrKind, message: String },
Full { kind: ShErrKind, message: String, span: Span },
Full { kind: ShErrKind, message: String, blame: BlamePair },
}
impl ShErr {
@@ -95,32 +141,35 @@ impl ShErr {
pub fn io() -> Self {
io::Error::last_os_error().into()
}
pub fn full<S: Into<String>>(kind: ShErrKind, message: S, span: Span) -> Self {
Self::Full { kind, message: message.into(), span }
pub fn full<S: Into<String>>(kind: ShErrKind, message: S, input: String, span: Rc<RefCell<Span>>) -> Self {
let blame = BlamePair::new(input.to_string(), span);
Self::Full { kind, message: message.into(), blame }
}
pub fn try_blame(&mut self, blame: Span) {
pub fn try_blame(&mut self, input: String, span: Rc<RefCell<Span>>) {
let blame_pair = BlamePair::new(input, span);
match self {
Self::Full {..} => {
/* Do not overwrite */
}
Self::Simple { kind, message } => {
*self = Self::Full { kind: core::mem::take(kind), message: core::mem::take(message), span: blame }
*self = Self::Full { kind: core::mem::take(kind), message: core::mem::take(message), blame: blame_pair }
}
}
}
pub fn blame(&mut self, blame: Span) {
pub fn blame(&mut self, input: String, span: Rc<RefCell<Span>>) {
let blame_pair = BlamePair::new(input, span);
match self {
Self::Full { kind: _, message: _, span } => {
*span = blame;
Self::Full { kind: _, message: _, blame } => {
*blame = blame_pair;
}
Self::Simple { kind, message } => {
*self = Self::Full { kind: core::mem::take(kind), message: core::mem::take(message), span: blame }
*self = Self::Full { kind: core::mem::take(kind), message: core::mem::take(message), blame: blame_pair }
}
}
}
pub fn with_msg(&mut self, new_message: String) {
match self {
Self::Full { kind: _, message, span: _ } => {
Self::Full { kind: _, message, blame: _ } => {
*message = new_message
}
Self::Simple { kind: _, message } => {
@@ -128,9 +177,19 @@ impl ShErr {
}
}
}
pub fn kind(&self) -> ShErrKind {
match self {
ShErr::Simple { kind, message: _ } => {
*kind
}
ShErr::Full { kind, message: _, blame: _ } => {
*kind
}
}
}
pub fn with_kind(&mut self, new_kind: ShErrKind) {
match self {
Self::Full { kind, message: _, span: _ } => {
Self::Full { kind, message: _, blame: _ } => {
*kind = new_kind
}
Self::Simple { kind, message: _ } => {
@@ -141,7 +200,7 @@ impl ShErr {
pub fn display_kind(&self) -> String {
match self {
ShErr::Simple { kind, message: _ } |
ShErr::Full { kind, message: _, span: _ } => {
ShErr::Full { kind, message: _, blame: _ } => {
match kind {
ShErrKind::IoErr => "I/O Error: ".into(),
ShErrKind::SyntaxErr => "Syntax Error: ".into(),
@@ -159,9 +218,31 @@ impl ShErr {
}
}
}
pub fn get_line(&self) -> (usize,usize,String) {
if let ShErr::Full { kind, message, blame } = self {
unsafe {
let mut dist = 0;
let mut line_no = 0;
let window = self.get_window();
let mut lines = window.lines();
while let Some(line) = lines.next() {
line_no += 1;
dist += line.len();
if dist > blame.start() {
dist -= line.len();
let offset = blame.start() - dist;
return (offset,line_no,line.to_string())
}
}
}
(0,0,String::new())
} else {
(0,0,String::new())
}
}
pub fn get_window(&self) -> String {
if let ShErr::Full { kind: _, message: _, span } = self {
let window = span.get_slice();
if let ShErr::Full { kind: _, message: _, blame } = self.clone() {
let window: String = blame.into();
window.split_once('\n').unwrap_or((&window,"")).0.to_string()
} else {
String::new()
@@ -173,9 +254,10 @@ impl Display for ShErr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let error_display = match self {
ShErr::Simple { kind: _, message } => format!("{}{}",self.display_kind(),message),
ShErr::Full { kind: _, message, span } => {
let (offset,line_no,line_text) = span.get_line();
let dist = span.end() - span.start();
ShErr::Full { kind: _, message, blame } => {
let (offset,line_no,line_text) = self.get_line();
log!(DEBUG, blame);
let dist = blame.len();
let padding = " ".repeat(offset);
let line_inner = "~".repeat(dist.saturating_sub(2));
let err_kind = &self.display_kind().styled(Style::Red | Style::Bold);

View File

@@ -40,6 +40,31 @@ pub fn c_pipe() -> Result<(RawFd,RawFd),Errno> {
Ok((pipes[0],pipes[1]))
}
pub fn sh_quit(code: i32) -> ! {
write_jobs(|j| {
for job in j.jobs_mut().iter_mut().flatten() {
job.killpg(Signal::SIGTERM).ok();
}
});
exit(code);
}
pub fn read_to_string(fd: i32) -> ShResult<String> {
let mut buf = Vec::with_capacity(4096);
let mut temp_buf = [0u8;1024];
loop {
match read(fd, &mut temp_buf) {
Ok(0) => break, // EOF
Ok(n) => buf.extend_from_slice(&temp_buf[..n]),
Err(Errno::EINTR) => continue, // Retry on EINTR
Err(e) => return Err(e.into()), // Return other errors
}
}
Ok(String::from_utf8_lossy(&buf).to_string())
}
pub fn execvpe(cmd: String, argv: Vec<String>, envp: Vec<String>) -> Result<(),Errno> {
let cmd_raw = CString::new(cmd).unwrap();

View File

@@ -3,10 +3,26 @@ use std::{os::fd::{AsFd, AsRawFd, BorrowedFd, FromRawFd, OwnedFd}, str::FromStr}
use nix::libc::getpgrp;
use crate::{expand::{expand_vars::{expand_dquote, expand_var}, tilde::expand_tilde}, prelude::*};
use crate::prelude::*;
use super::term::StyleSet;
pub trait RedirTargetType {
fn as_tgt(self) -> RedirTarget;
}
impl RedirTargetType for PathBuf {
fn as_tgt(self) -> RedirTarget {
RedirTarget::File(self)
}
}
impl RedirTargetType for i32 {
fn as_tgt(self) -> RedirTarget {
RedirTarget::Fd(self)
}
}
pub trait StrOps {
/// This function operates on anything that implements `AsRef<str>` and `Display`, which is mainly strings.
/// It takes a 'Style' which can be passed as a single Style object like `Style::Cyan` or a Bit OR of many styles,
@@ -31,7 +47,7 @@ impl ArgVec for Vec<Token> {
let mut argv_iter = self.into_iter();
let mut argv_processed = vec![];
while let Some(arg) = argv_iter.next() {
let cleaned = trim_quotes(&arg);
let cleaned = trim_quotes(&arg.as_raw(shenv));
argv_processed.push(cleaned);
}
argv_processed
@@ -248,6 +264,12 @@ impl Redir {
pub fn new(src: i32, op: RedirType, tgt: RedirTarget) -> Self {
Self { src, op, tgt }
}
pub fn output(src: i32, tgt: impl RedirTargetType) -> Self {
Self::new(src, RedirType::Output, tgt.as_tgt())
}
pub fn input(src: i32, tgt: impl RedirTargetType) -> Self {
Self::new(src, RedirType::Input, tgt.as_tgt())
}
}
#[derive(Debug,Clone)]

View File

@@ -10,8 +10,6 @@ pub mod prompt;
pub mod builtin;
pub mod expand;
use libc::PIPE_BUF;
use nix::unistd::setpgid;
use signal::sig_setup;
use crate::prelude::*;
@@ -22,32 +20,13 @@ pub fn main() {
loop {
log!(TRACE, "Entered loop");
let mut line = match prompt::read_line(&mut shenv) {
let line = match prompt::read_line(&mut shenv) {
Ok(line) => line,
Err(e) => {
eprintln!("{}",e);
continue;
}
};
if let Some(line_exp) = expand_aliases(&line, &mut shenv) {
line = line_exp;
}
let input = Rc::new(line);
log!(INFO, "New input: {:?}", input);
let token_stream = Lexer::new(input).lex();
log!(DEBUG, token_stream);
log!(DEBUG, token_stream);
log!(TRACE, "Token stream: {:?}", token_stream);
match Parser::new(token_stream).parse() {
Err(e) => {
eprintln!("{}",e);
}
Ok(syn_tree) => {
if let Err(e) = Executor::new(syn_tree, &mut shenv).walk() {
eprintln!("{}",e);
}
}
}
log!(TRACE, "Finished iteration");
let _ = exec_input(line, &mut shenv).eprint();
}
}

View File

@@ -1,66 +1,66 @@
use std::fmt::{Debug, Display};
use std::{cell::Ref, fmt::{Debug, Display}};
use crate::prelude::*;
pub const KEYWORDS: [&str;14] = [
"if",
"then",
"elif",
"else",
"fi",
"while",
"until",
"for",
"in",
"select",
"do",
"done",
"case",
"esac"
pub const KEYWORDS: [TkRule;14] = [
TkRule::If,
TkRule::Then,
TkRule::Elif,
TkRule::Else,
TkRule::Fi,
TkRule::While,
TkRule::Until,
TkRule::For,
TkRule::In,
TkRule::Select,
TkRule::Do,
TkRule::Done,
TkRule::Case,
TkRule::Esac
];
pub const SEPARATORS: [TkRule; 7] = [
pub const SEPARATORS: [TkRule; 6] = [
TkRule::Sep,
TkRule::AndOp,
TkRule::OrOp,
TkRule::PipeOp,
TkRule::ErrPipeOp,
TkRule::BgOp,
TkRule::Keyword // Keywords don't count as command names
];
pub trait LexRule {
fn try_match(input: &str) -> Option<usize>;
}
pub struct Lexer {
input: Rc<String>,
pub struct Lexer<'a> {
input: String,
tokens: Vec<Token>,
is_command: bool,
shenv: &'a mut ShEnv,
consumed: usize
}
impl Lexer {
pub fn new(input: Rc<String>) -> Self {
Self { input, tokens: vec![], is_command: true, consumed: 0 }
impl<'a> Lexer<'a> {
pub fn new(input: String, shenv: &'a mut ShEnv) -> Self {
Self { input, tokens: vec![], is_command: true, shenv, consumed: 0 }
}
pub fn lex(mut self) -> Vec<Token> {
unsafe {
let mut input = self.input.as_str();
while let Some((mut rule,len)) = TkRule::try_match(input) {
// If we see a keyword in an argument position, it's actually an ident
if !self.is_command && rule == TkRule::Keyword {
if !self.is_command && KEYWORDS.contains(&rule) {
rule = TkRule::Ident
// If we are in a command right now, after this we are in arguments
} else if self.is_command && !matches!(rule,TkRule::Keyword | TkRule::Whitespace) {
} else if self.is_command && rule != TkRule::Whitespace && !KEYWORDS.contains(&rule) {
self.is_command = false;
}
// If we see a separator like && or ;, we are now in a command again
if SEPARATORS.contains(&rule) {
self.is_command = true;
}
let span = Span::new(self.input.clone(),self.consumed,self.consumed + len);
let span = self.shenv.inputman_mut().new_span(self.consumed, self.consumed + len);
let token = Token::new(rule, span);
self.consumed += len;
input = &input[len..];
@@ -77,73 +77,43 @@ impl Lexer {
#[derive(Clone)]
pub struct Token {
rule: TkRule,
span: Span
span: Rc<RefCell<Span>>
}
impl Token {
pub fn new(rule: TkRule, span: Span) -> Self {
pub fn new(rule: TkRule, span: Rc<RefCell<Span>>) -> Self {
Self { rule, span }
}
pub fn span(&self) -> &Span {
&self.span
}
pub fn span_mut(&mut self) -> &mut Span {
&mut self.span
pub fn span(&self) -> Rc<RefCell<Span>> {
self.span.clone()
}
pub fn rule(&self) -> TkRule {
self.rule
}
pub fn as_raw(&self, shenv: &mut ShEnv) -> String {
shenv.input_slice(self.span()).to_string()
}
}
impl Debug for Token {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let info = (self.rule(),self.to_string(),self.span.start,self.span.end);
let info = (self.rule(),self.span.borrow().start,self.span.borrow().end);
write!(f,"{:?}",info)
}
}
impl Display for Token {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let slice = self.span.get_slice();
write!(f,"{}",slice)
}
}
#[derive(Debug,Clone)]
pub struct Span {
input: Rc<String>,
start: usize,
end: usize
}
impl Span {
pub fn new(input: Rc<String>, start: usize, end: usize) -> Self {
Self { input, start, end } }
pub fn get_slice(&self) -> String {
unsafe {
let slice = &self.input[self.start..self.end];
slice.to_string()
}
}
pub fn get_line(&self) -> (usize,usize,String) {
unsafe {
let mut dist = 0;
let mut line_no = 0;
let mut lines = self.input.lines();
while let Some(line) = lines.next() {
line_no += 1;
dist += line.len();
if dist > self.start {
dist -= line.len();
let offset = self.start - dist;
return (offset,line_no,line.to_string())
}
}
}
(0,0,String::new())
pub fn new(start: usize, end: usize) -> Self {
Self { start, end }
}
pub fn start(&self) -> usize {
self.start
@@ -151,8 +121,19 @@ impl Span {
pub fn end(&self) -> usize {
self.end
}
pub fn get_input(&self) -> Rc<String> {
self.input.clone()
pub fn clamp_start(&mut self, start: usize) {
if self.start > start {
self.start = start
}
}
pub fn clamp_end(&mut self, end: usize) {
if self.end > end {
self.end = end
}
}
pub fn shift(&mut self, delta: isize) {
self.start = self.start.saturating_add_signed(delta);
self.end = self.end.saturating_add_signed(delta);
}
}
@@ -204,7 +185,20 @@ pub enum TkRule {
CmdSub,
DQuote,
SQuote,
Keyword,
If,
Then,
Elif,
Else,
Fi,
While,
Until,
For,
In,
Select,
Do,
Done,
Case,
Esac,
Assign,
Ident,
Sep,
@@ -232,8 +226,21 @@ impl TkRule {
try_match!(TildeSub,input);
try_match!(Subshell,input);
try_match!(Sep,input);
try_match!(Keyword,input);
try_match!(Assign,input);
try_match!(If,input);
try_match!(Then,input);
try_match!(Elif,input);
try_match!(Else,input);
try_match!(Fi,input);
try_match!(While,input);
try_match!(Until,input);
try_match!(For,input);
try_match!(In,input);
try_match!(Select,input);
try_match!(Do,input);
try_match!(Done,input);
try_match!(Case,input);
try_match!(Esac,input);
try_match!(Ident,input);
None
}
@@ -371,17 +378,159 @@ tkrule_def!(OrOp, |input: &str| {
}
});
tkrule_def!(Keyword, |input: &str| {
for &kw in KEYWORDS.iter() {
if input.starts_with(kw) {
let len = kw.len();
if input.chars().nth(len).map_or(true, |ch| ch.is_whitespace() || ch == ';') {
return Some(len);
}
tkrule_def!(If, |input: &str| {
if input.starts_with("if") {
match input.chars().nth(2) {
Some(ch) if ch.is_whitespace() || ch == ';' => Some(2),
Some(_) => None,
None => Some(2), // "if" is the entire input
}
} else {
None
}
});
tkrule_def!(Then, |input: &str| {
if input.starts_with("then") {
match input.chars().nth(4) {
Some(ch) if ch.is_whitespace() || ch == ';' => Some(4),
Some(_) => None,
None => Some(4), // "if" is the entire input
}
} else {
None
}
});
tkrule_def!(Elif, |input: &str| {
if input.starts_with("elif") {
match input.chars().nth(4) {
Some(ch) if ch.is_whitespace() || ch == ';' => Some(4),
Some(_) => None,
None => Some(4), // "if" is the entire input
}
} else {
None
}
});
tkrule_def!(Else, |input: &str| {
if input.starts_with("else") {
match input.chars().nth(4) {
Some(ch) if ch.is_whitespace() || ch == ';' => Some(4),
Some(_) => None,
None => Some(4), // "if" is the entire input
}
} else {
None
}
});
tkrule_def!(Fi, |input: &str| {
if input.starts_with("fi") {
match input.chars().nth(2) {
Some(ch) if ch.is_whitespace() || ch == ';' => Some(2),
Some(_) => None,
None => Some(2), // "if" is the entire input
}
} else {
None
}
});
tkrule_def!(While, |input: &str| {
if input.starts_with("while") {
match input.chars().nth(5) {
Some(ch) if ch.is_whitespace() || ch == ';' => Some(5),
Some(_) => None,
None => Some(5), // "if" is the entire input
}
} else {
None
}
});
tkrule_def!(Until, |input: &str| {
if input.starts_with("until") {
match input.chars().nth(5) {
Some(ch) if ch.is_whitespace() || ch == ';' => Some(5),
Some(_) => None,
None => Some(5), // "if" is the entire input
}
} else {
None
}
});
tkrule_def!(For, |input: &str| {
if input.starts_with("for") {
match input.chars().nth(3) {
Some(ch) if ch.is_whitespace() || ch == ';' => Some(3),
Some(_) => None,
None => Some(3), // "if" is the entire input
}
} else {
None
}
});
tkrule_def!(In, |input: &str| {
if input.starts_with("in") {
match input.chars().nth(2) {
Some(ch) if ch.is_whitespace() || ch == ';' => Some(2),
Some(_) => None,
None => Some(2), // "if" is the entire input
}
} else {
None
}
});
tkrule_def!(Select, |input: &str| {
if input.starts_with("select") {
match input.chars().nth(6) {
Some(ch) if ch.is_whitespace() || ch == ';' => Some(6),
Some(_) => None,
None => Some(6), // "if" is the entire input
}
} else {
None
}
});
tkrule_def!(Do, |input: &str| {
if input.starts_with("do") {
match input.chars().nth(2) {
Some(ch) if ch.is_whitespace() || ch == ';' => Some(2),
Some(_) => None,
None => Some(2), // "if" is the entire input
}
} else {
None
}
});
tkrule_def!(Done, |input: &str| {
if input.starts_with("done") {
match input.chars().nth(4) {
Some(ch) if ch.is_whitespace() || ch == ';' => Some(4),
Some(_) => None,
None => Some(4), // "if" is the entire input
}
} else {
None
}
});
tkrule_def!(Case, |input: &str| {
if input.starts_with("case") {
match input.chars().nth(4) {
Some(ch) if ch.is_whitespace() || ch == ';' => Some(4),
Some(_) => None,
None => Some(4), // "if" is the entire input
}
} else {
None
}
});
tkrule_def!(Esac, |input: &str| {
if input.starts_with("esac") {
match input.chars().nth(4) {
Some(ch) if ch.is_whitespace() || ch == ';' => Some(4),
Some(_) => None,
None => Some(4), // "if" is the entire input
}
} else {
None
}
None
});
tkrule_def!(Ident, |input: &str| {
@@ -717,6 +866,14 @@ tkrule_def!(BraceGrp, |input: &str| {
});
tkrule_def!(RedirOp, |input: &str| {
if let Some(ch) = input.chars().next() {
match ch {
'>' |
'<' |
'&' => { /* Continue */ }
_ => return None
}
}
// Order matters here
// For instance, if '>' is checked before '>>', '>' will always match first, and '>>' will never be checked
try_match_inner!(RedirCombineAppend,input); // Ex: &>>

View File

@@ -1,9 +1,9 @@
use core::fmt::Display;
use std::str::FromStr;
use std::{cell::Ref, str::FromStr};
use crate::prelude::*;
use super::lex::{TkRule, Span, Token};
use super::lex::{Span, TkRule, Token, KEYWORDS};
bitflags! {
#[derive(Debug,Clone,Copy,PartialEq,Eq)]
@@ -17,12 +17,12 @@ bitflags! {
pub trait ParseRule {
/// Used for cases where a rule is optional
fn try_match(input: &[Token]) -> ShResult<Option<Node>>;
fn try_match(input: &[Token], shenv: &mut ShEnv) -> ShResult<Option<Node>>;
/// Used for cases where a rule is assumed based on context
/// For instance, if the "for" keyword is encountered, then it *must* be a for loop
/// And if it isn't, return a parse error
fn assert_match(input: &[Token]) -> ShResult<Node> {
Self::try_match(input)?.ok_or_else(||
fn assert_match(input: &[Token], shenv: &mut ShEnv) -> ShResult<Node> {
Self::try_match(input,shenv)?.ok_or_else(||
ShErr::simple(ShErrKind::ParseErr, "Parse Error")
)
}
@@ -40,7 +40,7 @@ pub enum CmdGuard {
pub struct Node {
node_rule: NdRule,
tokens: Vec<Token>,
span: Span,
span: Rc<RefCell<Span>>,
flags: NdFlag,
}
@@ -60,9 +60,12 @@ impl Node {
pub fn into_rule(self) -> NdRule {
self.node_rule
}
pub fn span(&self) -> Span {
pub fn span(&self) -> Rc<RefCell<Span>> {
self.span.clone()
}
pub fn as_raw(&self, shenv: &mut ShEnv) -> String {
shenv.input_slice(self.span()).to_string()
}
pub fn flags(&self) -> NdFlag {
self.flags
}
@@ -71,11 +74,10 @@ impl Node {
}
}
impl Display for Node {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let raw = self.span().get_slice();
write!(f, "{}", raw)
}
#[derive(Clone,Debug)]
pub enum LoopKind {
While,
Until
}
#[derive(Clone,Debug)]
@@ -84,6 +86,9 @@ pub enum NdRule {
Command { argv: Vec<Token>, redirs: Vec<Redir> },
Assignment { assignments: Vec<Token>, cmd: Option<Box<Node>> },
FuncDef { name: Token, body: Token },
IfThen { cond_blocks: Vec<(Vec<Node>,Vec<Node>)>, else_block: Option<Vec<Node>> },
Loop { kind: LoopKind, cond: Vec<Node>, body: Vec<Node> },
ForLoop { vars: Vec<Token>, arr: Vec<Token>, body: Vec<Node> },
Subshell { body: Token, argv: Vec<Token>, redirs: Vec<Redir> },
CmdList { cmds: Vec<(Option<CmdGuard>,Node)> },
Pipeline { cmds: Vec<Node> }
@@ -91,12 +96,12 @@ pub enum NdRule {
/// Define a Node rule. The body of this macro becomes the implementation for the try_match() method for the rule.
macro_rules! ndrule_def {
($name:ident,$try:expr) => {
($name:ident,$shenv:ident,$try:expr) => {
#[derive(Debug)]
pub struct $name;
impl ParseRule for $name {
fn try_match(input: &[Token]) -> ShResult<Option<Node>> {
$try(input)
fn try_match(input: &[Token],shenv: &mut ShEnv) -> ShResult<Option<Node>> {
$try(input,shenv)
}
}
};
@@ -105,9 +110,9 @@ macro_rules! ndrule_def {
/// This macro attempts to match all of the given Rules. It returns upon finding the first match, so the order matters
/// Place the most specialized/specific rules first, and the most general rules last
macro_rules! try_rules {
($tokens:expr, $($name:ident),+) => {
($tokens:expr,$shenv:expr,$($name:ident),+) => {
$(
let result = $name::try_match($tokens)?;
let result = $name::try_match($tokens,$shenv)?;
if let Some(node) = result {
return Ok(Some(node))
}
@@ -125,6 +130,9 @@ impl SynTree {
pub fn new() -> Self {
Self { tree: VecDeque::new() }
}
pub fn from_vec(nodes: Vec<Node>) -> Self {
Self { tree: VecDeque::from(nodes) }
}
pub fn push_node(&mut self, node: Node) {
self.tree.bpush(node)
}
@@ -133,16 +141,17 @@ impl SynTree {
}
}
pub struct Parser {
pub struct Parser<'a> {
token_stream: Vec<Token>,
shenv: &'a mut ShEnv,
ast: SynTree
}
impl Parser {
pub fn new(mut token_stream: Vec<Token>) -> Self {
impl<'a> Parser<'a> {
pub fn new(mut token_stream: Vec<Token>, shenv: &'a mut ShEnv) -> Self {
log!(TRACE, "New parser");
token_stream.retain(|tk| !matches!(tk.rule(), TkRule::Whitespace | TkRule::Comment));
Self { token_stream, ast: SynTree::new() }
Self { token_stream, shenv, ast: SynTree::new() }
}
pub fn parse(mut self) -> ShResult<SynTree> {
@@ -150,11 +159,10 @@ impl Parser {
let mut lists = VecDeque::new();
let token_slice = &*self.token_stream;
// Get the Main rule
if let Some(mut node) = Main::try_match(token_slice)? {
if let Some(mut node) = Main::try_match(token_slice,self.shenv)? {
// Extract the inner lists
if let NdRule::Main { ref mut cmd_lists } = node.rule_mut() {
while let Some(node) = cmd_lists.pop() {
log!(DEBUG, node);
lists.bpush(node)
}
}
@@ -167,26 +175,42 @@ impl Parser {
}
}
fn get_span(toks: &Vec<Token>) -> ShResult<Span> {
fn get_span(toks: &Vec<Token>, shenv: &mut ShEnv) -> ShResult<Rc<RefCell<Span>>> {
if toks.is_empty() {
Err(ShErr::simple(ShErrKind::InternalErr, "Get_span was given an empty token list"))
} else {
let start = toks.first().unwrap().span().start();
let end = toks.iter().last().unwrap().span().end();
let input = toks.iter().last().unwrap().span().get_input();
Ok(Span::new(input,start,end))
let start = toks.first().unwrap().span().borrow().start();
let end = toks.iter().last().unwrap().span().borrow().end();
let span = shenv.inputman_mut().new_span(start, end);
Ok(span)
}
}
fn get_lists(mut tokens: &[Token], shenv: &mut ShEnv) -> (usize,Vec<Node>) {
let mut lists = vec![];
let mut tokens_eaten = 0;
while !tokens.is_empty() {
match CmdList::try_match(tokens, shenv) {
Ok(Some(list)) => {
tokens_eaten += list.len();
tokens = &tokens[list.len()..];
lists.push(list);
}
Ok(None) | Err(_) => break
}
}
(tokens_eaten,lists)
}
// TODO: Redirs with FD sources appear to be looping endlessly for some reason
ndrule_def!(Main, |tokens: &[Token]| {
ndrule_def!(Main, shenv, |tokens: &[Token], shenv: &mut ShEnv| {
log!(TRACE, "Parsing main");
let mut cmd_lists = vec![];
let mut node_toks = vec![];
let mut token_slice = &*tokens;
while let Some(node) = CmdList::try_match(token_slice)? {
while let Some(node) = CmdList::try_match(token_slice,shenv)? {
node_toks.extend(node.tokens().clone());
token_slice = &token_slice[node.len()..];
cmd_lists.push(node);
@@ -195,7 +219,7 @@ ndrule_def!(Main, |tokens: &[Token]| {
if cmd_lists.is_empty() {
return Ok(None)
}
let span = get_span(&node_toks)?;
let span = get_span(&node_toks,shenv)?;
let node = Node {
node_rule: NdRule::Main { cmd_lists },
tokens: node_toks,
@@ -205,14 +229,14 @@ ndrule_def!(Main, |tokens: &[Token]| {
Ok(Some(node))
});
ndrule_def!(CmdList, |tokens: &[Token]| {
ndrule_def!(CmdList, shenv, |tokens: &[Token], shenv: &mut ShEnv| {
log!(TRACE, "Parsing cmdlist");
let mut commands: Vec<(Option<CmdGuard>,Node)> = vec![];
let mut node_toks = vec![];
let mut token_slice = &*tokens;
let mut cmd_guard = None; // Operators like '&&' and '||'
while let Some(mut node) = Expr::try_match(token_slice)? {
while let Some(mut node) = Expr::try_match(token_slice,shenv)? {
// Add sub-node tokens to our tokens
node_toks.extend(node.tokens().clone());
// Reflect changes in the token slice
@@ -221,8 +245,11 @@ ndrule_def!(CmdList, |tokens: &[Token]| {
log!(DEBUG, token_slice);
// Push sub-node
if let NdRule::Command { argv, redirs: _ } = node.rule() {
if argv.first().is_some_and(|arg| BUILTINS.contains(&arg.to_string().as_str())) {
*node.flags_mut() |= NdFlag::BUILTIN;
if let Some(arg) = argv.first() {
let slice = shenv.input_slice(arg.span().clone());
if BUILTINS.contains(&slice) {
*node.flags_mut() |= NdFlag::BUILTIN;
}
}
}
commands.push((cmd_guard.take(),node));
@@ -244,7 +271,7 @@ ndrule_def!(CmdList, |tokens: &[Token]| {
if node_toks.is_empty() {
return Ok(None)
}
let span = get_span(&node_toks)?;
let span = get_span(&node_toks,shenv)?;
let node = Node {
node_rule: NdRule::CmdList { cmds: commands },
tokens: node_toks,
@@ -254,8 +281,8 @@ ndrule_def!(CmdList, |tokens: &[Token]| {
Ok(Some(node))
});
ndrule_def!(Expr, |tokens: &[Token]| {
try_rules!(tokens,
ndrule_def!(Expr, shenv, |tokens: &[Token], shenv: &mut ShEnv| {
try_rules!(tokens, shenv,
ShellCmd,
Pipeline,
Subshell,
@@ -264,8 +291,8 @@ ndrule_def!(Expr, |tokens: &[Token]| {
);
});
// Used in pipelines to avoid recursion
ndrule_def!(ExprNoPipeline, |tokens: &[Token]| {
try_rules!(tokens,
ndrule_def!(ExprNoPipeline, shenv, |tokens: &[Token], shenv: &mut ShEnv| {
try_rules!(tokens, shenv,
ShellCmd,
Subshell,
Assignment,
@@ -273,13 +300,352 @@ ndrule_def!(ExprNoPipeline, |tokens: &[Token]| {
);
});
ndrule_def!(ShellCmd, |tokens: &[Token]| {
try_rules!(tokens,
ndrule_def!(ShellCmd, shenv, |tokens: &[Token], shenv: &mut ShEnv| {
try_rules!(tokens, shenv,
IfThen,
Loop,
FuncDef
);
});
ndrule_def!(FuncDef, |tokens: &[Token]| {
ndrule_def!(ForLoop, shenv, |mut tokens: &[Token], shenv: &mut ShEnv| {
let err = |msg: &str, span: Rc<RefCell<Span>>, shenv: &mut ShEnv | {
ShErr::full(ShErrKind::ParseErr, msg, shenv.get_input(), span)
};
let mut tokens_iter = tokens.iter().peekable();
let mut node_toks = vec![];
let mut vars = vec![];
let mut arr = vec![];
let body: Vec<Node>;
if let Some(token) = tokens_iter.next() {
if let TkRule::For = token.rule() {
node_toks.push(token.clone());
tokens = &tokens[1..];
} else { return Ok(None) }
} else { return Ok(None) }
while let Some(token) = tokens_iter.next() {
if let TkRule::Ident = token.rule() {
node_toks.push(token.clone());
tokens = &tokens[1..];
if token.as_raw(shenv) == "in" { break }
vars.push(token.clone());
} else {
let span = get_span(&node_toks, shenv)?;
return Err(err("Expected an ident in for loop vars",span,shenv))
}
}
if vars.is_empty() {
let span = get_span(&node_toks, shenv)?;
return Err(err("Expected an ident in for loop vars",span,shenv))
}
while let Some(token) = tokens_iter.next() {
if let TkRule::Ident = token.rule() {
node_toks.push(token.clone());
tokens = &tokens[1..];
if token.rule() == TkRule::Sep { break }
arr.push(token.clone());
} else {
let span = get_span(&node_toks, shenv)?;
return Err(err("Expected an ident in for loop array",span,shenv))
}
}
if arr.is_empty() {
let span = get_span(&node_toks, shenv)?;
return Err(err("Expected an ident in for loop array",span,shenv))
}
if let Some(token) = tokens_iter.next() {
node_toks.push(token.clone());
tokens = &tokens[1..];
if token.rule() != TkRule::Do {
let span = get_span(&node_toks, shenv)?;
return Err(err("Expected `do` after for loop array",span,shenv))
}
} else {
let span = get_span(&node_toks, shenv)?;
return Err(err("Expected `do` after for loop array",span,shenv))
}
let (used,lists) = get_lists(tokens, shenv);
for list in &lists {
node_toks.extend(list.tokens().clone());
}
tokens = &tokens[used..];
body = lists;
tokens_iter = tokens.iter().peekable();
if let Some(token) = tokens_iter.next() {
node_toks.push(token.clone());
if token.rule() != TkRule::Done {
let span = get_span(&node_toks, shenv)?;
return Err(err("Expected `done` after for loop",span,shenv))
}
} else {
let span = get_span(&node_toks, shenv)?;
return Err(err("Expected `done` after for loop",span,shenv))
}
let span = get_span(&node_toks, shenv)?;
let node = Node {
node_rule: NdRule::ForLoop { vars, arr, body },
tokens: node_toks,
span,
flags: NdFlag::empty()
};
Ok(Some(node))
});
ndrule_def!(IfThen, shenv, |mut tokens: &[Token], shenv: &mut ShEnv| {
let err = |msg: &str, span: Rc<RefCell<Span>>, shenv: &mut ShEnv | {
ShErr::full(ShErrKind::ParseErr, msg, shenv.get_input(), span)
};
let mut tokens_iter = tokens.iter().peekable();
let mut node_toks = vec![];
let mut cond_blocks = vec![];
let mut else_block: Option<Vec<Node>> = None;
if let Some(token) = tokens_iter.next() {
if let TkRule::If = token.rule() {
node_toks.push(token.clone());
tokens = &tokens[1..];
} else { return Ok(None) }
} else { return Ok(None) }
let (used,lists) = get_lists(tokens, shenv);
for list in &lists {
node_toks.extend(list.tokens().clone());
}
tokens = &tokens[used..];
let cond = lists;
tokens_iter = tokens.iter().peekable();
while let Some(token) = tokens_iter.next() {
match token.rule() {
TkRule::Then => {
node_toks.push(token.clone());
tokens = &tokens[1..];
break
}
TkRule::Sep | TkRule::Whitespace => {
node_toks.push(token.clone());
tokens = &tokens[1..];
}
_ => {
let span = get_span(&node_toks,shenv)?;
return Err(err("Expected `then` after if statement condition",span,shenv))
}
}
}
if tokens_iter.peek().is_none() {
let span = get_span(&node_toks,shenv)?;
return Err(err("Failed to parse this if statement",span,shenv))
}
let (used,lists) = get_lists(tokens, shenv);
for list in &lists {
node_toks.extend(list.tokens().clone());
}
tokens = &tokens[used..];
let body = lists;
tokens_iter = tokens.iter().peekable();
cond_blocks.push((cond,body));
let mut closed = false;
while let Some(token) = tokens_iter.next() {
match token.rule() {
TkRule::Elif => {
node_toks.push(token.clone());
tokens = &tokens[1..];
let (used,lists) = get_lists(tokens, shenv);
for list in &lists {
node_toks.extend(list.tokens().clone());
}
tokens = &tokens[used..];
let cond = lists;
tokens_iter = tokens.iter().peekable();
while let Some(token) = tokens_iter.next() {
match token.rule() {
TkRule::Then => {
node_toks.push(token.clone());
tokens = &tokens[1..];
break
}
TkRule::Sep | TkRule::Whitespace => {
node_toks.push(token.clone());
tokens = &tokens[1..];
}
_ => {
let span = get_span(&node_toks,shenv)?;
return Err(err("Expected `then` after if statement condition",span,shenv))
}
}
}
if tokens_iter.peek().is_none() {
let span = get_span(&node_toks,shenv)?;
return Err(err("Failed to parse this if statement",span,shenv))
}
let (used,lists) = get_lists(tokens, shenv);
for list in &lists {
node_toks.extend(list.tokens().clone());
}
tokens = &tokens[used..];
let body = lists;
tokens_iter = tokens.iter().peekable();
cond_blocks.push((cond,body));
}
TkRule::Else => {
node_toks.push(token.clone());
tokens = &tokens[1..];
let (used,lists) = get_lists(tokens, shenv);
for list in &lists {
node_toks.extend(list.tokens().clone());
}
tokens = &tokens[used..];
else_block = Some(lists);
tokens_iter = tokens.iter().peekable();
}
TkRule::Fi => {
closed = true;
node_toks.push(token.clone());
tokens = &tokens[1..];
}
TkRule::Sep | TkRule::Whitespace => {
node_toks.push(token.clone());
tokens = &tokens[1..];
if closed { break }
}
_ => {
let span = get_span(&node_toks, shenv)?;
return Err(err("Unexpected token in if statement",span,shenv))
}
}
}
if !closed {
let span = get_span(&node_toks, shenv)?;
return Err(err("Expected `fi` to close if statement",span,shenv))
}
let span = get_span(&node_toks, shenv)?;
let node = Node {
node_rule: NdRule::IfThen { cond_blocks, else_block },
tokens: node_toks,
span,
flags: NdFlag::empty()
};
Ok(Some(node))
});
ndrule_def!(Loop, shenv, |mut tokens: &[Token], shenv: &mut ShEnv| {
let err = |msg: &str, span: Rc<RefCell<Span>>, shenv: &mut ShEnv | {
ShErr::full(ShErrKind::ParseErr, msg, shenv.get_input(), span)
};
let mut tokens_iter = tokens.iter().peekable();
let mut node_toks = vec![];
let kind: LoopKind;
let cond: Vec<Node>;
let body: Vec<Node>;
if let Some(token) = tokens_iter.next() {
node_toks.push(token.clone());
match token.rule() {
TkRule::While => {
kind = LoopKind::While
}
TkRule::Until => {
kind = LoopKind::Until
}
_ => return Ok(None)
}
} else { return Ok(None) }
tokens = &tokens[1..];
let (used,lists) = get_lists(tokens, shenv);
for list in &lists {
node_toks.extend(list.tokens().clone());
}
tokens = &tokens[used..];
cond = lists;
tokens_iter = tokens.iter().peekable();
while let Some(token) = tokens_iter.next() {
match token.rule() {
TkRule::Sep | TkRule::Whitespace => {
node_toks.push(token.clone());
tokens = &tokens[1..];
}
TkRule::Do => {
node_toks.push(token.clone());
tokens = &tokens[1..];
break
}
_ => {
let span = get_span(&node_toks,shenv)?;
return Err(err("Expected `do` after loop condition",span,shenv))
}
}
}
if tokens_iter.peek().is_none() {
return Ok(None)
}
let (used,lists) = get_lists(tokens, shenv);
for list in &lists {
node_toks.extend(list.tokens().clone());
}
tokens = &tokens[used..];
body = lists;
tokens_iter = tokens.iter().peekable();
let mut closed = false;
while let Some(token) = tokens_iter.next() {
match token.rule() {
TkRule::Sep | TkRule::Whitespace => {
node_toks.push(token.clone());
tokens = &tokens[1..];
if closed { break }
}
TkRule::Done => {
closed = true;
node_toks.push(token.clone());
tokens = &tokens[1..];
}
_ => {
let span = get_span(&node_toks,shenv)?;
return Err(err("Unexpected token in loop",span,shenv))
}
}
}
if !closed {
let span = get_span(&node_toks,shenv)?;
return Err(err("Expected `done` to close loop",span,shenv))
}
let span = get_span(&node_toks, shenv)?;
let node = Node {
node_rule: NdRule::Loop { kind, cond, body },
tokens: node_toks,
span,
flags: NdFlag::empty()
};
log!(DEBUG, node);
Ok(Some(node))
});
ndrule_def!(FuncDef, shenv, |tokens: &[Token], shenv: &mut ShEnv| {
let mut tokens_iter = tokens.iter();
let mut node_toks = vec![];
let name: Token;
@@ -307,7 +673,7 @@ ndrule_def!(FuncDef, |tokens: &[Token]| {
return Ok(None)
}
let span = get_span(&node_toks)?;
let span = get_span(&node_toks,shenv)?;
let node = Node {
node_rule: NdRule::FuncDef { name, body },
tokens: node_toks,
@@ -317,7 +683,7 @@ ndrule_def!(FuncDef, |tokens: &[Token]| {
Ok(Some(node))
});
ndrule_def!(Subshell, |tokens: &[Token]| {
ndrule_def!(Subshell, shenv, |tokens: &[Token], shenv: &mut ShEnv| {
let mut tokens_iter = tokens.iter();
let mut node_toks = vec![];
let mut argv = vec![];
@@ -350,27 +716,29 @@ ndrule_def!(Subshell, |tokens: &[Token]| {
TkRule::RedirOp => {
node_toks.push(token.clone());
// Get the raw redirection text, e.g. "1>&2" or "2>" or ">>" or something
let redir_raw = token.span().get_slice();
let redir_raw = shenv.input_slice(token.span());
let mut redir_bldr = RedirBldr::from_str(&redir_raw).unwrap();
// If there isn't an FD target, get the next token and use it as the filename
if redir_bldr.tgt().is_none() {
if let Some(filename) = tokens_iter.next() {
// Make sure it's a word and not an operator or something
if !matches!(filename.rule(), TkRule::SQuote | TkRule::DQuote | TkRule::Ident | TkRule::Keyword) {
if !matches!(filename.rule(), TkRule::SQuote | TkRule::DQuote | TkRule::Ident) || KEYWORDS.contains(&filename.rule()) {
let mut err = ShErr::simple(ShErrKind::ParseErr, "Did not find a target for this redirection");
err.blame(token.span().clone());
let input = shenv.input_slice(token.span()).to_string();
err.blame(input, token.span());
return Err(err)
}
node_toks.push(filename.clone());
// Construct the Path object
let filename_raw = filename.span().get_slice();
let filename_raw = shenv.input_slice(filename.span()).to_string();
let filename_path = PathBuf::from(filename_raw);
let tgt = RedirTarget::File(filename_path);
// Update the builder
redir_bldr = redir_bldr.with_tgt(tgt);
} else {
let mut err = ShErr::simple(ShErrKind::ParseErr, "Did not find a target for this redirection");
err.blame(token.span().clone());
let input = shenv.input_slice(token.span()).to_string();
err.blame(input, token.span());
return Err(err)
}
}
@@ -379,7 +747,7 @@ ndrule_def!(Subshell, |tokens: &[Token]| {
_ => break
}
}
let span = get_span(&node_toks)?;
let span = get_span(&node_toks,shenv)?;
let node = Node {
node_rule: NdRule::Subshell { body, argv, redirs },
tokens: node_toks,
@@ -394,7 +762,7 @@ ndrule_def!(Subshell, |tokens: &[Token]| {
Ok(None)
});
ndrule_def!(Pipeline, |mut tokens: &[Token]| {
ndrule_def!(Pipeline, shenv, |mut tokens: &[Token], shenv: &mut ShEnv| {
log!(TRACE, "Parsing pipeline");
let mut tokens_iter = tokens.iter().peekable();
let mut node_toks = vec![];
@@ -411,7 +779,7 @@ ndrule_def!(Pipeline, |mut tokens: &[Token]| {
}
_ => { /* Keep going */ }
}
if let Some(mut cmd) = ExprNoPipeline::try_match(tokens)? {
if let Some(mut cmd) = ExprNoPipeline::try_match(tokens,shenv)? {
// Add sub-node's tokens to our tokens
node_toks.extend(cmd.tokens().clone());
@@ -422,8 +790,11 @@ ndrule_def!(Pipeline, |mut tokens: &[Token]| {
}
if let NdRule::Command { argv, redirs: _ } = cmd.rule() {
if argv.first().is_some_and(|arg| BUILTINS.contains(&arg.to_string().as_str())) {
*cmd.flags_mut() |= NdFlag::BUILTIN;
if let Some(arg) = argv.first() {
let slice = shenv.input_slice(arg.span().clone());
if BUILTINS.contains(&slice) {
*cmd.flags_mut() |= NdFlag::BUILTIN;
}
}
}
// Push sub-node
@@ -458,7 +829,7 @@ ndrule_def!(Pipeline, |mut tokens: &[Token]| {
if node_toks.is_empty() {
return Ok(None)
}
let span = get_span(&node_toks)?;
let span = get_span(&node_toks,shenv)?;
let node = Node {
node_rule: NdRule::Pipeline { cmds },
tokens: node_toks,
@@ -468,7 +839,7 @@ ndrule_def!(Pipeline, |mut tokens: &[Token]| {
Ok(Some(node))
});
ndrule_def!(Command, |tokens: &[Token]| {
ndrule_def!(Command, shenv, |tokens: &[Token], shenv: &mut ShEnv| {
log!(TRACE, "Parsing command");
let mut tokens = tokens.iter().peekable();
let mut node_toks = vec![];
@@ -495,40 +866,49 @@ ndrule_def!(Command, |tokens: &[Token]| {
}
TkRule::RedirOp => {
// Get the raw redirection text, e.g. "1>&2" or "2>" or ">>" or something
let redir_raw = token.span().get_slice();
let redir_raw = shenv.input_slice(token.span()).to_string();
let mut redir_bldr = RedirBldr::from_str(&redir_raw).unwrap();
// If there isn't an FD target, get the next token and use it as the filename
if redir_bldr.tgt().is_none() {
if let Some(filename) = tokens.next() {
// Make sure it's a word and not an operator or something
if !matches!(filename.rule(), TkRule::SQuote | TkRule::DQuote | TkRule::Ident | TkRule::Keyword) {
if !matches!(filename.rule(), TkRule::SQuote | TkRule::DQuote | TkRule::Ident) || KEYWORDS.contains(&filename.rule()) {
let mut err = ShErr::simple(ShErrKind::ParseErr, "Did not find a target for this redirection");
err.blame(token.span().clone());
let input = shenv.input_slice(token.span()).to_string();
err.blame(input, token.span());
return Err(err)
}
node_toks.push(filename.clone());
// Construct the Path object
let filename_raw = filename.span().get_slice();
let filename_raw = shenv.input_slice(filename.span()).to_string();
let filename_path = PathBuf::from(filename_raw);
let tgt = RedirTarget::File(filename_path);
// Update the builder
redir_bldr = redir_bldr.with_tgt(tgt);
} else {
let mut err = ShErr::simple(ShErrKind::ParseErr, "Did not find a target for this redirection");
err.blame(token.span().clone());
let input = shenv.input_slice(token.span()).to_string();
err.blame(input, token.span());
return Err(err)
}
}
redirs.push(redir_bldr.build());
}
TkRule::Sep => break,
_ => unreachable!("Found this rule: {:?}", token.rule())
_ => return Err(
ShErr::full(
ShErrKind::ParseErr,
format!("Unexpected token in command rule: {:?}", token.rule()),
shenv.get_input(),
get_span(&node_toks,shenv)?
)
)
}
}
if node_toks.is_empty() {
return Ok(None)
}
let span = get_span(&node_toks)?;
let span = get_span(&node_toks,shenv)?;
if !argv.is_empty() {
let node = Node {
node_rule: NdRule::Command { argv, redirs },
@@ -542,7 +922,7 @@ ndrule_def!(Command, |tokens: &[Token]| {
}
});
ndrule_def!(Assignment, |tokens: &[Token]| {
ndrule_def!(Assignment, shenv, |tokens: &[Token], shenv: &mut ShEnv| {
log!(TRACE, "Parsing assignment");
let mut tokens = tokens.into_iter().peekable();
let mut node_toks = vec![];
@@ -559,11 +939,11 @@ ndrule_def!(Assignment, |tokens: &[Token]| {
if tokens.peek().is_some() {
let tokens_vec: Vec<Token> = tokens.into_iter().map(|token| token.clone()).collect();
let tokens_slice = &tokens_vec;
let cmd = Command::try_match(tokens_slice)?.map(|cmd| Box::new(cmd));
let cmd = Command::try_match(tokens_slice,shenv)?.map(|cmd| Box::new(cmd));
if let Some(ref cmd) = cmd {
node_toks.extend(cmd.tokens().clone());
}
let span = get_span(&node_toks)?;
let span = get_span(&node_toks,shenv)?;
let node = Node {
node_rule: NdRule::Assignment { assignments, cmd },
tokens: node_toks,
@@ -572,7 +952,7 @@ ndrule_def!(Assignment, |tokens: &[Token]| {
};
return Ok(Some(node))
} else {
let span = get_span(&node_toks)?;
let span = get_span(&node_toks,shenv)?;
let node = Node {
node_rule: NdRule::Assignment { assignments, cmd: None },
tokens: node_toks,

View File

@@ -97,12 +97,15 @@ pub use crate::{
},
sys::{
self,
sh_quit,
read_to_string,
write_err,
write_out,
c_pipe,
execvpe
},
error::{
ResultExt,
ShErrKind,
ShErr,
ShResult
@@ -114,6 +117,7 @@ pub use crate::{
pwd::pwd,
read::read_builtin,
alias::alias,
control_flow::sh_flow,
jobctl::{
continue_job,
jobs
@@ -140,7 +144,10 @@ pub use crate::{
exec_ctx::ExecFlags,
shenv::ShEnv
},
execute::Executor,
execute::{
exec_input,
Executor,
},
parse::{
parse::{
Node,

View File

@@ -1,19 +1,22 @@
use rustyline::highlight::Highlighter;
use sys::get_bin_path;
use crate::prelude::*;
use crate::{parse::lex::KEYWORDS, prelude::*};
use super::readline::SynHelper;
impl<'a> Highlighter for SynHelper<'a> {
fn highlight<'l>(&self, line: &'l str, pos: usize) -> std::borrow::Cow<'l, str> {
let mut shenv_clone = self.shenv.clone();
shenv_clone.new_input(line);
let mut result = String::new();
let mut tokens = Lexer::new(Rc::new(line.to_string())).lex().into_iter();
let mut tokens = Lexer::new(line.to_string(),&mut shenv_clone).lex().into_iter();
let mut is_command = true;
let mut in_array = false;
while let Some(token) = tokens.next() {
let raw = token.to_string();
let raw = token.as_raw(&mut shenv_clone);
match token.rule() {
TkRule::Comment => {
let styled = &raw.styled(Style::BrightBlack);
@@ -35,7 +38,7 @@ impl<'a> Highlighter for SynHelper<'a> {
let rebuilt = format!("{styled}()");
result.push_str(&rebuilt);
}
TkRule::Keyword => {
_ if KEYWORDS.contains(&token.rule()) => {
if &raw == "for" {
in_array = true;
}
@@ -76,7 +79,7 @@ impl<'a> Highlighter for SynHelper<'a> {
result.push_str(&raw);
} else if is_command {
if get_bin_path(&token.to_string(), self.shenv).is_some() ||
if get_bin_path(&token.as_raw(&mut shenv_clone), self.shenv).is_some() ||
self.shenv.logic().get_alias(&raw).is_some() ||
self.shenv.logic().get_function(&raw).is_some() ||
BUILTINS.contains(&raw.as_str()) {

View File

@@ -4,6 +4,7 @@ use rustyline::{config::Configurer, history::{DefaultHistory, History}, ColorMod
pub mod readline;
pub mod highlight;
pub mod validate;
fn init_rl<'a>(shenv: &'a mut ShEnv) -> Editor<SynHelper<'a>, DefaultHistory> {
let hist_path = std::env::var("FERN_HIST").unwrap_or_default();
@@ -25,7 +26,7 @@ fn init_rl<'a>(shenv: &'a mut ShEnv) -> Editor<SynHelper<'a>, DefaultHistory> {
editor
}
pub fn read_line<'a>(shenv: &'a mut ShEnv) -> ShResult<String> {
pub fn read_line(shenv: &mut ShEnv) -> ShResult<String> {
log!(TRACE, "Entering prompt");
let prompt = "$ ".styled(Style::Green | Style::Bold);
let mut editor = init_rl(shenv);

View File

@@ -33,11 +33,6 @@ impl<'a> SynHelper<'a> {
}
}
impl<'a> Validator for SynHelper<'a> {
fn validate(&self, ctx: &mut rustyline::validate::ValidationContext) -> rustyline::Result<rustyline::validate::ValidationResult> {
Ok(ValidationResult::Valid(None))
}
}
impl<'a> Completer for SynHelper<'a> {

115
src/prompt/validate.rs Normal file
View File

@@ -0,0 +1,115 @@
use rustyline::validate::{ValidationResult, Validator};
use crate::prelude::*;
use super::readline::SynHelper;
pub fn check_delims(line: &str) -> bool {
let mut delim_stack = vec![];
let mut chars = line.chars();
let mut in_quote = None; // Tracks which quote type is open (`'` or `"`)
while let Some(ch) = chars.next() {
match ch {
'{' | '(' | '[' if in_quote.is_none() => delim_stack.push(ch),
'}' if in_quote.is_none() && delim_stack.pop() != Some('{') => return false,
')' if in_quote.is_none() && delim_stack.pop() != Some('(') => return false,
']' if in_quote.is_none() && delim_stack.pop() != Some('[') => return false,
'"' | '\'' => {
if in_quote == Some(ch) {
in_quote = None;
} else if in_quote.is_none() {
in_quote = Some(ch);
}
}
'\\' => { chars.next(); } // Skip next character if escaped
_ => {}
}
}
delim_stack.is_empty() && in_quote.is_none()
}
pub fn check_keywords(line: &str, shenv: &mut ShEnv) -> bool {
use TkRule::*;
let mut expecting: Vec<Vec<TkRule>> = vec![];
let mut tokens = Lexer::new(line.to_string(),shenv).lex().into_iter();
while let Some(token) = tokens.next() {
match token.rule() {
If => {
expecting.push(vec![Then]);
}
Then => {
if let Some(frame) = expecting.pop() {
if frame.contains(&Then) {
expecting.push(vec![Elif, Else, Fi])
} else { return false }
} else { return false }
}
Elif => {
if let Some(frame) = expecting.pop() {
if frame.contains(&Elif) {
expecting.push(vec![Then])
} else { return false }
} else { return false }
}
Else => {
if let Some(frame) = expecting.pop() {
if frame.contains(&Else) {
expecting.push(vec![Fi])
} else { return false }
} else { return false }
}
Fi => {
if let Some(frame) = expecting.pop() {
if frame.contains(&Fi) {
/* Do nothing */
} else { return false }
} else { return false }
}
While | Until | For | Select => {
expecting.push(vec![Do])
}
Do => {
if let Some(frame) = expecting.pop() {
if frame.contains(&Do) {
expecting.push(vec![Done])
} else { return false }
} else { return false }
}
Done => {
if let Some(frame) = expecting.pop() {
if frame.contains(&Done) {
/* Do nothing */
} else { return false }
} else { return false }
}
Case => {
expecting.push(vec![Esac])
}
Esac => {
if let Some(frame) = expecting.pop() {
if frame.contains(&Esac) {
/* Do nothing */
} else { return false }
} else { return false }
}
_ => { /* Do nothing */ }
}
}
expecting.is_empty()
}
impl<'a> Validator for SynHelper<'a> {
fn validate(&self, ctx: &mut rustyline::validate::ValidationContext) -> rustyline::Result<rustyline::validate::ValidationResult> {
let input = ctx.input();
let mut shenv_clone = self.shenv.clone();
match check_delims(input) && check_keywords(input, &mut shenv_clone) {
true => Ok(ValidationResult::Valid(None)),
false => Ok(ValidationResult::Incomplete),
}
}
}

61
src/shellenv/input.rs Normal file
View File

@@ -0,0 +1,61 @@
use std::cell::Ref;
use crate::prelude::*;
#[derive(Clone,Debug)]
pub struct InputMan {
input: Option<String>,
spans: Vec<Rc<RefCell<Span>>>,
}
impl InputMan {
pub fn new() -> Self {
Self { input: None, spans: vec![] }
}
pub fn clear(&mut self) {
*self = Self::new();
}
pub fn new_input(&mut self, input: &str) {
self.input = Some(input.to_string())
}
pub fn get_input(&self) -> Option<&String> {
self.input.as_ref()
}
pub fn get_input_mut(&mut self) -> Option<&mut String> {
self.input.as_mut()
}
pub fn new_span(&mut self, start: usize, end: usize) -> Rc<RefCell<Span>> {
if let Some(_input) = &self.input {
let span = Rc::new(RefCell::new(Span::new(start, end)));
self.spans.push(span.clone());
span
} else {
Rc::new(RefCell::new(Span::new(0,0)))
}
}
pub fn spans_mut(&mut self) -> &mut Vec<Rc<RefCell<Span>>> {
&mut self.spans
}
pub fn clamp(&self, span: Rc<RefCell<Span>>) {
let mut span = span.borrow_mut();
if let Some(input) = &self.input {
span.clamp_start(input.len());
span.clamp_end(input.len());
}
}
pub fn clamp_all(&self) {
for span in &self.spans {
self.clamp(span.clone());
}
}
pub fn get_slice(&self, span: Rc<RefCell<Span>>) -> Option<&str> {
let span = span.borrow();
let mut start = span.start();
let end = span.end();
if start > end {
start = end;
}
self.input.as_ref().map(|s| &s[start..end])
}
}

View File

@@ -10,6 +10,7 @@ pub mod exec_ctx;
pub mod meta;
pub mod shenv;
pub mod vars;
pub mod input;
/// Calls attach_tty() on the shell's process group to retake control of the terminal
pub fn take_term() -> ShResult<()> {

View File

@@ -5,6 +5,7 @@ pub struct ShEnv {
vars: shellenv::vars::VarTab,
logic: shellenv::logic::LogTab,
meta: shellenv::meta::MetaTab,
input_man: shellenv::input::InputMan,
ctx: shellenv::exec_ctx::ExecCtx
}
@@ -14,6 +15,7 @@ impl ShEnv {
vars: shellenv::vars::VarTab::new(),
logic: shellenv::logic::LogTab::new(),
meta: shellenv::meta::MetaTab::new(),
input_man: shellenv::input::InputMan::new(),
ctx: shellenv::exec_ctx::ExecCtx::new(),
}
}
@@ -26,6 +28,58 @@ impl ShEnv {
pub fn meta(&self) -> &shellenv::meta::MetaTab {
&self.meta
}
pub fn input_slice(&self, span: Rc<RefCell<Span>>) -> &str {
&self.input_man.get_slice(span).unwrap_or_default()
}
pub fn expand_input(&mut self, new: &str, repl_span: Rc<RefCell<Span>>) -> Vec<Token> {
let saved_spans = self.input_man.spans_mut().clone();
let mut new_tokens = Lexer::new(new.to_string(), self).lex();
*self.input_man.spans_mut() = saved_spans;
let offset = repl_span.borrow().start();
for token in new_tokens.iter_mut() {
token.span().borrow_mut().shift(offset as isize);
}
let repl_start = repl_span.borrow().start();
let repl_end = repl_span.borrow().end();
let range = repl_start..repl_end;
if let Some(ref mut input) = self.input_man.get_input_mut() {
let old = &input[range.clone()];
let delta: isize = new.len() as isize - old.len() as isize;
input.replace_range(range, new);
let expanded = input.clone();
log!(DEBUG, expanded);
for span in self.input_man.spans_mut() {
let mut span_mut = span.borrow_mut();
if span_mut.start() > repl_start {
span_mut.shift(delta);
}
}
for token in &new_tokens {
self.input_man.spans_mut().push(token.span());
}
}
self.input_man.clamp_all();
new_tokens
}
pub fn new_input(&mut self, input: &str) {
self.input_man.clear();
self.input_man.new_input(input);
}
pub fn get_input(&self) -> String {
let input = self.input_man.get_input().map(|s| s.to_string()).unwrap_or_default();
log!(DEBUG, input);
input
}
pub fn inputman(&self) -> &shellenv::input::InputMan {
&self.input_man
}
pub fn inputman_mut(&mut self) -> &mut shellenv::input::InputMan {
&mut self.input_man
}
pub fn meta_mut(&mut self) -> &mut shellenv::meta::MetaTab {
&mut self.meta
}