More edge case work

This commit is contained in:
2025-03-07 21:02:26 -05:00
parent 972e2ceefa
commit 67977f96eb
9 changed files with 113 additions and 68 deletions

View File

@@ -1,6 +1,6 @@
use std::os::fd::AsRawFd; use std::os::fd::AsRawFd;
use crate::{expand::vars::expand_string, prelude::*}; use crate::{expand::{arithmetic::expand_arith_string, tilde::expand_tilde_string, vars::{expand_string, expand_var}}, prelude::*};
use shellenv::jobs::{ChildProc, JobBldr}; use shellenv::jobs::{ChildProc, JobBldr};
pub mod shellcmd; pub mod shellcmd;
@@ -11,6 +11,7 @@ pub fn exec_input<S: Into<String>>(input: S, shenv: &mut ShEnv) -> ShResult<()>
shenv.new_input(&input); shenv.new_input(&input);
let token_stream = Lexer::new(input,shenv).lex(); let token_stream = Lexer::new(input,shenv).lex();
log!(INFO, token_stream);
let token_stream = expand_aliases(token_stream, shenv); let token_stream = expand_aliases(token_stream, shenv);
for token in &token_stream { for token in &token_stream {
@@ -85,14 +86,14 @@ fn dispatch_node(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
let node_raw = node.as_raw(shenv); let node_raw = node.as_raw(shenv);
let span = node.span(); let span = node.span();
match *node.rule() { match *node.rule() {
NdRule::Command {..} => dispatch_command(node, shenv).try_blame(node_raw, span)?, NdRule::Command {..} |
NdRule::Subshell {..} => exec_subshell(node,shenv).try_blame(node_raw, span)?, NdRule::Subshell {..} |
NdRule::Assignment {..} => dispatch_command(node, shenv).try_blame(node_raw, span)?,
NdRule::IfThen {..} => shellcmd::exec_if(node, shenv).try_blame(node_raw, span)?, NdRule::IfThen {..} => shellcmd::exec_if(node, shenv).try_blame(node_raw, span)?,
NdRule::Loop {..} => shellcmd::exec_loop(node, shenv).try_blame(node_raw, span)?, NdRule::Loop {..} => shellcmd::exec_loop(node, shenv).try_blame(node_raw, span)?,
NdRule::ForLoop {..} => shellcmd::exec_for(node, shenv).try_blame(node_raw, span)?, NdRule::ForLoop {..} => shellcmd::exec_for(node, shenv).try_blame(node_raw, span)?,
NdRule::Case {..} => shellcmd::exec_case(node, shenv).try_blame(node_raw, span)?, NdRule::Case {..} => shellcmd::exec_case(node, shenv).try_blame(node_raw, span)?,
NdRule::FuncDef {..} => exec_funcdef(node,shenv).try_blame(node_raw, span)?, NdRule::FuncDef {..} => exec_funcdef(node,shenv).try_blame(node_raw, span)?,
NdRule::Assignment {..} => exec_assignment(node,shenv).try_blame(node_raw, span)?,
NdRule::Pipeline {..} => exec_pipeline(node, shenv).try_blame(node_raw, span)?, NdRule::Pipeline {..} => exec_pipeline(node, shenv).try_blame(node_raw, span)?,
_ => unimplemented!("No support for NdRule::{:?} yet", node.rule()) _ => unimplemented!("No support for NdRule::{:?} yet", node.rule())
} }
@@ -103,6 +104,7 @@ fn dispatch_command(mut node: Node, shenv: &mut ShEnv) -> ShResult<()> {
let mut is_builtin = false; let mut is_builtin = false;
let mut is_func = false; let mut is_func = false;
let mut is_subsh = false; let mut is_subsh = false;
let mut is_assign = false;
if let NdRule::Command { ref mut argv, redirs: _ } = node.rule_mut() { if let NdRule::Command { ref mut argv, redirs: _ } = node.rule_mut() {
*argv = expand_argv(argv.to_vec(), shenv)?; *argv = expand_argv(argv.to_vec(), shenv)?;
let cmd = argv.first().unwrap().as_raw(shenv); let cmd = argv.first().unwrap().as_raw(shenv);
@@ -114,6 +116,8 @@ fn dispatch_command(mut node: Node, shenv: &mut ShEnv) -> ShResult<()> {
} else if let NdRule::Subshell { body: _, ref mut argv, redirs: _ } = node.rule_mut() { } else if let NdRule::Subshell { body: _, ref mut argv, redirs: _ } = node.rule_mut() {
*argv = expand_argv(argv.to_vec(), shenv)?; *argv = expand_argv(argv.to_vec(), shenv)?;
is_subsh = true; is_subsh = true;
} else if let NdRule::Assignment { assignments: _, cmd: _ } = node.rule() {
is_assign = true;
} else { unreachable!() } } else { unreachable!() }
if is_builtin { if is_builtin {
@@ -122,6 +126,8 @@ fn dispatch_command(mut node: Node, shenv: &mut ShEnv) -> ShResult<()> {
exec_func(node, shenv)?; exec_func(node, shenv)?;
} else if is_subsh { } else if is_subsh {
exec_subshell(node, shenv)?; exec_subshell(node, shenv)?;
} else if is_assign {
exec_assignment(node, shenv)?;
} else { } else {
exec_cmd(node, shenv)?; exec_cmd(node, shenv)?;
} }
@@ -274,30 +280,50 @@ fn exec_assignment(node: Node, shenv: &mut ShEnv) -> ShResult<()> {
log!(TRACE, "Command: {:?}", cmd); log!(TRACE, "Command: {:?}", cmd);
let mut assigns = assignments.into_iter(); let mut assigns = assignments.into_iter();
if let Some(cmd) = cmd { if let Some(cmd) = cmd {
while let Some((var,val)) = assigns.next() { let saved_env = shenv.vars().env().clone();
let var_raw = var.as_raw(shenv); while let Some(token) = assigns.next() {
let val_raw = val.as_raw(shenv); let raw = token.as_raw(shenv);
if check_expansion(&val_raw).is_some() { if let Some((var,val)) = raw.split_once('=') {
let exp = expand_token(val.clone(), shenv)?; let val_rule = Lexer::get_rule(&val);
let val_exp = exp.into_iter().next().unwrap_or(val); if EXPANSIONS.contains(&val_rule) {
let val_exp_raw = val_exp.as_raw(shenv); let exp = match val_rule {
shenv.vars_mut().export(&var_raw, &val_exp_raw); TkRule::ArithSub => expand_arith_string(val,shenv)?,
} else { TkRule::DQuote => expand_string(val, shenv),
shenv.vars_mut().export(&var_raw, &val_raw); TkRule::TildeSub => expand_tilde_string(val),
TkRule::VarSub => {
let val = shenv.vars().get_var(var);
val.to_string()
}
_ => unimplemented!()
};
shenv.vars_mut().set_var(var, &exp);
} else {
shenv.vars_mut().set_var(var, val);
}
} }
} }
dispatch_command(*cmd, shenv)?; dispatch_command(*cmd, shenv)?;
*shenv.vars_mut().env_mut() = saved_env;
} else { } else {
while let Some((var,val)) = assigns.next() { while let Some(token) = assigns.next() {
let var_raw = var.as_raw(shenv); let raw = token.as_raw(shenv);
let val_raw = val.as_raw(shenv); if let Some((var,val)) = raw.split_once('=') {
if check_expansion(&val_raw).is_some() { let val_rule = Lexer::get_rule(&val);
let exp = expand_token(val.clone(), shenv)?; if EXPANSIONS.contains(&val_rule) {
let val_exp = exp.into_iter().next().unwrap_or(val); let exp = match val_rule {
let val_exp_raw = val_exp.as_raw(shenv); TkRule::ArithSub => expand_arith_string(val,shenv)?,
shenv.vars_mut().export(&var_raw, &val_exp_raw); TkRule::DQuote => expand_string(val, shenv),
} else { TkRule::TildeSub => expand_tilde_string(val),
shenv.vars_mut().export(&var_raw, &val_raw); TkRule::VarSub => {
let val = shenv.vars().get_var(var);
val.to_string()
}
_ => unimplemented!()
};
shenv.vars_mut().set_var(var, &exp);
} else {
shenv.vars_mut().set_var(var, val);
}
} }
} }
} }

View File

@@ -153,22 +153,29 @@ pub fn eval_rpn(tokens: Vec<ExprToken>) -> ShResult<f64> {
Ok(stack.pop().unwrap()) Ok(stack.pop().unwrap())
} }
pub fn expand_arithmetic(token: Token, shenv: &mut ShEnv) -> ShResult<Token> { pub fn expand_arith_token(token: Token, shenv: &mut ShEnv) -> ShResult<Token> {
log!(INFO, "{}", token.as_raw(shenv)); log!(INFO, "{}", token.as_raw(shenv));
// I mean hey it works // I mean hey it works
let token_raw = token.as_raw(shenv); let token_raw = token.as_raw(shenv);
log!(INFO, token_raw); log!(INFO, token_raw);
let expanded = expand_string(token_raw, shenv);
token.span().borrow_mut().expanded = false;
log!(INFO, expanded); let arith_raw = token_raw.trim_matches('`');
let arith_raw = expanded.trim_matches('`');
let expr_tokens = shunting_yard(tokenize_expr(arith_raw)?)?; let result = expand_arith_string(arith_raw,shenv)?;
log!(DEBUG,expr_tokens);
let result = eval_rpn(expr_tokens)?.to_string();
let mut final_expansion = shenv.expand_input(&result, token.span()); let mut final_expansion = shenv.expand_input(&result, token.span());
Ok(final_expansion.pop().unwrap_or(token)) Ok(final_expansion.pop().unwrap_or(token))
} }
pub fn expand_arith_string(s: &str,shenv: &mut ShEnv) -> ShResult<String> {
let mut exp = expand_string(s,shenv);
if exp.starts_with('`') && s.ends_with('`') {
exp = exp[1..exp.len() - 1].to_string();
}
log!(INFO,exp);
let expr_tokens = shunting_yard(tokenize_expr(&exp)?)?;
log!(DEBUG,expr_tokens);
let result = eval_rpn(expr_tokens)?.to_string();
Ok(result)
}

View File

@@ -4,9 +4,9 @@ pub mod alias;
pub mod cmdsub; pub mod cmdsub;
pub mod arithmetic; pub mod arithmetic;
use arithmetic::expand_arithmetic; use arithmetic::expand_arith_token;
use vars::{expand_string, expand_var}; use vars::{expand_string, expand_var};
use tilde::expand_tilde; use tilde::expand_tilde_token;
use crate::prelude::*; use crate::prelude::*;
@@ -23,9 +23,11 @@ pub fn expand_argv(argv: Vec<Token>, shenv: &mut ShEnv) -> ShResult<Vec<Token>>
pub fn expand_token(token: Token, shenv: &mut ShEnv) -> ShResult<Vec<Token>> { pub fn expand_token(token: Token, shenv: &mut ShEnv) -> ShResult<Vec<Token>> {
let mut processed = vec![]; let mut processed = vec![];
log!(INFO, "expanding argv");
log!(INFO, "rule: {:?}", token.rule());
match token.rule() { match token.rule() {
TkRule::DQuote => { TkRule::DQuote => {
let dquote_exp = expand_string(token.as_raw(shenv), shenv); let dquote_exp = expand_string(&token.as_raw(shenv), shenv);
let mut expanded = shenv.expand_input(&dquote_exp, token.span()); let mut expanded = shenv.expand_input(&dquote_exp, token.span());
processed.append(&mut expanded); processed.append(&mut expanded);
} }
@@ -34,11 +36,11 @@ pub fn expand_token(token: Token, shenv: &mut ShEnv) -> ShResult<Vec<Token>> {
processed.append(&mut varsub_exp); processed.append(&mut varsub_exp);
} }
TkRule::TildeSub => { TkRule::TildeSub => {
let tilde_exp = expand_tilde(token.clone(), shenv); let tilde_exp = expand_tilde_token(token.clone(), shenv);
processed.push(tilde_exp); processed.push(tilde_exp);
} }
TkRule::ArithSub => { TkRule::ArithSub => {
let arith_exp = expand_arithmetic(token.clone(), shenv)?; let arith_exp = expand_arith_token(token.clone(), shenv)?;
processed.push(arith_exp); processed.push(arith_exp);
} }
_ => { _ => {

View File

@@ -1,13 +1,20 @@
use crate::prelude::*; use crate::prelude::*;
pub fn expand_tilde(tilde_sub: Token, shenv: &mut ShEnv) -> Token { pub fn expand_tilde_token(tilde_sub: Token, shenv: &mut ShEnv) -> Token {
let mut tilde_sub_raw = tilde_sub.as_raw(shenv); let tilde_sub_raw = tilde_sub.as_raw(shenv);
if tilde_sub_raw.starts_with('~') { let result = expand_tilde_string(&tilde_sub_raw);
if result == tilde_sub_raw {
return tilde_sub
}
let mut tokens = Lexer::new(result,shenv).lex();
tokens.pop().unwrap_or(tilde_sub)
}
pub fn expand_tilde_string(s: &str) -> String {
if s.starts_with('~') {
let home = std::env::var("HOME").unwrap_or_default(); let home = std::env::var("HOME").unwrap_or_default();
tilde_sub_raw = tilde_sub_raw.replacen('~', &home, 1); s.replacen('~', &home, 1)
let mut tokens = Lexer::new(tilde_sub_raw,shenv).lex();
tokens.pop().unwrap_or(tilde_sub)
} else { } else {
tilde_sub s.to_string()
} }
} }

View File

@@ -8,7 +8,7 @@ pub fn expand_var(var_sub: Token, shenv: &mut ShEnv) -> Vec<Token> {
shenv.expand_input(&value, var_sub.span()) shenv.expand_input(&value, var_sub.span())
} }
pub fn expand_string(s: String, shenv: &mut ShEnv) -> String { pub fn expand_string(s: &str, shenv: &mut ShEnv) -> String {
let mut result = String::new(); let mut result = String::new();
let mut var_name = String::new(); let mut var_name = String::new();
let mut chars = s.chars().peekable(); let mut chars = s.chars().peekable();
@@ -24,7 +24,7 @@ pub fn expand_string(s: String, shenv: &mut ShEnv) -> String {
'$' => { '$' => {
let mut expanded = false; let mut expanded = false;
while let Some(ch) = chars.peek() { while let Some(ch) = chars.peek() {
if *ch == '"' { if *ch == '"' || *ch == '`' {
break break
} }
let ch = chars.next().unwrap(); let ch = chars.next().unwrap();
@@ -63,9 +63,11 @@ pub fn expand_string(s: String, shenv: &mut ShEnv) -> String {
} }
} }
if !expanded { if !expanded {
log!(INFO, var_name);
let value = shenv.vars().get_var(&var_name); let value = shenv.vars().get_var(&var_name);
result.push_str(value); result.push_str(value);
} }
var_name.clear();
} }
_ => result.push(ch) _ => result.push(ch)
} }

View File

@@ -357,7 +357,11 @@ pub fn clean_string(s: impl ToString) -> String {
(s.starts_with('\'') && s.ends_with('\'')) || (s.starts_with('\'') && s.ends_with('\'')) ||
(s.starts_with('`') && s.ends_with('`')) (s.starts_with('`') && s.ends_with('`'))
{ {
s[1..s.len() - 1].to_string() if s.len() > 1 {
s[1..s.len() - 1].to_string()
} else {
s
}
} else if s.starts_with("$(") && s.ends_with(')') { } else if s.starts_with("$(") && s.ends_with(')') {
s[2..s.len() - 1].to_string() s[2..s.len() - 1].to_string()
} else { } else {

View File

@@ -394,6 +394,9 @@ tkrule_def!(ArithSub, |input: &str| {
_ => { /* Continue */ } _ => { /* Continue */ }
} }
} }
if is_arith_sub {
break
}
} }
' ' | '\t' | ';' | '\n' => return None, ' ' | '\t' | ';' | '\n' => return None,
_ => { /* Continue */ } _ => { /* Continue */ }

View File

@@ -85,7 +85,7 @@ pub enum LoopKind {
pub enum NdRule { pub enum NdRule {
Main { cmd_lists: Vec<Node> }, Main { cmd_lists: Vec<Node> },
Command { argv: Vec<Token>, redirs: Vec<Redir> }, Command { argv: Vec<Token>, redirs: Vec<Redir> },
Assignment { assignments: Vec<(Token,Token)>, cmd: Option<Box<Node>> }, Assignment { assignments: Vec<Token>, cmd: Option<Box<Node>> },
FuncDef { name: Token, body: Token }, FuncDef { name: Token, body: Token },
Case { pat: Token, blocks: Vec<(Token,Vec<Node>)>, redirs: Vec<Redir> }, Case { pat: Token, blocks: Vec<(Token,Vec<Node>)>, redirs: Vec<Redir> },
IfThen { cond_blocks: Vec<(Vec<Node>,Vec<Node>)>, else_block: Option<Vec<Node>>, redirs: Vec<Redir> }, IfThen { cond_blocks: Vec<(Vec<Node>,Vec<Node>)>, else_block: Option<Vec<Node>>, redirs: Vec<Redir> },
@@ -364,7 +364,7 @@ ndrule_def!(Case, shenv, |mut tokens: &[Token], shenv: &mut ShEnv| {
tokens = &tokens[1..]; tokens = &tokens[1..];
match token.rule() { match token.rule() {
TkRule::Whitespace => continue, TkRule::Whitespace => continue,
TkRule::Ident | TkRule::VarSub => { TkRule::Ident | TkRule::VarSub | TkRule::ArithSub => {
pat = Some(token.clone()); pat = Some(token.clone());
break break
} }
@@ -426,6 +426,15 @@ ndrule_def!(Case, shenv, |mut tokens: &[Token], shenv: &mut ShEnv| {
TkRule::CasePat => { TkRule::CasePat => {
node_toks.push(token.clone()); node_toks.push(token.clone());
tokens = &tokens[1..]; tokens = &tokens[1..];
while let Some(token) = tokens_iter.peek() {
if token.rule() == TkRule::Sep {
let token = tokens_iter.next().unwrap();
node_toks.push(token.clone());
tokens = &tokens[1..];
} else {
break
}
}
let block_pat = token.clone(); let block_pat = token.clone();
let (used,lists) = get_lists(tokens, shenv); let (used,lists) = get_lists(tokens, shenv);
let mut lists_iter = lists.iter().peekable(); let mut lists_iter = lists.iter().peekable();
@@ -1189,29 +1198,13 @@ ndrule_def!(Assignment, shenv, |tokens: &[Token], shenv: &mut ShEnv| {
while let Some(token) = tokens.peek() { while let Some(token) = tokens.peek() {
if matches!(token.rule(), TkRule::Ident | TkRule::ArithSub | TkRule::CmdSub | TkRule::DQuote) { if matches!(token.rule(), TkRule::Ident | TkRule::ArithSub | TkRule::CmdSub | TkRule::DQuote) {
let raw = token.as_raw(shenv); let raw = token.as_raw(shenv);
log!(INFO, raw);
// We are going to deconstruct this Ident into two separate tokens // We are going to deconstruct this Ident into two separate tokens
// This makes expanding it easier later // This makes expanding it easier later
if let Some((var,val)) = raw.split_once('=') { if raw.split_once('=').is_some() {
const LEN_DELTA: usize = 1; // The distance covered by the '=' that we just split at
let token = tokens.next().unwrap(); let token = tokens.next().unwrap();
let var_span = shenv.inputman_mut().new_span(
token.span().borrow().start(),
var.len()
);
let val_span = shenv.inputman_mut().new_span(
var.len() + LEN_DELTA,
token.span().borrow().end()
);
let var_rule = TkRule::Ident;
let val_rule = if let Some(rule) = check_expansion(&val) {
rule
} else {
TkRule::Ident
};
let var_token = Token::new(var_rule,var_span);
let val_token = Token::new(val_rule,val_span);
node_toks.push(token.clone()); node_toks.push(token.clone());
assignments.push((var_token.clone(),val_token.clone())); assignments.push(token.clone());
} else { } else {
break break
} }

View File

@@ -164,6 +164,7 @@ pub use crate::{
Parser, Parser,
ParseRule, ParseRule,
lex::{ lex::{
EXPANSIONS,
Span, Span,
Token, Token,
TkRule, TkRule,