Re-implemented aliases

This commit is contained in:
2025-03-18 21:50:53 -04:00
parent 62c76e70a6
commit 653a5fe344
12 changed files with 314 additions and 55 deletions

43
src/builtin/alias.rs Normal file
View File

@@ -0,0 +1,43 @@
use crate::{jobs::JobBldr, libsh::error::{ShErr, ShErrKind, ShResult}, parse::{NdRule, Node}, prelude::*, procio::{borrow_fd, IoStack}, state::{self, read_logic, write_logic}};
use super::setup_builtin;
pub fn alias(node: Node, io_stack: &mut IoStack, job: &mut JobBldr) -> ShResult<()> {
let NdRule::Command { assignments: _, argv } = node.class else {
unreachable!()
};
let (argv,io_frame) = setup_builtin(argv, job, Some((io_stack,node.redirs)))?;
if argv.is_empty() {
// Display the environment variables
let mut alias_output = read_logic(|l| {
l.aliases()
.iter()
.map(|ent| format!("{} = \"{}\"", ent.0, ent.1))
.collect::<Vec<_>>()
});
alias_output.sort(); // Sort them alphabetically
let mut alias_output = alias_output.join("\n"); // Join them with newlines
alias_output.push('\n'); // Push a final newline
let stdout = borrow_fd(STDOUT_FILENO);
write(stdout, alias_output.as_bytes())?; // Write it
} else {
for (arg,span) in argv {
let Some((name,body)) = arg.split_once('=') else {
return Err(
ShErr::full(
ShErrKind::SyntaxErr,
"alias: Expected an assignment in alias args",
span.into()
)
)
};
write_logic(|l| l.insert_alias(name, body));
}
}
io_frame.unwrap().restore()?;
state::set_status(0);
Ok(())
}

View File

@@ -9,8 +9,9 @@ pub mod pwd;
pub mod source;
pub mod shift;
pub mod jobctl;
pub mod alias;
pub const BUILTINS: [&str;9] = [
pub const BUILTINS: [&str;10] = [
"echo",
"cd",
"export",
@@ -19,7 +20,8 @@ pub const BUILTINS: [&str;9] = [
"shift",
"jobs",
"fg",
"bg"
"bg",
"alias"
];
/// Sets up a builtin command

View File

@@ -1,4 +1,6 @@
use crate::{libsh::error::ShResult, parse::lex::{is_field_sep, is_hard_sep, LexFlags, LexStream, Span, Tk, TkFlags, TkRule}, prelude::*, state::{read_meta, read_vars, write_meta}};
use std::collections::{HashSet, VecDeque};
use crate::{libsh::error::ShResult, parse::lex::{is_field_sep, is_hard_sep, is_keyword, LexFlags, LexStream, Span, Tk, TkFlags, TkRule}, prelude::*, state::{read_logic, read_meta, read_vars, write_meta}};
/// Variable substitution marker
pub const VAR_SUB: char = '\u{fdd0}';
@@ -479,9 +481,7 @@ pub fn expand_prompt(raw: &str) -> ShResult<String> {
PromptTk::Text(txt) => result.push_str(&txt),
PromptTk::AnsiSeq(params) => result.push_str(&params),
PromptTk::Runtime => {
flog!(INFO, "getting runtime");
if let Some(runtime) = write_meta(|m| m.stop_timer()) {
flog!(DEBUG, runtime);
let runtime_fmt = format_cmd_runtime(runtime);
result.push_str(&runtime_fmt);
}
@@ -543,3 +543,104 @@ pub fn expand_prompt(raw: &str) -> ShResult<String> {
Ok(result)
}
/// Expand aliases in the given input string
pub fn expand_aliases(input: String, mut already_expanded: HashSet<String>) -> String {
let mut result = String::new();
let mut cur_word = String::new();
let mut chars = input.chars().peekable();
let mut is_cmd = true;
let mut expanded_this_iter = HashSet::new();
while let Some(ch) = chars.next() {
match ch {
';' | '\n' => {
if is_cmd {
if !is_keyword(&cur_word) {
if !already_expanded.contains(&cur_word) {
if let Some(alias) = read_logic(|l| l.get_alias(&cur_word)) {
result.push_str(&alias);
expanded_this_iter.insert(cur_word.clone());
cur_word.clear();
} else {
result.push_str(&mem::take(&mut cur_word));
}
}
} else {
result.push_str(&mem::take(&mut cur_word));
}
} else {
result.push_str(&mem::take(&mut cur_word));
}
result.push(ch);
is_cmd = true;
while let Some(next_ch) = chars.peek() {
if is_hard_sep(*next_ch) {
result.push(chars.next().unwrap());
} else {
break
}
}
}
' ' | '\t' => {
if is_cmd {
if !is_keyword(&cur_word) {
if let Some(alias) = read_logic(|l| l.get_alias(&cur_word)) {
if !already_expanded.contains(&cur_word) {
result.push_str(&alias);
expanded_this_iter.insert(cur_word.clone());
cur_word.clear();
} else {
result.push_str(&mem::take(&mut cur_word));
}
is_cmd = false;
} else {
result.push_str(&mem::take(&mut cur_word));
is_cmd = false;
}
} else {
result.push_str(&mem::take(&mut cur_word));
}
} else {
result.push_str(&mem::take(&mut cur_word));
}
result.push(ch);
while let Some(next_ch) = chars.peek() {
if is_field_sep(*next_ch) {
result.push(chars.next().unwrap());
} else {
break
}
}
}
_ => cur_word.push(ch)
}
}
if !cur_word.is_empty() {
if is_cmd {
if !is_keyword(&cur_word) {
if let Some(alias) = read_logic(|l| l.get_alias(&cur_word)) {
if !already_expanded.contains(&cur_word) {
result.push_str(&alias);
expanded_this_iter.insert(cur_word.clone());
cur_word.clear();
} else {
result.push_str(&mem::take(&mut cur_word));
}
} else {
result.push_str(&mem::take(&mut cur_word));
}
} else {
result.push_str(&mem::take(&mut cur_word));
}
} else {
result.push_str(&mem::take(&mut cur_word));
}
}
if expanded_this_iter.is_empty() {
return result
} else {
already_expanded.extend(expanded_this_iter.into_iter());
return expand_aliases(result, already_expanded)
}
}

View File

@@ -8,14 +8,16 @@ pub mod state;
pub mod builtin;
pub mod jobs;
pub mod signal;
#[cfg(test)]
pub mod tests;
use std::collections::HashSet;
use expand::expand_aliases;
use libsh::error::ShResult;
use parse::{execute::Dispatcher, lex::{LexFlags, LexStream}, Ast, ParseStream, ParsedSrc};
use procio::IoFrame;
use signal::sig_setup;
use state::write_meta;
use state::{source_rc, write_logic, write_meta};
use termios::{LocalFlags, Termios};
use crate::prelude::*;
@@ -51,14 +53,12 @@ fn set_termios() {
pub fn exec_input(input: String) -> ShResult<()> {
write_meta(|m| m.start_timer());
let input = expand_aliases(input, HashSet::new());
let mut parser = ParsedSrc::new(Rc::new(input));
parser.parse_src()?;
let exec_start = Instant::now();
let mut dispatcher = Dispatcher::new(parser.extract_nodes());
dispatcher.begin_dispatch()?;
Ok(())
dispatcher.begin_dispatch()
}
fn main() {
@@ -66,6 +66,9 @@ fn main() {
set_termios();
sig_setup();
if let Err(e) = source_rc() {
eprintln!("{e}");
}
loop {
let input = match prompt::read_line() {

View File

@@ -611,9 +611,11 @@ pub fn enable_reaping() -> ShResult<()> {
/// Waits on the current foreground job and updates the shell's last status code
pub fn wait_fg(job: Job) -> ShResult<()> {
if job.children().is_empty() {
return Ok(()) // Nothing to do
}
flog!(TRACE, "Waiting on foreground job");
let mut code = 0;
flog!(DEBUG,job.pgid());
attach_tty(job.pgid())?;
disable_reaping()?;
let statuses = write_jobs(|j| j.new_fg(job))?;

View File

@@ -30,7 +30,7 @@ impl<T> ShResultExt for Result<T,ShErr> {
return self
};
match e {
ShErr::Simple { kind, msg } => Err(ShErr::full(*kind, msg, new_span)),
ShErr::Simple { kind, msg } => Err(ShErr::full(kind.clone(), msg, new_span)),
ShErr::Full { kind: _, msg: _, span: _ } => self
}
}
@@ -86,7 +86,7 @@ impl ShErr {
if ch == '\n' {
total_lines += 1;
if total_len >= span.start {
if total_len > span.start {
let line = (
total_lines,
mem::take(&mut cur_line)
@@ -96,6 +96,7 @@ impl ShErr {
if total_len >= span.end {
break
}
cur_line.clear();
}
}
@@ -161,6 +162,7 @@ impl Display for ShErr {
for (lineno,line) in window {
let lineno = lineno.to_string();
let line = line.trim();
let mut prefix = format!("{padding}|");
prefix.replace_range(0..lineno.len(), &lineno);
prefix = prefix.styled(Style::Cyan | Style::Bold);
@@ -203,7 +205,7 @@ impl From<Errno> for ShErr {
}
}
#[derive(Debug,Clone,Copy)]
#[derive(Debug,Clone)]
pub enum ShErrKind {
IoErr,
SyntaxErr,
@@ -213,8 +215,8 @@ pub enum ShErrKind {
ResourceLimitExceeded,
BadPermission,
Errno,
FileNotFound,
CmdNotFound,
FileNotFound(String),
CmdNotFound(String),
CleanExit,
FuncReturn,
LoopContinue,
@@ -233,8 +235,8 @@ impl Display for ShErrKind {
ShErrKind::ResourceLimitExceeded => "Resource Limit Exceeded",
ShErrKind::BadPermission => "Bad Permissions",
ShErrKind::Errno => "ERRNO",
ShErrKind::FileNotFound => "File Not Found",
ShErrKind::CmdNotFound => "Command Not Found",
ShErrKind::FileNotFound(file) => &format!("File not found: {file}"),
ShErrKind::CmdNotFound(cmd) => &format!("Command not found: {cmd}"),
ShErrKind::CleanExit => "",
ShErrKind::FuncReturn => "",
ShErrKind::LoopContinue => "",

View File

@@ -1,9 +1,9 @@
use std::collections::VecDeque;
use crate::{builtin::{cd::cd, echo::echo, export::export, jobctl::{continue_job, jobs, JobBehavior}, pwd::pwd, shift::shift, source::source}, exec_input, jobs::{dispatch_job, ChildProc, Job, JobBldr, JobStack}, libsh::{error::{ShErr, ShErrKind, ShResult, ShResultExt}, utils::RedirVecUtils}, prelude::*, procio::{IoFrame, IoMode, IoStack}, state::{self, read_logic, read_vars, write_logic, write_vars, ShFunc, VarTab}};
use crate::{builtin::{alias::alias, cd::cd, echo::echo, export::export, jobctl::{continue_job, jobs, JobBehavior}, pwd::pwd, shift::shift, source::source}, exec_input, jobs::{dispatch_job, ChildProc, Job, JobBldr, JobStack}, libsh::{error::{ShErr, ShErrKind, ShResult, ShResultExt}, utils::RedirVecUtils}, prelude::*, procio::{IoFrame, IoMode, IoStack}, state::{self, read_logic, read_vars, write_logic, write_vars, ShFunc, VarTab}};
use super::{lex::{LexFlags, LexStream, Span, Tk, TkFlags}, AssignKind, CondNode, ConjunctNode, ConjunctOp, LoopKind, NdFlags, NdRule, Node, ParseStream, ParsedSrc, Redir, RedirType};
use super::{lex::{LexFlags, LexStream, Span, Tk, TkFlags, KEYWORDS}, AssignKind, CondNode, ConjunctNode, ConjunctOp, LoopKind, NdFlags, NdRule, Node, ParseStream, ParsedSrc, Redir, RedirType};
pub enum AssignBehavior {
Export,
@@ -12,7 +12,7 @@ pub enum AssignBehavior {
/// Arguments to the execvpe function
pub struct ExecArgs {
pub cmd: CString,
pub cmd: (CString,Span),
pub argv: Vec<CString>,
pub envp: Vec<CString>
}
@@ -26,8 +26,8 @@ impl ExecArgs {
let envp = Self::get_envp();
Self { cmd, argv, envp }
}
pub fn get_cmd(argv: &[(String,Span)]) -> CString {
CString::new(argv[0].0.as_str()).unwrap()
pub fn get_cmd(argv: &[(String,Span)]) -> (CString,Span) {
(CString::new(argv[0].0.as_str()).unwrap(),argv[0].1.clone())
}
pub fn get_argv(argv: Vec<(String,Span)>) -> Vec<CString> {
argv.into_iter().map(|s| CString::new(s.0).unwrap()).collect()
@@ -101,6 +101,7 @@ impl Dispatcher {
Ok(())
}
pub fn exec_func_def(&mut self, func_def: Node) -> ShResult<()> {
let blame = func_def.get_span();
let NdRule::FuncDef { name, body } = func_def.class else {
unreachable!()
};
@@ -108,6 +109,16 @@ impl Dispatcher {
let body = body_span.as_str().to_string();
let name = name.span.as_str().strip_suffix("()").unwrap();
if KEYWORDS.contains(&name) {
return Err(
ShErr::full(
ShErrKind::SyntaxErr,
format!("function: Forbidden function name `{name}`"),
blame
)
)
}
let mut func_parser = ParsedSrc::new(Rc::new(body));
func_parser.parse_src()?; // Parse the function
@@ -256,7 +267,7 @@ impl Dispatcher {
Ok(())
}
pub fn exec_pipeline(&mut self, pipeline: Node) -> ShResult<()> {
let NdRule::Pipeline { cmds, pipe_err } = pipeline.class else {
let NdRule::Pipeline { cmds, pipe_err: _ } = pipeline.class else {
unreachable!()
};
self.job_stack.new_job();
@@ -281,7 +292,7 @@ impl Dispatcher {
Ok(())
}
pub fn exec_builtin(&mut self, mut cmd: Node) -> ShResult<()> {
let NdRule::Command { ref mut assignments, argv } = &mut cmd.class else {
let NdRule::Command { ref mut assignments, argv: _ } = &mut cmd.class else {
unreachable!()
};
let env_vars_to_unset = self.set_assignments(mem::take(assignments), AssignBehavior::Export);
@@ -300,6 +311,7 @@ impl Dispatcher {
"fg" => continue_job(cmd, curr_job_mut, JobBehavior::Foregound),
"bg" => continue_job(cmd, curr_job_mut, JobBehavior::Background),
"jobs" => jobs(cmd, io_stack_mut, curr_job_mut),
"alias" => alias(cmd, io_stack_mut, curr_job_mut),
_ => unimplemented!("Have not yet added support for builtin '{}'", cmd_raw.span.as_str())
};
@@ -422,7 +434,7 @@ where
}
ForkResult::Parent { child } => {
let cmd = if let Some(args) = exec_args {
Some(args.cmd.to_str().unwrap().to_string())
Some(args.cmd.0.to_str().unwrap().to_string())
} else {
None
};
@@ -437,11 +449,29 @@ pub fn def_child_action(mut io_frame: IoFrame, exec_args: Option<ExecArgs>) {
eprintln!("{e}");
}
let exec_args = exec_args.unwrap();
let cmd = &exec_args.cmd.to_str().unwrap().to_string();
let Err(e) = execvpe(&exec_args.cmd, &exec_args.argv, &exec_args.envp);
let cmd = &exec_args.cmd.0;
let span = exec_args.cmd.1;
let Err(e) = execvpe(&cmd, &exec_args.argv, &exec_args.envp);
let cmd = cmd.to_str().unwrap().to_string();
match e {
Errno::ENOENT => eprintln!("Command not found: {}", cmd),
_ => eprintln!("{e}")
Errno::ENOENT => {
let err = ShErr::full(
ShErrKind::CmdNotFound(cmd),
"",
span
);
eprintln!("{err}");
}
_ => {
let err = ShErr::full(
ShErrKind::Errno,
format!("{e}"),
span
);
eprintln!("{err}");
}
}
exit(e as i32)
}

View File

@@ -29,7 +29,9 @@ fn get_hist_path() -> ShResult<PathBuf> {
Ok(PathBuf::from(path))
} else {
let home = env::var("HOME")?;
Ok(PathBuf::from(format!("{home}/.fernhist")))
let path = PathBuf::from(format!("{home}/.fernhist"));
flog!(DEBUG, path);
Ok(path)
}
}

View File

@@ -2,7 +2,7 @@ use std::{collections::{HashMap, VecDeque}, ops::{Deref, Range}, sync::{LazyLock
use nix::unistd::{gethostname, getppid, User};
use crate::{exec_input, jobs::JobTab, libsh::{error::ShResult, utils::VecDequeExt}, parse::{lex::{get_char, Tk}, ConjunctNode, NdRule, Node, ParsedSrc}, prelude::*};
use crate::{exec_input, jobs::JobTab, libsh::{error::{ShErr, ShErrKind, ShResult}, utils::VecDequeExt}, parse::{lex::{get_char, Tk}, ConjunctNode, NdRule, Node, ParsedSrc}, prelude::*};
pub static JOB_TABLE: LazyLock<RwLock<JobTab>> = LazyLock::new(|| RwLock::new(JobTab::new()));
@@ -67,6 +67,12 @@ impl LogTab {
pub fn get_func(&self, name: &str) -> Option<ShFunc> {
self.functions.get(name).cloned()
}
pub fn funcs(&self) -> &HashMap<String,ShFunc> {
&self.functions
}
pub fn aliases(&self) -> &HashMap<String,String> {
&self.aliases
}
pub fn insert_alias(&mut self, name: &str, body: &str) {
self.aliases.insert(name.into(), body.into());
}
@@ -142,7 +148,7 @@ impl VarTab {
env::set_var("OLDPWD", pathbuf_to_string(std::env::current_dir()));
env::set_var("HOME", home.clone());
env::set_var("SHELL", pathbuf_to_string(std::env::current_exe()));
env::set_var("FERN_HIST",format!("{}/.fern_hist",home));
env::set_var("FERN_HIST",format!("{}/.fernhist",home));
env::set_var("FERN_RC",format!("{}/.fernrc",home));
}
pub fn init_sh_argv(&mut self) {
@@ -313,6 +319,21 @@ pub fn set_status(code: i32) {
write_vars(|v| v.set_param('?', &code.to_string()))
}
pub fn source_rc() -> ShResult<()> {
let path = if let Ok(path) = env::var("FERN_RC") {
PathBuf::from(&path)
} else {
let home = env::var("HOME").unwrap();
PathBuf::from(format!("{home}/.fernrc"))
};
if !path.exists() {
return Err(
ShErr::simple(ShErrKind::InternalErr, ".fernrc not found")
)
}
source_file(path)
}
pub fn source_file(path: PathBuf) -> ShResult<()> {
let mut file = OpenOptions::new()
.read(true)

View File

@@ -1,6 +1,8 @@
use expand::unescape_str;
use std::collections::HashSet;
use expand::{expand_aliases, unescape_str};
use parse::lex::{Tk, TkFlags, TkRule};
use state::write_vars;
use state::{write_logic, write_vars};
use super::super::*;
#[test]
@@ -8,7 +10,7 @@ fn simple_expansion() {
let varsub = "$foo";
write_vars(|v| v.new_var("foo", "this is the value of the variable".into()));
let mut tokens: Vec<Tk> = LexStream::new(varsub, LexFlags::empty())
let mut tokens: Vec<Tk> = LexStream::new(Rc::new(varsub.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.filter(|tk| !matches!(tk.class, TkRule::EOI | TkRule::SOI))
.collect();
@@ -27,3 +29,54 @@ fn unescape_string() {
insta::assert_snapshot!(unescaped)
}
#[test]
fn expand_alias_simple() {
write_logic(|l| l.insert_alias("foo", "echo foo"));
let input = String::from("foo");
let result = expand_aliases(input, HashSet::new());
assert_eq!(result.as_str(),"echo foo")
}
#[test]
fn expand_alias_in_if() {
write_logic(|l| l.insert_alias("foo", "echo foo"));
let input = String::from("if foo; then echo bar; fi");
let result = expand_aliases(input, HashSet::new());
assert_eq!(result.as_str(),"if echo foo; then echo bar; fi")
}
#[test]
fn expand_multiple_aliases() {
write_logic(|l| l.insert_alias("foo", "echo foo"));
write_logic(|l| l.insert_alias("bar", "echo bar"));
write_logic(|l| l.insert_alias("biz", "echo biz"));
let input = String::from("foo; bar; biz");
let result = expand_aliases(input, HashSet::new());
assert_eq!(result.as_str(),"echo foo; echo bar; echo biz")
}
#[test]
fn expand_recursive_alias() {
write_logic(|l| l.insert_alias("foo", "echo foo"));
write_logic(|l| l.insert_alias("bar", "foo bar"));
let input = String::from("bar");
let result = expand_aliases(input, HashSet::new());
assert_eq!(result.as_str(),"echo foo bar")
}
#[test]
fn test_infinite_recursive_alias() {
write_logic(|l| l.insert_alias("foo", "foo"));
let input = String::from("foo");
let result = expand_aliases(input, HashSet::new());
assert_eq!(result.as_str(),"foo")
}

View File

@@ -2,35 +2,35 @@ use super::super::*;
#[test]
fn lex_simple() {
let input = "echo hello world";
let tokens: Vec<_> = LexStream::new(input, LexFlags::empty()).collect();
let tokens: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()).collect();
insta::assert_debug_snapshot!(tokens)
}
#[test]
fn lex_redir() {
let input = "echo foo > bar.txt";
let tokens: Vec<_> = LexStream::new(input, LexFlags::empty()).collect();
let tokens: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()).collect();
insta::assert_debug_snapshot!(tokens)
}
#[test]
fn lex_redir_fds() {
let input = "echo foo 1>&2";
let tokens: Vec<_> = LexStream::new(input, LexFlags::empty()).collect();
let tokens: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()).collect();
insta::assert_debug_snapshot!(tokens)
}
#[test]
fn lex_quote_str() {
let input = "echo \"foo bar\" biz baz";
let tokens: Vec<_> = LexStream::new(input, LexFlags::empty()).collect();
let tokens: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()).collect();
insta::assert_debug_snapshot!(tokens)
}
#[test]
fn lex_with_keywords() {
let input = "if true; then echo foo; fi";
let tokens: Vec<_> = LexStream::new(input, LexFlags::empty()).collect();
let tokens: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()).collect();
insta::assert_debug_snapshot!(tokens)
}
@@ -38,7 +38,7 @@ fn lex_with_keywords() {
#[test]
fn lex_multiline() {
let input = "echo hello world\necho foo bar\necho boo biz";
let tokens: Vec<_> = LexStream::new(input, LexFlags::empty()).collect();
let tokens: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()).collect();
insta::assert_debug_snapshot!(tokens)
}

View File

@@ -3,7 +3,7 @@ use super::super::*;
#[test]
fn parse_simple() {
let input = "echo hello world";
let tk_stream: Vec<_> = LexStream::new(input, LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -14,7 +14,7 @@ fn parse_simple() {
#[test]
fn parse_pipeline() {
let input = "echo foo | sed s/foo/bar";
let tk_stream: Vec<_> = LexStream::new(input, LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -25,7 +25,7 @@ fn parse_pipeline() {
#[test]
fn parse_conjunction() {
let input = "echo foo && echo bar";
let tk_stream: Vec<_> = LexStream::new(input, LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -36,7 +36,7 @@ fn parse_conjunction() {
#[test]
fn parse_conjunction_and_pipeline() {
let input = "echo foo | sed s/foo/bar/ && echo bar | sed s/bar/foo/ || echo foo bar | sed s/foo bar/bar foo/";
let tk_stream: Vec<_> = LexStream::new(input, LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -50,7 +50,7 @@ fn parse_multiline() {
echo hello world
echo foo bar
echo boo biz";
let tk_stream: Vec<_> = LexStream::new(input, LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -61,7 +61,7 @@ echo boo biz";
#[test]
fn parse_if_simple() {
let input = "if foo; then echo bar; fi";
let tk_stream: Vec<_> = LexStream::new(input, LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -71,7 +71,7 @@ fn parse_if_simple() {
#[test]
fn parse_if_with_elif() {
let input = "if foo; then echo bar; elif bar; then echo foo; fi";
let tk_stream: Vec<_> = LexStream::new(input, LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -81,7 +81,7 @@ fn parse_if_with_elif() {
#[test]
fn parse_if_multiple_elif() {
let input = "if foo; then echo bar; elif bar; then echo foo; elif biz; then echo baz; fi";
let tk_stream: Vec<_> = LexStream::new(input, LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -98,7 +98,7 @@ elif bar; then
elif biz; then
echo baz
fi";
let tk_stream: Vec<_> = LexStream::new(input, LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -108,7 +108,7 @@ fi";
#[test]
fn parse_loop_simple() {
let input = "while foo; do bar; done";
let tk_stream: Vec<_> = LexStream::new(input, LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -118,7 +118,7 @@ fn parse_loop_simple() {
#[test]
fn parse_loop_until() {
let input = "until foo; do bar; done";
let tk_stream: Vec<_> = LexStream::new(input, LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -131,7 +131,7 @@ fn parse_loop_multiline() {
until foo; do
bar
done";
let tk_stream: Vec<_> = LexStream::new(input, LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();