switched to Arc instead of Rc for input strings

This commit is contained in:
2025-03-23 17:37:15 -04:00
parent 8fffe1cd71
commit 32ec62b52f
15 changed files with 61 additions and 70 deletions

View File

@@ -1,4 +1,4 @@
use crate::{jobs::{ChildProc, JobBldr}, libsh::error::{ShErr, ShErrKind, ShResult}, parse::{execute::prepare_argv, NdRule, Node}, prelude::*, state::{self, write_vars}};
use crate::{jobs::JobBldr, libsh::error::ShResult, parse::{NdRule, Node}, prelude::*, state::{self}};
use super::setup_builtin;

View File

@@ -604,7 +604,7 @@ pub fn expand_prompt(raw: &str) -> ShResult<String> {
/// Expand aliases in the given input string
pub fn expand_aliases(input: String, mut already_expanded: HashSet<String>) -> String {
let mut result = input.clone();
let tokens: Vec<_> = LexStream::new(Rc::new(input), LexFlags::empty()).collect();
let tokens: Vec<_> = LexStream::new(Arc::new(input), LexFlags::empty()).collect();
let mut expanded_this_iter: Vec<String> = vec![];
for token_result in tokens.into_iter().rev() {

View File

@@ -15,12 +15,10 @@ pub mod getopt;
use std::collections::HashSet;
use expand::expand_aliases;
use getopt::get_opts;
use libsh::error::ShResult;
use parse::{execute::Dispatcher, lex::{LexFlags, LexStream, Tk}, Ast, ParseStream, ParsedSrc};
use procio::IoFrame;
use parse::{execute::Dispatcher, ParsedSrc};
use signal::sig_setup;
use state::{source_rc, write_logic, write_meta};
use state::{source_rc, write_meta};
use termios::{LocalFlags, Termios};
use crate::prelude::*;
@@ -57,7 +55,7 @@ fn set_termios() {
pub fn exec_input(input: String) -> ShResult<()> {
write_meta(|m| m.start_timer());
let input = expand_aliases(input, HashSet::new());
let mut parser = ParsedSrc::new(Rc::new(input));
let mut parser = ParsedSrc::new(Arc::new(input));
parser.parse_src()?;
let mut dispatcher = Dispatcher::new(parser.extract_nodes());

View File

@@ -120,7 +120,7 @@ impl Dispatcher {
)
}
let mut func_parser = ParsedSrc::new(Rc::new(body));
let mut func_parser = ParsedSrc::new(Arc::new(body));
func_parser.parse_src()?; // Parse the function
let func = ShFunc::new(func_parser);

View File

@@ -1,4 +1,4 @@
use std::{collections::VecDeque, fmt::Display, iter::Peekable, ops::{Bound, Deref, Range, RangeBounds}, str::Chars};
use std::{collections::VecDeque, fmt::Display, iter::Peekable, ops::{Bound, Deref, Range, RangeBounds}, str::Chars, sync::Arc};
use bitflags::bitflags;
@@ -33,12 +33,12 @@ pub const OPENERS: [&'static str;6] = [
#[derive(Clone,PartialEq,Default,Debug)]
pub struct Span {
range: Range<usize>,
source: Rc<String>
source: Arc<String>
}
impl Span {
/// New `Span`. Wraps a range and a string slice that it refers to.
pub fn new(range: Range<usize>, source: Rc<String>) -> Self {
pub fn new(range: Range<usize>, source: Arc<String>) -> Self {
Span {
range,
source,
@@ -48,7 +48,7 @@ impl Span {
pub fn as_str(&self) -> &str {
&self.source[self.start..self.end]
}
pub fn get_source(&self) -> Rc<String> {
pub fn get_source(&self) -> Arc<String> {
self.source.clone()
}
pub fn range(&self) -> Range<usize> {
@@ -108,7 +108,7 @@ impl Tk {
_ => self.span.as_str().to_string()
}
}
pub fn source(&self) -> Rc<String> {
pub fn source(&self) -> Arc<String> {
self.span.source.clone()
}
/// Used to see if a separator is ';;' for case statements
@@ -145,7 +145,7 @@ bitflags! {
}
pub struct LexStream {
source: Rc<String>,
source: Arc<String>,
pub cursor: usize,
in_quote: bool,
flags: LexFlags,
@@ -175,7 +175,7 @@ bitflags! {
}
impl LexStream {
pub fn new(source: Rc<String>, flags: LexFlags) -> Self {
pub fn new(source: Arc<String>, flags: LexFlags) -> Self {
flog!(TRACE, "new lex stream");
let flags = flags | LexFlags::FRESH | LexFlags::NEXT_IS_CMD;
Self { source, cursor: 0, in_quote: false, flags }

View File

@@ -24,17 +24,17 @@ macro_rules! try_match {
/// The parsed AST along with the source input it parsed
///
/// Uses Rc<String> instead of &str because the reference has to stay alive while errors are propagated upwards
/// Uses Arc<String> instead of &str because the reference has to stay alive while errors are propagated upwards
/// The string also has to stay alive in the case of pre-parsed shell function nodes, which live in the logic table
/// Using &str for this use-case dramatically overcomplicates the code
#[derive(Clone,Debug)]
pub struct ParsedSrc {
pub src: Rc<String>,
pub src: Arc<String>,
pub ast: Ast
}
impl ParsedSrc {
pub fn new(src: Rc<String>) -> Self {
pub fn new(src: Arc<String>) -> Self {
Self { src, ast: Ast::new(vec![]) }
}
pub fn parse_src(&mut self) -> ShResult<()> {

View File

@@ -16,7 +16,7 @@ pub use std::path::{ Path, PathBuf };
pub use std::ffi::{ CStr, CString };
pub use std::process::exit;
pub use std::time::Instant;
pub use std::rc::Rc;
pub use std::sync::Arc;
pub use std::mem;
pub use std::env;
pub use std::fmt;

View File

@@ -7,10 +7,10 @@ use crate::{libsh::{error::{ShErr, ShErrKind, ShResult}, utils::RedirVecUtils},
#[derive(Clone,Debug)]
pub enum IoMode {
Fd { tgt_fd: RawFd, src_fd: Rc<OwnedFd> },
File { tgt_fd: RawFd, file: Rc<File> },
Pipe { tgt_fd: RawFd, pipe: Rc<OwnedFd> },
Buffer { buf: String, pipe: Rc<OwnedFd> }
Fd { tgt_fd: RawFd, src_fd: Arc<OwnedFd> },
File { tgt_fd: RawFd, file: Arc<File> },
Pipe { tgt_fd: RawFd, pipe: Arc<OwnedFd> },
Buffer { buf: String, pipe: Arc<OwnedFd> }
}
impl IoMode {

View File

@@ -68,7 +68,7 @@ impl Validator for FernReadline {
fn validate(&self, ctx: &mut rustyline::validate::ValidationContext) -> rustyline::Result<rustyline::validate::ValidationResult> {
return Ok(ValidationResult::Valid(None));
let mut tokens = vec![];
let tk_stream = LexStream::new(Rc::new(ctx.input().to_string()), LexFlags::empty());
let tk_stream = LexStream::new(Arc::new(ctx.input().to_string()), LexFlags::empty());
for tk in tk_stream {
if tk.is_err() {
return Ok(ValidationResult::Incomplete)

View File

@@ -10,15 +10,12 @@ pub static VAR_TABLE: LazyLock<RwLock<VarTab>> = LazyLock::new(|| RwLock::new(Va
pub static META_TABLE: LazyLock<RwLock<MetaTab>> = LazyLock::new(|| RwLock::new(MetaTab::new()));
thread_local! {
pub static LOGIC_TABLE: LazyLock<RwLock<LogTab>> = LazyLock::new(|| RwLock::new(LogTab::new()));
}
/// A shell function
///
/// Consists of the BraceGrp Node and the stored ParsedSrc that the node refers to
/// The Node must be stored with the ParsedSrc because the tokens of the node contain an Rc<String>
/// The Node must be stored with the ParsedSrc because the tokens of the node contain an Arc<String>
/// Which refers to the String held in ParsedSrc
///
/// Can be dereferenced to pull out the wrapped Node
@@ -298,18 +295,14 @@ pub fn write_meta<T, F: FnOnce(&mut RwLockWriteGuard<MetaTab>) -> T>(f: F) -> T
/// Read from the logic table
pub fn read_logic<T, F: FnOnce(RwLockReadGuard<LogTab>) -> T>(f: F) -> T {
LOGIC_TABLE.with(|log| {
let lock = log.read().unwrap();
let lock = LOGIC_TABLE.read().unwrap();
f(lock)
})
}
/// Write to the logic table
pub fn write_logic<T, F: FnOnce(&mut RwLockWriteGuard<LogTab>) -> T>(f: F) -> T {
LOGIC_TABLE.with(|log| {
let lock = &mut log.write().unwrap();
let lock = &mut LOGIC_TABLE.write().unwrap();
f(lock)
})
}
pub fn get_status() -> i32 {

View File

@@ -5,7 +5,7 @@ use super::super::*;
#[test]
fn cmd_not_found() {
let input = "foo";
let token = LexStream::new(Rc::new(input.into()), LexFlags::empty()).next().unwrap().unwrap();
let token = LexStream::new(Arc::new(input.into()), LexFlags::empty()).next().unwrap().unwrap();
let err = ShErr::full(ShErrKind::CmdNotFound("foo".into()), "", token.span);
let err_fmt = format!("{err}");
@@ -15,7 +15,7 @@ fn cmd_not_found() {
#[test]
fn if_no_fi() {
let input = "if foo; then bar;";
let tokens = LexStream::new(Rc::new(input.into()), LexFlags::empty())
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
@@ -29,7 +29,7 @@ fn if_no_fi() {
#[test]
fn if_no_then() {
let input = "if foo; bar; fi";
let tokens = LexStream::new(Rc::new(input.into()), LexFlags::empty())
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
@@ -43,7 +43,7 @@ fn if_no_then() {
#[test]
fn loop_no_done() {
let input = "while true; do echo foo;";
let tokens = LexStream::new(Rc::new(input.into()), LexFlags::empty())
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
@@ -57,7 +57,7 @@ fn loop_no_done() {
#[test]
fn loop_no_do() {
let input = "while true; echo foo; done";
let tokens = LexStream::new(Rc::new(input.into()), LexFlags::empty())
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
@@ -71,7 +71,7 @@ fn loop_no_do() {
#[test]
fn case_no_esac() {
let input = "case foo in foo) bar;; bar) foo;;";
let tokens = LexStream::new(Rc::new(input.into()), LexFlags::empty())
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
@@ -85,7 +85,7 @@ fn case_no_esac() {
#[test]
fn case_no_in() {
let input = "case foo foo) bar;; bar) foo;; esac";
let tokens = LexStream::new(Rc::new(input.into()), LexFlags::empty())
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();

View File

@@ -10,7 +10,7 @@ fn simple_expansion() {
let varsub = "$foo";
write_vars(|v| v.new_var("foo", "this is the value of the variable".into()));
let mut tokens: Vec<Tk> = LexStream::new(Rc::new(varsub.to_string()), LexFlags::empty())
let mut tokens: Vec<Tk> = LexStream::new(Arc::new(varsub.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.filter(|tk| !matches!(tk.class, TkRule::EOI | TkRule::SOI))
.collect();

View File

@@ -2,35 +2,35 @@ use super::super::*;
#[test]
fn lex_simple() {
let input = "echo hello world";
let tokens: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()).collect();
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
insta::assert_debug_snapshot!(tokens)
}
#[test]
fn lex_redir() {
let input = "echo foo > bar.txt";
let tokens: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()).collect();
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
insta::assert_debug_snapshot!(tokens)
}
#[test]
fn lex_redir_fds() {
let input = "echo foo 1>&2";
let tokens: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()).collect();
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
insta::assert_debug_snapshot!(tokens)
}
#[test]
fn lex_quote_str() {
let input = "echo \"foo bar\" biz baz";
let tokens: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()).collect();
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
insta::assert_debug_snapshot!(tokens)
}
#[test]
fn lex_with_keywords() {
let input = "if true; then echo foo; fi";
let tokens: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()).collect();
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
insta::assert_debug_snapshot!(tokens)
}
@@ -38,7 +38,7 @@ fn lex_with_keywords() {
#[test]
fn lex_multiline() {
let input = "echo hello world\necho foo bar\necho boo biz";
let tokens: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()).collect();
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
insta::assert_debug_snapshot!(tokens)
}
@@ -46,7 +46,7 @@ fn lex_multiline() {
#[test]
fn lex_case() {
let input = "case $foo in foo) bar;; bar) foo;; biz) baz;; esac";
let tokens: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()).collect();
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
insta::assert_debug_snapshot!(tokens)
}

View File

@@ -1,4 +1,4 @@
use std::rc::Rc;
use std::rc::Arc;
use crate::parse::{lex::{LexFlags, LexStream}, node_operation, Node, ParseStream};
@@ -15,7 +15,7 @@ pub fn get_nodes<F1>(input: &str, filter: F1) -> Vec<Node>
F1: Fn(&Node) -> bool
{
let mut nodes = vec![];
let tokens = LexStream::new(Rc::new(input.into()), LexFlags::empty())
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
let mut parsed_nodes = ParseStream::new(tokens)

View File

@@ -5,7 +5,7 @@ use super::super::*;
#[test]
fn parse_simple() {
let input = "echo hello world";
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -16,7 +16,7 @@ fn parse_simple() {
#[test]
fn parse_pipeline() {
let input = "echo foo | sed s/foo/bar";
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -27,7 +27,7 @@ fn parse_pipeline() {
#[test]
fn parse_conjunction() {
let input = "echo foo && echo bar";
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -38,7 +38,7 @@ fn parse_conjunction() {
#[test]
fn parse_conjunction_and_pipeline() {
let input = "echo foo | sed s/foo/bar/ && echo bar | sed s/bar/foo/ || echo foo bar | sed s/foo bar/bar foo/";
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -52,7 +52,7 @@ fn parse_multiline() {
echo hello world
echo foo bar
echo boo biz";
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -63,7 +63,7 @@ echo boo biz";
#[test]
fn parse_if_simple() {
let input = "if foo; then echo bar; fi";
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -73,7 +73,7 @@ fn parse_if_simple() {
#[test]
fn parse_if_with_elif() {
let input = "if foo; then echo bar; elif bar; then echo foo; fi";
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -83,7 +83,7 @@ fn parse_if_with_elif() {
#[test]
fn parse_if_multiple_elif() {
let input = "if foo; then echo bar; elif bar; then echo foo; elif biz; then echo baz; fi";
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -100,7 +100,7 @@ elif bar; then
elif biz; then
echo baz
fi";
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -110,7 +110,7 @@ fi";
#[test]
fn parse_loop_simple() {
let input = "while foo; do bar; done";
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -120,7 +120,7 @@ fn parse_loop_simple() {
#[test]
fn parse_loop_until() {
let input = "until foo; do bar; done";
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -133,7 +133,7 @@ fn parse_loop_multiline() {
until foo; do
bar
done";
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -143,7 +143,7 @@ done";
#[test]
fn parse_case_simple() {
let input = "case foo in foo) bar;; bar) foo;; biz) baz;; esac";
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -160,7 +160,7 @@ fn parse_case_multiline() {
biz) baz
;;
esac";
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -179,7 +179,7 @@ fn parse_case_nested() {
fi
;;
esac";
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
@@ -190,7 +190,7 @@ esac";
fn parse_cursed() {
let input = "if if if if case foo in foo) if true; then true; fi;; esac; then case foo in foo) until true; do true; done;; esac; fi; then until if case foo in foo) true;; esac; then if true; then true; fi; fi; do until until true; do true; done; do case foo in foo) true;; esac; done; done; fi; then until until case foo in foo) true;; esac; do if true; then true; fi; done; do until true; do true; done; done; fi; then until case foo in foo) case foo in foo) true;; esac;; esac; do if if true; then true; fi; then until true; do true; done; fi; done; elif until until case foo in foo) true;; esac; do if true; then true; fi; done; do case foo in foo) until true; do true; done;; esac; done; then case foo in foo) if case foo in foo) true;; esac; then if true; then true; fi; fi;; esac; else case foo in foo) until until true; do true; done; do case foo in foo) true;; esac; done;; esac; fi";
let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty())
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();