From 32ec62b52f99701c9aed043a7508e7e89761bc38 Mon Sep 17 00:00:00 2001 From: pagedmov Date: Sun, 23 Mar 2025 17:37:15 -0400 Subject: [PATCH] switched to Arc instead of Rc for input strings --- src/builtin/cd.rs | 2 +- src/expand.rs | 2 +- src/fern.rs | 8 +++----- src/parse/execute.rs | 2 +- src/parse/lex.rs | 14 +++++++------- src/parse/mod.rs | 6 +++--- src/prelude.rs | 2 +- src/procio.rs | 8 ++++---- src/prompt/readline.rs | 2 +- src/state.rs | 19 ++++++------------- src/tests/error.rs | 14 +++++++------- src/tests/expand.rs | 2 +- src/tests/lexer.rs | 14 +++++++------- src/tests/mod.rs | 4 ++-- src/tests/parser.rs | 32 ++++++++++++++++---------------- 15 files changed, 61 insertions(+), 70 deletions(-) diff --git a/src/builtin/cd.rs b/src/builtin/cd.rs index 35cffac..9239fd7 100644 --- a/src/builtin/cd.rs +++ b/src/builtin/cd.rs @@ -1,4 +1,4 @@ -use crate::{jobs::{ChildProc, JobBldr}, libsh::error::{ShErr, ShErrKind, ShResult}, parse::{execute::prepare_argv, NdRule, Node}, prelude::*, state::{self, write_vars}}; +use crate::{jobs::JobBldr, libsh::error::ShResult, parse::{NdRule, Node}, prelude::*, state::{self}}; use super::setup_builtin; diff --git a/src/expand.rs b/src/expand.rs index 5ce6aa0..7bbe291 100644 --- a/src/expand.rs +++ b/src/expand.rs @@ -604,7 +604,7 @@ pub fn expand_prompt(raw: &str) -> ShResult { /// Expand aliases in the given input string pub fn expand_aliases(input: String, mut already_expanded: HashSet) -> String { let mut result = input.clone(); - let tokens: Vec<_> = LexStream::new(Rc::new(input), LexFlags::empty()).collect(); + let tokens: Vec<_> = LexStream::new(Arc::new(input), LexFlags::empty()).collect(); let mut expanded_this_iter: Vec = vec![]; for token_result in tokens.into_iter().rev() { diff --git a/src/fern.rs b/src/fern.rs index 509ea8c..418047a 100644 --- a/src/fern.rs +++ b/src/fern.rs @@ -15,12 +15,10 @@ pub mod getopt; use std::collections::HashSet; use expand::expand_aliases; -use getopt::get_opts; use libsh::error::ShResult; -use parse::{execute::Dispatcher, lex::{LexFlags, LexStream, Tk}, Ast, ParseStream, ParsedSrc}; -use procio::IoFrame; +use parse::{execute::Dispatcher, ParsedSrc}; use signal::sig_setup; -use state::{source_rc, write_logic, write_meta}; +use state::{source_rc, write_meta}; use termios::{LocalFlags, Termios}; use crate::prelude::*; @@ -57,7 +55,7 @@ fn set_termios() { pub fn exec_input(input: String) -> ShResult<()> { write_meta(|m| m.start_timer()); let input = expand_aliases(input, HashSet::new()); - let mut parser = ParsedSrc::new(Rc::new(input)); + let mut parser = ParsedSrc::new(Arc::new(input)); parser.parse_src()?; let mut dispatcher = Dispatcher::new(parser.extract_nodes()); diff --git a/src/parse/execute.rs b/src/parse/execute.rs index d4feab1..05a936e 100644 --- a/src/parse/execute.rs +++ b/src/parse/execute.rs @@ -120,7 +120,7 @@ impl Dispatcher { ) } - let mut func_parser = ParsedSrc::new(Rc::new(body)); + let mut func_parser = ParsedSrc::new(Arc::new(body)); func_parser.parse_src()?; // Parse the function let func = ShFunc::new(func_parser); diff --git a/src/parse/lex.rs b/src/parse/lex.rs index 32ee57b..f1a8841 100644 --- a/src/parse/lex.rs +++ b/src/parse/lex.rs @@ -1,4 +1,4 @@ -use std::{collections::VecDeque, fmt::Display, iter::Peekable, ops::{Bound, Deref, Range, RangeBounds}, str::Chars}; +use std::{collections::VecDeque, fmt::Display, iter::Peekable, ops::{Bound, Deref, Range, RangeBounds}, str::Chars, sync::Arc}; use bitflags::bitflags; @@ -33,12 +33,12 @@ pub const OPENERS: [&'static str;6] = [ #[derive(Clone,PartialEq,Default,Debug)] pub struct Span { range: Range, - source: Rc + source: Arc } impl Span { /// New `Span`. Wraps a range and a string slice that it refers to. - pub fn new(range: Range, source: Rc) -> Self { + pub fn new(range: Range, source: Arc) -> Self { Span { range, source, @@ -48,7 +48,7 @@ impl Span { pub fn as_str(&self) -> &str { &self.source[self.start..self.end] } - pub fn get_source(&self) -> Rc { + pub fn get_source(&self) -> Arc { self.source.clone() } pub fn range(&self) -> Range { @@ -108,7 +108,7 @@ impl Tk { _ => self.span.as_str().to_string() } } - pub fn source(&self) -> Rc { + pub fn source(&self) -> Arc { self.span.source.clone() } /// Used to see if a separator is ';;' for case statements @@ -145,7 +145,7 @@ bitflags! { } pub struct LexStream { - source: Rc, + source: Arc, pub cursor: usize, in_quote: bool, flags: LexFlags, @@ -175,7 +175,7 @@ bitflags! { } impl LexStream { - pub fn new(source: Rc, flags: LexFlags) -> Self { + pub fn new(source: Arc, flags: LexFlags) -> Self { flog!(TRACE, "new lex stream"); let flags = flags | LexFlags::FRESH | LexFlags::NEXT_IS_CMD; Self { source, cursor: 0, in_quote: false, flags } diff --git a/src/parse/mod.rs b/src/parse/mod.rs index 127ea4d..e902698 100644 --- a/src/parse/mod.rs +++ b/src/parse/mod.rs @@ -24,17 +24,17 @@ macro_rules! try_match { /// The parsed AST along with the source input it parsed /// -/// Uses Rc instead of &str because the reference has to stay alive while errors are propagated upwards +/// Uses Arc instead of &str because the reference has to stay alive while errors are propagated upwards /// The string also has to stay alive in the case of pre-parsed shell function nodes, which live in the logic table /// Using &str for this use-case dramatically overcomplicates the code #[derive(Clone,Debug)] pub struct ParsedSrc { - pub src: Rc, + pub src: Arc, pub ast: Ast } impl ParsedSrc { - pub fn new(src: Rc) -> Self { + pub fn new(src: Arc) -> Self { Self { src, ast: Ast::new(vec![]) } } pub fn parse_src(&mut self) -> ShResult<()> { diff --git a/src/prelude.rs b/src/prelude.rs index 98a1486..a743eef 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -16,7 +16,7 @@ pub use std::path::{ Path, PathBuf }; pub use std::ffi::{ CStr, CString }; pub use std::process::exit; pub use std::time::Instant; -pub use std::rc::Rc; +pub use std::sync::Arc; pub use std::mem; pub use std::env; pub use std::fmt; diff --git a/src/procio.rs b/src/procio.rs index 9251239..1e10df1 100644 --- a/src/procio.rs +++ b/src/procio.rs @@ -7,10 +7,10 @@ use crate::{libsh::{error::{ShErr, ShErrKind, ShResult}, utils::RedirVecUtils}, #[derive(Clone,Debug)] pub enum IoMode { - Fd { tgt_fd: RawFd, src_fd: Rc }, - File { tgt_fd: RawFd, file: Rc }, - Pipe { tgt_fd: RawFd, pipe: Rc }, - Buffer { buf: String, pipe: Rc } + Fd { tgt_fd: RawFd, src_fd: Arc }, + File { tgt_fd: RawFd, file: Arc }, + Pipe { tgt_fd: RawFd, pipe: Arc }, + Buffer { buf: String, pipe: Arc } } impl IoMode { diff --git a/src/prompt/readline.rs b/src/prompt/readline.rs index fee404f..8c9798b 100644 --- a/src/prompt/readline.rs +++ b/src/prompt/readline.rs @@ -68,7 +68,7 @@ impl Validator for FernReadline { fn validate(&self, ctx: &mut rustyline::validate::ValidationContext) -> rustyline::Result { return Ok(ValidationResult::Valid(None)); let mut tokens = vec![]; - let tk_stream = LexStream::new(Rc::new(ctx.input().to_string()), LexFlags::empty()); + let tk_stream = LexStream::new(Arc::new(ctx.input().to_string()), LexFlags::empty()); for tk in tk_stream { if tk.is_err() { return Ok(ValidationResult::Incomplete) diff --git a/src/state.rs b/src/state.rs index b8a0bfa..6026cae 100644 --- a/src/state.rs +++ b/src/state.rs @@ -10,15 +10,12 @@ pub static VAR_TABLE: LazyLock> = LazyLock::new(|| RwLock::new(Va pub static META_TABLE: LazyLock> = LazyLock::new(|| RwLock::new(MetaTab::new())); - -thread_local! { - pub static LOGIC_TABLE: LazyLock> = LazyLock::new(|| RwLock::new(LogTab::new())); -} +pub static LOGIC_TABLE: LazyLock> = LazyLock::new(|| RwLock::new(LogTab::new())); /// A shell function /// /// Consists of the BraceGrp Node and the stored ParsedSrc that the node refers to -/// The Node must be stored with the ParsedSrc because the tokens of the node contain an Rc +/// The Node must be stored with the ParsedSrc because the tokens of the node contain an Arc /// Which refers to the String held in ParsedSrc /// /// Can be dereferenced to pull out the wrapped Node @@ -298,18 +295,14 @@ pub fn write_meta) -> T>(f: F) -> T /// Read from the logic table pub fn read_logic) -> T>(f: F) -> T { - LOGIC_TABLE.with(|log| { - let lock = log.read().unwrap(); - f(lock) - }) + let lock = LOGIC_TABLE.read().unwrap(); + f(lock) } /// Write to the logic table pub fn write_logic) -> T>(f: F) -> T { - LOGIC_TABLE.with(|log| { - let lock = &mut log.write().unwrap(); - f(lock) - }) + let lock = &mut LOGIC_TABLE.write().unwrap(); + f(lock) } pub fn get_status() -> i32 { diff --git a/src/tests/error.rs b/src/tests/error.rs index 4e2f933..9719602 100644 --- a/src/tests/error.rs +++ b/src/tests/error.rs @@ -5,7 +5,7 @@ use super::super::*; #[test] fn cmd_not_found() { let input = "foo"; - let token = LexStream::new(Rc::new(input.into()), LexFlags::empty()).next().unwrap().unwrap(); + let token = LexStream::new(Arc::new(input.into()), LexFlags::empty()).next().unwrap().unwrap(); let err = ShErr::full(ShErrKind::CmdNotFound("foo".into()), "", token.span); let err_fmt = format!("{err}"); @@ -15,7 +15,7 @@ fn cmd_not_found() { #[test] fn if_no_fi() { let input = "if foo; then bar;"; - let tokens = LexStream::new(Rc::new(input.into()), LexFlags::empty()) + let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect::>(); @@ -29,7 +29,7 @@ fn if_no_fi() { #[test] fn if_no_then() { let input = "if foo; bar; fi"; - let tokens = LexStream::new(Rc::new(input.into()), LexFlags::empty()) + let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect::>(); @@ -43,7 +43,7 @@ fn if_no_then() { #[test] fn loop_no_done() { let input = "while true; do echo foo;"; - let tokens = LexStream::new(Rc::new(input.into()), LexFlags::empty()) + let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect::>(); @@ -57,7 +57,7 @@ fn loop_no_done() { #[test] fn loop_no_do() { let input = "while true; echo foo; done"; - let tokens = LexStream::new(Rc::new(input.into()), LexFlags::empty()) + let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect::>(); @@ -71,7 +71,7 @@ fn loop_no_do() { #[test] fn case_no_esac() { let input = "case foo in foo) bar;; bar) foo;;"; - let tokens = LexStream::new(Rc::new(input.into()), LexFlags::empty()) + let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect::>(); @@ -85,7 +85,7 @@ fn case_no_esac() { #[test] fn case_no_in() { let input = "case foo foo) bar;; bar) foo;; esac"; - let tokens = LexStream::new(Rc::new(input.into()), LexFlags::empty()) + let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect::>(); diff --git a/src/tests/expand.rs b/src/tests/expand.rs index 0d873d6..0ec8628 100644 --- a/src/tests/expand.rs +++ b/src/tests/expand.rs @@ -10,7 +10,7 @@ fn simple_expansion() { let varsub = "$foo"; write_vars(|v| v.new_var("foo", "this is the value of the variable".into())); - let mut tokens: Vec = LexStream::new(Rc::new(varsub.to_string()), LexFlags::empty()) + let mut tokens: Vec = LexStream::new(Arc::new(varsub.to_string()), LexFlags::empty()) .map(|tk| tk.unwrap()) .filter(|tk| !matches!(tk.class, TkRule::EOI | TkRule::SOI)) .collect(); diff --git a/src/tests/lexer.rs b/src/tests/lexer.rs index e5dfec3..488cfc5 100644 --- a/src/tests/lexer.rs +++ b/src/tests/lexer.rs @@ -2,35 +2,35 @@ use super::super::*; #[test] fn lex_simple() { let input = "echo hello world"; - let tokens: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()).collect(); + let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect(); insta::assert_debug_snapshot!(tokens) } #[test] fn lex_redir() { let input = "echo foo > bar.txt"; - let tokens: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()).collect(); + let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect(); insta::assert_debug_snapshot!(tokens) } #[test] fn lex_redir_fds() { let input = "echo foo 1>&2"; - let tokens: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()).collect(); + let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect(); insta::assert_debug_snapshot!(tokens) } #[test] fn lex_quote_str() { let input = "echo \"foo bar\" biz baz"; - let tokens: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()).collect(); + let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect(); insta::assert_debug_snapshot!(tokens) } #[test] fn lex_with_keywords() { let input = "if true; then echo foo; fi"; - let tokens: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()).collect(); + let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect(); insta::assert_debug_snapshot!(tokens) } @@ -38,7 +38,7 @@ fn lex_with_keywords() { #[test] fn lex_multiline() { let input = "echo hello world\necho foo bar\necho boo biz"; - let tokens: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()).collect(); + let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect(); insta::assert_debug_snapshot!(tokens) } @@ -46,7 +46,7 @@ fn lex_multiline() { #[test] fn lex_case() { let input = "case $foo in foo) bar;; bar) foo;; biz) baz;; esac"; - let tokens: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()).collect(); + let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect(); insta::assert_debug_snapshot!(tokens) } diff --git a/src/tests/mod.rs b/src/tests/mod.rs index 5f2184f..9d06df7 100644 --- a/src/tests/mod.rs +++ b/src/tests/mod.rs @@ -1,4 +1,4 @@ -use std::rc::Rc; +use std::rc::Arc; use crate::parse::{lex::{LexFlags, LexStream}, node_operation, Node, ParseStream}; @@ -15,7 +15,7 @@ pub fn get_nodes(input: &str, filter: F1) -> Vec F1: Fn(&Node) -> bool { let mut nodes = vec![]; - let tokens = LexStream::new(Rc::new(input.into()), LexFlags::empty()) + let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect::>(); let mut parsed_nodes = ParseStream::new(tokens) diff --git a/src/tests/parser.rs b/src/tests/parser.rs index 7d880ef..3861b38 100644 --- a/src/tests/parser.rs +++ b/src/tests/parser.rs @@ -5,7 +5,7 @@ use super::super::*; #[test] fn parse_simple() { let input = "echo hello world"; - let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()) + let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect(); let nodes: Vec<_> = ParseStream::new(tk_stream).collect(); @@ -16,7 +16,7 @@ fn parse_simple() { #[test] fn parse_pipeline() { let input = "echo foo | sed s/foo/bar"; - let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()) + let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect(); let nodes: Vec<_> = ParseStream::new(tk_stream).collect(); @@ -27,7 +27,7 @@ fn parse_pipeline() { #[test] fn parse_conjunction() { let input = "echo foo && echo bar"; - let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()) + let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect(); let nodes: Vec<_> = ParseStream::new(tk_stream).collect(); @@ -38,7 +38,7 @@ fn parse_conjunction() { #[test] fn parse_conjunction_and_pipeline() { let input = "echo foo | sed s/foo/bar/ && echo bar | sed s/bar/foo/ || echo foo bar | sed s/foo bar/bar foo/"; - let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()) + let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect(); let nodes: Vec<_> = ParseStream::new(tk_stream).collect(); @@ -52,7 +52,7 @@ fn parse_multiline() { echo hello world echo foo bar echo boo biz"; - let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()) + let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect(); let nodes: Vec<_> = ParseStream::new(tk_stream).collect(); @@ -63,7 +63,7 @@ echo boo biz"; #[test] fn parse_if_simple() { let input = "if foo; then echo bar; fi"; - let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()) + let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect(); let nodes: Vec<_> = ParseStream::new(tk_stream).collect(); @@ -73,7 +73,7 @@ fn parse_if_simple() { #[test] fn parse_if_with_elif() { let input = "if foo; then echo bar; elif bar; then echo foo; fi"; - let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()) + let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect(); let nodes: Vec<_> = ParseStream::new(tk_stream).collect(); @@ -83,7 +83,7 @@ fn parse_if_with_elif() { #[test] fn parse_if_multiple_elif() { let input = "if foo; then echo bar; elif bar; then echo foo; elif biz; then echo baz; fi"; - let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()) + let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect(); let nodes: Vec<_> = ParseStream::new(tk_stream).collect(); @@ -100,7 +100,7 @@ elif bar; then elif biz; then echo baz fi"; - let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()) + let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect(); let nodes: Vec<_> = ParseStream::new(tk_stream).collect(); @@ -110,7 +110,7 @@ fi"; #[test] fn parse_loop_simple() { let input = "while foo; do bar; done"; - let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()) + let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect(); let nodes: Vec<_> = ParseStream::new(tk_stream).collect(); @@ -120,7 +120,7 @@ fn parse_loop_simple() { #[test] fn parse_loop_until() { let input = "until foo; do bar; done"; - let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()) + let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect(); let nodes: Vec<_> = ParseStream::new(tk_stream).collect(); @@ -133,7 +133,7 @@ fn parse_loop_multiline() { until foo; do bar done"; - let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()) + let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect(); let nodes: Vec<_> = ParseStream::new(tk_stream).collect(); @@ -143,7 +143,7 @@ done"; #[test] fn parse_case_simple() { let input = "case foo in foo) bar;; bar) foo;; biz) baz;; esac"; - let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()) + let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect(); let nodes: Vec<_> = ParseStream::new(tk_stream).collect(); @@ -160,7 +160,7 @@ fn parse_case_multiline() { biz) baz ;; esac"; - let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()) + let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect(); let nodes: Vec<_> = ParseStream::new(tk_stream).collect(); @@ -179,7 +179,7 @@ fn parse_case_nested() { fi ;; esac"; - let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()) + let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect(); let nodes: Vec<_> = ParseStream::new(tk_stream).collect(); @@ -190,7 +190,7 @@ esac"; fn parse_cursed() { let input = "if if if if case foo in foo) if true; then true; fi;; esac; then case foo in foo) until true; do true; done;; esac; fi; then until if case foo in foo) true;; esac; then if true; then true; fi; fi; do until until true; do true; done; do case foo in foo) true;; esac; done; done; fi; then until until case foo in foo) true;; esac; do if true; then true; fi; done; do until true; do true; done; done; fi; then until case foo in foo) case foo in foo) true;; esac;; esac; do if if true; then true; fi; then until true; do true; done; fi; done; elif until until case foo in foo) true;; esac; do if true; then true; fi; done; do case foo in foo) until true; do true; done;; esac; done; then case foo in foo) if case foo in foo) true;; esac; then if true; then true; fi; fi;; esac; else case foo in foo) until until true; do true; done; do case foo in foo) true;; esac; done;; esac; fi"; - let tk_stream: Vec<_> = LexStream::new(Rc::new(input.to_string()), LexFlags::empty()) + let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()) .map(|tk| tk.unwrap()) .collect(); let nodes: Vec<_> = ParseStream::new(tk_stream).collect();