Added rustfmt.toml, formatted codebase

This commit is contained in:
2025-08-12 13:58:25 -04:00
parent 23fb67aba8
commit 8ad53f09b3
52 changed files with 15188 additions and 14451 deletions

View File

@@ -2,173 +2,175 @@ use super::*;
#[test]
fn cmd_not_found() {
let input = "foo";
let token = LexStream::new(Arc::new(input.into()), LexFlags::empty()).next().unwrap().unwrap();
let err = ShErr::full(ShErrKind::CmdNotFound("foo".into()), "", token.span);
let input = "foo";
let token = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.next()
.unwrap()
.unwrap();
let err = ShErr::full(ShErrKind::CmdNotFound("foo".into()), "", token.span);
let err_fmt = format!("{err}");
insta::assert_snapshot!(err_fmt)
let err_fmt = format!("{err}");
insta::assert_snapshot!(err_fmt)
}
#[test]
fn unclosed_subsh() {
let input = "(foo";
let token = LexStream::new(Arc::new(input.into()), LexFlags::empty()).nth(1).unwrap();
let Err(err) = token else {
panic!("{:?}",token);
};
let input = "(foo";
let token = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.nth(1)
.unwrap();
let Err(err) = token else {
panic!("{:?}", token);
};
let err_fmt = format!("{err}");
insta::assert_snapshot!(err_fmt)
let err_fmt = format!("{err}");
insta::assert_snapshot!(err_fmt)
}
#[test]
fn unclosed_dquote() {
let input = "\"foo bar";
let token = LexStream::new(Arc::new(input.into()), LexFlags::empty()).nth(1).unwrap();
let Err(err) = token else {
panic!();
};
let input = "\"foo bar";
let token = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.nth(1)
.unwrap();
let Err(err) = token else {
panic!();
};
let err_fmt = format!("{err}");
insta::assert_snapshot!(err_fmt)
let err_fmt = format!("{err}");
insta::assert_snapshot!(err_fmt)
}
#[test]
fn unclosed_squote() {
let input = "'foo bar";
let token = LexStream::new(Arc::new(input.into()), LexFlags::empty()).nth(1).unwrap();
let Err(err) = token else {
panic!();
};
let input = "'foo bar";
let token = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.nth(1)
.unwrap();
let Err(err) = token else {
panic!();
};
let err_fmt = format!("{err}");
insta::assert_snapshot!(err_fmt)
let err_fmt = format!("{err}");
insta::assert_snapshot!(err_fmt)
}
#[test]
fn unclosed_brc_grp() {
let input = "{ foo bar";
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
let input = "{ foo bar";
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
let node = ParseStream::new(tokens).next().unwrap();
let Err(err) = node else {
panic!();
};
let node = ParseStream::new(tokens).next().unwrap();
let Err(err) = node else {
panic!();
};
let err_fmt = format!("{err}");
insta::assert_snapshot!(err_fmt)
let err_fmt = format!("{err}");
insta::assert_snapshot!(err_fmt)
}
#[test]
fn if_no_fi() {
let input = "if foo; then bar;";
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
let input = "if foo; then bar;";
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
let node = ParseStream::new(tokens).next().unwrap();
let Err(e) = node else { panic!() };
let node = ParseStream::new(tokens).next().unwrap();
let Err(e) = node else { panic!() };
let err_fmt = format!("{e}");
insta::assert_snapshot!(err_fmt)
let err_fmt = format!("{e}");
insta::assert_snapshot!(err_fmt)
}
#[test]
fn if_no_then() {
let input = "if foo; bar; fi";
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
let input = "if foo; bar; fi";
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
let node = ParseStream::new(tokens).next().unwrap();
let Err(e) = node else { panic!() };
let node = ParseStream::new(tokens).next().unwrap();
let Err(e) = node else { panic!() };
let err_fmt = format!("{e}");
insta::assert_snapshot!(err_fmt)
let err_fmt = format!("{e}");
insta::assert_snapshot!(err_fmt)
}
#[test]
fn loop_no_done() {
let input = "while true; do echo foo;";
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
let input = "while true; do echo foo;";
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
let node = ParseStream::new(tokens).next().unwrap();
let Err(e) = node else { panic!() };
let node = ParseStream::new(tokens).next().unwrap();
let Err(e) = node else { panic!() };
let err_fmt = format!("{e}");
insta::assert_snapshot!(err_fmt)
let err_fmt = format!("{e}");
insta::assert_snapshot!(err_fmt)
}
#[test]
fn loop_no_do() {
let input = "while true; echo foo; done";
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
let input = "while true; echo foo; done";
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
let node = ParseStream::new(tokens).next().unwrap();
let Err(e) = node else { panic!() };
let node = ParseStream::new(tokens).next().unwrap();
let Err(e) = node else { panic!() };
let err_fmt = format!("{e}");
insta::assert_snapshot!(err_fmt)
let err_fmt = format!("{e}");
insta::assert_snapshot!(err_fmt)
}
#[test]
fn case_no_esac() {
let input = "case foo in foo) bar;; bar) foo;;";
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
let input = "case foo in foo) bar;; bar) foo;;";
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
let node = ParseStream::new(tokens).next().unwrap();
let Err(e) = node else { panic!() };
let node = ParseStream::new(tokens).next().unwrap();
let Err(e) = node else { panic!() };
let err_fmt = format!("{e}");
insta::assert_snapshot!(err_fmt)
let err_fmt = format!("{e}");
insta::assert_snapshot!(err_fmt)
}
#[test]
fn case_no_in() {
let input = "case foo foo) bar;; bar) foo;; esac";
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
let input = "case foo foo) bar;; bar) foo;; esac";
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
let node = ParseStream::new(tokens).next().unwrap();
let Err(e) = node else { panic!() };
let node = ParseStream::new(tokens).next().unwrap();
let Err(e) = node else { panic!() };
let err_fmt = format!("{e}");
insta::assert_snapshot!(err_fmt)
let err_fmt = format!("{e}");
insta::assert_snapshot!(err_fmt)
}
#[test]
fn error_with_notes() {
let err = ShErr::simple(ShErrKind::ExecFail, "Execution failed")
.with_note(Note::new("Execution failed for this reason"))
.with_note(Note::new("Here is how to fix it: blah blah blah"));
let err = ShErr::simple(ShErrKind::ExecFail, "Execution failed")
.with_note(Note::new("Execution failed for this reason"))
.with_note(Note::new("Here is how to fix it: blah blah blah"));
let err_fmt = format!("{err}");
insta::assert_snapshot!(err_fmt)
let err_fmt = format!("{err}");
insta::assert_snapshot!(err_fmt)
}
#[test]
fn error_with_notes_and_sub_notes() {
let err = ShErr::simple(ShErrKind::ExecFail, "Execution failed")
.with_note(Note::new("Execution failed for this reason"))
.with_note(
Note::new("Here is how to fix it:")
.with_sub_notes(vec![
"blah",
"blah",
"blah"
])
);
let err = ShErr::simple(ShErrKind::ExecFail, "Execution failed")
.with_note(Note::new("Execution failed for this reason"))
.with_note(Note::new("Here is how to fix it:").with_sub_notes(vec!["blah", "blah", "blah"]));
let err_fmt = format!("{err}");
insta::assert_snapshot!(err_fmt)
let err_fmt = format!("{err}");
insta::assert_snapshot!(err_fmt)
}

View File

@@ -6,297 +6,300 @@ use super::*;
#[test]
fn simple_expansion() {
let varsub = "$foo";
write_vars(|v| v.set_var("foo", "this is the value of the variable", false));
let varsub = "$foo";
write_vars(|v| v.set_var("foo", "this is the value of the variable", false));
let mut tokens: Vec<Tk> = LexStream::new(Arc::new(varsub.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.filter(|tk| !matches!(tk.class, TkRule::EOI | TkRule::SOI))
.collect();
let var_tk = tokens.pop().unwrap();
let mut tokens: Vec<Tk> = LexStream::new(Arc::new(varsub.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.filter(|tk| !matches!(tk.class, TkRule::EOI | TkRule::SOI))
.collect();
let var_tk = tokens.pop().unwrap();
let exp_tk = var_tk.expand().unwrap();
write_vars(|v| v.vars_mut().clear());
insta::assert_debug_snapshot!(exp_tk.get_words())
let exp_tk = var_tk.expand().unwrap();
write_vars(|v| v.vars_mut().clear());
insta::assert_debug_snapshot!(exp_tk.get_words())
}
#[test]
fn unescape_string() {
let string = "echo $foo \\$bar";
let unescaped = unescape_str(string);
let string = "echo $foo \\$bar";
let unescaped = unescape_str(string);
insta::assert_snapshot!(unescaped)
insta::assert_snapshot!(unescaped)
}
#[test]
fn expand_alias_simple() {
write_logic(|l| {
l.insert_alias("foo", "echo foo");
let input = String::from("foo");
write_logic(|l| {
l.insert_alias("foo", "echo foo");
let input = String::from("foo");
let result = expand_aliases(input, HashSet::new(), l);
assert_eq!(result.as_str(),"echo foo");
l.clear_aliases();
});
let result = expand_aliases(input, HashSet::new(), l);
assert_eq!(result.as_str(), "echo foo");
l.clear_aliases();
});
}
#[test]
fn expand_alias_in_if() {
write_logic(|l| {
l.insert_alias("foo", "echo foo");
let input = String::from("if foo; then echo bar; fi");
write_logic(|l| {
l.insert_alias("foo", "echo foo");
let input = String::from("if foo; then echo bar; fi");
let result = expand_aliases(input, HashSet::new(), l);
assert_eq!(result.as_str(),"if echo foo; then echo bar; fi");
l.clear_aliases();
});
let result = expand_aliases(input, HashSet::new(), l);
assert_eq!(result.as_str(), "if echo foo; then echo bar; fi");
l.clear_aliases();
});
}
#[test]
fn expand_alias_multiline() {
write_logic(|l| {
l.insert_alias("foo", "echo foo");
l.insert_alias("bar", "echo bar");
let input = String::from("
write_logic(|l| {
l.insert_alias("foo", "echo foo");
l.insert_alias("bar", "echo bar");
let input = String::from(
"
foo
if true; then
bar
fi
");
let expected = String::from("
",
);
let expected = String::from(
"
echo foo
if true; then
echo bar
fi
");
",
);
let result = expand_aliases(input, HashSet::new(), l);
assert_eq!(result,expected)
});
let result = expand_aliases(input, HashSet::new(), l);
assert_eq!(result, expected)
});
}
#[test]
fn expand_multiple_aliases() {
write_logic(|l| {
l.insert_alias("foo", "echo foo");
l.insert_alias("bar", "echo bar");
l.insert_alias("biz", "echo biz");
let input = String::from("foo; bar; biz");
write_logic(|l| {
l.insert_alias("foo", "echo foo");
l.insert_alias("bar", "echo bar");
l.insert_alias("biz", "echo biz");
let input = String::from("foo; bar; biz");
let result = expand_aliases(input, HashSet::new(), l);
assert_eq!(result.as_str(),"echo foo; echo bar; echo biz");
});
let result = expand_aliases(input, HashSet::new(), l);
assert_eq!(result.as_str(), "echo foo; echo bar; echo biz");
});
}
#[test]
fn alias_in_arg_position() {
write_logic(|l| {
l.insert_alias("foo", "echo foo");
let input = String::from("echo foo");
write_logic(|l| {
l.insert_alias("foo", "echo foo");
let input = String::from("echo foo");
let result = expand_aliases(input.clone(), HashSet::new(), l);
assert_eq!(input,result);
l.clear_aliases();
});
let result = expand_aliases(input.clone(), HashSet::new(), l);
assert_eq!(input, result);
l.clear_aliases();
});
}
#[test]
fn expand_recursive_alias() {
write_logic(|l| {
l.insert_alias("foo", "echo foo");
l.insert_alias("bar", "foo bar");
write_logic(|l| {
l.insert_alias("foo", "echo foo");
l.insert_alias("bar", "foo bar");
let input = String::from("bar");
let result = expand_aliases(input, HashSet::new(), l);
assert_eq!(result.as_str(),"echo foo bar");
});
let input = String::from("bar");
let result = expand_aliases(input, HashSet::new(), l);
assert_eq!(result.as_str(), "echo foo bar");
});
}
#[test]
fn test_infinite_recursive_alias() {
write_logic(|l| {
l.insert_alias("foo", "foo bar");
let input = String::from("foo");
let result = expand_aliases(input, HashSet::new(), l);
assert_eq!(result.as_str(),"foo bar");
l.clear_aliases();
});
write_logic(|l| {
l.insert_alias("foo", "foo bar");
let input = String::from("foo");
let result = expand_aliases(input, HashSet::new(), l);
assert_eq!(result.as_str(), "foo bar");
l.clear_aliases();
});
}
#[test]
fn param_expansion_defaultunsetornull() {
write_vars(|v| {
v.set_var("foo", "foo", false);
v.set_var("set_var", "value", false);
});
let result = perform_param_expansion("unset:-default").unwrap();
assert_eq!(result, "default");
write_vars(|v| v.vars_mut().clear());
write_vars(|v| {
v.set_var("foo", "foo", false);
v.set_var("set_var", "value", false);
});
let result = perform_param_expansion("unset:-default").unwrap();
assert_eq!(result, "default");
write_vars(|v| v.vars_mut().clear());
}
#[test]
fn param_expansion_defaultunset() {
write_vars(|v| {
v.set_var("foo", "foo", false);
v.set_var("set_var", "value", false);
});
let result = perform_param_expansion("unset-default").unwrap();
assert_eq!(result, "default");
write_vars(|v| v.vars_mut().clear());
write_vars(|v| {
v.set_var("foo", "foo", false);
v.set_var("set_var", "value", false);
});
let result = perform_param_expansion("unset-default").unwrap();
assert_eq!(result, "default");
write_vars(|v| v.vars_mut().clear());
}
#[test]
fn param_expansion_setdefaultunsetornull() {
write_vars(|v| {
v.set_var("foo", "foo", false);
v.set_var("set_var", "value", false);
});
let result = perform_param_expansion("unset:=assigned").unwrap();
assert_eq!(result, "assigned");
write_vars(|v| v.vars_mut().clear());
write_vars(|v| {
v.set_var("foo", "foo", false);
v.set_var("set_var", "value", false);
});
let result = perform_param_expansion("unset:=assigned").unwrap();
assert_eq!(result, "assigned");
write_vars(|v| v.vars_mut().clear());
}
#[test]
fn param_expansion_setdefaultunset() {
write_vars(|v| {
v.set_var("foo", "foo", false);
v.set_var("set_var", "value", false);
});
let result = perform_param_expansion("unset=assigned").unwrap();
assert_eq!(result, "assigned");
write_vars(|v| v.vars_mut().clear());
write_vars(|v| {
v.set_var("foo", "foo", false);
v.set_var("set_var", "value", false);
});
let result = perform_param_expansion("unset=assigned").unwrap();
assert_eq!(result, "assigned");
write_vars(|v| v.vars_mut().clear());
}
#[test]
fn param_expansion_altsetnotnull() {
write_vars(|v| {
v.set_var("foo", "foo", false);
v.set_var("set_var", "value", false);
});
let result = perform_param_expansion("set_var:+alt").unwrap();
assert_eq!(result, "alt");
write_vars(|v| v.vars_mut().clear());
write_vars(|v| {
v.set_var("foo", "foo", false);
v.set_var("set_var", "value", false);
});
let result = perform_param_expansion("set_var:+alt").unwrap();
assert_eq!(result, "alt");
write_vars(|v| v.vars_mut().clear());
}
#[test]
fn param_expansion_altnotnull() {
write_vars(|v| {
v.set_var("foo", "foo", false);
v.set_var("set_var", "value", false);
});
let result = perform_param_expansion("set_var+alt").unwrap();
assert_eq!(result, "alt");
write_vars(|v| v.vars_mut().clear());
write_vars(|v| {
v.set_var("foo", "foo", false);
v.set_var("set_var", "value", false);
});
let result = perform_param_expansion("set_var+alt").unwrap();
assert_eq!(result, "alt");
write_vars(|v| v.vars_mut().clear());
}
#[test]
fn param_expansion_len() {
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("#foo").unwrap();
assert_eq!(result, "3");
write_vars(|v| v.vars_mut().clear());
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("#foo").unwrap();
assert_eq!(result, "3");
write_vars(|v| v.vars_mut().clear());
}
#[test]
fn param_expansion_substr() {
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("foo:1").unwrap();
assert_eq!(result, "oo");
write_vars(|v| v.vars_mut().clear());
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("foo:1").unwrap();
assert_eq!(result, "oo");
write_vars(|v| v.vars_mut().clear());
}
#[test]
fn param_expansion_substrlen() {
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("foo:0:2").unwrap();
assert_eq!(result, "fo");
write_vars(|v| v.vars_mut().clear());
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("foo:0:2").unwrap();
assert_eq!(result, "fo");
write_vars(|v| v.vars_mut().clear());
}
#[test]
fn param_expansion_remshortestprefix() {
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("foo#f*").unwrap();
assert_eq!(result, "oo");
write_vars(|v| v.vars_mut().clear());
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("foo#f*").unwrap();
assert_eq!(result, "oo");
write_vars(|v| v.vars_mut().clear());
}
#[test]
fn param_expansion_remlongestprefix() {
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("foo##f*").unwrap();
assert_eq!(result, "");
write_vars(|v| v.vars_mut().clear());
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("foo##f*").unwrap();
assert_eq!(result, "");
write_vars(|v| v.vars_mut().clear());
}
#[test]
fn param_expansion_remshortestsuffix() {
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("foo%*o").unwrap();
assert_eq!(result, "fo");
write_vars(|v| v.vars_mut().clear());
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("foo%*o").unwrap();
assert_eq!(result, "fo");
write_vars(|v| v.vars_mut().clear());
}
#[test]
fn param_expansion_remlongestsuffix() {
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("foo%%*o").unwrap();
assert_eq!(result, "");
write_vars(|v| v.vars_mut().clear());
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("foo%%*o").unwrap();
assert_eq!(result, "");
write_vars(|v| v.vars_mut().clear());
}
#[test]
fn param_expansion_replacefirstmatch() {
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("foo/foo/X").unwrap();
assert_eq!(result, "X");
write_vars(|v| v.vars_mut().clear());
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("foo/foo/X").unwrap();
assert_eq!(result, "X");
write_vars(|v| v.vars_mut().clear());
}
#[test]
fn param_expansion_replaceallmatches() {
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("foo//o/X").unwrap();
assert_eq!(result, "fXX");
write_vars(|v| v.vars_mut().clear());
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("foo//o/X").unwrap();
assert_eq!(result, "fXX");
write_vars(|v| v.vars_mut().clear());
}
#[test]
fn param_expansion_replaceprefix() {
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("foo/#f/X").unwrap();
assert_eq!(result, "Xoo");
write_vars(|v| v.vars_mut().clear());
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("foo/#f/X").unwrap();
assert_eq!(result, "Xoo");
write_vars(|v| v.vars_mut().clear());
}
#[test]
fn param_expansion_replacesuffix() {
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("foo/%o/X").unwrap();
assert_eq!(result, "foX");
write_vars(|v| v.vars_mut().clear());
write_vars(|v| {
v.set_var("foo", "foo", false);
});
let result = perform_param_expansion("foo/%o/X").unwrap();
assert_eq!(result, "foX");
write_vars(|v| v.vars_mut().clear());
}

View File

@@ -6,32 +6,44 @@ use super::super::*;
#[test]
fn getopt_from_argv() {
let node = get_nodes("echo -n -e foo", |node| matches!(node.class, NdRule::Command {..}))
.pop()
.unwrap();
let NdRule::Command { assignments: _, argv } = node.class else {
panic!()
};
let node = get_nodes("echo -n -e foo", |node| {
matches!(node.class, NdRule::Command { .. })
})
.pop()
.unwrap();
let NdRule::Command {
assignments: _,
argv,
} = node.class
else {
panic!()
};
let (words,opts) = get_opts_from_tokens(argv);
insta::assert_debug_snapshot!(words);
insta::assert_debug_snapshot!(opts)
let (words, opts) = get_opts_from_tokens(argv);
insta::assert_debug_snapshot!(words);
insta::assert_debug_snapshot!(opts)
}
#[test]
fn getopt_simple() {
let raw = "echo -n foo".split_whitespace().map(|s| s.to_string()).collect::<Vec<_>>();
let raw = "echo -n foo"
.split_whitespace()
.map(|s| s.to_string())
.collect::<Vec<_>>();
let (words,opts) = get_opts(raw);
insta::assert_debug_snapshot!(words);
insta::assert_debug_snapshot!(opts);
let (words, opts) = get_opts(raw);
insta::assert_debug_snapshot!(words);
insta::assert_debug_snapshot!(opts);
}
#[test]
fn getopt_multiple_short() {
let raw = "echo -nre foo".split_whitespace().map(|s| s.to_string()).collect::<Vec<_>>();
let raw = "echo -nre foo"
.split_whitespace()
.map(|s| s.to_string())
.collect::<Vec<_>>();
let (words,opts) = get_opts(raw);
insta::assert_debug_snapshot!(words);
insta::assert_debug_snapshot!(opts);
let (words, opts) = get_opts(raw);
insta::assert_debug_snapshot!(words);
insta::assert_debug_snapshot!(opts);
}

View File

@@ -1,52 +1,52 @@
use super::*;
#[test]
fn lex_simple() {
let input = "echo hello world";
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
let input = "echo hello world";
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
insta::assert_debug_snapshot!(tokens)
insta::assert_debug_snapshot!(tokens)
}
#[test]
fn lex_redir() {
let input = "echo foo > bar.txt";
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
let input = "echo foo > bar.txt";
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
insta::assert_debug_snapshot!(tokens)
insta::assert_debug_snapshot!(tokens)
}
#[test]
fn lex_redir_fds() {
let input = "echo foo 1>&2";
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
let input = "echo foo 1>&2";
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
insta::assert_debug_snapshot!(tokens)
insta::assert_debug_snapshot!(tokens)
}
#[test]
fn lex_quote_str() {
let input = "echo \"foo bar\" biz baz";
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
let input = "echo \"foo bar\" biz baz";
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
insta::assert_debug_snapshot!(tokens)
insta::assert_debug_snapshot!(tokens)
}
#[test]
fn lex_with_keywords() {
let input = "if true; then echo foo; fi";
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
let input = "if true; then echo foo; fi";
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
insta::assert_debug_snapshot!(tokens)
insta::assert_debug_snapshot!(tokens)
}
#[test]
fn lex_multiline() {
let input = "echo hello world\necho foo bar\necho boo biz";
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
let input = "echo hello world\necho foo bar\necho boo biz";
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
insta::assert_debug_snapshot!(tokens)
insta::assert_debug_snapshot!(tokens)
}
#[test]
fn lex_case() {
let input = "case $foo in foo) bar;; bar) foo;; biz) baz;; esac";
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
let input = "case $foo in foo) bar;; bar) foo;; biz) baz;; esac";
let tokens: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty()).collect();
insta::assert_debug_snapshot!(tokens)
insta::assert_debug_snapshot!(tokens)
}

View File

@@ -1,51 +1,41 @@
use std::sync::Arc;
use super::*;
use crate::libsh::error::{
Note, ShErr, ShErrKind
};
use crate::expand::{expand_aliases, unescape_str};
use crate::libsh::error::{Note, ShErr, ShErrKind};
use crate::parse::{
node_operation, Node, NdRule, ParseStream,
lex::{
Tk, TkRule, LexFlags, LexStream
}
};
use crate::expand::{
expand_aliases, unescape_str
};
use crate::state::{
write_logic, write_vars
lex::{LexFlags, LexStream, Tk, TkRule},
node_operation, NdRule, Node, ParseStream,
};
use crate::state::{write_logic, write_vars};
pub mod error;
pub mod expand;
pub mod getopt;
pub mod highlight;
pub mod lexer;
pub mod parser;
pub mod expand;
pub mod term;
pub mod error;
pub mod getopt;
pub mod script;
pub mod highlight;
pub mod readline;
pub mod script;
pub mod term;
/// Unsafe to use outside of tests
pub fn get_nodes<F1>(input: &str, filter: F1) -> Vec<Node>
where
F1: Fn(&Node) -> bool
where
F1: Fn(&Node) -> bool,
{
let mut nodes = vec![];
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
let mut parsed_nodes = ParseStream::new(tokens)
.map(|nd| nd.unwrap())
.collect::<Vec<_>>();
let mut nodes = vec![];
let tokens = LexStream::new(Arc::new(input.into()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect::<Vec<_>>();
let mut parsed_nodes = ParseStream::new(tokens)
.map(|nd| nd.unwrap())
.collect::<Vec<_>>();
for node in parsed_nodes.iter_mut() {
node_operation(node,
&filter,
&mut |node: &mut Node| nodes.push(node.clone())
);
}
nodes
for node in parsed_nodes.iter_mut() {
node_operation(node, &filter, &mut |node: &mut Node| {
nodes.push(node.clone())
});
}
nodes
}

View File

@@ -2,95 +2,95 @@ use super::*;
#[test]
fn parse_simple() {
let input = "echo hello world";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
let input = "echo hello world";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
insta::assert_debug_snapshot!(nodes)
insta::assert_debug_snapshot!(nodes)
}
#[test]
fn parse_pipeline() {
let input = "echo foo | sed s/foo/bar";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
let input = "echo foo | sed s/foo/bar";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
insta::assert_debug_snapshot!(nodes)
insta::assert_debug_snapshot!(nodes)
}
#[test]
fn parse_conjunction() {
let input = "echo foo && echo bar";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
let input = "echo foo && echo bar";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
insta::assert_debug_snapshot!(nodes)
insta::assert_debug_snapshot!(nodes)
}
#[test]
fn parse_conjunction_and_pipeline() {
let input = "echo foo | sed s/foo/bar/ && echo bar | sed s/bar/foo/ || echo foo bar | sed s/foo bar/bar foo/";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
let input = "echo foo | sed s/foo/bar/ && echo bar | sed s/bar/foo/ || echo foo bar | sed s/foo bar/bar foo/";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
insta::assert_debug_snapshot!(nodes)
insta::assert_debug_snapshot!(nodes)
}
#[test]
fn parse_multiline() {
let input = "
let input = "
echo hello world
echo foo bar
echo boo biz";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
insta::assert_debug_snapshot!(nodes)
insta::assert_debug_snapshot!(nodes)
}
#[test]
fn parse_if_simple() {
let input = "if foo; then echo bar; fi";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
let input = "if foo; then echo bar; fi";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
insta::assert_debug_snapshot!(nodes)
insta::assert_debug_snapshot!(nodes)
}
#[test]
fn parse_if_with_elif() {
let input = "if foo; then echo bar; elif bar; then echo foo; fi";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
let input = "if foo; then echo bar; elif bar; then echo foo; fi";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
insta::assert_debug_snapshot!(nodes)
insta::assert_debug_snapshot!(nodes)
}
#[test]
fn parse_if_multiple_elif() {
let input = "if foo; then echo bar; elif bar; then echo foo; elif biz; then echo baz; fi";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
let input = "if foo; then echo bar; elif bar; then echo foo; elif biz; then echo baz; fi";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
insta::assert_debug_snapshot!(nodes)
insta::assert_debug_snapshot!(nodes)
}
#[test]
fn parse_if_multiline() {
let input = "
let input = "
if foo; then
echo bar
elif bar; then
@@ -98,59 +98,59 @@ elif bar; then
elif biz; then
echo baz
fi";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
insta::assert_debug_snapshot!(nodes)
insta::assert_debug_snapshot!(nodes)
}
#[test]
fn parse_loop_simple() {
let input = "while foo; do bar; done";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
let input = "while foo; do bar; done";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
insta::assert_debug_snapshot!(nodes)
insta::assert_debug_snapshot!(nodes)
}
#[test]
fn parse_loop_until() {
let input = "until foo; do bar; done";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
let input = "until foo; do bar; done";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
insta::assert_debug_snapshot!(nodes)
insta::assert_debug_snapshot!(nodes)
}
#[test]
fn parse_loop_multiline() {
let input = "
let input = "
until foo; do
bar
done";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
insta::assert_debug_snapshot!(nodes)
insta::assert_debug_snapshot!(nodes)
}
#[test]
fn parse_case_simple() {
let input = "case foo in foo) bar;; bar) foo;; biz) baz;; esac";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
let input = "case foo in foo) bar;; bar) foo;; biz) baz;; esac";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
insta::assert_debug_snapshot!(nodes)
insta::assert_debug_snapshot!(nodes)
}
#[test]
fn parse_case_multiline() {
let input = "case foo in
let input = "case foo in
foo) bar
;;
bar) foo
@@ -158,16 +158,16 @@ fn parse_case_multiline() {
biz) baz
;;
esac";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
insta::assert_debug_snapshot!(nodes)
insta::assert_debug_snapshot!(nodes)
}
#[test]
fn parse_case_nested() {
let input = "case foo in
let input = "case foo in
foo)
if true; then
while true; do
@@ -194,41 +194,41 @@ fn parse_case_nested() {
fi
;;
esac";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
insta::assert_debug_snapshot!(nodes)
insta::assert_debug_snapshot!(nodes)
}
#[test]
fn parse_cursed() {
let input = "if if if if case foo in foo) if true; then true; fi;; esac; then case foo in foo) until true; do true; done;; esac; fi; then until if case foo in foo) true;; esac; then if true; then true; fi; fi; do until until true; do true; done; do case foo in foo) true;; esac; done; done; fi; then until until case foo in foo) true;; esac; do if true; then true; fi; done; do until true; do true; done; done; fi; then until case foo in foo) case foo in foo) true;; esac;; esac; do if if true; then true; fi; then until true; do true; done; fi; done; elif until until case foo in foo) true;; esac; do if true; then true; fi; done; do case foo in foo) until true; do true; done;; esac; done; then case foo in foo) if case foo in foo) true;; esac; then if true; then true; fi; fi;; esac; else case foo in foo) until until true; do true; done; do case foo in foo) true;; esac; done;; esac; fi";
let input = "if if if if case foo in foo) if true; then true; fi;; esac; then case foo in foo) until true; do true; done;; esac; fi; then until if case foo in foo) true;; esac; then if true; then true; fi; fi; do until until true; do true; done; do case foo in foo) true;; esac; done; done; fi; then until until case foo in foo) true;; esac; do if true; then true; fi; done; do until true; do true; done; done; fi; then until case foo in foo) case foo in foo) true;; esac;; esac; do if if true; then true; fi; then until true; do true; done; fi; done; elif until until case foo in foo) true;; esac; do if true; then true; fi; done; do case foo in foo) until true; do true; done;; esac; done; then case foo in foo) if case foo in foo) true;; esac; then if true; then true; fi; fi;; esac; else case foo in foo) until until true; do true; done; do case foo in foo) true;; esac; done;; esac; fi";
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
let tk_stream: Vec<_> = LexStream::new(Arc::new(input.to_string()), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes: Vec<_> = ParseStream::new(tk_stream).collect();
// 15,000 line snapshot file btw
insta::assert_debug_snapshot!(nodes)
// 15,000 line snapshot file btw
insta::assert_debug_snapshot!(nodes)
}
#[test]
fn test_node_operation() {
let input = String::from("echo hello world; echo foo bar");
let mut check_nodes = vec![];
let tokens: Vec<Tk> = LexStream::new(input.into(), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let input = String::from("echo hello world; echo foo bar");
let mut check_nodes = vec![];
let tokens: Vec<Tk> = LexStream::new(input.into(), LexFlags::empty())
.map(|tk| tk.unwrap())
.collect();
let nodes = ParseStream::new(tokens)
.map(|nd| nd.unwrap());
let nodes = ParseStream::new(tokens).map(|nd| nd.unwrap());
for mut node in nodes {
node_operation(&mut node,
&|node: &Node| matches!(node.class, NdRule::Command {..}),
&mut |node: &mut Node| check_nodes.push(node.clone()),
);
}
insta::assert_debug_snapshot!(check_nodes)
for mut node in nodes {
node_operation(
&mut node,
&|node: &Node| matches!(node.class, NdRule::Command { .. }),
&mut |node: &mut Node| check_nodes.push(node.clone()),
);
}
insta::assert_debug_snapshot!(check_nodes)
}

File diff suppressed because it is too large Load Diff

View File

@@ -4,49 +4,48 @@ use pretty_assertions::assert_eq;
use super::super::*;
fn get_script_output(name: &str, args: &[&str]) -> Output {
// Resolve the path to the fern binary.
// Do not question me.
let mut fern_path = env::current_exe()
.expect("Failed to get test executable"); // The path to the test executable
fern_path.pop(); // Hocus pocus
fern_path.pop();
fern_path.push("fern"); // Abra Kadabra
// Resolve the path to the fern binary.
// Do not question me.
let mut fern_path = env::current_exe().expect("Failed to get test executable"); // The path to the test executable
fern_path.pop(); // Hocus pocus
fern_path.pop();
fern_path.push("fern"); // Abra Kadabra
if !fern_path.is_file() {
fern_path.pop();
fern_path.pop();
fern_path.push("release");
fern_path.push("fern");
}
if !fern_path.is_file() {
fern_path.pop();
fern_path.pop();
fern_path.push("release");
fern_path.push("fern");
}
if !fern_path.is_file() {
panic!("where the hell is the binary")
}
if !fern_path.is_file() {
panic!("where the hell is the binary")
}
process::Command::new(fern_path) // Alakazam
.arg(name)
.args(args)
.output()
.expect("Failed to run script")
process::Command::new(fern_path) // Alakazam
.arg(name)
.args(args)
.output()
.expect("Failed to run script")
}
#[test]
fn script_hello_world() {
let output = get_script_output("./test_scripts/hello.sh", &[]);
assert!(output.status.success());
let stdout = String::from_utf8_lossy(&output.stdout);
assert_eq!(stdout.trim(), "Hello, World!")
let output = get_script_output("./test_scripts/hello.sh", &[]);
assert!(output.status.success());
let stdout = String::from_utf8_lossy(&output.stdout);
assert_eq!(stdout.trim(), "Hello, World!")
}
#[test]
fn script_cmdsub() {
let output = get_script_output("./test_scripts/cmdsub.sh", &[]);
assert!(output.status.success());
let stdout = String::from_utf8_lossy(&output.stdout);
assert_eq!(stdout.trim(), "foo Hello bar")
let output = get_script_output("./test_scripts/cmdsub.sh", &[]);
assert!(output.status.success());
let stdout = String::from_utf8_lossy(&output.stdout);
assert_eq!(stdout.trim(), "foo Hello bar")
}
#[test]
fn script_multiline() {
let output = get_script_output("./test_scripts/multiline.sh", &[]);
assert!(output.status.success());
let stdout = String::from_utf8_lossy(&output.stdout);
assert_eq!(stdout.trim(), "foo\nbar\nbiz\nbuzz")
let output = get_script_output("./test_scripts/multiline.sh", &[]);
assert!(output.status.success());
let stdout = String::from_utf8_lossy(&output.stdout);
assert_eq!(stdout.trim(), "foo\nbar\nbiz\nbuzz")
}

View File

@@ -3,39 +3,41 @@ use libsh::term::{Style, StyleSet, Styled};
use super::super::*;
#[test]
fn styled_simple() {
let input = "hello world";
let styled = input.styled(Style::Green);
let input = "hello world";
let styled = input.styled(Style::Green);
insta::assert_snapshot!(styled)
insta::assert_snapshot!(styled)
}
#[test]
fn styled_multiple() {
let input = "styled text";
let styled = input.styled(Style::Red | Style::Bold | Style::Underline);
insta::assert_snapshot!(styled);
let input = "styled text";
let styled = input.styled(Style::Red | Style::Bold | Style::Underline);
insta::assert_snapshot!(styled);
}
#[test]
fn styled_rgb() {
let input = "RGB styled text";
let styled = input.styled(Style::RGB(255, 99, 71)); // Tomato color
insta::assert_snapshot!(styled);
let input = "RGB styled text";
let styled = input.styled(Style::RGB(255, 99, 71)); // Tomato color
insta::assert_snapshot!(styled);
}
#[test]
fn styled_background() {
let input = "text with background";
let styled = input.styled(Style::BgBlue | Style::Bold);
insta::assert_snapshot!(styled);
let input = "text with background";
let styled = input.styled(Style::BgBlue | Style::Bold);
insta::assert_snapshot!(styled);
}
#[test]
fn styled_set() {
let input = "multi-style text";
let style_set = StyleSet::new().add_style(Style::Magenta).add_style(Style::Italic);
let styled = input.styled(style_set);
insta::assert_snapshot!(styled);
let input = "multi-style text";
let style_set = StyleSet::new()
.add_style(Style::Magenta)
.add_style(Style::Italic);
let styled = input.styled(style_set);
insta::assert_snapshot!(styled);
}
#[test]
fn styled_reset() {
let input = "reset test";
let styled = input.styled(Style::Bold | Style::Reset);
insta::assert_snapshot!(styled);
let input = "reset test";
let styled = input.styled(Style::Bold | Style::Reset);
insta::assert_snapshot!(styled);
}