summaryrefslogtreecommitdiffstats
path: root/src/edits.rs
diff options
context:
space:
mode:
authorDan Davison <dandavison7@gmail.com>2020-06-11 14:37:46 -0400
committerDan Davison <dandavison7@gmail.com>2020-06-11 18:01:04 -0400
commit33249ea4ccb98ce31a371c00ffd1e6de27033293 (patch)
tree35d367a816d9fdbdc3807cb3d936a0b97735a0a3 /src/edits.rs
parent827db14ad81a3a17a43ea7ef441f7473c68ccab4 (diff)
Strengthen tokenization tests
Diffstat (limited to 'src/edits.rs')
-rw-r--r--src/edits.rs47
1 files changed, 27 insertions, 20 deletions
diff --git a/src/edits.rs b/src/edits.rs
index f330eb4c..1066470b 100644
--- a/src/edits.rs
+++ b/src/edits.rs
@@ -226,6 +226,7 @@ where
#[cfg(test)]
mod tests {
use super::*;
+ use itertools::Itertools;
use unicode_segmentation::UnicodeSegmentation;
#[derive(Clone, Copy, Debug, PartialEq)]
@@ -256,14 +257,14 @@ mod tests {
#[test]
fn test_tokenize_1() {
- assert_eq!(tokenize("aaa bbb"), vec!["aaa", " ", "bbb"])
+ assert_tokenize("aaa bbb", &["aaa", " ", "bbb"])
}
#[test]
fn test_tokenize_2() {
- assert_eq!(
- tokenize("fn coalesce_edits<'a, EditOperation>("),
- vec![
+ assert_tokenize(
+ "fn coalesce_edits<'a, EditOperation>(",
+ &[
"fn",
" ",
"coalesce_edits",
@@ -274,16 +275,16 @@ mod tests {
" ",
"EditOperation",
">",
- "("
- ]
+ "(",
+ ],
);
}
#[test]
fn test_tokenize_3() {
- assert_eq!(
- tokenize("fn coalesce_edits<'a, 'b, EditOperation>("),
- vec![
+ assert_tokenize(
+ "fn coalesce_edits<'a, 'b, EditOperation>(",
+ &[
"fn",
" ",
"coalesce_edits",
@@ -298,16 +299,16 @@ mod tests {
" ",
"EditOperation",
">",
- "("
- ]
+ "(",
+ ],
);
}
#[test]
fn test_tokenize_4() {
- assert_eq!(
- tokenize("annotated_plus_lines.push(vec![(noop_insertion, plus_line)]);"),
- vec![
+ assert_tokenize(
+ "annotated_plus_lines.push(vec![(noop_insertion, plus_line)]);",
+ &[
"annotated_plus_lines",
".",
"push",
@@ -322,16 +323,16 @@ mod tests {
")",
"]",
")",
- ";"
- ]
+ ";",
+ ],
);
}
#[test]
fn test_tokenize_5() {
- assert_eq!(
- tokenize(" let col = Color::from_str(s).unwrap_or_else(|_| die());"),
- vec![
+ assert_tokenize(
+ " let col = Color::from_str(s).unwrap_or_else(|_| die());",
+ &[
" ",
" ",
" ",
@@ -366,10 +367,16 @@ mod tests {
")",
")",
";",
- ]
+ ],
)
}
+ fn assert_tokenize(text: &str, expected_tokens: &[&str]) {
+ let actual_tokens = tokenize(text);
+ assert_eq!(text, expected_tokens.iter().join(""));
+ assert_eq!(actual_tokens, expected_tokens);
+ }
+
#[test]
fn test_infer_edits_1() {
assert_paired_edits(