Skip to content

Commit

Permalink
more concice benches, add json example
Browse files Browse the repository at this point in the history
  • Loading branch information
Fogapod committed Feb 6, 2024
1 parent a31fd7e commit 71c256c
Show file tree
Hide file tree
Showing 14 changed files with 259 additions and 49 deletions.
3 changes: 2 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,15 @@ clap = { version = "4.4", optional = true, features = ["derive"] }
serde = { version = "1", features = ["derive"], optional = true }
typetag = { version = "0.2", optional = true }

# deserializing ron file format
# format deserialization
ron = { version = "0.8", optional = true }

# clone stored Replacement trait objects
dyn-clone = "1.0"

[dev-dependencies]
ron = { version = "0.8" }
serde_json = { version = "1.0" }
criterion = { version = "0.4", features = ["html_reports"] }

[features]
Expand Down
6 changes: 2 additions & 4 deletions benches/accents.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
use criterion::criterion_main;
use criterion::{criterion_group, Criterion};
use criterion::{criterion_group, criterion_main, Criterion};
use sayit::Accent;
use std::fs;

pub fn read_accent(filename: &str) -> Accent {
let content = fs::read_to_string(filename).expect("reading accent definition");
ron::from_str::<Accent>(&content).expect("parsing accent")
ron::from_str::<Accent>(&content).expect(&format!("parsing accent {filename}"))
}

pub fn read_sample_file() -> String {
Expand All @@ -24,7 +23,6 @@ fn accents(c: &mut Criterion) {
let lines = read_sample_file_lines();

let mut g = c.benchmark_group("accents");
g.sample_size(2000);

for name in [
"original", "literal", "any", "weights", "upper", "lower", "concat",
Expand Down
64 changes: 57 additions & 7 deletions benches/any.ron
Original file line number Diff line number Diff line change
@@ -1,27 +1,77 @@
(
patterns: [
// highly nested
("a", {"Any": [
{"Original": ()},
{"Literal": "B"},
{"Literal": "Flat"},
]}),
("VeryLongNonExistent12344343243244234", {"Any": [{"Original": ()}]}),
("you", {"Any": [
{"Any": [
{"Any": [
{"Any": [
{"Any": [
{"Any": [
{"Any": [
{"Any": [
{"Literal": "nestednormalizeme"},
{"Any": [
{"Original": ()},
]},
]},
]},
]},
]},
]},
]},
]},
]}),
// many items
("o", {"Any": [
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
{"Any": [{"Original": ()}]},
]}),
],
)
12 changes: 9 additions & 3 deletions benches/concat.ron
Original file line number Diff line number Diff line change
@@ -1,7 +1,13 @@
(
patterns: [
("a", {"Concat": ({"Literal": "first"}, {"Original": ()})}),
("VeryLongNonExistent12344343243244234", {"Concat": ({"Original": ()}, {"Literal": "first"})}),
("you", {"Concat": ({"Literal": "left"}, {"Literal": "right"})}),
// use \w+ and \W+ to vary string lengths
(r"\w+", {"Concat": (
{"Original": ()},
{"Original": ()},
)}),
(r"\W+", {"Concat": (
{"Original": ()},
{"Original": ()},
)}),
],
)
6 changes: 3 additions & 3 deletions benches/literal.ron
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
(
patterns: [
("a", {"Literal": "B"}),
("VeryLongNonExistent12344343243244234", {"Literal": "wdwdffhskhfkshekfhKFE"}),
("you", {"Literal": "normalizeme"}),
("a", {"Literal": "A"}),
("o", {"Literal": "short"}),
(r"\d+", {"Literal": "veryLongStringaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa1234123412344rhrkghsghkgshal;ghaslfnkhsgkshgshgsjdgjkdngkjgh"}),
],
)
4 changes: 1 addition & 3 deletions benches/lower.ron
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
(
patterns: [
("a", {"Lower": {"Original": ()}}),
("VeryLongNonExistent12344343243244234", {"Lower": {"Original": ()}}),
("you", {"Lower": {"Original": ()}}),
(r"\w+", {"Lower": {"Lower": {"Lower": {"Lower": {"Lower": {"Original": ()}}}}}}),
],
)
6 changes: 3 additions & 3 deletions benches/original.ron
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
(
patterns: [
("a", {"Original": ()}),
("VeryLongNonExistent12344343243244234", {"Original": ()}),
("you", {"Original": ()}),
// varying string lenghts
(r"\w+", {"Original": ()}),
(r"\W+", {"Original": ()}),
],
)
4 changes: 1 addition & 3 deletions benches/upper.ron
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
(
patterns: [
("a", {"Upper": {"Original": ()}}),
("VeryLongNonExistent12344343243244234", {"Upper": {"Original": ()}}),
("you", {"Upper": {"Original": ()}}),
(r"\w+", {"Upper": {"Upper": {"Upper": {"Upper": {"Upper": {"Original": ()}}}}}}),
],
)
17 changes: 14 additions & 3 deletions benches/weights.ron
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
(
patterns: [
("a", {"Weights": [(1, {"Literal": "B"})]}),
("VeryLongNonExistent12344343243244234", {"Weights": [(10000, {"Original": ()})]}),
("you", {"Weights": [
("a", {"Weights": [(1, {"Literal": "single"})]}),
("o", {"Weights": [
(0, {"Weights": [(1, {"Literal": "many variants"})]}),
(1, {"Weights": [(1, {"Literal": "1"})]}),
(2, {"Weights": [(1, {"Literal": "2"})]}),
(3, {"Weights": [(1, {"Literal": "3"})]}),
Expand All @@ -12,6 +12,17 @@
(7, {"Weights": [(1, {"Literal": "7"})]}),
(8, {"Weights": [(1, {"Literal": "8"})]}),
(9, {"Weights": [(1, {"Literal": "9"})]}),
(10, {"Weights": [(1, {"Literal": "10"})]}),
(11, {"Weights": [(1, {"Literal": "11"})]}),
(12, {"Weights": [(1, {"Literal": "12"})]}),
(13, {"Weights": [(1, {"Literal": "13"})]}),
(14, {"Weights": [(1, {"Literal": "14"})]}),
(15, {"Weights": [(1, {"Literal": "15"})]}),
(16, {"Weights": [(1, {"Literal": "16"})]}),
(17, {"Weights": [(1, {"Literal": "17"})]}),
(18, {"Weights": [(1, {"Literal": "18"})]}),
(19, {"Weights": [(1, {"Literal": "19"})]}),
(20, {"Weights": [(1, {"Literal": "20"})]}),
]}),
],
)
70 changes: 70 additions & 0 deletions examples/spurdo.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
{
"words": [
["epic", {"Literal": "ebin"}],
[":?\\]", {"Any": [
{"Literal": ":D"},
{"Literal": ":DD"},
{"Literal": ":DDD"}
]}]
],
"patterns": [
["xc", {"Literal": "gg"}],
["c", {"Literal": "g"}],
["k", {"Literal": "g"}],
["t", {"Literal": "d"}],
["p", {"Literal": "b"}],
["x", {"Literal": "gs"}],
["\\Bng\\b", {"Literal": "gn"}],
["$", {"Weights": [
[1, {"Original": null}],
[1, {"Any": [
{"Literal": " :D"},
{"Literal": " :DD"},
{"Literal": " :DDD"},
{"Literal": " :DDDD"},
{"Literal": " :DDDDD"}
]}]
]}]
],
"intensities": {
"1": {"Extend": {
"patterns": [
["$", {"Weights": [
[1, {"Original": null}],
[2, {"Any": [
{"Literal": " :DD"},
{"Literal": " :DDD"},
{"Literal": " :DDDD"},
{"Literal": " :DDDDD"},
{"Literal": " :DDDDDD"}
]}]
]}]
],
"words": [
[":?\\]", {"Any": [
{"Literal": ":DD"},
{"Literal": ":DDD"},
{"Literal": ":DDDD"}
]}]
]
}},
"2": {"Extend": {
"patterns": [
["$", {"Any": [
{"Literal": " :DDDD"},
{"Literal": " :DDDDD"},
{"Literal": " :DDDDDD"},
{"Literal": " :DDDDDDD"},
{"Literal": " :DDDDDDDD"}
]}]
],
"words": [
[":?\\]", {"Any": [
{"Literal": ":DDDD"},
{"Literal": ":DDDDD"},
{"Literal": ":DDDDDD"}
]}]
]
}}
}
}
19 changes: 0 additions & 19 deletions src/deserialize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,6 @@ impl TryFrom<AccentDef> for Accent {
#[cfg(test)]
mod tests {
use regex::Regex;
use std::fs;

use crate::{
replacement::{Any, Literal, Original, Weights},
Expand Down Expand Up @@ -628,24 +627,6 @@ mod tests {
assert_eq!(accent.say_it("intensity", 9000 + 1), "5");
}

#[test]
fn example_accents() {
let sample_text = fs::read_to_string("tests/sample_text.txt").expect("reading sample text");

for file in fs::read_dir("examples").expect("read symlinked accents folder") {
let filename = file.expect("getting file info").path();
println!("parsing {}", filename.display());

let accent =
ron::from_str::<Accent>(&fs::read_to_string(filename).expect("reading file"))
.unwrap();

for intensity in accent.intensities() {
let _ = accent.say_it(&sample_text, intensity);
}
}
}

#[test]
fn custom_replacement_works() {
use crate::replacement::Replacement;
Expand Down
41 changes: 41 additions & 0 deletions tests/json.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
mod utils;

use sayit::Accent;
use std::{fs, path::PathBuf};
use utils::read_sample_file_lines;

pub fn read_accent(filename: PathBuf) -> Accent {
let content = fs::read_to_string(&filename).unwrap();
serde_json::from_str::<Accent>(&content)
.expect(&format!("parsing accent {}", filename.display()))
}

#[test]
fn json_examples_work() {
let lines = read_sample_file_lines();

let mut tested_at_least_one = false;

for entry in fs::read_dir("examples").unwrap() {
let path = entry.unwrap().path();

if !path.is_file() {
continue;
}

if !path.extension().is_some_and(|ext| ext == "json") {
continue;
}

println!("running {}", path.display());
let accent = read_accent(path);
for line in &lines {
for intensity in accent.intensities() {
accent.say_it(&line, intensity);
}
}
tested_at_least_one = true;
}

assert!(tested_at_least_one);
}
41 changes: 41 additions & 0 deletions tests/ron.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
mod utils;

use sayit::Accent;
use std::{fs, path::PathBuf};
use utils::read_sample_file_lines;

pub fn read_accent(filename: PathBuf) -> Accent {
let content = fs::read_to_string(&filename).unwrap();
ron::from_str::<Accent>(&content)
.expect(&format!("parsing accent {}", filename.display()))
}

#[test]
fn ron_examples_work() {
let lines = read_sample_file_lines();

let mut tested_at_least_one = false;

for entry in fs::read_dir("examples").unwrap() {
let path = entry.unwrap().path();

if !path.is_file() {
continue;
}

if !path.extension().is_some_and(|ext| ext == "ron") {
continue;
}

println!("running {}", path.display());
let accent = read_accent(path);
for line in &lines {
for intensity in accent.intensities() {
accent.say_it(&line, intensity);
}
}
tested_at_least_one = true;
}

assert!(tested_at_least_one);
}
Loading

0 comments on commit 71c256c

Please sign in to comment.