diff --git a/CHANGES.md b/CHANGES.md
new file mode 100644
index 0000000..2d1731f
--- /dev/null
+++ b/CHANGES.md
@@ -0,0 +1,22 @@
+# Backend internal interview (rust)
+
+## CHANGES: fixed branch
+
+- Moving from **HashMap** to **BTreeMap** to preseve loading lines order.
+- Fix **lines parser** in Maps **metrics** and in **aggregated_metrics** to be handled properly.
+- Fix **unwrap / expect** for obvious reasons, add simple error message via **map_err**, **unwrap_or_else** and **closure function**.
+- Fix some source code lines order, output format and conversions.
+- Change some **iterators** to functional mode.
+- Separate tasks to independent funtions for better isolation, less responsibilities and help on **tests** handling.
+- Adding **output_path** to help on result output options.
+- Following **unit tests** are implemented:
+ - test_load_input test load input file **input.txt**.
+ - test_invalid_line_value test use invalid value in line parsing.
+ - test_invalid_line test use invlid value in line parsing.
+ - test_expected_metrics test load input data from **input.txt** compare with **output_expected.txt**.
+
+> Code simply **works as expected**
+> It is not be able to **process multiple metrics in parallel** yet.
+> A full refactoring has to be done for better quality, maintenance and be more readable. (Structs, implementaitions, settings for multiple inputs, etc).
+
+Next round in: [Improve branch](/NewRelic/be-technical-interview-rust/src/branch/improved)
diff --git a/README.md b/README.md
index 76b6c56..3f2fcb9 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,10 @@
-# Backend internal interview (rust)
+# Backend internal interview (rust) - basic-fixed
+
+> Code simply **works as expected**
+> It is not be able to **process multiple metrics in parallel** yet.
+> A full refactoring has to be done for better quality, maintenance and be more readable. (Structs, implementaitions, settings for multiple inputs, etc).
+
+See [main chainges](CHAINGES.md)
A **Refactor metric-consumer** task
diff --git a/src/main.rs b/src/main.rs
index f48b5a1..1904eb9 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -1,76 +1,168 @@
-use std::collections::HashMap;
+use std::{collections::BTreeMap, process::exit};
use std::error::Error;
use std::fs::File;
use std::io::Read;
-use std::time::{Duration, UNIX_EPOCH};
+use std::time::{SystemTime,Duration, UNIX_EPOCH};
+use chrono::{DateTime,Utc};
+use regex::Regex;
fn parse(
- file: File,
-) -> Result>, Box> {
- let mut file = file;
+ mut file: File,
+) -> Result>, Box> {
let mut contents = String::new();
file.read_to_string(&mut contents)?;
+ let re = regex::Regex::new(r"(\d+) (\w+) (\d+)")?;
+ Ok(parse_content(contents, re))
+}
+fn parse_content(
+ contents: String,
+ re: Regex,
+) -> BTreeMap> {
+ dbg!(&contents);
+ let mut metrics: BTreeMap>> = BTreeMap::new();
+ let show_invalid_line = | index: usize, line: &str | println!("invalid line: {} {}", index, line);
- let mut metrics: HashMap>> = HashMap::new();
-
- for line in contents.lines() {
- let re = regex::Regex::new(r"(\d+) (\w+) (\d+)").unwrap();
+ for (index,line ) in contents.lines().enumerate() {
if let Some(caps) = re.captures(line) {
let timestamp_raw = &caps[1];
let metric_name = &caps[2];
let metric_value_raw = &caps[3];
-
- let timestamp = timestamp_raw.parse::().unwrap();
- let metric_value = metric_value_raw.parse::().unwrap();
-
- if !metrics.contains_key(metric_name) {
- metrics.insert(metric_name.to_string(), HashMap::new());
- }
-
+ let timestamp = timestamp_raw.parse::().unwrap_or_else(|e|{
+ println!("Parse timestamp {} error {}",timestamp_raw,e);
+ 0
+ });
+ if timestamp == 0 {
+ show_invalid_line(index, line);
+ continue;
+ }
+ let metric_value = metric_value_raw.parse::().unwrap_or_else(|e|{
+ println!("Parse metric_value {} error {}",metric_value_raw,e);
+ 0 as f64
+ });
+ if metric_value == 0 as f64 {
+ show_invalid_line(index, line);
+ continue;
+ }
let minute = UNIX_EPOCH + Duration::from_secs((timestamp - (timestamp % 60)) as u64);
- metrics
- .get_mut(metric_name)
- .unwrap()
- .insert(minute, vec![metric_value]);
- } else {
- println!("invalid line");
- }
- }
-
- let mut aggregated_metrics: HashMap> =
- HashMap::new();
-
- for (metric_name, time_val_list) in metrics {
- aggregated_metrics.insert(metric_name.clone(), HashMap::new());
-
- for (time, values) in time_val_list {
- let mut sum = 0.0;
- for v in values.iter() {
- sum += *v
+ if let Some(metric) = metrics.get_mut(metric_name) {
+ if let Some(metric_data) = metric.get_mut(&minute) {
+ metric_data.push(metric_value);
+ } else {
+ metric.insert(minute, vec![metric_value]);
+ }
+ } else {
+ let metric_time: BTreeMap > = [(minute, vec![metric_value])].into_iter().collect();
+ metrics.entry(metric_name.to_string()).or_insert( metric_time);
}
- let average = sum / values.len() as f64;
-
- aggregated_metrics
- .get_mut(&metric_name)
- .unwrap()
- .insert(time, average);
+ } else {
+ show_invalid_line(index, line);
}
}
- Ok(aggregated_metrics)
+ let mut aggregated_metrics: BTreeMap> = BTreeMap::new();
+ metrics.into_iter().for_each(|(metric_name, time_val_list)| {
+ time_val_list.into_iter().for_each(|(time, values)| {
+ let average = values.iter().sum::() / values.len() as f64;
+ if let Some(metric) = aggregated_metrics.get_mut(&metric_name) {
+ if let Some(metric_data) = metric.get_mut(&time) {
+ *metric_data = average;
+ } else {
+ metric.insert(time, average);
+ }
+ } else {
+ let metric_time: BTreeMap = [(time, average)].into_iter().collect();
+ aggregated_metrics.entry(metric_name.to_string()).or_insert( metric_time);
+ }
+ })
+ });
+ aggregated_metrics
}
+fn load_input(file_path: &str) -> Result>, Box> {
+ let file = File::open(&file_path)
+ .map_err(|err| format!("Error reading file: {} {}", &file_path, err))?;
+ let metrics = parse(file)
+ .map_err(|err| format!("Unable to parse: {} {}", &file_path, err))?;
+ Ok(metrics)
+}
+fn show_metrics(metrics: BTreeMap>, output_path: &str) -> Vec {
+ let mut output = Vec::new();
+ metrics.into_iter().for_each(|(metric_name, time_val)|
+ for (time, value) in time_val {
+ let output_line = format!(
+ "{} {} {:?}",
+ DateTime::::from(time).format("%Y-%m-%dT%H:%M:%SZ"),
+ metric_name,
+ value
+ );
+ match output_path {
+ "vec" => output.push(output_line),
+ "print" | _ => println!("{}", output_line),
+ }
+ }
+ );
+ output
+}
fn main() {
- let file = File::open("input.txt").expect("Unable to open file");
- let metrics = parse(file).expect("Unable to parse file");
- for (metric_name, time_val) in metrics {
- for (time, value) in time_val {
- println!(
- "{} {:?} {:.2}",
- metric_name,
- chrono::DateTime::::from(time),
- value
- );
+ let default_input = String::from("input.txt");
+ match load_input(&default_input) {
+ Ok(metrics) => {
+ let _ = show_metrics(metrics, "");
+ },
+ Err(err) => {
+ eprint!("Error: {}", err);
+ exit(1);
}
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ #[test]
+ fn test_load_input() -> Result<(), String>{
+ let default_input = String::from("input.txt");
+ match load_input(&default_input) {
+ Ok(_) => Ok(()),
+ Err(e) => Err(format!("Error: {}",e).into()),
+ }
+ }
+ #[test]
+ fn test_invalid_line_value() -> Result<(), String>{
+ let contents = String::from("1650973075 cpu A47\n");
+ let re = regex::Regex::new(r"(\d+) (\w+) (\d+)")
+ .map_err(|err| format!("Error regex: {}", err))?;
+ let result = parse_content(contents.clone(), re);
+ if result.len() == 0 {
+ Ok(())
+ } else {
+ Err(format!("Error invalid line value: {}", contents).into())
+ }
+ }
+ #[test]
+ fn test_invalid_line_time() -> Result<(), String>{
+ let contents = String::from("1650973075A cpu 47\n");
+ let re = regex::Regex::new(r"(\d+) (\w+) (\d+)")
+ .map_err(|err| format!("Error regex: {}", err))?;
+ let result = parse_content(contents.clone(), re);
+ if result.len() == 0 {
+ Ok(())
+ } else {
+ Err(format!("Error invalid line time: {}", contents).into())
+ }
+ }
+ #[test]
+ fn test_expected_metrics() {
+ use std::io::{prelude::*, BufReader};
+ let default_input = String::from("input.txt");
+ let metrics = load_input(&default_input).unwrap_or_default();
+ let data_metrics = show_metrics(metrics, "vec");
+
+ let expected_output = String::from("output_expected.txt");
+ let file = File::open(expected_output.clone()).expect(format!("no such file: {}", expected_output).as_str());
+ let buf = BufReader::new(file);
+ let lines: Vec = buf.lines().map(|l| l.expect("Could not parse line")).collect();
+
+ assert_eq!(lines.join("\n"),data_metrics.join("\n"));
+ }
+}
\ No newline at end of file