Решение на упр.06 задача 1 от Мариян Момчилов

Обратно към всички решения

Към профила на Мариян Момчилов

Резултати

  • 4 точки от тестове
  • 0 бонус точки
  • 4 точки общо
  • 4 успешни тест(а)
  • 0 неуспешни тест(а)

Код

use std::collections::HashMap;
use std::error::Error;
use std::fs::{read_dir, File};
use std::io::{self, BufReader};
use std::io::{BufRead, Read};
use std::path::Path;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
enum LogLevel {
Error,
Warn,
Info,
Debug,
}
struct AggregateInfo {
log_counts: HashMap<LogLevel, usize>,
skipped_files: Vec<String>,
}
impl AggregateInfo {
fn skip_file(&mut self, s: &str) {
self.skipped_files.push(s.to_string());
}
}
fn aggregate_logs(dir: &Path) -> Result<AggregateInfo, Box<dyn Error>> {
match read_dir(dir) {
Err(e) => Err(Box::from(e)),
Ok(rd) => {
let mut info = AggregateInfo {
log_counts: HashMap::new(),
skipped_files: Vec::new(),
};
for r_dirent in rd {
let dirent = r_dirent?;
let meta = dirent.metadata()?;
if meta.is_file() {
let filename_os_s = dirent.file_name();
let filename = filename_os_s.to_str().unwrap();
if filename.ends_with(".log") {
match File::open(dirent.path().as_path()) {
Ok(f) => {
if let Err(e) = parse_log_file(
BufReader::with_capacity(10000, f),
&mut info.log_counts,
) {
println!("Skiping file {} due to errors {:?}", filename, e);
info.skip_file(filename);
}
}
Err(_) => info.skip_file(filename),
}
} else {
info.skip_file(filename);
}
}
}
Ok(info)
}
}
}
#[derive(Debug)]
enum ParseLogError {
Read(io::Error),
ParseLine,
}
fn parse_log_file<R>(file: R, map: &mut HashMap<LogLevel, usize>) -> Result<(), ParseLogError>
where
R: Read,
{
let buf_r = BufReader::with_capacity(10000, file);
let mut local_map: HashMap<LogLevel, usize> = HashMap::new();
for result_line in buf_r.lines() {
let line = result_line.map_err(|e| ParseLogError::Read(e))?;
match line.split_once(' ') {
None => return Err(ParseLogError::ParseLine),
Some((log_level, _)) => match log_level {
"ERROR" => {
local_map.insert(
LogLevel::Error,
local_map.get(&LogLevel::Error).map_or(1, |v| *v + 1),
);
}
"WARN" => {
local_map.insert(
LogLevel::Warn,
local_map.get(&LogLevel::Warn).map_or(1, |v| *v + 1),
);
}
"INFO" => {
local_map.insert(
LogLevel::Info,
local_map.get(&LogLevel::Info).map_or(1, |v| *v + 1),
);
}
"DEBUG" => {
local_map.insert(
LogLevel::Debug,
local_map.get(&LogLevel::Debug).map_or(1, |v| *v + 1),
);
}
_ => return Err(ParseLogError::ParseLine),
},
}
}
for log_level in [
LogLevel::Debug,
LogLevel::Error,
LogLevel::Info,
LogLevel::Warn,
] {
let local_value = *local_map.get(&log_level).unwrap_or(&0);
match map.get_mut(&log_level) {
Some(val) => {
*val += local_value;
}
None => {
map.insert(log_level, local_value);
}
}
}
Ok(())
}

Лог от изпълнението

Updating crates.io index
     Locking 17 packages to latest compatible versions
   Compiling proc-macro2 v1.0.103
   Compiling unicode-ident v1.0.22
   Compiling quote v1.0.42
   Compiling futures-core v0.3.31
   Compiling futures-sink v0.3.31
   Compiling futures-channel v0.3.31
   Compiling memchr v2.7.6
   Compiling syn v2.0.110
   Compiling pin-project-lite v0.2.16
   Compiling pin-utils v0.1.0
   Compiling futures-task v0.3.31
   Compiling futures-io v0.3.31
   Compiling slab v0.4.11
   Compiling solution v0.1.0 (/tmp/d20251120-1757769-c56idp/solution)
warning: enum `LogLevel` is never used
 --> src/lib.rs:9:6
  |
9 | enum LogLevel {
  |      ^^^^^^^^
  |
  = note: `#[warn(dead_code)]` on by default

warning: struct `AggregateInfo` is never constructed
  --> src/lib.rs:16:8
   |
16 | struct AggregateInfo {
   |        ^^^^^^^^^^^^^

warning: method `skip_file` is never used
  --> src/lib.rs:22:8
   |
21 | impl AggregateInfo {
   | ------------------ method in this implementation
22 |     fn skip_file(&mut self, s: &str) {
   |        ^^^^^^^^^

warning: function `aggregate_logs` is never used
  --> src/lib.rs:27:4
   |
27 | fn aggregate_logs(dir: &Path) -> Result<AggregateInfo, Box<dyn Error>> {
   |    ^^^^^^^^^^^^^^

warning: enum `ParseLogError` is never used
  --> src/lib.rs:66:6
   |
66 | enum ParseLogError {
   |      ^^^^^^^^^^^^^

warning: function `parse_log_file` is never used
  --> src/lib.rs:71:4
   |
71 | fn parse_log_file<R>(file: R, map: &mut HashMap<LogLevel, usize>) -> Result<(), ParseLogError>
   |    ^^^^^^^^^^^^^^

warning: `solution` (lib) generated 6 warnings
   Compiling futures-macro v0.3.31
   Compiling futures-util v0.3.31
   Compiling futures-executor v0.3.31
   Compiling futures v0.3.31
warning: field `0` is never read
  --> tests/../src/lib.rs:67:10
   |
67 |     Read(io::Error),
   |     ---- ^^^^^^^^^
   |     |
   |     field in this variant
   |
   = note: `ParseLogError` has a derived impl for the trait `Debug`, but this is intentionally ignored during dead code analysis
   = note: `#[warn(dead_code)]` on by default
help: consider changing the field to be of unit type to suppress this warning while preserving the field numbering, or remove the field
   |
67 |     Read(()),
   |          ~~

warning: `solution` (test "solution_test") generated 1 warning
    Finished `test` profile [unoptimized + debuginfo] target(s) in 8.80s
     Running tests/solution_test.rs (target/debug/deps/solution_test-f75e629a1d90e17c)

running 4 tests
test solution_test::test_parse_log_basic ... ok
test solution_test::test_aggregate ... ok
test solution_test::test_parse_log_big_data ... ok
test solution_test::test_parse_log_invalid ... ok

test result: ok. 4 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s

История (4 версии и 0 коментара)

Мариян качи първо решение на 19.11.2025 19:42 (преди 16 дена)

Мариян качи решение на 19.11.2025 23:07 (преди 16 дена)

use std::collections::HashMap;
use std::error::Error;
use std::fs::{File, read_dir};
use std::io::{self, BufReader};
use std::io::{BufRead, Read};
use std::path::Path;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
enum LogLevel {
Error,
Warn,
Info,
Debug,
}
struct AggregateInfo {
log_counts: HashMap<LogLevel, usize>,
skipped_files: Vec<String>,
}
fn aggregate_logs(dir: &Path) -> Result<AggregateInfo, Box<dyn Error>> {
match read_dir(dir) {
Err(e) => Err(Box::from(e)),
Ok(rd) => {
let mut info = AggregateInfo {
log_counts: HashMap::new(),
skipped_files: Vec::new(),
};
for r_dirent in rd {
let dirent = r_dirent?;
let meta = dirent.metadata()?;
if meta.is_file() {
match dirent.file_name().to_str() {
Some(s) => {
if s.ends_with(".log") {
match File::open(dirent.path().as_path()) {
Ok(f) => {
match parse_log_file(
BufReader::with_capacity(10000, f),
&mut info.log_counts,
) {
Err(e) => {
println!(
"Skiping file {} due to errors {:?}",
s, e
);
info.skipped_files.push(String::from(s));
}
_ => {}
}
}
Err(_) => info.skipped_files.push(String::from(s)),
}
} else {
info.skipped_files.push(String::from(s));
}
}
_ => {}
}
}
}
Ok(info)
}
}
}
#[derive(Debug)]
enum ParseLogError {
Read(io::Error),
ParseLine,
}
fn parse_log_file<R>(file: R, map: &mut HashMap<LogLevel, usize>) -> Result<(), ParseLogError>
where
R: Read,
{
let mut buf_r = BufReader::with_capacity(10000, file);
let mut local_map: HashMap<LogLevel, usize> = HashMap::new();
for result_line in buf_r.lines() {
let line = result_line.map_err(|e| ParseLogError::Read(e))?;
match line.split_once(' ') {
None => return Err(ParseLogError::ParseLine),
Some((log_level, _)) => match log_level {
"ERROR" => {
local_map.insert(
LogLevel::Error,
local_map.get(&LogLevel::Error).map_or(1, |v| *v + 1),
);
}
"WARN" => {
local_map.insert(
LogLevel::Warn,
local_map.get(&LogLevel::Warn).map_or(1, |v| *v + 1),
);
}
"INFO" => {
local_map.insert(
LogLevel::Info,
local_map.get(&LogLevel::Info).map_or(1, |v| *v + 1),
);
}
"DEBUG" => {
local_map.insert(
LogLevel::Debug,
local_map.get(&LogLevel::Debug).map_or(1, |v| *v + 1),
);
}
_ => return Err(ParseLogError::ParseLine),
},
}
}
for log_level in [
LogLevel::Debug,
LogLevel::Error,
LogLevel::Info,
LogLevel::Warn,
] {
let local_value = *local_map.get(&log_level).unwrap_or(&0);
match map.get_mut(&log_level) {
Some(val) => {
*val += local_value;
}
None => {
map.insert(log_level, local_value);
}
}
}
Ok(())
-}
+}

Мариян качи решение на 19.11.2025 23:18 (преди 16 дена)

use std::collections::HashMap;
use std::error::Error;
-use std::fs::{File, read_dir};
+use std::fs::{read_dir, File};
use std::io::{self, BufReader};
use std::io::{BufRead, Read};
use std::path::Path;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
enum LogLevel {
Error,
Warn,
Info,
Debug,
}
struct AggregateInfo {
log_counts: HashMap<LogLevel, usize>,
skipped_files: Vec<String>,
}
+impl AggregateInfo {
+ fn skip_file(&mut self, s: &str) {
+ self.skipped_files.push(s.to_string());
+ }
+}
+
fn aggregate_logs(dir: &Path) -> Result<AggregateInfo, Box<dyn Error>> {
match read_dir(dir) {
Err(e) => Err(Box::from(e)),
Ok(rd) => {
let mut info = AggregateInfo {
log_counts: HashMap::new(),
skipped_files: Vec::new(),
};
for r_dirent in rd {
let dirent = r_dirent?;
let meta = dirent.metadata()?;
if meta.is_file() {
- match dirent.file_name().to_str() {
- Some(s) => {
- if s.ends_with(".log") {
- match File::open(dirent.path().as_path()) {
- Ok(f) => {
- match parse_log_file(
- BufReader::with_capacity(10000, f),
- &mut info.log_counts,
- ) {
- Err(e) => {
- println!(
- "Skiping file {} due to errors {:?}",
- s, e
- );
- info.skipped_files.push(String::from(s));
- }
- _ => {}
- }
- }
- Err(_) => info.skipped_files.push(String::from(s)),
+ let filename = String::from(dirent.file_name().to_str().unwrap());
+ if filename.ends_with(".log") {
+ match File::open(dirent.path().as_path()) {
+ Ok(f) => {
+ if let Err(e) = parse_log_file(
+ BufReader::with_capacity(10000, f),
+ &mut info.log_counts,
+ ) {
+ println!("Skiping file {} due to errors {:?}", filename, e);
+ info.skip_file(filename.as_str());
}
- } else {
- info.skipped_files.push(String::from(s));
}
+ Err(_) => info.skip_file(filename.as_str()),
}
- _ => {}
+ } else {
+ info.skip_file(filename.as_str());
}
}
}
Ok(info)
}
}
}
#[derive(Debug)]
enum ParseLogError {
Read(io::Error),
ParseLine,
}
fn parse_log_file<R>(file: R, map: &mut HashMap<LogLevel, usize>) -> Result<(), ParseLogError>
where
R: Read,
{
- let mut buf_r = BufReader::with_capacity(10000, file);
+ let buf_r = BufReader::with_capacity(10000, file);
let mut local_map: HashMap<LogLevel, usize> = HashMap::new();
for result_line in buf_r.lines() {
let line = result_line.map_err(|e| ParseLogError::Read(e))?;
match line.split_once(' ') {
None => return Err(ParseLogError::ParseLine),
Some((log_level, _)) => match log_level {
"ERROR" => {
local_map.insert(
LogLevel::Error,
local_map.get(&LogLevel::Error).map_or(1, |v| *v + 1),
);
}
"WARN" => {
local_map.insert(
LogLevel::Warn,
local_map.get(&LogLevel::Warn).map_or(1, |v| *v + 1),
);
}
"INFO" => {
local_map.insert(
LogLevel::Info,
local_map.get(&LogLevel::Info).map_or(1, |v| *v + 1),
);
}
"DEBUG" => {
local_map.insert(
LogLevel::Debug,
local_map.get(&LogLevel::Debug).map_or(1, |v| *v + 1),
);
}
_ => return Err(ParseLogError::ParseLine),
},
}
}
for log_level in [
LogLevel::Debug,
LogLevel::Error,
LogLevel::Info,
LogLevel::Warn,
] {
let local_value = *local_map.get(&log_level).unwrap_or(&0);
match map.get_mut(&log_level) {
Some(val) => {
*val += local_value;
}
None => {
map.insert(log_level, local_value);
}
}
}
Ok(())
-}
+}

Мариян качи решение на 19.11.2025 23:27 (преди 16 дена)

use std::collections::HashMap;
use std::error::Error;
use std::fs::{read_dir, File};
use std::io::{self, BufReader};
use std::io::{BufRead, Read};
use std::path::Path;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
enum LogLevel {
Error,
Warn,
Info,
Debug,
}
struct AggregateInfo {
log_counts: HashMap<LogLevel, usize>,
skipped_files: Vec<String>,
}
impl AggregateInfo {
fn skip_file(&mut self, s: &str) {
self.skipped_files.push(s.to_string());
}
}
fn aggregate_logs(dir: &Path) -> Result<AggregateInfo, Box<dyn Error>> {
match read_dir(dir) {
Err(e) => Err(Box::from(e)),
Ok(rd) => {
let mut info = AggregateInfo {
log_counts: HashMap::new(),
skipped_files: Vec::new(),
};
for r_dirent in rd {
let dirent = r_dirent?;
let meta = dirent.metadata()?;
if meta.is_file() {
- let filename = String::from(dirent.file_name().to_str().unwrap());
+ let filename_os_s = dirent.file_name();
+ let filename = filename_os_s.to_str().unwrap();
if filename.ends_with(".log") {
match File::open(dirent.path().as_path()) {
Ok(f) => {
if let Err(e) = parse_log_file(
BufReader::with_capacity(10000, f),
&mut info.log_counts,
) {
println!("Skiping file {} due to errors {:?}", filename, e);
- info.skip_file(filename.as_str());
+ info.skip_file(filename);
}
}
- Err(_) => info.skip_file(filename.as_str()),
+ Err(_) => info.skip_file(filename),
}
} else {
- info.skip_file(filename.as_str());
+ info.skip_file(filename);
}
}
}
Ok(info)
}
}
}
#[derive(Debug)]
enum ParseLogError {
Read(io::Error),
ParseLine,
}
fn parse_log_file<R>(file: R, map: &mut HashMap<LogLevel, usize>) -> Result<(), ParseLogError>
where
R: Read,
{
let buf_r = BufReader::with_capacity(10000, file);
let mut local_map: HashMap<LogLevel, usize> = HashMap::new();
for result_line in buf_r.lines() {
let line = result_line.map_err(|e| ParseLogError::Read(e))?;
match line.split_once(' ') {
None => return Err(ParseLogError::ParseLine),
Some((log_level, _)) => match log_level {
"ERROR" => {
local_map.insert(
LogLevel::Error,
local_map.get(&LogLevel::Error).map_or(1, |v| *v + 1),
);
}
"WARN" => {
local_map.insert(
LogLevel::Warn,
local_map.get(&LogLevel::Warn).map_or(1, |v| *v + 1),
);
}
"INFO" => {
local_map.insert(
LogLevel::Info,
local_map.get(&LogLevel::Info).map_or(1, |v| *v + 1),
);
}
"DEBUG" => {
local_map.insert(
LogLevel::Debug,
local_map.get(&LogLevel::Debug).map_or(1, |v| *v + 1),
);
}
_ => return Err(ParseLogError::ParseLine),
},
}
}
for log_level in [
LogLevel::Debug,
LogLevel::Error,
LogLevel::Info,
LogLevel::Warn,
] {
let local_value = *local_map.get(&log_level).unwrap_or(&0);
match map.get_mut(&log_level) {
Some(val) => {
*val += local_value;
}
None => {
map.insert(log_level, local_value);
}
}
}
Ok(())
}