упр.06 задача 1
- Краен срок:
- 19.11.2025 23:59
- Точки:
- 4
Срокът за предаване на решения е отминал
// Include the solution source in the same file, so we
// don't have to worry about item visibility.
// Please don't use `include!` in real code, this is a hack
// around the checking system.
include!{ "../src/lib.rs" }
#[test]
fn test_parse_log_basic() {
{
let data = "".as_bytes();
let mut map = HashMap::new();
assert!(matches!(parse_log_file(data, &mut map), Ok(())));
assert_eq!(map.get(&LogLevel::Error).copied().unwrap_or(0), 0);
assert_eq!(map.get(&LogLevel::Warn).copied().unwrap_or(0), 0);
assert_eq!(map.get(&LogLevel::Info).copied().unwrap_or(0), 0);
assert_eq!(map.get(&LogLevel::Debug).copied().unwrap_or(0), 0);
}
{
let data = concat!(
"INFO message 1\n",
"ERROR message 2\n",
"WARN message 3\n",
"DEBUG message 4\n",
)
.as_bytes();
let mut map = HashMap::new();
assert!(matches!(parse_log_file(data, &mut map), Ok(())));
assert_eq!(map.get(&LogLevel::Error).copied().unwrap_or(0), 1);
assert_eq!(map.get(&LogLevel::Warn).copied().unwrap_or(0), 1);
assert_eq!(map.get(&LogLevel::Info).copied().unwrap_or(0), 1);
assert_eq!(map.get(&LogLevel::Debug).copied().unwrap_or(0), 1);
}
{
let data = concat!(
"INFO message 1\n",
"WARN message 2\n",
"INFO message 3\n",
"DEBUG message 4\n",
"INFO message 3\n",
)
.as_bytes();
let mut map = HashMap::new();
assert!(matches!(parse_log_file(data, &mut map), Ok(())));
assert_eq!(map.get(&LogLevel::Error).copied().unwrap_or(0), 0);
assert_eq!(map.get(&LogLevel::Warn).copied().unwrap_or(0), 1);
assert_eq!(map.get(&LogLevel::Info).copied().unwrap_or(0), 3);
assert_eq!(map.get(&LogLevel::Debug).copied().unwrap_or(0), 1);
}
}
#[test]
fn test_parse_log_invalid() {
{
let data = concat!("INFO message 1\n", "invalid line\n", "INFO message 3\n",).as_bytes();
let mut map = HashMap::new();
assert!(matches!(parse_log_file(data, &mut map), Err(ParseLogError::ParseLine)));
}
}
#[test]
fn test_parse_log_big_data() {
let line = "INFO lorem ipsum dolor sit amet.\n".as_bytes();
let data: std::collections::VecDeque<u8> = std::iter::repeat(line)
.take(20_000 / line.len() + 1)
.flatten()
.copied()
.collect();
struct ReadCounter<R: Read> {
data: R,
called_count: usize,
}
impl<R: std::io::Read> std::io::Read for ReadCounter<R> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.called_count += 1;
self.data.read(buf)
}
}
let mut reader = ReadCounter { data, called_count: 0 };
let mut map = HashMap::new();
match parse_log_file(&mut reader, &mut map) {
Ok(()) => {}
Err(e) => panic!("assertion failed, expected Ok(_), found Err({:?})", e),
}
assert!(reader.called_count < 10_000);
}
#[test]
fn test_aggregate() {
let pid = std::process::id();
let dirname = format!("logs-{}", pid);
let data1 = concat!(
"DEBUG msg 1\n",
"INFO msg 2\n",
"WARN msg 3\n",
"ERROR msg 4\n",
);
let data2 = concat!(
"INFO some message\n",
"INFO some message\n",
"INFO some message\n",
);
std::fs::create_dir(&dirname).unwrap();
struct CleanupDir {
dirname: String,
}
impl Drop for CleanupDir {
fn drop(&mut self) {
std::fs::remove_dir_all(&self.dirname).unwrap();
}
}
let _cleanup = CleanupDir {
dirname: dirname.clone(),
};
std::fs::write(&format!("{}/file1.log", dirname), data1).unwrap();
std::fs::write(&format!("{}/file2.log", dirname), data2).unwrap();
std::fs::write(&format!("{}/file3.txt", dirname), data1).unwrap();
let output = aggregate_logs(&Path::new(&dirname));
match output {
Ok(info) => {
assert_eq!(info.log_counts.get(&LogLevel::Error).copied().unwrap_or(0), 1);
assert_eq!(info.log_counts.get(&LogLevel::Warn).copied().unwrap_or(0), 1);
assert_eq!(info.log_counts.get(&LogLevel::Info).copied().unwrap_or(0), 4);
assert_eq!(info.log_counts.get(&LogLevel::Debug).copied().unwrap_or(0), 1);
assert_eq!(info.skipped_files.len(), 1);
assert!(info.skipped_files[0].contains("file3.txt"));
}
Err(e) => {
panic!("expected Ok(_), found: Err({})", e)
}
}
}
Напишете програма, която агрегира статистика от логовете и използва грешкообработка.
- Имаме няколко лог файла в директория (например logs/), с разширение .log. Напишете функция:
aggregate_logsкоято:- обхожда файловете в директорията, филтрира само .log файловете;
- за всеки файл извиква parse_log_file и брои нивата (LogLevel) – записва в HashMap<LogLevel, usize> колко реда има за всяко ниво;
- използвайте подходящ тип грешка – собствен изброен тип AggregateError или Box с контекст.
Лог файловете имат следния формат.
Всеки ред започва с една от думите ERROR, WARN, INFO, DEBUG и следва произволно съобщение до края на реда.
INFO message1
DEBUG message2
2. Усложнение: Ако файл е >10KB, обработвайте го поетапно (с BufReader и lines()). Ако има грешка в него – пропуснете файла, изведете предупреждение и продължете.
Изисквания: - Ясно разграничете грешките: I/O, парсинг, непознато ниво. - Използвайте ?, map_err, From, Into, Box където е удачно.
use std::collections::HashMap;
use std::error::Error;
use std::io;
use std::io::Read;
use std::path::Path;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
enum LogLevel {
Error,
Warn,
Info,
Debug,
}
struct AggregateInfo {
log_counts: HashMap<LogLevel, usize>,
skipped_files: Vec<String>,
}
fn aggregate_logs(dir: &Path) -> Result<AggregateInfo, Box<dyn Error>> {
todo!()
}
enum ParseLogError {
Read(io::Error),
ParseLine,
}
fn parse_log_file<R>(file: R, map: &mut HashMap<LogLevel, usize>) -> Result<(), ParseLogError>
where
R: Read,
{
todo!()
}
Бонус за 1т.: Напишете CLI програма, която:
- приема аргумент – път до директория с логове;
- извиква aggregate_logs;
- при грешка извежда съобщение на stderr и връща ненулев код.
- отпечатва статистиката в показания формат и списък на пропуснатите файлове:
Debug: 123
Info: 456
Warn: 78
Error: 9
Задължително прочетете (или си припомнете): Указания за предаване на домашни
Погрижете се решението ви да се компилира с базовия тест:
// Include the solution source in the same file, so we
// don't have to worry about item visibility.
// Please don't use `include!` in real code, this is a hack
// around the checking system.
include!{ "../src/lib.rs" }
#[test]
fn test_parse_log_basic() {
{
let data = "".as_bytes();
let mut map = HashMap::new();
assert!(matches!(parse_log_file(data, &mut map), Ok(())));
assert_eq!(map.get(&LogLevel::Error).copied().unwrap_or(0), 0);
assert_eq!(map.get(&LogLevel::Warn).copied().unwrap_or(0), 0);
assert_eq!(map.get(&LogLevel::Info).copied().unwrap_or(0), 0);
assert_eq!(map.get(&LogLevel::Debug).copied().unwrap_or(0), 0);
}
{
let data = concat!(
"INFO message 1\n",
"ERROR message 2\n",
"WARN message 3\n",
"DEBUG message 4\n",
)
.as_bytes();
let mut map = HashMap::new();
assert!(matches!(parse_log_file(data, &mut map), Ok(())));
assert_eq!(map.get(&LogLevel::Error).copied().unwrap_or(0), 1);
assert_eq!(map.get(&LogLevel::Warn).copied().unwrap_or(0), 1);
assert_eq!(map.get(&LogLevel::Info).copied().unwrap_or(0), 1);
assert_eq!(map.get(&LogLevel::Debug).copied().unwrap_or(0), 1);
}
{
let data = concat!(
"INFO message 1\n",
"WARN message 2\n",
"INFO message 3\n",
"DEBUG message 4\n",
"INFO message 3\n",
)
.as_bytes();
let mut map = HashMap::new();
assert!(matches!(parse_log_file(data, &mut map), Ok(())));
assert_eq!(map.get(&LogLevel::Error).copied().unwrap_or(0), 0);
assert_eq!(map.get(&LogLevel::Warn).copied().unwrap_or(0), 1);
assert_eq!(map.get(&LogLevel::Info).copied().unwrap_or(0), 3);
assert_eq!(map.get(&LogLevel::Debug).copied().unwrap_or(0), 1);
}
}
#[test]
fn test_parse_log_invalid() {
{
let data = concat!("INFO message 1\n", "invalid line\n", "INFO message 3\n",).as_bytes();
let mut map = HashMap::new();
assert!(matches!(parse_log_file(data, &mut map), Err(ParseLogError::ParseLine)));
}
}
#[test]
fn test_parse_log_big_data() {
let line = "INFO lorem ipsum dolor sit amet.\n".as_bytes();
let data: std::collections::VecDeque<u8> = std::iter::repeat(line)
.take(20_000 / line.len() + 1)
.flatten()
.copied()
.collect();
struct ReadCounter<R: Read> {
data: R,
called_count: usize,
}
impl<R: std::io::Read> std::io::Read for ReadCounter<R> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.called_count += 1;
self.data.read(buf)
}
}
let mut reader = ReadCounter { data, called_count: 0 };
let mut map = HashMap::new();
match parse_log_file(&mut reader, &mut map) {
Ok(()) => {}
Err(e) => panic!("assertion failed, expected Ok(_), found Err({:?})", e),
}
assert!(reader.called_count < 10_000);
}
#[test]
fn test_aggregate() {
let pid = std::process::id();
let dirname = format!("logs-{}", pid);
let data1 = concat!(
"DEBUG msg 1\n",
"INFO msg 2\n",
"WARN msg 3\n",
"ERROR msg 4\n",
);
let data2 = concat!(
"INFO some message\n",
"INFO some message\n",
"INFO some message\n",
);
std::fs::create_dir(&dirname).unwrap();
struct CleanupDir {
dirname: String,
}
impl Drop for CleanupDir {
fn drop(&mut self) {
std::fs::remove_dir_all(&self.dirname).unwrap();
}
}
let _cleanup = CleanupDir {
dirname: dirname.clone(),
};
std::fs::write(&format!("{}/file1.log", dirname), data1).unwrap();
std::fs::write(&format!("{}/file2.log", dirname), data2).unwrap();
std::fs::write(&format!("{}/file3.txt", dirname), data1).unwrap();
let output = aggregate_logs(&Path::new(&dirname));
match output {
Ok(info) => {
assert_eq!(info.log_counts.get(&LogLevel::Error).copied().unwrap_or(0), 1);
assert_eq!(info.log_counts.get(&LogLevel::Warn).copied().unwrap_or(0), 1);
assert_eq!(info.log_counts.get(&LogLevel::Info).copied().unwrap_or(0), 4);
assert_eq!(info.log_counts.get(&LogLevel::Debug).copied().unwrap_or(0), 1);
assert_eq!(info.skipped_files.len(), 1);
assert!(info.skipped_files[0].contains("file3.txt"));
}
Err(e) => {
panic!("expected Ok(_), found: Err({})", e)
}
}
}
