diff --git a/README.md b/README.md index ace8da61e3f..ad42c1dde6d 100644 --- a/README.md +++ b/README.md @@ -37,7 +37,10 @@ The **API endpoint** can be used to: - Add one or more network interfaces to the microVM. Firecracker is mapping an existing host file as a VirtIO block device into the microVM. - Add one or more read/write disks (file-backed block devices) to the microVM. -- Configure the logging system (i.e. path on host for log file, log level, etc). +- Configure the logging system by: + - Specifying two named pipes (one for human readable logs and one for the metrics). + - Enabling or disabling printing the log level, line and file of where the log originated. + - Setting the maximum level for triggering logs. - Configure rate limiters for VirtiIO devices which can limit the bandwidth, ops/s or both. - Start the microVM using a given kernel image, root file system and boot arguments. @@ -47,7 +50,7 @@ The **API endpoint** can be used to: - Emulated keyboard (i8042) and serial console (UART). The microVM serial console input and output are connected to those of the Firecracker process (this allows direct console access to the guest OS). -- Metrics currently logged every 60s to the configured log-file. +- Metrics currently logged every 60s to a one-purpose only named pipe. Categories: - API requests related metrics - VCPUs related metrics diff --git a/api_server/src/http_service.rs b/api_server/src/http_service.rs index a75c02f859d..2e4af822648 100644 --- a/api_server/src/http_service.rs +++ b/api_server/src/http_service.rs @@ -892,6 +892,18 @@ mod tests { } // Error cases + // Test the case where action already exists. + assert!( + parse_actions_req( + &path_tokens, + &path, + Method::Put, + &id_from_path, + &body, + &mut action_map + ).is_err() + ); + assert!( parse_actions_req( &path_tokens, @@ -1033,6 +1045,82 @@ mod tests { &body ).is_err() ); + + // Test case where id from path is different. + assert!( + parse_drives_req( + &"/foo/bar"[1..].split_terminator('/').collect(), + &"/foo/bar", + Method::Put, + &Some("barr"), + &body + ).is_err() + ); + + // Deserializing to a DriveDescription should fail when mandatory fields are missing. + let json = "{ + \"drive_id\": \"bar\", + \"path_on_host\": \"/foo/bar\", + \"statee\": \"Attached\", + \"is_root_device\": true, + \"permissions\": \"ro\" + }"; + let body: Chunk = Chunk::from(json); + assert!( + parse_drives_req( + &"/foo/bar"[1..].split_terminator('/').collect(), + &"/foo/bar", + Method::Put, + &Some("bar"), + &body + ).is_err() + ); + } + + #[test] + fn test_parse_logger_source_req() { + let path = "/foo"; + let path_tokens: Vec<&str> = path[1..].split_terminator('/').collect(); + let json = "{ + \"log_fifo\": \"tmp1\", + \"metrics_fifo\": \"tmp2\", + \"level\": \"Info\", + \"show_level\": true, + \"show_log_origin\": true + }"; + let body: Chunk = Chunk::from(json); + + // GET + match parse_logger_req(&path_tokens, &path, Method::Get, &body) { + Ok(pr_dummy) => assert!(pr_dummy.eq(&ParsedRequest::Dummy)), + _ => assert!(false), + } + + // PUT + let logger_body = serde_json::from_slice::(&body) + .expect("deserialization failed"); + match parse_logger_req(&path_tokens, &path, Method::Put, &body) { + Ok(pr) => { + let (sender, receiver) = oneshot::channel(); + assert!(pr.eq(&ParsedRequest::Sync( + SyncRequest::PutLogger(logger_body, sender), + receiver, + ))); + } + _ => assert!(false), + } + + // Error cases + assert!(parse_logger_req(&path_tokens, &path, Method::Put, &Chunk::from("foo")).is_err()); + + assert!( + parse_logger_req( + &"/foo/bar"[1..].split_terminator('/').collect(), + &"/foo/bar", + Method::Put, + &Chunk::from("foo") + ).is_err() + ); } #[test] @@ -1155,6 +1243,16 @@ mod tests { } // Error cases + assert!( + parse_netif_req( + &"/foo/bar"[1..].split_terminator('/').collect(), + &"/foo/bar", + Method::Put, + &Some("barr"), + &body + ).is_err() + ); + assert!( parse_netif_req( &"/foo/bar"[1..].split_terminator('/').collect(), diff --git a/api_server/src/request/sync/logger.rs b/api_server/src/request/sync/logger.rs index 83b9f9cf5fe..c7bbe323370 100644 --- a/api_server/src/request/sync/logger.rs +++ b/api_server/src/request/sync/logger.rs @@ -18,7 +18,8 @@ pub enum APILoggerLevel { #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] #[serde(deny_unknown_fields)] pub struct APILoggerDescription { - pub path: String, + pub log_fifo: String, + pub metrics_fifo: String, #[serde(skip_serializing_if = "Option::is_none")] pub level: Option, #[serde(skip_serializing_if = "Option::is_none")] @@ -109,7 +110,8 @@ mod tests { #[test] fn test_into_parsed_request() { let desc = APILoggerDescription { - path: String::from(""), + log_fifo: String::from("log"), + metrics_fifo: String::from("metrics"), level: None, show_level: None, show_log_origin: None, diff --git a/api_server/swagger/firecracker.yaml b/api_server/swagger/firecracker.yaml index 14332d6ed08..f933f7ea18f 100644 --- a/api_server/swagger/firecracker.yaml +++ b/api_server/swagger/firecracker.yaml @@ -150,7 +150,7 @@ paths: /logger: put: - summary: Initializes the logging system by specifying a log path on the host. + summary: Initializes the logger by specifying two named pipes (i.e. for the logs and metrics output). operationId: putLogger parameters: - name: body @@ -364,11 +364,14 @@ definitions: Logger: type: object description: - Describes the configuration option for the logger intitialization. + Describes the configuration option for the logging capability. properties: - path: + log_fifo: type: string - description: The path on the host for the log file. + description: The named pipe for the human readable log output. + metrics_fifo: + type: string + description: The named pipe where the JSON-formatted metrics will be flushed. level: type: string description: Set the level. diff --git a/logger/Cargo.toml b/logger/Cargo.toml index 94cbb6b2110..950e145ca28 100644 --- a/logger/Cargo.toml +++ b/logger/Cargo.toml @@ -12,3 +12,6 @@ serde = ">=1.0.27" serde_derive = ">=1.0.27" serde_json = ">=1.0.9" time = "0.1.34" + +[dev-dependencies] +tempfile = ">=3.0.2" diff --git a/logger/src/error.rs b/logger/src/error.rs index 5fa8c776bb1..65e0273c63b 100644 --- a/logger/src/error.rs +++ b/logger/src/error.rs @@ -10,12 +10,12 @@ pub enum LoggerError { NeverInitialized(String), /// The logger does not allow reinitialization. AlreadyInitialized, - /// Creating log file fails. - CreateLogFile(std::io::Error), - /// Writing to log file fails. - FileLogWrite(std::io::Error), + /// Opening named pipe fails. + OpenFIFO(std::io::Error), + /// Writing to named pipe fails. + LogWrite(std::io::Error), /// Flushing to disk fails. - FileLogFlush(std::io::Error), + LogFlush(std::io::Error), /// Error obtaining lock on mutex. MutexLockFailure(String), /// Error in the logging of the metrics. @@ -31,14 +31,14 @@ impl fmt::Display for LoggerError { LoggerError::AlreadyInitialized => { format!("{}", "Reinitialization of logger not allowed.") } - LoggerError::CreateLogFile(ref e) => { - format!("Failed to create log file. Error: {}", e.description()) + LoggerError::OpenFIFO(ref e) => { + format!("Failed to open pipe. Error: {}", e.description()) } - LoggerError::FileLogWrite(ref e) => { - format!("Failed to write to log file. Error: {}", e.description()) + LoggerError::LogWrite(ref e) => { + format!("Failed to write logs. Error: {}", e.description()) } - LoggerError::FileLogFlush(ref e) => { - format!("Failed to flush log file. Error: {}", e.description()) + LoggerError::LogFlush(ref e) => { + format!("Failed to flush logs. Error: {}", e.description()) } LoggerError::MutexLockFailure(ref e) => format!("{}", e), LoggerError::LogMetricFailure(ref e) => format!("{}", e), @@ -78,29 +78,29 @@ mod tests { assert!( format!( "{:?}", - LoggerError::FileLogWrite(std::io::Error::new(ErrorKind::Interrupted, "write")) - ).contains("FileLogWrite") + LoggerError::LogWrite(std::io::Error::new(ErrorKind::Interrupted, "write")) + ).contains("LogWrite") ); assert_eq!( format!( "{}", - LoggerError::FileLogWrite(std::io::Error::new(ErrorKind::Interrupted, "write")) + LoggerError::LogWrite(std::io::Error::new(ErrorKind::Interrupted, "write")) ), - "Failed to write to log file. Error: write" + "Failed to write logs. Error: write" ); assert!( format!( "{:?}", - LoggerError::FileLogFlush(std::io::Error::new(ErrorKind::Interrupted, "flush")) - ).contains("FileLogFlush") + LoggerError::LogFlush(std::io::Error::new(ErrorKind::Interrupted, "flush")) + ).contains("LogFlush") ); assert_eq!( format!( "{}", - LoggerError::FileLogFlush(std::io::Error::new(ErrorKind::Interrupted, "flush")) + LoggerError::LogFlush(std::io::Error::new(ErrorKind::Interrupted, "flush")) ), - "Failed to flush log file. Error: flush" + "Failed to flush logs. Error: flush" ); assert!( @@ -126,9 +126,9 @@ mod tests { assert_eq!( format!( "{}", - LoggerError::LogMetricFailure("Failure in the logging of the metrics.".to_string()) + LoggerError::LogMetricFailure("Failed to log metrics.".to_string()) ), - "Failure in the logging of the metrics." + "Failed to log metrics." ); assert!(format!("{:?}", LoggerError::LogMetricRateLimit).contains("LogMetricRateLimit")); diff --git a/logger/src/lib.rs b/logger/src/lib.rs index 22b1dfc5b36..5eda3dc55c3 100644 --- a/logger/src/lib.rs +++ b/logger/src/lib.rs @@ -25,6 +25,7 @@ extern crate chrono; // workaround to macro_reexport #[macro_use] extern crate lazy_static; +extern crate libc; extern crate log; extern crate serde; #[macro_use] @@ -78,17 +79,16 @@ lazy_static! { /// Output sources for the log subsystem. /// -#[derive(Debug, PartialEq, Clone, Copy)] +#[derive(PartialEq, Clone, Copy)] #[repr(usize)] enum Destination { Stderr, Stdout, - File, + Pipe, } /// Each log level also has a code and a destination output associated with it. /// -#[derive(Debug)] pub struct LevelInfo { // this represents the numeric representation of the chosen log::Level variant code: AtomicUsize, @@ -118,15 +118,15 @@ impl LevelInfo { // All member fields have types which are Sync, and exhibit interior mutability, so // we can call logging operations using a non-mut static global variable. -#[derive(Debug)] pub struct Logger { show_level: AtomicBool, show_file_path: AtomicBool, show_line_numbers: AtomicBool, level_info: LevelInfo, - - // used in case we want to log to a file - file: Mutex>, + // Used in case we want to send logs to a FIFO. + log_fifo: Mutex>, + // Used in case we want to send metrics to a FIFO. + metrics_fifo: Mutex>, } /// Auxiliary function to get the default destination for some code level. @@ -141,6 +141,16 @@ fn get_default_destination(level: Level) -> Destination { } } +/// Auxiliary function to flush a message to a PipeLogWriter. +/// This is used by the internal logger to either flush human-readable logs or metrics. +fn log_to_fifo(mut msg: String, fifo_writer: &mut PipeLogWriter) -> Result<()> { + msg = format!("{}\n", msg); + fifo_writer.write(&msg)?; + // No need to call flush here since the write will handle the flush on its own given that + // our messages always has a newline. + Ok(()) +} + impl Logger { /// Creates a new instance of the current logger. /// @@ -159,7 +169,8 @@ impl Logger { code: AtomicUsize::new(DEFAULT_LEVEL as usize), writer: AtomicUsize::new(Destination::Stderr as usize), }, - file: Mutex::new(None), + log_fifo: Mutex::new(None), + metrics_fifo: Mutex::new(None), } } @@ -248,7 +259,7 @@ impl Logger { /// ``` pub fn set_level(&self, level: Level) { self.level_info.set_code(level); - if self.level_info.writer() != Destination::File as usize { + if self.level_info.writer() != Destination::Pipe as usize { self.level_info.set_writer(get_default_destination(level)); } } @@ -287,8 +298,17 @@ impl Logger { format!("[{}{}{}]", level_str, file_path_str, line_str) } - fn file_guard(&self) -> MutexGuard> { - match self.file.lock() { + fn log_fifo_guard(&self) -> MutexGuard> { + match self.log_fifo.lock() { + Ok(guard) => guard, + // If a thread panics while holding this lock, the writer within should still be usable. + // (we might get an incomplete log line or something like that). + Err(poisoned) => poisoned.into_inner(), + } + } + + fn metrics_fifo_guard(&self) -> MutexGuard> { + match self.metrics_fifo.lock() { Ok(guard) => guard, // If a thread panics while holding this lock, the writer within should still be usable. // (we might get an incomplete log line or something like that). @@ -313,28 +333,47 @@ impl Logger { /// } /// ``` - pub fn init(&self, log_path: Option) -> Result<()> { - // if the logger was already initialized, return error + pub fn init(&self, log_pipe: Option, metrics_pipe: Option) -> Result<()> { + // If the logger was already initialized, error will be returned. if STATE.compare_and_swap(UNINITIALIZED, INITIALIZING, Ordering::SeqCst) != UNINITIALIZED { + METRICS.logger.log_fails.inc(); return Err(LoggerError::AlreadyInitialized); } - // otherwise try initialization - if let Some(path) = log_path.as_ref() { - match FileLogWriter::new(path) { + if let Some(path) = log_pipe.as_ref() { + match PipeLogWriter::new(path) { Ok(t) => { - // the mutex shouldn't be poisoned before init; PANIC! - let mut g = LOGGER.file_guard(); + // The mutex shouldn't be poisoned before init otherwise panic!. + let mut g = LOGGER.log_fifo_guard(); *g = Some(t); - LOGGER.level_info.set_writer(Destination::File); + LOGGER.level_info.set_writer(Destination::Pipe); } Err(ref e) => { STATE.store(UNINITIALIZED, Ordering::SeqCst); - return Err(LoggerError::NeverInitialized(format!("{}", e))); + return Err(LoggerError::NeverInitialized(format!( + "Could not open logging fifo: {}", + e + ))); } }; } + if let Some(path) = metrics_pipe.as_ref() { + match PipeLogWriter::new(path) { + Ok(t) => { + // The mutex shouldn't be poisoned before init otherwise panic!. + let mut g = LOGGER.metrics_fifo_guard(); + *g = Some(t); + } + Err(ref e) => { + STATE.store(UNINITIALIZED, Ordering::SeqCst); + return Err(LoggerError::NeverInitialized(format!( + "Could not open metrics fifo: {}", + e + ))); + } + }; + } set_max_level(Level::Trace.to_level_filter()); if let Err(e) = set_logger(LOGGER.deref()) { @@ -349,20 +388,16 @@ impl Logger { // In a future PR we'll update the way things are written to the selected destination to avoid // the creation and allocation of unnecessary intermediate Strings. The log_helper method takes // care of the common logic involved in both writing regular log messages, and dumping metrics. - fn log_helper(&self, mut msg: String) { + fn log_helper(&self, msg: String) { // We have the awkward IF's for now because we can't use just " as usize // on the left side of a match arm for some reason. match self.level_info.writer() { - x if x == Destination::File as usize => { - let mut g = self.file_guard(); - // the unwrap() is safe because writer == Destination::File - let fw = g.as_mut().unwrap(); - msg = format!("{}\n", msg); - if let Err(e) = fw.write(&msg) { - eprintln!("logger: Could not write to log file {}", e); + x if x == Destination::Pipe as usize => { + // Unwrap is safe cause the Destination is a Pipe. + if let Err(_) = log_to_fifo(msg, self.log_fifo_guard().as_mut().unwrap()) { + // No reason to log the error to stderr here, just increment the metric. + METRICS.logger.missed_log_count.inc(); } - - let _ = fw.flush(); } x if x == Destination::Stderr as usize => { eprintln!("{}", msg); @@ -370,24 +405,35 @@ impl Logger { x if x == Destination::Stdout as usize => { println!("{}", msg); } - // major program logic error + // This is hit on major program logic error. _ => panic!("Invalid logger.level_info.writer!"), } } pub fn log_metrics(&self) -> Result<()> { - // Log metrics only if the logger has been initialized. + // Check that the logger is initialized. if STATE.load(Ordering::Relaxed) == INITIALIZED { match serde_json::to_string(METRICS.deref()) { Ok(msg) => { - self.log_helper(msg); + // Check that the destination is indeed a FIFO. + if self.level_info.writer() == Destination::Pipe as usize { + log_to_fifo(msg, self.metrics_fifo_guard().as_mut().unwrap()).map_err( + |e| { + METRICS.logger.missed_metrics_count.inc(); + e + }, + )?; + } + // We are not logging metrics if the Destination is not a PIPE. Ok(()) } Err(e) => { + METRICS.logger.metrics_fails.inc(); return Err(LoggerError::LogMetricFailure(e.description().to_string())); } } } else { + METRICS.logger.metrics_fails.inc(); return Err(LoggerError::LogMetricFailure( "Failed to log metrics. Logger was not initialized.".to_string(), )); @@ -398,7 +444,7 @@ impl Logger { /// Implements trait log from the externally used log crate /// impl Log for Logger { - // test whether a log level is enabled for the current module + // Test whether a log level is enabled for the current module. fn enabled(&self, metadata: &Metadata) -> bool { metadata.level() as usize <= self.level_info.code() } @@ -423,17 +469,15 @@ impl Log for Logger { #[cfg(test)] mod tests { + extern crate tempfile; + use super::*; use log::MetadataBuilder; - use std::fs::{remove_file, File}; + use std::fs::File; use std::io::BufRead; use std::io::BufReader; - fn log_file_str() -> String { - String::from("tmp.log") - } - fn validate_logs( log_path: &str, expected: &[(&'static str, &'static str, &'static str)], @@ -476,20 +520,31 @@ mod tests { assert_eq!(l.show_level(), true); assert!(l.log_metrics().is_err()); - assert!(l.init(Some(log_file_str())).is_ok()); + + let log_file_temp = tempfile::NamedTempFile::new() + .expect("Failed to create temporary output logging file."); + let metrics_file_temp = tempfile::NamedTempFile::new() + .expect("Failed to create temporary metrics logging file."); + let log_file = String::from(log_file_temp.path().to_path_buf().to_str().unwrap()); + let metrics_file = String::from(metrics_file_temp.path().to_path_buf().to_str().unwrap()); + + assert!(l.init(Some(log_file.clone()), Some(metrics_file)).is_ok()); + info!("info"); warn!("warning"); - assert!(l.init(None).is_err()); + assert!(l.init(None, None).is_err()); info!("info"); warn!("warning"); error!("error"); - // here we also test that the second initialization had no effect given that the - // logging system can only be initialized once per program + l.flush(); + + // Here we also test that the second initialization had no effect given that the + // logging system can only be initialized once per program. validate_logs( - &log_file_str(), + &log_file, &[ ("[INFO", "lib.rs", "info"), ("[WARN", "lib.rs", "warn"), @@ -501,11 +556,11 @@ mod tests { assert!(l.log_metrics().is_ok()); - remove_file(log_file_str()).unwrap(); - // exercise the case when there is an error in opening file STATE.store(UNINITIALIZED, Ordering::SeqCst); - assert!(l.init(Some(String::from(""))).is_err()); + assert!(l.init(Some(String::from("")), None).is_err()); + let res = l.init(Some(log_file.clone()), Some(String::from(""))); + assert!(res.is_err()); l.set_include_level(true); l.set_include_origin(false, false); @@ -531,15 +586,15 @@ mod tests { let l = Logger::new(); assert_eq!( - format!("{:?}", l.init(None).err()), + format!("{:?}", l.init(None, None).err()), "Some(AlreadyInitialized)" ); STATE.store(UNINITIALIZED, Ordering::SeqCst); - let res = format!("{:?}", l.init(Some(log_file_str())).err().unwrap()); - remove_file(log_file_str()).unwrap(); + let res = l.init(Some(log_file.clone()), None); + assert!(res.is_err()); assert_eq!( - res, + format!("{:?}", res.err().unwrap()), "NeverInitialized(\"attempted to set a logger after \ the logging system was already initialized\")" ); @@ -547,25 +602,10 @@ mod tests { #[test] fn test_get_default_destination() { - assert_eq!( - get_default_destination(log::Level::Error), - Destination::Stderr - ); - assert_eq!( - get_default_destination(log::Level::Warn), - Destination::Stderr - ); - assert_eq!( - get_default_destination(log::Level::Info), - Destination::Stdout - ); - assert_eq!( - get_default_destination(log::Level::Debug), - Destination::Stdout - ); - assert_eq!( - get_default_destination(log::Level::Trace), - Destination::Stdout - ); + assert!(get_default_destination(log::Level::Error) == Destination::Stderr); + assert!(get_default_destination(log::Level::Warn) == Destination::Stderr); + assert!(get_default_destination(log::Level::Info) == Destination::Stdout); + assert!(get_default_destination(log::Level::Debug) == Destination::Stdout); + assert!(get_default_destination(log::Level::Trace) == Destination::Stdout); } } diff --git a/logger/src/metrics.rs b/logger/src/metrics.rs index e8137b9ee1a..0c392791920 100644 --- a/logger/src/metrics.rs +++ b/logger/src/metrics.rs @@ -171,6 +171,14 @@ pub struct I8042DeviceMetrics { pub write_count: SharedMetric, } +#[derive(Default, Serialize)] +pub struct LoggerSystemMetrics { + pub missed_metrics_count: SharedMetric, + pub metrics_fails: SharedMetric, + pub missed_log_count: SharedMetric, + pub log_fails: SharedMetric, +} + #[derive(Default, Serialize)] pub struct NetDeviceMetrics { pub activate_fails: SharedMetric, @@ -251,6 +259,7 @@ pub struct FirecrackerMetrics { pub block: BlockDeviceMetrics, pub get_api_requests: GetRequestsMetrics, pub i8042: I8042DeviceMetrics, + pub logger: LoggerSystemMetrics, pub net: NetDeviceMetrics, pub put_api_requests: PutRequestsMetrics, pub seccomp: SeccompMetrics, diff --git a/logger/src/writers.rs b/logger/src/writers.rs index f78e9384a2c..b04d0281e20 100644 --- a/logger/src/writers.rs +++ b/logger/src/writers.rs @@ -1,32 +1,34 @@ -use error::LoggerError; -use std::fs::File; +use libc::O_NONBLOCK; +use std::fs::{File, OpenOptions}; use std::io::{LineWriter, Write}; -use std::path::Path; +use std::os::unix::fs::OpenOptionsExt; +use std::path::PathBuf; use std::result; use std::sync::{Mutex, MutexGuard}; -pub type Result = result::Result; +use error::LoggerError; + +type Result = result::Result; -/// Structure `FileLogWriter` used for writing to a file in a thread-safe way. +/// Structure `PipeLogWriter` used for writing to a file in a thread-safe way. #[derive(Debug)] -pub struct FileLogWriter { +pub struct PipeLogWriter { line_writer: Mutex>, } -impl FileLogWriter { - pub fn new(log_path: &String) -> Result { - let p_file = Path::new(&log_path); - if Path::new(log_path).exists() { - eprintln!( - "Log file {} already exists. Overwriting its contents...", - log_path - ); - } - match File::create(&p_file) { - Ok(t) => Ok(FileLogWriter { +impl PipeLogWriter { + pub fn new(fifo_path: &String) -> Result { + let fifo = PathBuf::from(fifo_path); + match OpenOptions::new() + .custom_flags(O_NONBLOCK) + .read(true) + .write(true) + .open(&fifo) + { + Ok(t) => Ok(PipeLogWriter { line_writer: Mutex::new(LineWriter::new(t)), }), - Err(e) => return Err(LoggerError::CreateLogFile(e)), + Err(e) => return Err(LoggerError::OpenFIFO(e)), } } @@ -34,14 +36,7 @@ impl FileLogWriter { let mut line_writer = self.get_line_writer()?; line_writer .write_all(msg.as_bytes()) - .map_err(|e| LoggerError::FileLogWrite(e)) - } - - pub fn flush(&self) -> Result<()> { - let mut line_writer = self.get_line_writer()?; - line_writer - .flush() - .map_err(|e| LoggerError::FileLogFlush(e)) + .map_err(|e| LoggerError::LogWrite(e)) } fn get_line_writer(&self) -> Result<(MutexGuard>)> { @@ -53,37 +48,28 @@ impl FileLogWriter { #[cfg(test)] mod tests { + extern crate tempfile; + use super::*; - use std::fs::remove_file; #[test] fn test_new() { - let mut file: String = "./inexistent/tmp.log".to_string(); - assert!(FileLogWriter::new(&file).is_err()); - format!("{:?}", FileLogWriter::new(&file)); - file = "tmp_writers_new.log".to_string(); - let res = FileLogWriter::new(&file); - format!("{:?}", res); - remove_file(file).unwrap(); + let log_file_temp = tempfile::NamedTempFile::new() + .expect("Failed to create temporary output logging file."); + let good_file = String::from(log_file_temp.path().to_path_buf().to_str().unwrap()); + let res = PipeLogWriter::new(&good_file); assert!(res.is_ok()) } #[test] fn test_write() { - let file: String = "tmp_writers_write.log".to_string(); - let fw = FileLogWriter::new(&file).unwrap(); + let log_file_temp = tempfile::NamedTempFile::new() + .expect("Failed to create temporary output logging file."); + let file = String::from(log_file_temp.path().to_path_buf().to_str().unwrap()); + + let fw = PipeLogWriter::new(&file).unwrap(); let msg = String::from("some message"); let res = fw.write(&msg); - remove_file(file).unwrap(); - assert!(res.is_ok()) - } - - #[test] - fn test_flush() { - let file: String = "tmp_writers_flush.log".to_string(); - let fw = FileLogWriter::new(&file).unwrap(); - let res = fw.flush(); - remove_file(file).unwrap(); assert!(res.is_ok()) } } diff --git a/vmm/src/api_logger_config.rs b/vmm/src/api_logger_config.rs index 8d9ffc577db..40f358d8ebe 100644 --- a/vmm/src/api_logger_config.rs +++ b/vmm/src/api_logger_config.rs @@ -20,7 +20,7 @@ pub fn init_logger(api_logger: APILoggerDescription) -> Result<()> { LOGGER.set_include_level(val); } - if let Err(ref e) = LOGGER.init(Some(api_logger.path)) { + if let Err(ref e) = LOGGER.init(Some(api_logger.log_fifo), Some(api_logger.metrics_fifo)) { return Err(APILoggerError::InitializationFailure(e.to_string())); } else { Ok(()) @@ -46,6 +46,7 @@ mod tests { use api_server::request::sync::{APILoggerDescription, APILoggerLevel}; use std::fs::{self, File}; use std::io::{BufRead, BufReader}; + use std::path::Path; fn validate_logs( log_path: &str, @@ -67,17 +68,25 @@ mod tests { #[test] fn test_init_logger_from_api() { + let log_filename = "tmp.log"; + let metrics_filename = "metrics.log"; + let desc = APILoggerDescription { - path: String::from(""), + log_fifo: String::from(log_filename), + metrics_fifo: String::from(metrics_filename), level: None, show_level: None, show_log_origin: None, }; assert!(init_logger(desc).is_err()); - let filename = "tmp.log"; + File::create(&Path::new(&log_filename)).expect("Failed to create temporary log file."); + + File::create(&Path::new(&metrics_filename)).expect("Failed to create temporary log file."); + let desc = APILoggerDescription { - path: String::from(filename), + log_fifo: String::from(log_filename), + metrics_fifo: String::from(metrics_filename), level: Some(APILoggerLevel::Warning), show_level: Some(true), show_log_origin: Some(true), @@ -85,7 +94,8 @@ mod tests { let res = init_logger(desc).is_ok(); if !res { - let _x = fs::remove_file(filename); + let _x = fs::remove_file(log_filename); + let _x = fs::remove_file(metrics_filename); } assert!(res); @@ -96,13 +106,14 @@ mod tests { // info should not be outputted let res = validate_logs( - filename, + log_filename, &[ ("[WARN", "logger_config.rs", "warn"), ("[ERROR", "logger_config.rs", "error"), ], ); - let _x = fs::remove_file(filename); + let _x = fs::remove_file(log_filename); + let _x = fs::remove_file(metrics_filename); assert!(res); }