Skip to content

Add support for specifying different files for flushing metrics #362

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Jun 28, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,10 @@ The **API endpoint** can be used to:
- Add one or more network interfaces to the microVM. Firecracker is mapping
an existing host file as a VirtIO block device into the microVM.
- Add one or more read/write disks (file-backed block devices) to the microVM.
- Configure the logging system (i.e. path on host for log file, log level, etc).
- Configure the logging system by:
- Specifying two named pipes (one for human readable logs and one for the metrics).
- Enabling or disabling printing the log level, line and file of where the log originated.
- Setting the maximum level for triggering logs.
- Configure rate limiters for VirtiIO devices which can limit the bandwidth, ops/s
or both.
- Start the microVM using a given kernel image, root file system and boot arguments.
Expand All @@ -47,7 +50,7 @@ The **API endpoint** can be used to:
- Emulated keyboard (i8042) and serial console (UART). The microVM serial
console input and output are connected to those of the Firecracker process
(this allows direct console access to the guest OS).
- Metrics currently logged every 60s to the configured log-file.
- Metrics currently logged every 60s to a one-purpose only named pipe.
Categories:
- API requests related metrics
- VCPUs related metrics
Expand Down
98 changes: 98 additions & 0 deletions api_server/src/http_service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -892,6 +892,18 @@ mod tests {
}

// Error cases
// Test the case where action already exists.
assert!(
parse_actions_req(
&path_tokens,
&path,
Method::Put,
&id_from_path,
&body,
&mut action_map
).is_err()
);

assert!(
parse_actions_req(
&path_tokens,
Expand Down Expand Up @@ -1033,6 +1045,82 @@ mod tests {
&body
).is_err()
);

// Test case where id from path is different.
assert!(
parse_drives_req(
&"/foo/bar"[1..].split_terminator('/').collect(),
&"/foo/bar",
Method::Put,
&Some("barr"),
&body
).is_err()
);

// Deserializing to a DriveDescription should fail when mandatory fields are missing.
let json = "{
\"drive_id\": \"bar\",
\"path_on_host\": \"/foo/bar\",
\"statee\": \"Attached\",
\"is_root_device\": true,
\"permissions\": \"ro\"
}";
let body: Chunk = Chunk::from(json);
assert!(
parse_drives_req(
&"/foo/bar"[1..].split_terminator('/').collect(),
&"/foo/bar",
Method::Put,
&Some("bar"),
&body
).is_err()
);
}

#[test]
fn test_parse_logger_source_req() {
let path = "/foo";
let path_tokens: Vec<&str> = path[1..].split_terminator('/').collect();
let json = "{
\"log_fifo\": \"tmp1\",
\"metrics_fifo\": \"tmp2\",
\"level\": \"Info\",
\"show_level\": true,
\"show_log_origin\": true
}";
let body: Chunk = Chunk::from(json);

// GET
match parse_logger_req(&path_tokens, &path, Method::Get, &body) {
Ok(pr_dummy) => assert!(pr_dummy.eq(&ParsedRequest::Dummy)),
_ => assert!(false),
}

// PUT
let logger_body = serde_json::from_slice::<request::APILoggerDescription>(&body)
.expect("deserialization failed");
match parse_logger_req(&path_tokens, &path, Method::Put, &body) {
Ok(pr) => {
let (sender, receiver) = oneshot::channel();
assert!(pr.eq(&ParsedRequest::Sync(
SyncRequest::PutLogger(logger_body, sender),
receiver,
)));
}
_ => assert!(false),
}

// Error cases
assert!(parse_logger_req(&path_tokens, &path, Method::Put, &Chunk::from("foo")).is_err());

assert!(
parse_logger_req(
&"/foo/bar"[1..].split_terminator('/').collect(),
&"/foo/bar",
Method::Put,
&Chunk::from("foo")
).is_err()
);
}

#[test]
Expand Down Expand Up @@ -1155,6 +1243,16 @@ mod tests {
}

// Error cases
assert!(
parse_netif_req(
&"/foo/bar"[1..].split_terminator('/').collect(),
&"/foo/bar",
Method::Put,
&Some("barr"),
&body
).is_err()
);

assert!(
parse_netif_req(
&"/foo/bar"[1..].split_terminator('/').collect(),
Expand Down
6 changes: 4 additions & 2 deletions api_server/src/request/sync/logger.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@ pub enum APILoggerLevel {
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
#[serde(deny_unknown_fields)]
pub struct APILoggerDescription {
pub path: String,
pub log_fifo: String,
pub metrics_fifo: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub level: Option<APILoggerLevel>,
#[serde(skip_serializing_if = "Option::is_none")]
Expand Down Expand Up @@ -109,7 +110,8 @@ mod tests {
#[test]
fn test_into_parsed_request() {
let desc = APILoggerDescription {
path: String::from(""),
log_fifo: String::from("log"),
metrics_fifo: String::from("metrics"),
level: None,
show_level: None,
show_log_origin: None,
Expand Down
11 changes: 7 additions & 4 deletions api_server/swagger/firecracker.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ paths:

/logger:
put:
summary: Initializes the logging system by specifying a log path on the host.
summary: Initializes the logger by specifying two named pipes (i.e. for the logs and metrics output).
operationId: putLogger
parameters:
- name: body
Expand Down Expand Up @@ -364,11 +364,14 @@ definitions:
Logger:
type: object
description:
Describes the configuration option for the logger intitialization.
Describes the configuration option for the logging capability.
properties:
path:
log_fifo:
type: string
description: The path on the host for the log file.
description: The named pipe for the human readable log output.
metrics_fifo:
type: string
description: The named pipe where the JSON-formatted metrics will be flushed.
level:
type: string
description: Set the level.
Expand Down
3 changes: 3 additions & 0 deletions logger/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,6 @@ serde = ">=1.0.27"
serde_derive = ">=1.0.27"
serde_json = ">=1.0.9"
time = "0.1.34"

[dev-dependencies]
tempfile = ">=3.0.2"
42 changes: 21 additions & 21 deletions logger/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,12 @@ pub enum LoggerError {
NeverInitialized(String),
/// The logger does not allow reinitialization.
AlreadyInitialized,
/// Creating log file fails.
CreateLogFile(std::io::Error),
/// Writing to log file fails.
FileLogWrite(std::io::Error),
/// Opening named pipe fails.
OpenFIFO(std::io::Error),
/// Writing to named pipe fails.
LogWrite(std::io::Error),
/// Flushing to disk fails.
FileLogFlush(std::io::Error),
LogFlush(std::io::Error),
/// Error obtaining lock on mutex.
MutexLockFailure(String),
/// Error in the logging of the metrics.
Expand All @@ -31,14 +31,14 @@ impl fmt::Display for LoggerError {
LoggerError::AlreadyInitialized => {
format!("{}", "Reinitialization of logger not allowed.")
}
LoggerError::CreateLogFile(ref e) => {
format!("Failed to create log file. Error: {}", e.description())
LoggerError::OpenFIFO(ref e) => {
format!("Failed to open pipe. Error: {}", e.description())
}
LoggerError::FileLogWrite(ref e) => {
format!("Failed to write to log file. Error: {}", e.description())
LoggerError::LogWrite(ref e) => {
format!("Failed to write logs. Error: {}", e.description())
}
LoggerError::FileLogFlush(ref e) => {
format!("Failed to flush log file. Error: {}", e.description())
LoggerError::LogFlush(ref e) => {
format!("Failed to flush logs. Error: {}", e.description())
}
LoggerError::MutexLockFailure(ref e) => format!("{}", e),
LoggerError::LogMetricFailure(ref e) => format!("{}", e),
Expand Down Expand Up @@ -78,29 +78,29 @@ mod tests {
assert!(
format!(
"{:?}",
LoggerError::FileLogWrite(std::io::Error::new(ErrorKind::Interrupted, "write"))
).contains("FileLogWrite")
LoggerError::LogWrite(std::io::Error::new(ErrorKind::Interrupted, "write"))
).contains("LogWrite")
);
assert_eq!(
format!(
"{}",
LoggerError::FileLogWrite(std::io::Error::new(ErrorKind::Interrupted, "write"))
LoggerError::LogWrite(std::io::Error::new(ErrorKind::Interrupted, "write"))
),
"Failed to write to log file. Error: write"
"Failed to write logs. Error: write"
);

assert!(
format!(
"{:?}",
LoggerError::FileLogFlush(std::io::Error::new(ErrorKind::Interrupted, "flush"))
).contains("FileLogFlush")
LoggerError::LogFlush(std::io::Error::new(ErrorKind::Interrupted, "flush"))
).contains("LogFlush")
);
assert_eq!(
format!(
"{}",
LoggerError::FileLogFlush(std::io::Error::new(ErrorKind::Interrupted, "flush"))
LoggerError::LogFlush(std::io::Error::new(ErrorKind::Interrupted, "flush"))
),
"Failed to flush log file. Error: flush"
"Failed to flush logs. Error: flush"
);

assert!(
Expand All @@ -126,9 +126,9 @@ mod tests {
assert_eq!(
format!(
"{}",
LoggerError::LogMetricFailure("Failure in the logging of the metrics.".to_string())
LoggerError::LogMetricFailure("Failed to log metrics.".to_string())
),
"Failure in the logging of the metrics."
"Failed to log metrics."
);

assert!(format!("{:?}", LoggerError::LogMetricRateLimit).contains("LogMetricRateLimit"));
Expand Down
Loading