Skip to content

Commit

Permalink
Add background file logging (#2762)
Browse files Browse the repository at this point in the history
## Issue Addressed

Closes #1996 

## Proposed Changes

Run a second `Logger` via `sloggers` which logs to a file in the background with:
- separate `debug-level` for background and terminal logging
- the ability to limit log size
- rotation through a customizable number of log files
- an option to compress old log files (`.gz` format)

Add the following new CLI flags:
- `--logfile-debug-level`: The debug level of the log files
- `--logfile-max-size`: The maximum size of each log file
- `--logfile-max-number`: The number of old log files to store
- `--logfile-compress`: Whether to compress old log files

By default background logging uses the `debug` log level and saves logfiles to:
- Beacon Node:  `$HOME/.lighthouse/$network/beacon/logs/beacon.log`
- Validator Client:  `$HOME/.lighthouse/$network/validators/logs/validator.log`

Or, when using the `--datadir` flag:
`$datadir/beacon/logs/beacon.log` and `$datadir/validators/logs/validator.log`

Once rotated, old logs are stored like so: `beacon.log.1`, `beacon.log.2` etc. 
> Note: `beacon.log.1` is always newer than `beacon.log.2`.

## Additional Info

Currently the default value of `--logfile-max-size` is 200 (MB) and `--logfile-max-number` is 5.
This means that the maximum storage space that the logs will take up by default is 1.2GB. 
(200MB x 5 from old log files + <200MB the current logfile being written to)
Happy to adjust these default values to whatever people think is appropriate. 

It's also worth noting that when logging to a file, we lose our custom `slog` formatting. This means the logfile logs look like this:
```
Oct 27 16:02:50.305 INFO Lighthouse started, version: Lighthouse/v2.0.1-8edd9d4+, module: lighthouse:413
Oct 27 16:02:50.305 INFO Configured for network, name: prater, module: lighthouse:414
```
  • Loading branch information
macladson committed Nov 30, 2021
1 parent 6625aa4 commit fe75a0a
Show file tree
Hide file tree
Showing 16 changed files with 257 additions and 127 deletions.
3 changes: 2 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion beacon_node/beacon_chain/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ rayon = "1.4.1"
serde = "1.0.116"
serde_derive = "1.0.116"
slog = { version = "2.5.2", features = ["max_level_trace"] }
sloggers = "2.0.2"
sloggers = { version = "2.1.1", features = ["json"] }
slot_clock = { path = "../../common/slot_clock" }
eth2_hashing = "0.2.0"
eth2_ssz = "0.4.0"
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/eth1/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ edition = "2018"
eth1_test_rig = { path = "../../testing/eth1_test_rig" }
toml = "0.5.6"
web3 = { version = "0.17.0", default-features = false, features = ["http-tls", "signing", "ws-tls-tokio"] }
sloggers = "2.0.2"
sloggers = { version = "2.1.1", features = ["json"] }
environment = { path = "../../lighthouse/environment" }

[dependencies]
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/network/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ authors = ["Sigma Prime <contact@sigmaprime.io>"]
edition = "2018"

[dev-dependencies]
sloggers = "2.0.2"
sloggers = { version = "2.1.1", features = ["json"] }
genesis = { path = "../genesis" }
matches = "0.1.8"
exit-future = "0.2.0"
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/store/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,5 +23,5 @@ serde_derive = "1.0.116"
lazy_static = "1.4.0"
lighthouse_metrics = { path = "../../common/lighthouse_metrics" }
lru = "0.6.0"
sloggers = "2.0.2"
sloggers = { version = "2.1.1", features = ["json"] }
directory = { path = "../../common/directory" }
13 changes: 13 additions & 0 deletions common/directory/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,19 @@ pub fn ensure_dir_exists<P: AsRef<Path>>(path: P) -> Result<(), String> {
Ok(())
}

/// If `arg` is in `matches`, parses the value as a path.
///
/// Otherwise, attempts to find the default directory for the `testnet` from the `matches`.
pub fn parse_path_or_default(matches: &ArgMatches, arg: &'static str) -> Result<PathBuf, String> {
clap_utils::parse_path_with_default_in_home_dir(
matches,
arg,
PathBuf::new()
.join(DEFAULT_ROOT_DIR)
.join(get_network_dir(matches)),
)
}

/// If `arg` is in `matches`, parses the value as a path.
///
/// Otherwise, attempts to find the default directory for the `testnet` from the `matches`
Expand Down
2 changes: 1 addition & 1 deletion common/logging/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@ slog = "2.5.2"
slog-term = "2.6.0"
lighthouse_metrics = { path = "../lighthouse_metrics" }
lazy_static = "1.4.0"
sloggers = "2.0.2"
sloggers = { version = "2.1.1", features = ["json"] }
14 changes: 11 additions & 3 deletions lcli/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ mod transition_blocks;

use clap::{App, Arg, ArgMatches, SubCommand};
use clap_utils::parse_path_with_default_in_home_dir;
use environment::EnvironmentBuilder;
use environment::{EnvironmentBuilder, LoggerConfig};
use parse_ssz::run_parse_ssz;
use std::path::PathBuf;
use std::process;
Expand Down Expand Up @@ -584,8 +584,16 @@ fn run<T: EthSpec>(
let env = env_builder
.multi_threaded_tokio_runtime()
.map_err(|e| format!("should start tokio runtime: {:?}", e))?
.async_logger("trace", None)
.map_err(|e| format!("should start null logger: {:?}", e))?
.initialize_logger(LoggerConfig {
path: None,
debug_level: "trace",
logfile_debug_level: "trace",
log_format: None,
max_log_size: 0,
max_log_number: 0,
compression: false,
})
.map_err(|e| format!("should start logger: {:?}", e))?
.build()
.map_err(|e| format!("should build env: {:?}", e))?;

Expand Down
3 changes: 2 additions & 1 deletion lighthouse/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ spec-minimal = []
[dependencies]
beacon_node = { "path" = "../beacon_node" }
slog = { version = "2.5.2", features = ["max_level_trace"] }
sloggers = "2.0.2"
sloggers = { version = "2.1.1", features = ["json"] }
types = { "path" = "../consensus/types" }
bls = { path = "../crypto/bls" }
eth2_hashing = "0.2.0"
Expand All @@ -41,6 +41,7 @@ serde = { version = "1.0.116", features = ["derive"] }
serde_json = "1.0.59"
task_executor = { path = "../common/task_executor" }
malloc_utils = { path = "../common/malloc_utils" }
directory = { path = "../common/directory" }

[dev-dependencies]
tempfile = "3.1.0"
Expand Down
3 changes: 1 addition & 2 deletions lighthouse/environment/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ edition = "2018"
[dependencies]
tokio = { version = "1.14.0", features = ["macros", "rt", "rt-multi-thread", "signal" ] }
slog = { version = "2.5.2", features = ["max_level_trace"] }
sloggers = "2.0.2"
sloggers = { version = "2.1.1", features = ["json"] }
types = { path = "../../consensus/types" }
eth2_config = { path = "../../common/eth2_config" }
task_executor = { path = "../../common/task_executor" }
Expand All @@ -18,7 +18,6 @@ slog-async = "2.5.0"
futures = "0.3.7"
slog-json = "2.3.0"
exit-future = "0.2.0"
filesystem = {"path" = "../../common/filesystem"}

[target.'cfg(not(target_family = "unix"))'.dependencies]
ctrlc = { version = "3.1.6", features = ["termination"] }
176 changes: 85 additions & 91 deletions lighthouse/environment/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,17 +9,16 @@

use eth2_config::Eth2Config;
use eth2_network_config::Eth2NetworkConfig;
use filesystem::restrict_file_permissions;
use futures::channel::mpsc::{channel, Receiver, Sender};
use futures::{future, StreamExt};

use slog::{error, info, o, warn, Drain, Level, Logger};
use sloggers::{null::NullLoggerBuilder, Build};
use std::ffi::OsStr;
use std::fs::{rename as FsRename, OpenOptions};
use slog::{error, info, o, warn, Drain, Duplicate, Level, Logger};
use sloggers::{
file::FileLoggerBuilder, null::NullLoggerBuilder, types::Format, types::Severity, Build,
};
use std::fs::create_dir_all;
use std::path::PathBuf;
use std::sync::Arc;
use std::time::{SystemTime, UNIX_EPOCH};
use task_executor::{ShutdownReason, TaskExecutor};
use tokio::runtime::{Builder as RuntimeBuilder, Runtime};
use types::{EthSpec, MainnetEthSpec, MinimalEthSpec};
Expand All @@ -38,6 +37,21 @@ const LOG_CHANNEL_SIZE: usize = 2048;
/// The maximum time in seconds the client will wait for all internal tasks to shutdown.
const MAXIMUM_SHUTDOWN_TIME: u64 = 15;

/// Configuration for logging.
/// Background file logging is disabled if one of:
/// - `path` == None,
/// - `max_log_size` == 0,
/// - `max_log_number` == 0,
pub struct LoggerConfig<'a> {
pub path: Option<PathBuf>,
pub debug_level: &'a str,
pub logfile_debug_level: &'a str,
pub log_format: Option<&'a str>,
pub max_log_size: u64,
pub max_log_number: usize,
pub compression: bool,
}

/// Builds an `Environment`.
pub struct EnvironmentBuilder<E: EthSpec> {
runtime: Option<Arc<Runtime>>,
Expand Down Expand Up @@ -93,118 +107,98 @@ impl<E: EthSpec> EnvironmentBuilder<E> {
Ok(self)
}

/// Specifies that the `slog` asynchronous logger should be used. Ideal for production.
///
/// Initializes the logger using the specified configuration.
/// The logger is "async" because it has a dedicated thread that accepts logs and then
/// asynchronously flushes them to stdout/files/etc. This means the thread that raised the log
/// does not have to wait for the logs to be flushed.
pub fn async_logger(
mut self,
debug_level: &str,
log_format: Option<&str>,
) -> Result<Self, String> {
// Setting up the initial logger format and building it.
let drain = if let Some(format) = log_format {
/// The logger can be duplicated and more detailed logs can be output to `logfile`.
/// Note that background file logging will spawn a new thread.
pub fn initialize_logger(mut self, config: LoggerConfig) -> Result<Self, String> {
// Setting up the initial logger format and build it.
let stdout_drain = if let Some(format) = config.log_format {
match format.to_uppercase().as_str() {
"JSON" => {
let drain = slog_json::Json::default(std::io::stdout()).fuse();
slog_async::Async::new(drain)
let stdout_drain = slog_json::Json::default(std::io::stdout()).fuse();
slog_async::Async::new(stdout_drain)
.chan_size(LOG_CHANNEL_SIZE)
.build()
}
_ => return Err("Logging format provided is not supported".to_string()),
}
} else {
let decorator = slog_term::TermDecorator::new().build();
let decorator =
logging::AlignedTermDecorator::new(decorator, logging::MAX_MESSAGE_WIDTH);
let drain = slog_term::FullFormat::new(decorator).build().fuse();
slog_async::Async::new(drain)
let stdout_decorator = slog_term::TermDecorator::new().build();
let stdout_decorator =
logging::AlignedTermDecorator::new(stdout_decorator, logging::MAX_MESSAGE_WIDTH);
let stdout_drain = slog_term::FullFormat::new(stdout_decorator).build().fuse();
slog_async::Async::new(stdout_drain)
.chan_size(LOG_CHANNEL_SIZE)
.build()
};

let drain = match debug_level {
"info" => drain.filter_level(Level::Info),
"debug" => drain.filter_level(Level::Debug),
"trace" => drain.filter_level(Level::Trace),
"warn" => drain.filter_level(Level::Warning),
"error" => drain.filter_level(Level::Error),
"crit" => drain.filter_level(Level::Critical),
let stdout_drain = match config.debug_level {
"info" => stdout_drain.filter_level(Level::Info),
"debug" => stdout_drain.filter_level(Level::Debug),
"trace" => stdout_drain.filter_level(Level::Trace),
"warn" => stdout_drain.filter_level(Level::Warning),
"error" => stdout_drain.filter_level(Level::Error),
"crit" => stdout_drain.filter_level(Level::Critical),
unknown => return Err(format!("Unknown debug-level: {}", unknown)),
};

self.log = Some(Logger::root(drain.fuse(), o!()));
Ok(self)
}
let stdout_logger = Logger::root(stdout_drain.fuse(), o!());

/// Sets the logger (and all child loggers) to log to a file.
pub fn log_to_file(
mut self,
path: PathBuf,
debug_level: &str,
log_format: Option<&str>,
) -> Result<Self, String> {
// Creating a backup if the logfile already exists.
if path.exists() {
let start = SystemTime::now();
let timestamp = start
.duration_since(UNIX_EPOCH)
.map_err(|e| e.to_string())?
.as_secs();
let file_stem = path
.file_stem()
.ok_or("Invalid file name")?
.to_str()
.ok_or("Failed to create str from filename")?;
let file_ext = path.extension().unwrap_or_else(|| OsStr::new(""));
let backup_name = format!("{}_backup_{}", file_stem, timestamp);
let backup_path = path.with_file_name(backup_name).with_extension(file_ext);
FsRename(&path, &backup_path).map_err(|e| e.to_string())?;
// Disable file logging if values set to 0.
if config.max_log_size == 0 || config.max_log_number == 0 {
self.log = Some(stdout_logger);
return Ok(self);
}

let file = OpenOptions::new()
.create(true)
.write(true)
.truncate(true)
.open(&path)
.map_err(|e| format!("Unable to open logfile: {:?}", e))?;
// Disable file logging if no path is specified.
let path = match config.path {
Some(path) => path,
None => {
self.log = Some(stdout_logger);
return Ok(self);
}
};

restrict_file_permissions(&path)
.map_err(|e| format!("Unable to set file permissions for {:?}: {:?}", path, e))?;
// Ensure directories are created becfore the logfile.
if !path.exists() {
let mut dir = path.clone();
dir.pop();

// Setting up the initial logger format and building it.
let drain = if let Some(format) = log_format {
match format.to_uppercase().as_str() {
"JSON" => {
let drain = slog_json::Json::default(file).fuse();
slog_async::Async::new(drain)
.chan_size(LOG_CHANNEL_SIZE)
.build()
}
_ => return Err("Logging format provided is not supported".to_string()),
// Create the necessary directories for the correct service and network.
if !dir.exists() {
create_dir_all(dir).map_err(|e| format!("Unable to create directory: {:?}", e))?;
}
} else {
let decorator = slog_term::PlainDecorator::new(file);
let decorator =
logging::AlignedTermDecorator::new(decorator, logging::MAX_MESSAGE_WIDTH);
let drain = slog_term::FullFormat::new(decorator).build().fuse();
slog_async::Async::new(drain)
.chan_size(LOG_CHANNEL_SIZE)
.build()
};
}

let drain = match debug_level {
"info" => drain.filter_level(Level::Info),
"debug" => drain.filter_level(Level::Debug),
"trace" => drain.filter_level(Level::Trace),
"warn" => drain.filter_level(Level::Warning),
"error" => drain.filter_level(Level::Error),
"crit" => drain.filter_level(Level::Critical),
unknown => return Err(format!("Unknown debug-level: {}", unknown)),
let logfile_level = match config.logfile_debug_level {
"info" => Severity::Info,
"debug" => Severity::Debug,
"trace" => Severity::Trace,
"warn" => Severity::Warning,
"error" => Severity::Error,
"crit" => Severity::Critical,
unknown => return Err(format!("Unknown loglevel-debug-level: {}", unknown)),
};

let log = Logger::root(drain.fuse(), o!());
let file_logger = FileLoggerBuilder::new(&path)
.level(logfile_level)
.channel_size(LOG_CHANNEL_SIZE)
.format(match config.log_format {
Some("JSON") => Format::Json,
_ => Format::default(),
})
.rotate_size(config.max_log_size)
.rotate_keep(config.max_log_number)
.rotate_compress(config.compression)
.restrict_permissions(true)
.build()
.map_err(|e| format!("Unable to build file logger: {}", e))?;

let log = Logger::root(Duplicate::new(stdout_logger, file_logger).fuse(), o!());

info!(
log,
"Logging to file";
Expand Down
Loading

0 comments on commit fe75a0a

Please sign in to comment.