diff --git a/Cargo.lock b/Cargo.lock index 982f8733735..602bfc26199 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1383,7 +1383,6 @@ dependencies = [ "eth2_config", "eth2_network_config", "exit-future", - "filesystem", "futures", "logging", "slog", @@ -3214,6 +3213,7 @@ dependencies = [ "boot_node", "clap", "clap_utils", + "directory", "env_logger 0.9.0", "environment", "eth2_hashing 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -5414,6 +5414,7 @@ dependencies = [ "serde", "slog", "slog-async", + "slog-json", "slog-kvfilter", "slog-scope", "slog-stdlog", diff --git a/beacon_node/beacon_chain/Cargo.toml b/beacon_node/beacon_chain/Cargo.toml index 4951473f03c..0f68405db77 100644 --- a/beacon_node/beacon_chain/Cargo.toml +++ b/beacon_node/beacon_chain/Cargo.toml @@ -26,7 +26,7 @@ rayon = "1.4.1" serde = "1.0.116" serde_derive = "1.0.116" slog = { version = "2.5.2", features = ["max_level_trace"] } -sloggers = "2.0.2" +sloggers = { version = "2.1.1", features = ["json"] } slot_clock = { path = "../../common/slot_clock" } eth2_hashing = "0.2.0" eth2_ssz = "0.4.0" diff --git a/beacon_node/eth1/Cargo.toml b/beacon_node/eth1/Cargo.toml index 7103d1b487a..4e408aeb12f 100644 --- a/beacon_node/eth1/Cargo.toml +++ b/beacon_node/eth1/Cargo.toml @@ -8,7 +8,7 @@ edition = "2018" eth1_test_rig = { path = "../../testing/eth1_test_rig" } toml = "0.5.6" web3 = { version = "0.17.0", default-features = false, features = ["http-tls", "signing", "ws-tls-tokio"] } -sloggers = "2.0.2" +sloggers = { version = "2.1.1", features = ["json"] } environment = { path = "../../lighthouse/environment" } [dependencies] diff --git a/beacon_node/network/Cargo.toml b/beacon_node/network/Cargo.toml index f9d086701a0..d04668533e9 100644 --- a/beacon_node/network/Cargo.toml +++ b/beacon_node/network/Cargo.toml @@ -5,7 +5,7 @@ authors = ["Sigma Prime "] edition = "2018" [dev-dependencies] -sloggers = "2.0.2" +sloggers = { version = "2.1.1", features = ["json"] } genesis = { path = "../genesis" } matches = "0.1.8" exit-future = "0.2.0" diff --git a/beacon_node/store/Cargo.toml b/beacon_node/store/Cargo.toml index d641f87aafe..55ce2564550 100644 --- a/beacon_node/store/Cargo.toml +++ b/beacon_node/store/Cargo.toml @@ -23,5 +23,5 @@ serde_derive = "1.0.116" lazy_static = "1.4.0" lighthouse_metrics = { path = "../../common/lighthouse_metrics" } lru = "0.6.0" -sloggers = "2.0.2" +sloggers = { version = "2.1.1", features = ["json"] } directory = { path = "../../common/directory" } diff --git a/common/directory/src/lib.rs b/common/directory/src/lib.rs index aeb781d7a45..62b98aab94e 100644 --- a/common/directory/src/lib.rs +++ b/common/directory/src/lib.rs @@ -40,6 +40,19 @@ pub fn ensure_dir_exists>(path: P) -> Result<(), String> { Ok(()) } +/// If `arg` is in `matches`, parses the value as a path. +/// +/// Otherwise, attempts to find the default directory for the `testnet` from the `matches`. +pub fn parse_path_or_default(matches: &ArgMatches, arg: &'static str) -> Result { + clap_utils::parse_path_with_default_in_home_dir( + matches, + arg, + PathBuf::new() + .join(DEFAULT_ROOT_DIR) + .join(get_network_dir(matches)), + ) +} + /// If `arg` is in `matches`, parses the value as a path. /// /// Otherwise, attempts to find the default directory for the `testnet` from the `matches` diff --git a/common/logging/Cargo.toml b/common/logging/Cargo.toml index 0deb55a6b65..da1aa8b5296 100644 --- a/common/logging/Cargo.toml +++ b/common/logging/Cargo.toml @@ -12,4 +12,4 @@ slog = "2.5.2" slog-term = "2.6.0" lighthouse_metrics = { path = "../lighthouse_metrics" } lazy_static = "1.4.0" -sloggers = "2.0.2" +sloggers = { version = "2.1.1", features = ["json"] } diff --git a/lcli/src/main.rs b/lcli/src/main.rs index e6ebc03e16a..e9ce219cfdd 100644 --- a/lcli/src/main.rs +++ b/lcli/src/main.rs @@ -16,7 +16,7 @@ mod transition_blocks; use clap::{App, Arg, ArgMatches, SubCommand}; use clap_utils::parse_path_with_default_in_home_dir; -use environment::EnvironmentBuilder; +use environment::{EnvironmentBuilder, LoggerConfig}; use parse_ssz::run_parse_ssz; use std::path::PathBuf; use std::process; @@ -584,8 +584,16 @@ fn run( let env = env_builder .multi_threaded_tokio_runtime() .map_err(|e| format!("should start tokio runtime: {:?}", e))? - .async_logger("trace", None) - .map_err(|e| format!("should start null logger: {:?}", e))? + .initialize_logger(LoggerConfig { + path: None, + debug_level: "trace", + logfile_debug_level: "trace", + log_format: None, + max_log_size: 0, + max_log_number: 0, + compression: false, + }) + .map_err(|e| format!("should start logger: {:?}", e))? .build() .map_err(|e| format!("should build env: {:?}", e))?; diff --git a/lighthouse/Cargo.toml b/lighthouse/Cargo.toml index f3eec21d072..9725155e9c3 100644 --- a/lighthouse/Cargo.toml +++ b/lighthouse/Cargo.toml @@ -20,7 +20,7 @@ spec-minimal = [] [dependencies] beacon_node = { "path" = "../beacon_node" } slog = { version = "2.5.2", features = ["max_level_trace"] } -sloggers = "2.0.2" +sloggers = { version = "2.1.1", features = ["json"] } types = { "path" = "../consensus/types" } bls = { path = "../crypto/bls" } eth2_hashing = "0.2.0" @@ -41,6 +41,7 @@ serde = { version = "1.0.116", features = ["derive"] } serde_json = "1.0.59" task_executor = { path = "../common/task_executor" } malloc_utils = { path = "../common/malloc_utils" } +directory = { path = "../common/directory" } [dev-dependencies] tempfile = "3.1.0" diff --git a/lighthouse/environment/Cargo.toml b/lighthouse/environment/Cargo.toml index 29fb173032a..ee196e70f15 100644 --- a/lighthouse/environment/Cargo.toml +++ b/lighthouse/environment/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] tokio = { version = "1.14.0", features = ["macros", "rt", "rt-multi-thread", "signal" ] } slog = { version = "2.5.2", features = ["max_level_trace"] } -sloggers = "2.0.2" +sloggers = { version = "2.1.1", features = ["json"] } types = { path = "../../consensus/types" } eth2_config = { path = "../../common/eth2_config" } task_executor = { path = "../../common/task_executor" } @@ -18,7 +18,6 @@ slog-async = "2.5.0" futures = "0.3.7" slog-json = "2.3.0" exit-future = "0.2.0" -filesystem = {"path" = "../../common/filesystem"} [target.'cfg(not(target_family = "unix"))'.dependencies] ctrlc = { version = "3.1.6", features = ["termination"] } diff --git a/lighthouse/environment/src/lib.rs b/lighthouse/environment/src/lib.rs index f48433ec291..b6d2424672d 100644 --- a/lighthouse/environment/src/lib.rs +++ b/lighthouse/environment/src/lib.rs @@ -9,17 +9,16 @@ use eth2_config::Eth2Config; use eth2_network_config::Eth2NetworkConfig; -use filesystem::restrict_file_permissions; use futures::channel::mpsc::{channel, Receiver, Sender}; use futures::{future, StreamExt}; -use slog::{error, info, o, warn, Drain, Level, Logger}; -use sloggers::{null::NullLoggerBuilder, Build}; -use std::ffi::OsStr; -use std::fs::{rename as FsRename, OpenOptions}; +use slog::{error, info, o, warn, Drain, Duplicate, Level, Logger}; +use sloggers::{ + file::FileLoggerBuilder, null::NullLoggerBuilder, types::Format, types::Severity, Build, +}; +use std::fs::create_dir_all; use std::path::PathBuf; use std::sync::Arc; -use std::time::{SystemTime, UNIX_EPOCH}; use task_executor::{ShutdownReason, TaskExecutor}; use tokio::runtime::{Builder as RuntimeBuilder, Runtime}; use types::{EthSpec, MainnetEthSpec, MinimalEthSpec}; @@ -38,6 +37,21 @@ const LOG_CHANNEL_SIZE: usize = 2048; /// The maximum time in seconds the client will wait for all internal tasks to shutdown. const MAXIMUM_SHUTDOWN_TIME: u64 = 15; +/// Configuration for logging. +/// Background file logging is disabled if one of: +/// - `path` == None, +/// - `max_log_size` == 0, +/// - `max_log_number` == 0, +pub struct LoggerConfig<'a> { + pub path: Option, + pub debug_level: &'a str, + pub logfile_debug_level: &'a str, + pub log_format: Option<&'a str>, + pub max_log_size: u64, + pub max_log_number: usize, + pub compression: bool, +} + /// Builds an `Environment`. pub struct EnvironmentBuilder { runtime: Option>, @@ -93,118 +107,98 @@ impl EnvironmentBuilder { Ok(self) } - /// Specifies that the `slog` asynchronous logger should be used. Ideal for production. - /// + /// Initializes the logger using the specified configuration. /// The logger is "async" because it has a dedicated thread that accepts logs and then /// asynchronously flushes them to stdout/files/etc. This means the thread that raised the log /// does not have to wait for the logs to be flushed. - pub fn async_logger( - mut self, - debug_level: &str, - log_format: Option<&str>, - ) -> Result { - // Setting up the initial logger format and building it. - let drain = if let Some(format) = log_format { + /// The logger can be duplicated and more detailed logs can be output to `logfile`. + /// Note that background file logging will spawn a new thread. + pub fn initialize_logger(mut self, config: LoggerConfig) -> Result { + // Setting up the initial logger format and build it. + let stdout_drain = if let Some(format) = config.log_format { match format.to_uppercase().as_str() { "JSON" => { - let drain = slog_json::Json::default(std::io::stdout()).fuse(); - slog_async::Async::new(drain) + let stdout_drain = slog_json::Json::default(std::io::stdout()).fuse(); + slog_async::Async::new(stdout_drain) .chan_size(LOG_CHANNEL_SIZE) .build() } _ => return Err("Logging format provided is not supported".to_string()), } } else { - let decorator = slog_term::TermDecorator::new().build(); - let decorator = - logging::AlignedTermDecorator::new(decorator, logging::MAX_MESSAGE_WIDTH); - let drain = slog_term::FullFormat::new(decorator).build().fuse(); - slog_async::Async::new(drain) + let stdout_decorator = slog_term::TermDecorator::new().build(); + let stdout_decorator = + logging::AlignedTermDecorator::new(stdout_decorator, logging::MAX_MESSAGE_WIDTH); + let stdout_drain = slog_term::FullFormat::new(stdout_decorator).build().fuse(); + slog_async::Async::new(stdout_drain) .chan_size(LOG_CHANNEL_SIZE) .build() }; - let drain = match debug_level { - "info" => drain.filter_level(Level::Info), - "debug" => drain.filter_level(Level::Debug), - "trace" => drain.filter_level(Level::Trace), - "warn" => drain.filter_level(Level::Warning), - "error" => drain.filter_level(Level::Error), - "crit" => drain.filter_level(Level::Critical), + let stdout_drain = match config.debug_level { + "info" => stdout_drain.filter_level(Level::Info), + "debug" => stdout_drain.filter_level(Level::Debug), + "trace" => stdout_drain.filter_level(Level::Trace), + "warn" => stdout_drain.filter_level(Level::Warning), + "error" => stdout_drain.filter_level(Level::Error), + "crit" => stdout_drain.filter_level(Level::Critical), unknown => return Err(format!("Unknown debug-level: {}", unknown)), }; - self.log = Some(Logger::root(drain.fuse(), o!())); - Ok(self) - } + let stdout_logger = Logger::root(stdout_drain.fuse(), o!()); - /// Sets the logger (and all child loggers) to log to a file. - pub fn log_to_file( - mut self, - path: PathBuf, - debug_level: &str, - log_format: Option<&str>, - ) -> Result { - // Creating a backup if the logfile already exists. - if path.exists() { - let start = SystemTime::now(); - let timestamp = start - .duration_since(UNIX_EPOCH) - .map_err(|e| e.to_string())? - .as_secs(); - let file_stem = path - .file_stem() - .ok_or("Invalid file name")? - .to_str() - .ok_or("Failed to create str from filename")?; - let file_ext = path.extension().unwrap_or_else(|| OsStr::new("")); - let backup_name = format!("{}_backup_{}", file_stem, timestamp); - let backup_path = path.with_file_name(backup_name).with_extension(file_ext); - FsRename(&path, &backup_path).map_err(|e| e.to_string())?; + // Disable file logging if values set to 0. + if config.max_log_size == 0 || config.max_log_number == 0 { + self.log = Some(stdout_logger); + return Ok(self); } - let file = OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&path) - .map_err(|e| format!("Unable to open logfile: {:?}", e))?; + // Disable file logging if no path is specified. + let path = match config.path { + Some(path) => path, + None => { + self.log = Some(stdout_logger); + return Ok(self); + } + }; - restrict_file_permissions(&path) - .map_err(|e| format!("Unable to set file permissions for {:?}: {:?}", path, e))?; + // Ensure directories are created becfore the logfile. + if !path.exists() { + let mut dir = path.clone(); + dir.pop(); - // Setting up the initial logger format and building it. - let drain = if let Some(format) = log_format { - match format.to_uppercase().as_str() { - "JSON" => { - let drain = slog_json::Json::default(file).fuse(); - slog_async::Async::new(drain) - .chan_size(LOG_CHANNEL_SIZE) - .build() - } - _ => return Err("Logging format provided is not supported".to_string()), + // Create the necessary directories for the correct service and network. + if !dir.exists() { + create_dir_all(dir).map_err(|e| format!("Unable to create directory: {:?}", e))?; } - } else { - let decorator = slog_term::PlainDecorator::new(file); - let decorator = - logging::AlignedTermDecorator::new(decorator, logging::MAX_MESSAGE_WIDTH); - let drain = slog_term::FullFormat::new(decorator).build().fuse(); - slog_async::Async::new(drain) - .chan_size(LOG_CHANNEL_SIZE) - .build() - }; + } - let drain = match debug_level { - "info" => drain.filter_level(Level::Info), - "debug" => drain.filter_level(Level::Debug), - "trace" => drain.filter_level(Level::Trace), - "warn" => drain.filter_level(Level::Warning), - "error" => drain.filter_level(Level::Error), - "crit" => drain.filter_level(Level::Critical), - unknown => return Err(format!("Unknown debug-level: {}", unknown)), + let logfile_level = match config.logfile_debug_level { + "info" => Severity::Info, + "debug" => Severity::Debug, + "trace" => Severity::Trace, + "warn" => Severity::Warning, + "error" => Severity::Error, + "crit" => Severity::Critical, + unknown => return Err(format!("Unknown loglevel-debug-level: {}", unknown)), }; - let log = Logger::root(drain.fuse(), o!()); + let file_logger = FileLoggerBuilder::new(&path) + .level(logfile_level) + .channel_size(LOG_CHANNEL_SIZE) + .format(match config.log_format { + Some("JSON") => Format::Json, + _ => Format::default(), + }) + .rotate_size(config.max_log_size) + .rotate_keep(config.max_log_number) + .rotate_compress(config.compression) + .restrict_permissions(true) + .build() + .map_err(|e| format!("Unable to build file logger: {}", e))?; + + let log = Logger::root(Duplicate::new(stdout_logger, file_logger).fuse(), o!()); + info!( log, "Logging to file"; diff --git a/lighthouse/src/main.rs b/lighthouse/src/main.rs index 99775d71d54..31bfdff9d24 100644 --- a/lighthouse/src/main.rs +++ b/lighthouse/src/main.rs @@ -4,9 +4,10 @@ mod metrics; use beacon_node::{get_eth2_network_config, ProductionBeaconNode}; use clap::{App, Arg, ArgMatches}; -use clap_utils::flags::DISABLE_MALLOC_TUNING_FLAG; +use clap_utils::{flags::DISABLE_MALLOC_TUNING_FLAG, parse_optional}; +use directory::{parse_path_or_default, DEFAULT_BEACON_NODE_DIR, DEFAULT_VALIDATOR_DIR}; use env_logger::{Builder, Env}; -use environment::EnvironmentBuilder; +use environment::{EnvironmentBuilder, LoggerConfig}; use eth2_hashing::have_sha_extensions; use eth2_network_config::{Eth2NetworkConfig, DEFAULT_HARDCODED_NETWORK, HARDCODED_NET_NAMES}; use lighthouse_version::VERSION; @@ -80,23 +81,68 @@ fn main() { .long("logfile") .value_name("FILE") .help( - "File path where output will be written.", - ) - .takes_value(true), + "File path where the log file will be stored. Once it grows to the \ + value specified in `--logfile-max-size` a new log file is generated where \ + future logs are stored. \ + Once the number of log files exceeds the value specified in \ + `--logfile-max-number` the oldest log file will be overwritten.") + .takes_value(true) + .global(true), + ) + .arg( + Arg::with_name("logfile-debug-level") + .long("logfile-debug-level") + .value_name("LEVEL") + .help("The verbosity level used when emitting logs to the log file.") + .takes_value(true) + .possible_values(&["info", "debug", "trace", "warn", "error", "crit"]) + .default_value("debug") + .global(true), + ) + .arg( + Arg::with_name("logfile-max-size") + .long("logfile-max-size") + .value_name("SIZE") + .help( + "The maximum size (in MB) each log file can grow to before rotating. If set \ + to 0, background file logging is disabled.") + .takes_value(true) + .default_value("200") + .global(true), + ) + .arg( + Arg::with_name("logfile-max-number") + .long("logfile-max-number") + .value_name("COUNT") + .help( + "The maximum number of log files that will be stored. If set to 0, \ + background file logging is disabled.") + .takes_value(true) + .default_value("5") + .global(true), + ) + .arg( + Arg::with_name("logfile-compress") + .long("logfile-compress") + .help( + "If present, compress old log files. This can help reduce the space needed \ + to store old logs.") + .global(true), ) .arg( Arg::with_name("log-format") .long("log-format") .value_name("FORMAT") - .help("Specifies the format used for logging.") + .help("Specifies the log format used when emitting logs to the terminal.") .possible_values(&["JSON"]) - .takes_value(true), + .takes_value(true) + .global(true), ) .arg( Arg::with_name("debug-level") .long("debug-level") .value_name("LEVEL") - .help("The verbosity level for emitting logs.") + .help("Specifies the verbosity level used when emitting logs to the terminal.") .takes_value(true) .possible_values(&["info", "debug", "trace", "warn", "error", "crit"]) .global(true) @@ -257,15 +303,58 @@ fn run( let log_format = matches.value_of("log-format"); - let builder = if let Some(log_path) = matches.value_of("logfile") { - let path = log_path - .parse::() - .map_err(|e| format!("Failed to parse log path: {:?}", e))?; - environment_builder.log_to_file(path, debug_level, log_format)? - } else { - environment_builder.async_logger(debug_level, log_format)? + let logfile_debug_level = matches + .value_of("logfile-debug-level") + .ok_or("Expected --logfile-debug-level flag")?; + + let logfile_max_size: u64 = matches + .value_of("logfile-max-size") + .ok_or("Expected --logfile-max-size flag")? + .parse() + .map_err(|e| format!("Failed to parse `logfile-max-size`: {:?}", e))?; + + let logfile_max_number: usize = matches + .value_of("logfile-max-number") + .ok_or("Expected --logfile-max-number flag")? + .parse() + .map_err(|e| format!("Failed to parse `logfile-max-number`: {:?}", e))?; + + let logfile_compress = matches.is_present("logfile-compress"); + + // Construct the path to the log file. + let mut log_path: Option = parse_optional(matches, "logfile")?; + if log_path.is_none() { + log_path = match matches.subcommand_name() { + Some("beacon_node") => Some( + parse_path_or_default(matches, "datadir")? + .join(DEFAULT_BEACON_NODE_DIR) + .join("logs") + .join("beacon") + .with_extension("log"), + ), + Some("validator_client") => Some( + parse_path_or_default(matches, "datadir")? + .join(DEFAULT_VALIDATOR_DIR) + .join("logs") + .join("validator") + .with_extension("log"), + ), + _ => None, + }; + } + + let logger_config = LoggerConfig { + path: log_path, + debug_level, + logfile_debug_level, + log_format, + max_log_size: logfile_max_size * 1_024 * 1_024, + max_log_number: logfile_max_number, + compression: logfile_compress, }; + let builder = environment_builder.initialize_logger(logger_config)?; + let mut environment = builder .multi_threaded_tokio_runtime()? .optional_eth2_network_config(Some(testnet_config))? diff --git a/slasher/Cargo.toml b/slasher/Cargo.toml index b51b38f0fd3..ee964a32323 100644 --- a/slasher/Cargo.toml +++ b/slasher/Cargo.toml @@ -21,7 +21,7 @@ safe_arith = { path = "../consensus/safe_arith" } serde = "1.0" serde_derive = "1.0" slog = "2.5.2" -sloggers = "2.0.2" +sloggers = { version = "2.1.1", features = ["json"] } tree_hash = "0.4.0" tree_hash_derive = "0.4.0" types = { path = "../consensus/types" } diff --git a/testing/simulator/src/eth1_sim.rs b/testing/simulator/src/eth1_sim.rs index 50727f42663..80fc755d52b 100644 --- a/testing/simulator/src/eth1_sim.rs +++ b/testing/simulator/src/eth1_sim.rs @@ -6,8 +6,8 @@ use eth1::{DEFAULT_CHAIN_ID, DEFAULT_NETWORK_ID}; use eth1_test_rig::GanacheEth1Instance; use futures::prelude::*; use node_test_rig::{ - environment::EnvironmentBuilder, testing_client_config, testing_validator_config, - ClientGenesis, ValidatorFiles, + environment::{EnvironmentBuilder, LoggerConfig}, + testing_client_config, testing_validator_config, ClientGenesis, ValidatorFiles, }; use rayon::prelude::*; use sensitive_url::SensitiveUrl; @@ -53,7 +53,15 @@ pub fn run_eth1_sim(matches: &ArgMatches) -> Result<(), String> { let log_format = None; let mut env = EnvironmentBuilder::minimal() - .async_logger(log_level, log_format)? + .initialize_logger(LoggerConfig { + path: None, + debug_level: log_level, + logfile_debug_level: "debug", + log_format, + max_log_size: 0, + max_log_number: 0, + compression: false, + })? .multi_threaded_tokio_runtime()? .build()?; diff --git a/testing/simulator/src/no_eth1_sim.rs b/testing/simulator/src/no_eth1_sim.rs index 2eda987d497..5d2f0be72fd 100644 --- a/testing/simulator/src/no_eth1_sim.rs +++ b/testing/simulator/src/no_eth1_sim.rs @@ -2,8 +2,8 @@ use crate::{checks, LocalNetwork}; use clap::ArgMatches; use futures::prelude::*; use node_test_rig::{ - environment::EnvironmentBuilder, testing_client_config, testing_validator_config, - ClientGenesis, ValidatorFiles, + environment::{EnvironmentBuilder, LoggerConfig}, + testing_client_config, testing_validator_config, ClientGenesis, ValidatorFiles, }; use rayon::prelude::*; use std::cmp::max; @@ -45,7 +45,15 @@ pub fn run_no_eth1_sim(matches: &ArgMatches) -> Result<(), String> { let log_format = None; let mut env = EnvironmentBuilder::mainnet() - .async_logger(log_level, log_format)? + .initialize_logger(LoggerConfig { + path: None, + debug_level: log_level, + logfile_debug_level: "debug", + log_format, + max_log_size: 0, + max_log_number: 0, + compression: false, + })? .multi_threaded_tokio_runtime()? .build()?; diff --git a/testing/simulator/src/sync_sim.rs b/testing/simulator/src/sync_sim.rs index 9da52a35c99..e328938db1d 100644 --- a/testing/simulator/src/sync_sim.rs +++ b/testing/simulator/src/sync_sim.rs @@ -3,7 +3,8 @@ use crate::local_network::LocalNetwork; use clap::ArgMatches; use futures::prelude::*; use node_test_rig::{ - environment::EnvironmentBuilder, testing_client_config, ClientGenesis, ValidatorFiles, + environment::{EnvironmentBuilder, LoggerConfig}, + testing_client_config, ClientGenesis, ValidatorFiles, }; use node_test_rig::{testing_validator_config, ClientConfig}; use std::cmp::max; @@ -45,7 +46,15 @@ fn syncing_sim( log_format: Option<&str>, ) -> Result<(), String> { let mut env = EnvironmentBuilder::minimal() - .async_logger(log_level, log_format)? + .initialize_logger(LoggerConfig { + path: None, + debug_level: log_level, + logfile_debug_level: "debug", + log_format, + max_log_size: 0, + max_log_number: 0, + compression: false, + })? .multi_threaded_tokio_runtime()? .build()?;