Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(logger): use RUST_LOG-like EnvFilter for logging #4837

Merged
merged 5 commits into from
Jul 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions cli/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -520,7 +520,7 @@ impl Iroha {
loop {
tokio::select! {
Ok(()) = log_level_update.changed() => {
let value = *log_level_update.borrow_and_update();
let value = log_level_update.borrow_and_update().clone();
if let Err(error) = logger.reload_level(value).await {
iroha_logger::error!("Failed to reload log level: {error}");
};
Expand Down Expand Up @@ -615,7 +615,7 @@ pub fn read_config_and_genesis(

validate_config(&config)?;

let logger_config = LoggerInitConfig::new(config.logger, args.terminal_colors);
let logger_config = LoggerInitConfig::new(config.logger.clone(), args.terminal_colors);

Ok((config, logger_config, genesis))
}
Expand Down
2 changes: 1 addition & 1 deletion client/benches/torii.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ fn query_requests(criterion: &mut Criterion) {
rt.block_on(builder.start_with_peer(&mut peer));
rt.block_on(async {
iroha_logger::test_logger()
.reload_level(iroha::data_model::Level::ERROR)
.reload_level(iroha::data_model::Level::ERROR.into())
.await
.unwrap()
});
Expand Down
2 changes: 1 addition & 1 deletion client/src/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1030,7 +1030,7 @@ impl Client {
///
/// # Errors
/// If sending request or decoding fails
pub fn set_config(&self, dto: ConfigDTO) -> Result<()> {
pub fn set_config(&self, dto: &ConfigDTO) -> Result<()> {
let body = serde_json::to_vec(&dto).wrap_err(format!("Failed to serialize {dto:?}"))?;
let url = self
.torii_url
Expand Down
2 changes: 1 addition & 1 deletion config/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ iroha_genesis = { workspace = true }

error-stack = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true, features = ["fmt", "ansi"] }
tracing-subscriber = { workspace = true, features = ["fmt", "ansi", "env-filter"] }
url = { workspace = true, features = ["serde"] }

serde = { workspace = true, features = ["derive"] }
Expand Down
22 changes: 14 additions & 8 deletions config/src/client_api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,15 @@
// configuration-related crate, this part should be re-written in a clean way.
// Track configuration refactoring here: https://github.com/hyperledger/iroha/issues/2585

use iroha_data_model::Level;
use serde::{Deserialize, Serialize};

use crate::parameters::actual::{Logger as BaseLogger, Root as BaseConfig};
use crate::{
logger::Directives,
parameters::actual::{Logger as BaseLogger, Root as BaseConfig},
};

/// Subset of [`super::iroha`] configuration.
#[derive(Debug, Serialize, Deserialize, Clone, Copy)]
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ConfigDTO {
#[allow(missing_docs)]
pub logger: Logger,
Expand All @@ -30,27 +32,31 @@ impl From<&'_ BaseConfig> for ConfigDTO {
}

/// Subset of [`super::logger`] configuration.
#[derive(Debug, Serialize, Deserialize, Clone, Copy)]
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Logger {
#[allow(missing_docs)]
pub level: Level,
pub level: Directives,
}

impl From<&'_ BaseLogger> for Logger {
fn from(value: &'_ BaseLogger) -> Self {
Self { level: value.level }
Self {
level: value.level.clone(),
}
}
}

#[cfg(test)]
mod test {
use iroha_data_model::Level;

use super::*;

#[test]
fn snapshot_serialized_form() {
let value = ConfigDTO {
logger: Logger {
level: Level::TRACE,
level: Level::TRACE.into(),
},
};

Expand All @@ -62,7 +68,7 @@ mod test {
let expected = expect_test::expect![[r#"
{
"logger": {
"level": "TRACE"
"level": "trace"
}
}"#]];
expected.assert_eq(&actual);
Expand Down
79 changes: 68 additions & 11 deletions config/src/logger.rs
Original file line number Diff line number Diff line change
@@ -1,18 +1,13 @@
//! Configuration utils related to Logger specifically.

use std::{
fmt::{Debug, Display},
str::FromStr,
};

pub use iroha_data_model::Level;
use serde_with::{DeserializeFromStr, SerializeDisplay};

/// Convert [`Level`] into [`tracing::Level`]
pub fn into_tracing_level(level: Level) -> tracing::Level {
match level {
Level::TRACE => tracing::Level::TRACE,
Level::DEBUG => tracing::Level::DEBUG,
Level::INFO => tracing::Level::INFO,
Level::WARN => tracing::Level::WARN,
Level::ERROR => tracing::Level::ERROR,
}
}
use tracing_subscriber::filter::Directive;

/// Reflects formatters in [`tracing_subscriber::fmt::format`]
#[derive(
Expand Down Expand Up @@ -40,6 +35,68 @@ pub enum Format {
Json,
}

/// List of directives
#[derive(Clone, DeserializeFromStr, SerializeDisplay, PartialEq, Eq)]
pub struct Directives(Vec<Directive>);

impl FromStr for Directives {
type Err = tracing_subscriber::filter::ParseError;

fn from_str(dirs: &str) -> std::result::Result<Self, Self::Err> {
if dirs.is_empty() {
return Ok(Self(Vec::new()));
}
let directives = dirs
.split(',')
.filter(|s| !s.is_empty())
.map(Directive::from_str)
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(Self(directives))
}
}

impl Display for Directives {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut directives_iter = self.0.iter();
if let Some(directive) = directives_iter.next() {
write!(f, "{directive}")?;
}
for directive in directives_iter {
write!(f, ",{directive}")?;
}
Ok(())
}
}

impl Debug for Directives {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Display::fmt(&self, f)
}
}

impl From<Level> for Directives {
fn from(level: Level) -> Self {
Directives(Vec::from([into_tracing_level(level).into()]))
}
}

impl Default for Directives {
fn default() -> Self {
Self::from(Level::INFO)
}
}

/// Convert [`Level`] into [`tracing::Level`]
fn into_tracing_level(level: Level) -> tracing::Level {
match level {
Level::TRACE => tracing::Level::TRACE,
Level::DEBUG => tracing::Level::DEBUG,
Level::INFO => tracing::Level::INFO,
Level::WARN => tracing::Level::WARN,
Level::ERROR => tracing::Level::ERROR,
}
}

#[cfg(test)]
pub mod tests {
use crate::logger::Format;
Expand Down
8 changes: 4 additions & 4 deletions config/src/parameters/user.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,14 @@ use iroha_config_base::{
ReadConfig, WithOrigin,
};
use iroha_crypto::{PrivateKey, PublicKey};
use iroha_data_model::{peer::PeerId, ChainId, Level};
use iroha_data_model::{peer::PeerId, ChainId};
use iroha_primitives::{addr::SocketAddr, unique_vec::UniqueVec};
use serde::Deserialize;
use url::Url;

use crate::{
kura::InitMode as KuraInitMode,
logger::Format as LoggerFormat,
logger::{Directives, Format as LoggerFormat},
parameters::{actual, defaults},
snapshot::Mode as SnapshotMode,
};
Expand Down Expand Up @@ -346,14 +346,14 @@ impl Queue {
}
}

#[derive(Debug, Clone, Copy, Default, ReadConfig)]
#[derive(Debug, Clone, Default, ReadConfig)]
pub struct Logger {
/// Level of logging verbosity
// TODO: parse user provided value in a case insensitive way,
// because `format` is set in lowercase, and `LOG_LEVEL=INFO` + `LOG_FORMAT=pretty`
// looks inconsistent
#[config(env = "LOG_LEVEL", default)]
pub level: Level,
pub level: Directives,
/// Output format
#[config(env = "LOG_FORMAT", default)]
pub format: LoggerFormat,
Expand Down
2 changes: 1 addition & 1 deletion config/tests/fixtures.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ fn minimal_config_snapshot() {
idle_time: 30s,
},
logger: Logger {
level: INFO,
level: info,
format: Full,
},
queue: Queue {
Expand Down
21 changes: 11 additions & 10 deletions core/src/kiso.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@
use eyre::Result;
use iroha_config::{
client_api::{ConfigDTO, Logger as LoggerDTO},
logger::Directives,
parameters::actual::Root as Config,
};
use iroha_logger::Level;
use tokio::sync::{mpsc, oneshot, watch};

const DEFAULT_CHANNEL_SIZE: usize = 32;
Expand All @@ -29,7 +29,7 @@ impl KisoHandle {
/// Spawn a new actor
pub fn new(state: Config) -> Self {
let (actor_sender, actor_receiver) = mpsc::channel(DEFAULT_CHANNEL_SIZE);
let (log_level_update, _) = watch::channel(state.logger.level);
let (log_level_update, _) = watch::channel(state.logger.level.clone());
let mut actor = Actor {
handle: actor_receiver,
state,
Expand Down Expand Up @@ -75,7 +75,7 @@ impl KisoHandle {
///
/// # Errors
/// If communication with actor fails.
pub async fn subscribe_on_log_level(&self) -> Result<watch::Receiver<Level>, Error> {
pub async fn subscribe_on_log_level(&self) -> Result<watch::Receiver<Directives>, Error> {
let (tx, rx) = oneshot::channel();
let msg = Message::SubscribeOnLogLevel { respond_to: tx };
let _ = self.actor.send(msg).await;
Expand All @@ -93,7 +93,7 @@ enum Message {
respond_to: oneshot::Sender<Result<(), Error>>,
},
SubscribeOnLogLevel {
respond_to: oneshot::Sender<watch::Receiver<Level>>,
respond_to: oneshot::Sender<watch::Receiver<Directives>>,
},
}

Expand All @@ -111,7 +111,7 @@ struct Actor {
// future dynamic parameter, it will require its own `subscribe_on_<field>` function in [`KisoHandle`],
// new channel here, and new [`Message`] variant. If boilerplate expands, a more general solution will be
// required. However, as of now a single manually written implementation seems optimal.
log_level_update: watch::Sender<Level>,
log_level_update: watch::Sender<Directives>,
}

impl Actor {
Expand All @@ -134,7 +134,7 @@ impl Actor {
},
respond_to,
} => {
let _ = self.log_level_update.send(new_level);
let _ = self.log_level_update.send(new_level.clone());
self.state.logger.level = new_level;

let _ = respond_to.send(Ok(()));
Expand All @@ -155,6 +155,7 @@ mod tests {
client_api::{ConfigDTO, Logger as LoggerDTO},
parameters::{actual::Root, user::Root as UserConfig},
};
use iroha_logger::Level;

use super::*;

Expand All @@ -175,7 +176,7 @@ mod tests {
const WATCH_LAG_MILLIS: u64 = 30;

let mut config = test_config();
config.logger.level = INIT_LOG_LEVEL;
config.logger.level = INIT_LOG_LEVEL.into();
let kiso = KisoHandle::new(config);

let mut recv = kiso
Expand All @@ -189,7 +190,7 @@ mod tests {

kiso.update_with_dto(ConfigDTO {
logger: LoggerDTO {
level: NEW_LOG_LEVEL,
level: NEW_LOG_LEVEL.into(),
},
})
.await
Expand All @@ -200,7 +201,7 @@ mod tests {
.expect("Watcher should resolve within timeout")
.expect("Watcher should not be closed");

let value = *recv.borrow_and_update();
assert_eq!(value, NEW_LOG_LEVEL);
let value = recv.borrow_and_update().clone();
assert_eq!(value, NEW_LOG_LEVEL.into());
}
}
2 changes: 1 addition & 1 deletion genesis/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ fn build_transactions(
let parameters = build_transaction(
parameters
.into_iter()
.map(SetParameter)
.map(SetParameter::new)
.map(InstructionBox::from)
.collect(),
chain_id.clone(),
Expand Down
2 changes: 1 addition & 1 deletion logger/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ serde_json = { workspace = true }
tracing = { workspace = true }
tracing-core = "0.1.32"
tracing-futures = { version = "0.2.5", default-features = false, features = ["std-future", "std"] }
tracing-subscriber = { workspace = true, features = ["fmt", "ansi", "json"] }
tracing-subscriber = { workspace = true, features = ["fmt", "ansi", "json", "env-filter"] }
tokio = { workspace = true, features = ["sync", "rt", "macros"] }
console-subscriber = { version = "0.3.0", optional = true }
once_cell = { workspace = true }
Expand Down
Loading
Loading