zenyx-engine/subcrates/zlog/src/lib.rs

346 lines
9.6 KiB
Rust
Raw Normal View History

2025-04-15 20:08:58 +00:00
pub mod config;
pub mod query;
#[cfg(test)]
mod tests;
use std::{
fmt,
fs::OpenOptions,
io::{BufWriter, Write},
str::FromStr,
sync::{
Arc, RwLock,
mpsc::{self, Sender},
},
thread,
time::SystemTime,
};
2025-04-16 01:24:10 +00:00
use config::LoggerConfig;
use query::LogQuery;
use tracing::{Event, Level, Subscriber, level_filters::LevelFilter, subscriber::DefaultGuard};
2025-04-15 20:08:58 +00:00
use tracing_subscriber::{
layer::{Context, Layer, SubscriberExt},
registry::LookupSpan,
util::SubscriberInitExt,
};
2025-04-19 13:44:43 -04:00
#[cfg(feature = "json")]
use serde::{Deserialize, Serialize};
#[cfg(feature = "json")]
use serde_json::Value;
#[cfg(feature = "json")]
use chrono::{DateTime, Utc};
2025-04-15 20:08:58 +00:00
#[derive(Debug, Clone, PartialEq, Eq)]
enum LogEvent {
Log(LogEntry),
Shutdown,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct LogEntry {
timestamp: SystemTime,
level: Level,
message: String,
2025-04-19 13:44:43 -04:00
#[cfg(feature = "json")]
additional_fields: serde_json::Map<String, Value>,
2025-04-15 20:08:58 +00:00
}
impl PartialOrd for LogEntry {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for LogEntry {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.timestamp
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.cmp(
&other
.timestamp
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap(),
)
}
}
struct BufferLayer {
log_entries: Arc<RwLock<Vec<LogEntry>>>,
senders: Vec<Sender<LogEvent>>,
}
impl BufferLayer {
pub fn new(log_entries: Arc<RwLock<Vec<LogEntry>>>, senders: Vec<Sender<LogEvent>>) -> Self {
Self {
log_entries,
senders,
}
}
}
impl Drop for BufferLayer {
fn drop(&mut self) {
for tx in &self.senders {
if let Err(e) = tx.send(LogEvent::Shutdown) {
panic!("{e}")
}
2025-04-15 20:08:58 +00:00
}
self.senders.clear();
}
}
impl<S> Layer<S> for BufferLayer
where
S: Subscriber + for<'a> LookupSpan<'a>,
{
fn on_event(&self, event: &Event<'_>, _ctx: Context<'_, S>) {
let metadata = event.metadata();
let level = *metadata.level();
let timestamp = SystemTime::now();
2025-04-19 13:44:43 -04:00
#[cfg(feature = "json")]
let additional_fields = serde_json::Map::new();
2025-04-15 20:08:58 +00:00
let mut message = String::new();
let mut visitor = LogVisitor::new(&mut message);
event.record(&mut visitor);
let log_entry = LogEvent::Log(LogEntry {
timestamp,
level,
message,
2025-04-19 13:44:43 -04:00
#[cfg(feature = "json")]
additional_fields
2025-04-15 20:08:58 +00:00
});
if let LogEvent::Log(ref entry) = log_entry {
if let Ok(mut buf) = self.log_entries.write() {
buf.push(entry.clone());
}
}
for tx in &self.senders {
let _ = tx.send(log_entry.clone());
}
}
}
struct LogVisitor<'msg> {
message: &'msg mut String,
}
impl<'msg> LogVisitor<'msg> {
fn new(message: &'msg mut String) -> Self {
Self { message }
}
}
impl tracing::field::Visit for LogVisitor<'_> {
fn record_debug(&mut self, field: &tracing::field::Field, value: &dyn fmt::Debug) {
use std::fmt::Write;
if field.name() == "message" {
write!(self.message, "{:?}", value).unwrap();
} else {
write!(self.message, "{}={:?} ", field.name(), value).unwrap();
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)]
pub enum LogLevel {
Error,
#[default]
Info,
Warn,
Debug,
Trace,
}
impl From<LogLevel> for LevelFilter {
fn from(level: LogLevel) -> Self {
match level {
LogLevel::Error => LevelFilter::ERROR,
LogLevel::Info => LevelFilter::INFO,
LogLevel::Debug => LevelFilter::DEBUG,
LogLevel::Trace => LevelFilter::TRACE,
LogLevel::Warn => LevelFilter::WARN,
}
}
}
2025-04-15 20:08:58 +00:00
impl fmt::Display for LogLevel {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl FromStr for LogLevel {
type Err = ();
2025-04-16 01:24:10 +00:00
2025-04-15 20:08:58 +00:00
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"trace" => Ok(Self::Trace),
"debug" => Ok(Self::Debug),
"info" => Ok(Self::Info),
"error" => Ok(Self::Error),
"warn" => Ok(Self::Warn),
_ => Err(()),
}
}
}
pub struct Logger {
subscriber_guard: Option<tracing::subscriber::DefaultGuard>,
log_entries: Arc<RwLock<Vec<LogEntry>>>,
_handles: Vec<thread::JoinHandle<()>>,
}
impl Logger {
pub fn new(config: LoggerConfig) -> Self {
let log_entries = Arc::new(RwLock::new(Vec::new()));
let mut senders = Vec::new();
let mut handles = Vec::new();
2025-04-19 13:44:43 -04:00
if config.log_to_stdout || config.log_use_json {
2025-04-15 20:08:58 +00:00
let (tx, rx) = mpsc::channel();
senders.push(tx);
let config_clone = config.clone();
let handle = thread::spawn(move || {
for msg in rx {
match msg {
2025-04-19 13:44:43 -04:00
LogEvent::Log(mut entry) => {
2025-04-15 20:08:58 +00:00
println!(
"{}",
format_entry(
2025-04-19 13:44:43 -04:00
&mut entry,
&config_clone
2025-04-15 20:08:58 +00:00
)
);
}
LogEvent::Shutdown => break,
}
}
});
handles.push(handle);
}
if config.log_to_file {
let (tx, rx) = mpsc::channel();
senders.push(tx);
2025-04-19 13:44:43 -04:00
let config_clone = config.clone();
2025-04-15 20:08:58 +00:00
let path = config.log_file_path.clone();
let handle = thread::spawn(move || {
let file = OpenOptions::new()
.append(true)
.create(true)
.open(&path)
.expect("Failed to open log file");
let mut writer = BufWriter::new(file);
for msg in rx {
match msg {
2025-04-19 13:44:43 -04:00
LogEvent::Log(mut entry) => {
let line = format_entry(&mut entry, &config_clone);
2025-04-15 20:08:58 +00:00
writeln!(writer, "{}", line).expect("Failed to write to log file");
writer.flush().expect("Failed to flush log file");
}
LogEvent::Shutdown => break,
}
}
});
handles.push(handle);
}
let buffer_layer = BufferLayer::new(log_entries.clone(), senders);
let mk_logger = |guard: DefaultGuard| Logger {
2025-04-15 20:08:58 +00:00
subscriber_guard: Some(guard),
log_entries,
_handles: handles,
};
if let Some(level) = config.log_level {
let subscriber = tracing_subscriber::registry()
.with(buffer_layer)
.with(LevelFilter::from(level));
let guard = subscriber.set_default();
mk_logger(guard)
} else {
let subscriber = tracing_subscriber::registry().with(buffer_layer);
let guard = subscriber.set_default();
mk_logger(guard)
2025-04-15 20:08:58 +00:00
}
}
pub fn get_logs(&self, query: LogQuery) -> Vec<LogEntry> {
let guard = self.log_entries.read().unwrap();
match query {
LogQuery::All => guard.clone(),
LogQuery::From(t) => guard.iter().filter(|e| e.timestamp >= t).cloned().collect(),
}
}
}
impl Drop for Logger {
fn drop(&mut self) {
let _ = self.subscriber_guard.take();
let handles = std::mem::take(&mut self._handles);
for handle in handles {
handle.join().unwrap_or_else(|e| {
eprintln!("Logger thread panicked: {:?}", e);
});
}
}
}
2025-04-19 13:44:43 -04:00
fn format_entry(entry: &mut LogEntry, log_config: &LoggerConfig) -> String {
if log_config.log_use_json {
return format_entry_json(entry, log_config);
}
if log_config.log_to_stdout || log_config.log_to_file {
return format_entry_string(entry, log_config);
} else {
return String::new();
}
}
fn format_entry_string(entry: &LogEntry, log_config: &LoggerConfig) -> String {
let lvl = if log_config.stdout_color {
2025-04-15 20:08:58 +00:00
match entry.level {
Level::ERROR => "\x1b[31mERROR\x1b[0m",
Level::WARN => "\x1b[33mWARN\x1b[0m",
Level::INFO => "\x1b[32mINFO\x1b[0m",
Level::DEBUG => "\x1b[36mDEBUG\x1b[0m",
Level::TRACE => "\x1b[34mTRACE\x1b[0m",
}
} else {
entry.level.as_str()
};
format!("{} {}", lvl, entry.message)
}
2025-04-19 13:44:43 -04:00
/// Formats the log entry as a json object ([`serde_json`]) and returns it as a [`String`]
fn format_entry_json(entry: &mut LogEntry, log_config: &LoggerConfig) -> String {
let mut json_object = serde_json::Map::new();
if log_config.log_json_show_timestamp {
json_object.insert("timestamp".to_string(), Value::String(DateTime::<Utc>::from(entry.timestamp).to_rfc3339()));
}
if log_config.log_json_show_level {
json_object.insert("level".to_string(), Value::String(entry.level.to_string()));
}
if log_config.log_json_show_message {
json_object.insert("message".to_string(), Value::String(entry.message.to_string()));
}
if log_config.log_json_show_additional_fields {
json_object.append(&mut entry.additional_fields);
}
serde_json::to_string(&json_object).unwrap()
}