Merge branch 'logging-cleanup' into 'next'

Replace println/dbg calls with corresponding macros from tracing crate

See merge request famedly/conduit!424
This commit is contained in:
Timo Kösters 2022-12-18 06:57:23 +00:00
commit d7eaa9c5cc
5 changed files with 68 additions and 88 deletions

View file

@ -135,7 +135,6 @@ type TupleOfBytes = (Vec<u8>, Vec<u8>);
impl SqliteTable { impl SqliteTable {
fn get_with_guard(&self, guard: &Connection, key: &[u8]) -> Result<Option<Vec<u8>>> { fn get_with_guard(&self, guard: &Connection, key: &[u8]) -> Result<Option<Vec<u8>>> {
//dbg!(&self.name);
Ok(guard Ok(guard
.prepare(format!("SELECT value FROM {} WHERE key = ?", self.name).as_str())? .prepare(format!("SELECT value FROM {} WHERE key = ?", self.name).as_str())?
.query_row([key], |row| row.get(0)) .query_row([key], |row| row.get(0))
@ -143,7 +142,6 @@ impl SqliteTable {
} }
fn insert_with_guard(&self, guard: &Connection, key: &[u8], value: &[u8]) -> Result<()> { fn insert_with_guard(&self, guard: &Connection, key: &[u8], value: &[u8]) -> Result<()> {
//dbg!(&self.name);
guard.execute( guard.execute(
format!( format!(
"INSERT OR REPLACE INTO {} (key, value) VALUES (?, ?)", "INSERT OR REPLACE INTO {} (key, value) VALUES (?, ?)",
@ -176,10 +174,7 @@ impl SqliteTable {
statement statement
.query_map([], |row| Ok((row.get_unwrap(0), row.get_unwrap(1)))) .query_map([], |row| Ok((row.get_unwrap(0), row.get_unwrap(1))))
.unwrap() .unwrap()
.map(move |r| { .map(move |r| r.unwrap()),
//dbg!(&name);
r.unwrap()
}),
); );
Box::new(PreparedStatementIterator { Box::new(PreparedStatementIterator {
@ -276,10 +271,7 @@ impl KvTree for SqliteTable {
statement statement
.query_map([from], |row| Ok((row.get_unwrap(0), row.get_unwrap(1)))) .query_map([from], |row| Ok((row.get_unwrap(0), row.get_unwrap(1))))
.unwrap() .unwrap()
.map(move |r| { .map(move |r| r.unwrap()),
//dbg!(&name);
r.unwrap()
}),
); );
Box::new(PreparedStatementIterator { Box::new(PreparedStatementIterator {
iterator, iterator,
@ -301,10 +293,7 @@ impl KvTree for SqliteTable {
statement statement
.query_map([from], |row| Ok((row.get_unwrap(0), row.get_unwrap(1)))) .query_map([from], |row| Ok((row.get_unwrap(0), row.get_unwrap(1))))
.unwrap() .unwrap()
.map(move |r| { .map(move |r| r.unwrap()),
//dbg!(&name);
r.unwrap()
}),
); );
Box::new(PreparedStatementIterator { Box::new(PreparedStatementIterator {

View file

@ -258,7 +258,7 @@ impl KeyValueDatabase {
}; };
if config.max_request_size < 1024 { if config.max_request_size < 1024 {
eprintln!("ERROR: Max request size is less than 1KB. Please increase it."); error!(?config.max_request_size, "Max request size is less than 1KB. Please increase it.");
} }
let db_raw = Box::new(Self { let db_raw = Box::new(Self {
@ -483,7 +483,7 @@ impl KeyValueDatabase {
for user in services().rooms.state_cache.room_members(&room?) { for user in services().rooms.state_cache.room_members(&room?) {
let user = user?; let user = user?;
if user.server_name() != services().globals.server_name() { if user.server_name() != services().globals.server_name() {
println!("Migration: Creating user {}", user); info!(?user, "Migration: creating user");
services().users.create(&user, None)?; services().users.create(&user, None)?;
} }
} }
@ -545,7 +545,6 @@ impl KeyValueDatabase {
current_state: HashSet<_>, current_state: HashSet<_>,
last_roomstates: &mut HashMap<_, _>| { last_roomstates: &mut HashMap<_, _>| {
counter += 1; counter += 1;
println!("counter: {}", counter);
let last_roomsstatehash = last_roomstates.get(current_room); let last_roomsstatehash = last_roomstates.get(current_room);
let states_parents = last_roomsstatehash.map_or_else( let states_parents = last_roomsstatehash.map_or_else(
@ -742,15 +741,13 @@ impl KeyValueDatabase {
new_key.extend_from_slice(word); new_key.extend_from_slice(word);
new_key.push(0xff); new_key.push(0xff);
new_key.extend_from_slice(pdu_id_count); new_key.extend_from_slice(pdu_id_count);
println!("old {:?}", key);
println!("new {:?}", new_key);
Some((new_key, Vec::new())) Some((new_key, Vec::new()))
}) })
.peekable(); .peekable();
while iter.peek().is_some() { while iter.peek().is_some() {
db.tokenids.insert_batch(&mut iter.by_ref().take(1000))?; db.tokenids.insert_batch(&mut iter.by_ref().take(1000))?;
println!("smaller batch done"); debug!("Inserted smaller batch");
} }
info!("Deleting starts"); info!("Deleting starts");
@ -760,7 +757,6 @@ impl KeyValueDatabase {
.iter() .iter()
.filter_map(|(key, _)| { .filter_map(|(key, _)| {
if key.starts_with(b"!") { if key.starts_with(b"!") {
println!("del {:?}", key);
Some(key) Some(key)
} else { } else {
None None
@ -769,7 +765,6 @@ impl KeyValueDatabase {
.collect(); .collect();
for key in batch2 { for key in batch2 {
println!("del");
db.tokenids.remove(&key)?; db.tokenids.remove(&key)?;
} }
@ -945,7 +940,6 @@ impl KeyValueDatabase {
#[cfg(unix)] #[cfg(unix)]
use tokio::signal::unix::{signal, SignalKind}; use tokio::signal::unix::{signal, SignalKind};
use tracing::info;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
@ -961,23 +955,23 @@ impl KeyValueDatabase {
#[cfg(unix)] #[cfg(unix)]
tokio::select! { tokio::select! {
_ = i.tick() => { _ = i.tick() => {
info!("cleanup: Timer ticked"); debug!("cleanup: Timer ticked");
} }
_ = s.recv() => { _ = s.recv() => {
info!("cleanup: Received SIGHUP"); debug!("cleanup: Received SIGHUP");
} }
}; };
#[cfg(not(unix))] #[cfg(not(unix))]
{ {
i.tick().await; i.tick().await;
info!("cleanup: Timer ticked") debug!("cleanup: Timer ticked")
} }
let start = Instant::now(); let start = Instant::now();
if let Err(e) = services().globals.cleanup() { if let Err(e) = services().globals.cleanup() {
error!("cleanup: Errored: {}", e); error!("cleanup: Errored: {}", e);
} else { } else {
info!("cleanup: Finished in {:?}", start.elapsed()); debug!("cleanup: Finished in {:?}", start.elapsed());
} }
} }
}); });

View file

@ -40,7 +40,7 @@ use tower_http::{
trace::TraceLayer, trace::TraceLayer,
ServiceBuilderExt as _, ServiceBuilderExt as _,
}; };
use tracing::{info, warn}; use tracing::{error, info, warn};
use tracing_subscriber::{prelude::*, EnvFilter}; use tracing_subscriber::{prelude::*, EnvFilter};
pub use conduit::*; // Re-export everything from the library crate pub use conduit::*; // Re-export everything from the library crate
@ -68,27 +68,16 @@ async fn main() {
let config = match raw_config.extract::<Config>() { let config = match raw_config.extract::<Config>() {
Ok(s) => s, Ok(s) => s,
Err(e) => { Err(e) => {
eprintln!("It looks like your config is invalid. The following error occured while parsing it: {}", e); eprintln!(
"It looks like your config is invalid. The following error occurred: {}",
e
);
std::process::exit(1); std::process::exit(1);
} }
}; };
config.warn_deprecated(); config.warn_deprecated();
if let Err(e) = KeyValueDatabase::load_or_create(config).await {
eprintln!(
"The database couldn't be loaded or created. The following error occured: {}",
e
);
std::process::exit(1);
};
let config = &services().globals.config;
let start = async {
run_server().await.unwrap();
};
if config.allow_jaeger { if config.allow_jaeger {
opentelemetry::global::set_text_map_propagator(opentelemetry_jaeger::Propagator::new()); opentelemetry::global::set_text_map_propagator(opentelemetry_jaeger::Propagator::new());
let tracer = opentelemetry_jaeger::new_agent_pipeline() let tracer = opentelemetry_jaeger::new_agent_pipeline()
@ -113,35 +102,44 @@ async fn main() {
.with(filter_layer) .with(filter_layer)
.with(telemetry); .with(telemetry);
tracing::subscriber::set_global_default(subscriber).unwrap(); tracing::subscriber::set_global_default(subscriber).unwrap();
start.await; } else if config.tracing_flame {
println!("exporting remaining spans"); let registry = tracing_subscriber::Registry::default();
opentelemetry::global::shutdown_tracer_provider(); let (flame_layer, _guard) =
tracing_flame::FlameLayer::with_file("./tracing.folded").unwrap();
let flame_layer = flame_layer.with_empty_samples(false);
let filter_layer = EnvFilter::new("trace,h2=off");
let subscriber = registry.with(filter_layer).with(flame_layer);
tracing::subscriber::set_global_default(subscriber).unwrap();
} else { } else {
let registry = tracing_subscriber::Registry::default(); let registry = tracing_subscriber::Registry::default();
if config.tracing_flame { let fmt_layer = tracing_subscriber::fmt::Layer::new();
let (flame_layer, _guard) = let filter_layer = match EnvFilter::try_new(&config.log) {
tracing_flame::FlameLayer::with_file("./tracing.folded").unwrap(); Ok(s) => s,
let flame_layer = flame_layer.with_empty_samples(false); Err(e) => {
eprintln!("It looks like your config is invalid. The following error occured while parsing it: {}", e);
EnvFilter::try_new("warn").unwrap()
}
};
let filter_layer = EnvFilter::new("trace,h2=off"); let subscriber = registry.with(filter_layer).with(fmt_layer);
tracing::subscriber::set_global_default(subscriber).unwrap();
}
let subscriber = registry.with(filter_layer).with(flame_layer); info!("Loading database");
tracing::subscriber::set_global_default(subscriber).unwrap(); if let Err(error) = KeyValueDatabase::load_or_create(config).await {
start.await; error!(?error, "The database couldn't be loaded or created");
} else {
let fmt_layer = tracing_subscriber::fmt::Layer::new();
let filter_layer = match EnvFilter::try_new(&config.log) {
Ok(s) => s,
Err(e) => {
eprintln!("It looks like your log config is invalid. The following error occurred: {}", e);
EnvFilter::try_new("warn").unwrap()
}
};
let subscriber = registry.with(filter_layer).with(fmt_layer); std::process::exit(1);
tracing::subscriber::set_global_default(subscriber).unwrap(); };
start.await; let config = &services().globals.config;
}
info!("Starting server");
run_server().await.unwrap();
if config.allow_jaeger {
opentelemetry::global::shutdown_tracer_provider();
} }
} }

View file

@ -6,7 +6,7 @@ use std::{
pub use data::Data; pub use data::Data;
use ruma::{api::client::error::ErrorKind, EventId, RoomId}; use ruma::{api::client::error::ErrorKind, EventId, RoomId};
use tracing::log::warn; use tracing::{debug, error, warn};
use crate::{services, Error, Result}; use crate::{services, Error, Result};
@ -88,10 +88,10 @@ impl Service {
.rooms .rooms
.auth_chain .auth_chain
.cache_auth_chain(vec![sevent_id], Arc::clone(&auth_chain))?; .cache_auth_chain(vec![sevent_id], Arc::clone(&auth_chain))?;
println!( debug!(
"cache missed event {} with auth chain len {}", event_id = ?event_id,
event_id, chain_length = ?auth_chain.len(),
auth_chain.len() "Cache missed event"
); );
chunk_cache.extend(auth_chain.iter()); chunk_cache.extend(auth_chain.iter());
@ -101,11 +101,11 @@ impl Service {
} }
}; };
} }
println!( debug!(
"chunk missed with len {}, event hits2: {}, misses2: {}", chunk_cache_length = ?chunk_cache.len(),
chunk_cache.len(), hits = ?hits2,
hits2, misses = ?misses2,
misses2 "Chunk missed",
); );
let chunk_cache = Arc::new(chunk_cache); let chunk_cache = Arc::new(chunk_cache);
services() services()
@ -115,11 +115,11 @@ impl Service {
full_auth_chain.extend(chunk_cache.iter()); full_auth_chain.extend(chunk_cache.iter());
} }
println!( debug!(
"total: {}, chunk hits: {}, misses: {}", chain_length = ?full_auth_chain.len(),
full_auth_chain.len(), hits = ?hits,
hits, misses = ?misses,
misses "Auth chain stats",
); );
Ok(full_auth_chain Ok(full_auth_chain
@ -151,10 +151,10 @@ impl Service {
} }
} }
Ok(None) => { Ok(None) => {
warn!("Could not find pdu mentioned in auth events: {}", event_id); warn!(?event_id, "Could not find pdu mentioned in auth events");
} }
Err(e) => { Err(error) => {
warn!("Could not load event in auth chain: {} {}", event_id, e); error!(?event_id, ?error, "Could not load event in auth chain");
} }
} }
} }

View file

@ -839,8 +839,8 @@ impl Service {
info!("Preparing for stateres to derive new room state"); info!("Preparing for stateres to derive new room state");
let mut extremity_sstatehashes = HashMap::new(); let mut extremity_sstatehashes = HashMap::new();
info!("Loading extremities"); info!(?extremities, "Loading extremities");
for id in dbg!(&extremities) { for id in &extremities {
match services().rooms.timeline.get_pdu(id)? { match services().rooms.timeline.get_pdu(id)? {
Some(leaf_pdu) => { Some(leaf_pdu) => {
extremity_sstatehashes.insert( extremity_sstatehashes.insert(
@ -1273,7 +1273,6 @@ impl Service {
// This return value is the key used for sorting events, // This return value is the key used for sorting events,
// events are then sorted by power level, time, // events are then sorted by power level, time,
// and lexically by event_id. // and lexically by event_id.
println!("{}", event_id);
Ok(( Ok((
int!(0), int!(0),
MilliSecondsSinceUnixEpoch( MilliSecondsSinceUnixEpoch(