diff --git a/Cargo.lock b/Cargo.lock index 808ba4ef..01d2ba27 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -137,9 +137,9 @@ checksum = "383d29d513d8764dcdc42ea295d979eb99c3c9f00607b3692cf68a431f7dca72" [[package]] name = "bindgen" -version = "0.57.0" +version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd4865004a46a0aafb2a0a5eb19d3c9fc46ee5f063a6cfc605c69ac9ecf5263d" +checksum = "0b2ce639ee22f41a6ea0a3061e9bea9f690cf0c6ffc1ada0a3a599778f99ccba" dependencies = [ "bitflags", "cexpr", @@ -160,6 +160,18 @@ version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" +[[package]] +name = "bitvec" +version = "0.19.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8942c8d352ae1838c9dda0b0ca2ab657696ef2232a20147cf1b30ae1a9cb4321" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + [[package]] name = "blake2b_simd" version = "0.5.11" @@ -215,9 +227,9 @@ dependencies = [ [[package]] name = "cexpr" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4aedb84272dbe89af497cf81375129abda4fc0a9e7c5d317498c15cc30c0d27" +checksum = "db507a7679252d2276ed0dd8113c6875ec56d3089f9225b2b42c30cc1f8e5c89" dependencies = [ "nom", ] @@ -269,7 +281,6 @@ dependencies = [ "http", "image", "jsonwebtoken", - "log", "lru-cache", "num_cpus", "opentelemetry", @@ -292,8 +303,10 @@ dependencies = [ "serde_yaml", "sled", "thiserror", + "threadpool", "tokio", "tracing", + "tracing-flame", "tracing-opentelemetry", "tracing-subscriber", "trust-dns-resolver", @@ -326,9 +339,9 @@ checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" [[package]] name = "cookie" -version = "0.15.0" +version = "0.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffdf8865bac3d9a3bde5bde9088ca431b11f5d37c7a578b8086af77248b76627" +checksum = "d5f1c7727e460397e56abc4bddc1d49e07a1ad78fc98eb2e1c8f032a58a2f80d" dependencies = [ "percent-encoding", "time 0.2.27", @@ -564,9 +577,9 @@ checksum = "56899898ce76aaf4a0f24d914c97ea6ed976d42fec6ad33fcbb0a1103e07b2b0" [[package]] name = "ed25519" -version = "1.1.1" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d0860415b12243916284c67a9be413e044ee6668247b99ba26d94b2bc06c8f6" +checksum = "4620d40f6d2601794401d6dd95a5cf69b6c157852539470eeda433a99b3c0efc" dependencies = [ "signature", ] @@ -677,6 +690,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "funty" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" + [[package]] name = "futures" version = "0.3.15" @@ -952,9 +971,9 @@ dependencies = [ [[package]] name = "hyper" -version = "0.14.10" +version = "0.14.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7728a72c4c7d72665fde02204bcbd93b247721025b222ef78606f14513e0fd03" +checksum = "0b61cf2d1aebcf6e6352c97b81dc2244ca29194be1b276f5d8ad5c6330fffb11" dependencies = [ "bytes", "futures-channel", @@ -1177,9 +1196,9 @@ dependencies = [ [[package]] name = "librocksdb-sys" -version = "6.17.3" +version = "6.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5da125e1c0f22c7cae785982115523a0738728498547f415c9054cb17c7e89f9" +checksum = "c309a9d2470844aceb9a4a098cf5286154d20596868b75a6b36357d2bb9ca25d" dependencies = [ "bindgen", "cc", @@ -1346,10 +1365,12 @@ dependencies = [ [[package]] name = "nom" -version = "5.1.2" +version = "6.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffb4262d26ed83a1c0a33a38fe2bb15797329c85770da05e6b828ddb782627af" +checksum = "e7413f999671bd4745a7b624bd370a569fb6bc574b23c83a3c5ed2e453f3d5e2" dependencies = [ + "bitvec", + "funty", "memchr", "version_check", ] @@ -1445,11 +1466,12 @@ checksum = "28988d872ab76095a6e6ac88d99b54fd267702734fd7ffe610ca27f533ddb95a" [[package]] name = "opentelemetry" -version = "0.12.0" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "514d24875c140ed269eecc2d1b56d7b71b573716922a763c317fb1b1b4b58f15" +checksum = "ff27b33e30432e7b9854936693ca103d8591b0501f7ae9f633de48cda3bf2a67" dependencies = [ "async-trait", + "crossbeam-channel", "futures", "js-sys", "lazy_static", @@ -1461,9 +1483,9 @@ dependencies = [ [[package]] name = "opentelemetry-jaeger" -version = "0.11.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5677b3a361784aff6e2b1b30dbdb5f85f4ec57ff2ced41d9a481ad70a9d0b57" +checksum = "09a9fc8192722e7daa0c56e59e2336b797122fb8598383dcb11c8852733b435c" dependencies = [ "async-trait", "lazy_static", @@ -1569,18 +1591,18 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.0.7" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7509cc106041c40a4518d2af7a61530e1eed0e6285296a3d8c5472806ccc4a4" +checksum = "576bc800220cc65dac09e99e97b08b358cfab6e17078de8dc5fee223bd2d0c08" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.0.7" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48c950132583b500556b1efd71d45b319029f2b71518d979fcc208e16b42426f" +checksum = "6e8fe8163d14ce7f0cdac2e040116f22eac817edabff0be91e8aff7e9accf389" dependencies = [ "proc-macro2", "quote", @@ -1601,9 +1623,9 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "pkcs8" -version = "0.7.0" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09d156817ae0125e8aa5067710b0db24f0984830614f99875a70aa5e3b74db69" +checksum = "87bb2d5c68b7505a3a89eb2f3583a4d56303863005226c2ef99319930a262be4" dependencies = [ "der", "spki", @@ -1703,6 +1725,12 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "radium" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "941ba9d78d8e2f7ce474c015eea4d9c6d25b6a3327f9832ee29a4de27f91bbb8" + [[package]] name = "rand" version = "0.7.3" @@ -2015,7 +2043,7 @@ dependencies = [ [[package]] name = "ruma" version = "0.2.0" -source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" +source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" dependencies = [ "assign", "js_int", @@ -2036,7 +2064,7 @@ dependencies = [ [[package]] name = "ruma-api" version = "0.17.1" -source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" +source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" dependencies = [ "bytes", "http", @@ -2052,7 +2080,7 @@ dependencies = [ [[package]] name = "ruma-api-macros" version = "0.17.1" -source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" +source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -2063,7 +2091,7 @@ dependencies = [ [[package]] name = "ruma-appservice-api" version = "0.3.0" -source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" +source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" dependencies = [ "ruma-api", "ruma-common", @@ -2077,7 +2105,7 @@ dependencies = [ [[package]] name = "ruma-client-api" version = "0.11.0" -source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" +source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" dependencies = [ "assign", "bytes", @@ -2097,7 +2125,7 @@ dependencies = [ [[package]] name = "ruma-common" version = "0.5.4" -source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" +source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" dependencies = [ "indexmap", "js_int", @@ -2112,7 +2140,7 @@ dependencies = [ [[package]] name = "ruma-events" version = "0.23.2" -source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" +source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" dependencies = [ "indoc", "js_int", @@ -2128,7 +2156,7 @@ dependencies = [ [[package]] name = "ruma-events-macros" version = "0.23.2" -source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" +source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -2139,7 +2167,7 @@ dependencies = [ [[package]] name = "ruma-federation-api" version = "0.2.0" -source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" +source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" dependencies = [ "js_int", "ruma-api", @@ -2154,7 +2182,7 @@ dependencies = [ [[package]] name = "ruma-identifiers" version = "0.19.4" -source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" +source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" dependencies = [ "paste", "rand 0.8.4", @@ -2168,7 +2196,7 @@ dependencies = [ [[package]] name = "ruma-identifiers-macros" version = "0.19.4" -source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" +source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" dependencies = [ "quote", "ruma-identifiers-validation", @@ -2178,12 +2206,12 @@ dependencies = [ [[package]] name = "ruma-identifiers-validation" version = "0.4.0" -source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" +source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" [[package]] name = "ruma-identity-service-api" version = "0.2.0" -source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" +source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" dependencies = [ "js_int", "ruma-api", @@ -2196,7 +2224,7 @@ dependencies = [ [[package]] name = "ruma-push-gateway-api" version = "0.2.0" -source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" +source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" dependencies = [ "js_int", "ruma-api", @@ -2211,7 +2239,7 @@ dependencies = [ [[package]] name = "ruma-serde" version = "0.4.1" -source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" +source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" dependencies = [ "bytes", "form_urlencoded", @@ -2225,7 +2253,7 @@ dependencies = [ [[package]] name = "ruma-serde-macros" version = "0.4.1" -source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" +source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -2236,7 +2264,7 @@ dependencies = [ [[package]] name = "ruma-signatures" version = "0.8.0" -source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" +source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" dependencies = [ "base64 0.13.0", "ed25519-dalek", @@ -2253,7 +2281,7 @@ dependencies = [ [[package]] name = "ruma-state-res" version = "0.2.0" -source = "git+https://github.com/ruma/ruma?rev=eb19b0e08a901b87d11b3be0890ec788cc760492#eb19b0e08a901b87d11b3be0890ec788cc760492" +source = "git+https://github.com/timokoesters/ruma?rev=a2d93500e1dbc87e7032a3c74f3b2479a7f84e93#a2d93500e1dbc87e7032a3c74f3b2479a7f84e93" dependencies = [ "itertools 0.10.1", "js_int", @@ -2529,9 +2557,9 @@ dependencies = [ [[package]] name = "shlex" -version = "0.1.1" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2" +checksum = "42a568c8f2cd051a4d283bd6eb0343ac214c1b0f1ac19f93e1175b2dee38c73d" [[package]] name = "signal-hook-registry" @@ -2714,9 +2742,9 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" [[package]] name = "syn" -version = "1.0.73" +version = "1.0.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f71489ff30030d2ae598524f61326b902466f72a0fb1a8564c001cc63425bcc7" +checksum = "1873d832550d4588c3dbc20f01361ab00bfe741048f71e3fecf145a7cc18b29c" dependencies = [ "proc-macro2", "quote", @@ -2735,6 +2763,12 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + [[package]] name = "tempfile" version = "3.2.0" @@ -2859,9 +2893,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.2.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b5220f05bb7de7f3f53c7c065e1199b3172696fe2db9f9c4d8ad9b4ee74c342" +checksum = "848a1e1181b9f6753b5e96a092749e29b11d19ede67dfbbd6c7dc7e0f49b5338" dependencies = [ "tinyvec_macros", ] @@ -2874,9 +2908,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.8.1" +version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98c8b05dc14c75ea83d63dd391100353789f5f24b8b3866542a5e85c8be8e985" +checksum = "c2602b8af3767c285202012822834005f596c811042315fa7e9f5b12b2a43207" dependencies = [ "autocfg", "bytes", @@ -2997,6 +3031,17 @@ dependencies = [ "lazy_static", ] +[[package]] +name = "tracing-flame" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd520fe41c667b437952383f3a1ec14f1fa45d653f719a77eedd6e6a02d8fa54" +dependencies = [ + "lazy_static", + "tracing", + "tracing-subscriber", +] + [[package]] name = "tracing-log" version = "0.1.2" @@ -3010,9 +3055,9 @@ dependencies = [ [[package]] name = "tracing-opentelemetry" -version = "0.11.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cccdf13c28f1654fe806838f28c5b9cb23ca4c0eae71450daa489f50e523ceb1" +checksum = "c47440f2979c4cd3138922840eec122e3c0ba2148bc290f756bd7fd60fc97fff" dependencies = [ "opentelemetry", "tracing", @@ -3394,6 +3439,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "wyz" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" + [[package]] name = "yaml-rust" version = "0.4.5" @@ -3411,9 +3462,9 @@ checksum = "9fc79f4a1e39857fc00c3f662cbf2651c771f00e9c15fe2abc341806bd46bd71" [[package]] name = "zeroize" -version = "1.3.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4756f7db3f7b5574938c3eb1c117038b8e07f95ee6718c0efad4ac21508f1efd" +checksum = "377db0846015f7ae377174787dd452e1c5f5a9050bc6f954911d01f116daa0cd" dependencies = [ "zeroize_derive", ] diff --git a/Cargo.toml b/Cargo.toml index 2ce4b037..92134a44 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,12 +18,12 @@ edition = "2018" rocket = { version = "0.5.0-rc.1", features = ["tls"] } # Used to handle requests # Used for matrix spec type definitions and helpers -ruma = { git = "https://github.com/ruma/ruma", rev = "eb19b0e08a901b87d11b3be0890ec788cc760492", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } -#ruma = { git = "https://github.com/timokoesters/ruma", rev = "74cf83c4ca937fa5e2709fb71e9d11848e72e487", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } +#ruma = { git = "https://github.com/ruma/ruma", rev = "eb19b0e08a901b87d11b3be0890ec788cc760492", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } +ruma = { git = "https://github.com/timokoesters/ruma", rev = "a2d93500e1dbc87e7032a3c74f3b2479a7f84e93", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } #ruma = { path = "../ruma/crates/ruma", features = ["compat", "rand", "appservice-api-c", "client-api", "federation-api", "push-gateway-api-c", "state-res", "unstable-pre-spec", "unstable-exhaustive-types"] } # Used for long polling and federation sender, should be the same as rocket::tokio -tokio = "1.2.0" +tokio = "1.8.2" # Used for storing data permanently sled = { version = "0.34.6", features = ["compression", "no_metrics"], optional = true } rocksdb = { version = "0.16.0", features = ["multi-threaded-cf"], optional = true } @@ -31,30 +31,28 @@ rocksdb = { version = "0.16.0", features = ["multi-threaded-cf"], optional = tru # Used for the http request / response body type for Ruma endpoints used with reqwest bytes = "1.0.1" -# Used for emitting log entries -log = "0.4.14" # Used for rocket<->ruma conversions -http = "0.2.3" +http = "0.2.4" # Used to find data directory for default db path -directories = "3.0.1" +directories = "3.0.2" # Used for ruma wrapper serde_json = { version = "1.0.64", features = ["raw_value"] } # Used for appservice registration files serde_yaml = "0.8.17" # Used for pdu definition -serde = "1.0.123" +serde = "1.0.126" # Used for secure identifiers -rand = "0.8.3" +rand = "0.8.4" # Used to hash passwords rust-argon2 = "0.8.3" # Used to send requests -reqwest = { version = "0.11.3", default-features = false, features = ["rustls-tls-native-roots", "socks"] } +reqwest = { version = "0.11.4", default-features = false, features = ["rustls-tls-native-roots", "socks"] } # Custom TLS verifier -rustls = { version = "0.19", features = ["dangerous_configuration"] } +rustls = { version = "0.19.1", features = ["dangerous_configuration"] } rustls-native-certs = "0.5.0" webpki = "0.21.0" # Used for conduit::Error type -thiserror = "1.0.24" +thiserror = "1.0.26" # Used to generate thumbnails for images image = { version = "0.23.14", default-features = false, features = ["jpeg", "png", "gif"] } # Used to encode server public key @@ -62,23 +60,25 @@ base64 = "0.13.0" # Used when hashing the state ring = "0.16.20" # Used when querying the SRV record of other servers -trust-dns-resolver = "0.20.0" +trust-dns-resolver = "0.20.3" # Used to find matching events for appservices -regex = "1.4.3" +regex = "1.5.4" # jwt jsonwebtokens jsonwebtoken = "7.2.0" # Performance measurements -tracing = "0.1.25" -opentelemetry = "0.12.0" -tracing-subscriber = "0.2.16" -tracing-opentelemetry = "0.11.0" -opentelemetry-jaeger = "0.11.0" +tracing = { version = "0.1.26", features = ["release_max_level_warn"] } +opentelemetry = "0.15.0" +tracing-subscriber = "0.2.19" +tracing-opentelemetry = "0.14.0" +tracing-flame = "0.1.0" +opentelemetry-jaeger = "0.14.0" pretty_env_logger = "0.4.0" lru-cache = "0.1.2" rusqlite = { version = "0.25.3", optional = true, features = ["bundled"] } parking_lot = { version = "0.11.1", optional = true } crossbeam = { version = "0.8.1", optional = true } num_cpus = "1.13.0" +threadpool = "1.8.1" [features] default = ["conduit_bin", "backend_sqlite"] diff --git a/src/appservice_server.rs b/src/appservice_server.rs index 42918577..7868e45f 100644 --- a/src/appservice_server.rs +++ b/src/appservice_server.rs @@ -1,6 +1,5 @@ use crate::{utils, Error, Result}; use bytes::BytesMut; -use log::warn; use ruma::api::{IncomingResponse, OutgoingRequest, SendAccessToken}; use std::{ convert::{TryFrom, TryInto}, @@ -8,6 +7,7 @@ use std::{ mem, time::Duration, }; +use tracing::warn; pub async fn send_request( globals: &crate::database::globals::Globals, diff --git a/src/client_server/account.rs b/src/client_server/account.rs index ddb44d6e..c00cc871 100644 --- a/src/client_server/account.rs +++ b/src/client_server/account.rs @@ -6,7 +6,6 @@ use std::{ use super::{DEVICE_ID_LENGTH, SESSION_ID_LENGTH, TOKEN_LENGTH}; use crate::{database::DatabaseGuard, pdu::PduBuilder, utils, ConduitResult, Error, Ruma}; -use log::info; use ruma::{ api::client::{ error::ErrorKind, @@ -28,6 +27,7 @@ use ruma::{ identifiers::RoomName, push, RoomAliasId, RoomId, RoomVersionId, UserId, }; +use tracing::info; use register::RegistrationKind; #[cfg(feature = "conduit_bin")] diff --git a/src/client_server/directory.rs b/src/client_server/directory.rs index d7388865..f1ec4b86 100644 --- a/src/client_server/directory.rs +++ b/src/client_server/directory.rs @@ -1,5 +1,4 @@ use crate::{database::DatabaseGuard, ConduitResult, Database, Error, Result, Ruma}; -use log::info; use ruma::{ api::{ client::{ @@ -22,6 +21,7 @@ use ruma::{ serde::Raw, ServerName, UInt, }; +use tracing::info; #[cfg(feature = "conduit_bin")] use rocket::{get, post, put}; diff --git a/src/client_server/membership.rs b/src/client_server/membership.rs index 52bb18c4..ea7fdab5 100644 --- a/src/client_server/membership.rs +++ b/src/client_server/membership.rs @@ -4,7 +4,6 @@ use crate::{ pdu::{PduBuilder, PduEvent}, server_server, utils, ConduitResult, Database, Error, Result, Ruma, }; -use log::{debug, error, warn}; use member::{MemberEventContent, MembershipState}; use rocket::futures; use ruma::{ @@ -34,6 +33,7 @@ use std::{ sync::{Arc, RwLock}, time::{Duration, Instant}, }; +use tracing::{debug, error, warn}; #[cfg(feature = "conduit_bin")] use rocket::{get, post}; diff --git a/src/client_server/room.rs b/src/client_server/room.rs index 7aa22d08..49a6052a 100644 --- a/src/client_server/room.rs +++ b/src/client_server/room.rs @@ -2,7 +2,6 @@ use crate::{ client_server::invite_helper, database::DatabaseGuard, pdu::PduBuilder, ConduitResult, Error, Ruma, }; -use log::info; use ruma::{ api::client::{ error::ErrorKind, @@ -16,6 +15,7 @@ use ruma::{ RoomAliasId, RoomId, RoomVersionId, }; use std::{cmp::max, collections::BTreeMap, convert::TryFrom, sync::Arc}; +use tracing::info; #[cfg(feature = "conduit_bin")] use rocket::{get, post}; diff --git a/src/client_server/session.rs b/src/client_server/session.rs index 7ad792b2..f8452e0d 100644 --- a/src/client_server/session.rs +++ b/src/client_server/session.rs @@ -1,6 +1,5 @@ use super::{DEVICE_ID_LENGTH, TOKEN_LENGTH}; use crate::{database::DatabaseGuard, utils, ConduitResult, Error, Ruma}; -use log::info; use ruma::{ api::client::{ error::ErrorKind, @@ -9,6 +8,7 @@ use ruma::{ UserId, }; use serde::Deserialize; +use tracing::info; #[derive(Debug, Deserialize)] struct Claims { diff --git a/src/client_server/sync.rs b/src/client_server/sync.rs index fdb8f256..541045ec 100644 --- a/src/client_server/sync.rs +++ b/src/client_server/sync.rs @@ -1,5 +1,4 @@ use crate::{database::DatabaseGuard, ConduitResult, Database, Error, Result, Ruma, RumaResponse}; -use log::{error, warn}; use ruma::{ api::client::r0::{sync::sync_events, uiaa::UiaaResponse}, events::{room::member::MembershipState, AnySyncEphemeralRoomEvent, EventType}, @@ -13,6 +12,7 @@ use std::{ time::Duration, }; use tokio::sync::watch::Sender; +use tracing::{error, warn}; #[cfg(feature = "conduit_bin")] use rocket::{get, tokio}; diff --git a/src/database.rs b/src/database.rs index e359a5f1..9f243456 100644 --- a/src/database.rs +++ b/src/database.rs @@ -17,7 +17,6 @@ pub mod users; use crate::{utils, Error, Result}; use abstraction::DatabaseEngine; use directories::ProjectDirs; -use log::error; use lru_cache::LruCache; use rocket::{ futures::{channel::mpsc, stream::FuturesUnordered, StreamExt}, @@ -36,6 +35,7 @@ use std::{ sync::{Arc, Mutex, RwLock}, }; use tokio::sync::{OwnedRwLockReadGuard, RwLock as TokioRwLock, Semaphore}; +use tracing::{debug, error, warn}; use self::proxy::ProxyConfig; @@ -69,6 +69,8 @@ pub struct Config { allow_federation: bool, #[serde(default = "false_fn")] pub allow_jaeger: bool, + #[serde(default = "false_fn")] + pub tracing_flame: bool, #[serde(default)] proxy: ProxyConfig, jwt_secret: Option, @@ -91,12 +93,12 @@ impl Config { .keys() .filter(|key| DEPRECATED_KEYS.iter().any(|s| s == key)) { - log::warn!("Config parameter {} is deprecated", key); + warn!("Config parameter {} is deprecated", key); was_deprecated = true; } if was_deprecated { - log::warn!("Read conduit documentation and check your configuration if any new configuration parameters should be adjusted"); + warn!("Read conduit documentation and check your configuration if any new configuration parameters should be adjusted"); } } } @@ -193,13 +195,13 @@ impl Database { if sled_exists { if sqlite_exists { // most likely an in-place directory, only warn - log::warn!("Both sled and sqlite databases are detected in database directory"); - log::warn!("Currently running from the sqlite database, but consider removing sled database files to free up space") + warn!("Both sled and sqlite databases are detected in database directory"); + warn!("Currently running from the sqlite database, but consider removing sled database files to free up space") } else { - log::error!( + error!( "Sled database detected, conduit now uses sqlite for database operations" ); - log::error!("This database must be converted to sqlite, go to https://github.com/ShadowJonathan/conduit_toolbox#conduit_sled_to_sqlite"); + error!("This database must be converted to sqlite, go to https://github.com/ShadowJonathan/conduit_toolbox#conduit_sled_to_sqlite"); return Err(Error::bad_config( "sled database detected, migrate to sqlite", )); @@ -291,7 +293,7 @@ impl Database { statehash_shortstatehash: builder.open_tree("statehash_shortstatehash")?, eventid_outlierpdu: builder.open_tree("eventid_outlierpdu")?, - prevevent_parent: builder.open_tree("prevevent_parent")?, + referencedevents: builder.open_tree("referencedevents")?, pdu_cache: Mutex::new(LruCache::new(100_000)), auth_chain_cache: Mutex::new(LruCache::new(100_000)), }, @@ -444,10 +446,12 @@ impl Database { #[cfg(feature = "conduit_bin")] pub async fn start_on_shutdown_tasks(db: Arc>, shutdown: Shutdown) { + use tracing::info; + tokio::spawn(async move { shutdown.await; - log::info!(target: "shutdown-sync", "Received shutdown notification, notifying sync helpers..."); + info!(target: "shutdown-sync", "Received shutdown notification, notifying sync helpers..."); db.read().await.globals.rotate.fire(); }); @@ -543,22 +547,25 @@ impl Database { futures.next().await; } + #[tracing::instrument(skip(self))] pub async fn flush(&self) -> Result<()> { let start = std::time::Instant::now(); let res = self._db.flush(); - log::debug!("flush: took {:?}", start.elapsed()); + debug!("flush: took {:?}", start.elapsed()); res } #[cfg(feature = "sqlite")] + #[tracing::instrument(skip(self))] pub fn flush_wal(&self) -> Result<()> { self._db.flush_wal() } #[cfg(feature = "sqlite")] + #[tracing::instrument(skip(engine, config))] pub async fn start_spillover_reap_task(engine: Arc, config: &Config) { let fraction = config.sqlite_spillover_reap_fraction.clamp(0.01, 1.0); let interval_secs = config.sqlite_spillover_reap_interval_secs as u64; @@ -585,11 +592,13 @@ impl Database { } #[cfg(feature = "sqlite")] + #[tracing::instrument(skip(lock, config))] pub async fn start_wal_clean_task(lock: &Arc>, config: &Config) { use tokio::time::{interval, timeout}; #[cfg(unix)] use tokio::signal::unix::{signal, SignalKind}; + use tracing::info; use std::{ sync::Weak, @@ -611,41 +620,41 @@ impl Database { #[cfg(unix)] tokio::select! { _ = i.tick(), if do_timer => { - log::info!(target: "wal-trunc", "Timer ticked") + info!(target: "wal-trunc", "Timer ticked") } _ = s.recv() => { - log::info!(target: "wal-trunc", "Received SIGHUP") + info!(target: "wal-trunc", "Received SIGHUP") } }; #[cfg(not(unix))] if do_timer { i.tick().await; - log::info!(target: "wal-trunc", "Timer ticked") + info!(target: "wal-trunc", "Timer ticked") } else { // timer disabled, and there's no concept of signals on windows, bailing... return; } if let Some(arc) = Weak::upgrade(&weak) { - log::info!(target: "wal-trunc", "Rotating sync helpers..."); + info!(target: "wal-trunc", "Rotating sync helpers..."); // This actually creates a very small race condition between firing this and trying to acquire the subsequent write lock. // Though it is not a huge deal if the write lock doesn't "catch", as it'll harmlessly time out. arc.read().await.globals.rotate.fire(); - log::info!(target: "wal-trunc", "Locking..."); + info!(target: "wal-trunc", "Locking..."); let guard = { if let Ok(guard) = timeout(lock_timeout, arc.write()).await { guard } else { - log::info!(target: "wal-trunc", "Lock failed in timeout, canceled."); + info!(target: "wal-trunc", "Lock failed in timeout, canceled."); continue; } }; - log::info!(target: "wal-trunc", "Locked, flushing..."); + info!(target: "wal-trunc", "Locked, flushing..."); let start = Instant::now(); if let Err(e) = guard.flush_wal() { - log::error!(target: "wal-trunc", "Errored: {}", e); + error!(target: "wal-trunc", "Errored: {}", e); } else { - log::info!(target: "wal-trunc", "Flushed in {:?}", start.elapsed()); + info!(target: "wal-trunc", "Flushed in {:?}", start.elapsed()); } } else { break; diff --git a/src/database/abstraction/sled.rs b/src/database/abstraction/sled.rs index 12e02751..d99ce264 100644 --- a/src/database/abstraction/sled.rs +++ b/src/database/abstraction/sled.rs @@ -1,7 +1,7 @@ use super::super::Config; use crate::{utils, Result}; -use log::warn; use std::{future::Future, pin::Pin, sync::Arc}; +use tracing::warn; use super::{DatabaseEngine, Tree}; diff --git a/src/database/abstraction/sqlite.rs b/src/database/abstraction/sqlite.rs index 8cc6a8de..a46d3ada 100644 --- a/src/database/abstraction/sqlite.rs +++ b/src/database/abstraction/sqlite.rs @@ -3,9 +3,8 @@ use crate::{database::Config, Result}; use crossbeam::channel::{ bounded, unbounded, Receiver as ChannelReceiver, Sender as ChannelSender, TryRecvError, }; -use log::debug; use parking_lot::{Mutex, MutexGuard, RwLock}; -use rusqlite::{params, Connection, DatabaseName::Main, OptionalExtension}; +use rusqlite::{params, Connection, DatabaseName::Main, OptionalExtension, Params}; use std::{ collections::HashMap, future::Future, @@ -13,10 +12,11 @@ use std::{ path::{Path, PathBuf}, pin::Pin, sync::Arc, - thread, time::{Duration, Instant}, }; +use threadpool::ThreadPool; use tokio::sync::oneshot::Sender; +use tracing::{debug, warn}; struct Pool { writer: Mutex, @@ -86,9 +86,9 @@ impl Deref for RecycledConn { impl Drop for RecycledConn { fn drop(&mut self) { if let Some(conn) = self.0.take() { - log::debug!("Recycled connection"); + debug!("Recycled connection"); if let Err(e) = self.1.send(conn) { - log::warn!("Recycling a connection led to the following error: {:?}", e) + warn!("Recycling a connection led to the following error: {:?}", e) } } } @@ -149,14 +149,14 @@ impl Pool { } } - log::debug!("read_lock: All permanent readers locked, obtaining spillover reader..."); + debug!("read_lock: All permanent readers locked, obtaining spillover reader..."); // We didn't get a connection from the permanent pool, so we'll dumpster-dive for recycled connections. // Either we have a connection or we dont, if we don't, we make a new one. let conn = match self.spills.try_take() { Some(conn) => conn, None => { - log::debug!("read_lock: No recycled connections left, creating new one..."); + debug!("read_lock: No recycled connections left, creating new one..."); Self::prepare_conn(&self.path, None).unwrap() } }; @@ -169,7 +169,7 @@ impl Pool { // If the spillover readers are more than the number of total readers, there might be a problem. if now_count > self.readers.len() { - log::warn!( + warn!( "Database is under high load. Consider increasing sqlite_read_pool_size ({} spillover readers exist)", now_count ); @@ -182,6 +182,7 @@ impl Pool { pub struct Engine { pool: Pool, + iter_pool: Mutex, } impl DatabaseEngine for Engine { @@ -195,7 +196,10 @@ impl DatabaseEngine for Engine { pool.write_lock() .execute("CREATE TABLE IF NOT EXISTS _noop (\"key\" INT)", params![])?; - let arc = Arc::new(Engine { pool }); + let arc = Arc::new(Engine { + pool, + iter_pool: Mutex::new(ThreadPool::new(10)), + }); Ok(arc) } @@ -259,7 +263,7 @@ impl Engine { } } - log::debug!("Reaped {} connections", reaped); + debug!("Reaped {} connections", reaped); } } @@ -272,6 +276,7 @@ pub struct SqliteTable { type TupleOfBytes = (Vec, Vec); impl SqliteTable { + #[tracing::instrument(skip(self, guard, key))] fn get_with_guard(&self, guard: &Connection, key: &[u8]) -> Result>> { Ok(guard .prepare(format!("SELECT value FROM {} WHERE key = ?", self.name).as_str())? @@ -279,6 +284,7 @@ impl SqliteTable { .optional()?) } + #[tracing::instrument(skip(self, guard, key, value))] fn insert_with_guard(&self, guard: &Connection, key: &[u8], value: &[u8]) -> Result<()> { guard.execute( format!( @@ -291,41 +297,67 @@ impl SqliteTable { Ok(()) } - fn _iter_from_thread(&self, f: F) -> Box + Send> - where - F: (for<'a> FnOnce(&'a Connection, ChannelSender)) + Send + 'static, - { + #[tracing::instrument(skip(self, sql, param))] + fn iter_from_thread( + &self, + sql: String, + param: Option>, + ) -> Box + Send + Sync> { let (s, r) = bounded::(5); - let engine = self.engine.clone(); + let engine = Arc::clone(&self.engine); - thread::spawn(move || { - let _ = f(&engine.pool.read_lock(), s); - }); + let lock = self.engine.iter_pool.lock(); + if lock.active_count() < lock.max_count() { + lock.execute(move || { + if let Some(param) = param { + iter_from_thread_work(&engine.pool.read_lock(), &s, &sql, [param]); + } else { + iter_from_thread_work(&engine.pool.read_lock(), &s, &sql, []); + } + }); + } else { + std::thread::spawn(move || { + if let Some(param) = param { + iter_from_thread_work(&engine.pool.read_lock(), &s, &sql, [param]); + } else { + iter_from_thread_work(&engine.pool.read_lock(), &s, &sql, []); + } + }); + } Box::new(r.into_iter()) } } -macro_rules! iter_from_thread { - ($self:expr, $sql:expr, $param:expr) => { - $self._iter_from_thread(move |guard, s| { - let _ = guard - .prepare($sql) - .unwrap() - .query_map($param, |row| Ok((row.get_unwrap(0), row.get_unwrap(1)))) - .unwrap() - .map(|r| r.unwrap()) - .try_for_each(|bob| s.send(bob)); - }) - }; +fn iter_from_thread_work

( + guard: &HoldingConn<'_>, + s: &ChannelSender<(Vec, Vec)>, + sql: &str, + params: P, +) where + P: Params, +{ + for bob in guard + .prepare(sql) + .unwrap() + .query_map(params, |row| Ok((row.get_unwrap(0), row.get_unwrap(1)))) + .unwrap() + .map(|r| r.unwrap()) + { + if s.send(bob).is_err() { + return; + } + } } impl Tree for SqliteTable { + #[tracing::instrument(skip(self, key))] fn get(&self, key: &[u8]) -> Result>> { self.get_with_guard(&self.engine.pool.read_lock(), key) } + #[tracing::instrument(skip(self, key, value))] fn insert(&self, key: &[u8], value: &[u8]) -> Result<()> { let guard = self.engine.pool.write_lock(); @@ -365,6 +397,7 @@ impl Tree for SqliteTable { Ok(()) } + #[tracing::instrument(skip(self, key))] fn remove(&self, key: &[u8]) -> Result<()> { let guard = self.engine.pool.write_lock(); @@ -385,15 +418,13 @@ impl Tree for SqliteTable { Ok(()) } + #[tracing::instrument(skip(self))] fn iter<'a>(&'a self) -> Box + Send + 'a> { let name = self.name.clone(); - iter_from_thread!( - self, - format!("SELECT key, value FROM {}", name).as_str(), - params![] - ) + self.iter_from_thread(format!("SELECT key, value FROM {}", name), None) } + #[tracing::instrument(skip(self, from, backwards))] fn iter_from<'a>( &'a self, from: &[u8], @@ -402,28 +433,25 @@ impl Tree for SqliteTable { let name = self.name.clone(); let from = from.to_vec(); // TODO change interface? if backwards { - iter_from_thread!( - self, + self.iter_from_thread( format!( "SELECT key, value FROM {} WHERE key <= ? ORDER BY key DESC", name - ) - .as_str(), - [from] + ), + Some(from), ) } else { - iter_from_thread!( - self, + self.iter_from_thread( format!( "SELECT key, value FROM {} WHERE key >= ? ORDER BY key ASC", name - ) - .as_str(), - [from] + ), + Some(from), ) } } + #[tracing::instrument(skip(self, key))] fn increment(&self, key: &[u8]) -> Result> { let guard = self.engine.pool.write_lock(); @@ -446,18 +474,17 @@ impl Tree for SqliteTable { Ok(new) } + #[tracing::instrument(skip(self, prefix))] fn scan_prefix<'a>( &'a self, prefix: Vec, ) -> Box + Send + 'a> { // let name = self.name.clone(); - // iter_from_thread!( - // self, + // self.iter_from_thread( // format!( // "SELECT key, value FROM {} WHERE key BETWEEN ?1 AND ?1 || X'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF' ORDER BY key ASC", // name // ) - // .as_str(), // [prefix] // ) Box::new( @@ -466,6 +493,7 @@ impl Tree for SqliteTable { ) } + #[tracing::instrument(skip(self, prefix))] fn watch_prefix<'a>(&'a self, prefix: &[u8]) -> Pin + Send + 'a>> { let (tx, rx) = tokio::sync::oneshot::channel(); @@ -481,6 +509,7 @@ impl Tree for SqliteTable { }) } + #[tracing::instrument(skip(self))] fn clear(&self) -> Result<()> { debug!("clear: running"); self.engine diff --git a/src/database/account_data.rs b/src/database/account_data.rs index b1d5b6b5..8a8d2c22 100644 --- a/src/database/account_data.rs +++ b/src/database/account_data.rs @@ -16,6 +16,7 @@ pub struct AccountData { impl AccountData { /// Places one event in the account data of the user and removes the previous entry. + #[tracing::instrument(skip(self, room_id, user_id, event_type, data, globals))] pub fn update( &self, room_id: Option<&RoomId>, @@ -60,6 +61,7 @@ impl AccountData { } /// Searches the account data for a specific kind. + #[tracing::instrument(skip(self, room_id, user_id, kind))] pub fn get( &self, room_id: Option<&RoomId>, @@ -74,7 +76,7 @@ impl AccountData { } /// Returns all changes to the account data that happened after `since`. - #[tracing::instrument(skip(self))] + #[tracing::instrument(skip(self, room_id, user_id, since))] pub fn changes_since( &self, room_id: Option<&RoomId>, @@ -122,6 +124,7 @@ impl AccountData { Ok(userdata) } + #[tracing::instrument(skip(self, room_id, user_id, kind))] fn find_event( &self, room_id: Option<&RoomId>, diff --git a/src/database/admin.rs b/src/database/admin.rs index d8b7ae5e..e1b24d08 100644 --- a/src/database/admin.rs +++ b/src/database/admin.rs @@ -4,13 +4,13 @@ use std::{ }; use crate::{pdu::PduBuilder, Database}; -use log::warn; use rocket::futures::{channel::mpsc, stream::StreamExt}; use ruma::{ events::{room::message, EventType}, UserId, }; use tokio::sync::{MutexGuard, RwLock, RwLockReadGuard}; +use tracing::warn; pub enum AdminCommand { RegisterAppservice(serde_yaml::Value), diff --git a/src/database/globals.rs b/src/database/globals.rs index fbd41a38..0edb9ca2 100644 --- a/src/database/globals.rs +++ b/src/database/globals.rs @@ -1,5 +1,4 @@ use crate::{database::Config, utils, ConduitResult, Error, Result}; -use log::{error, info}; use ruma::{ api::{ client::r0::sync::sync_events, @@ -17,6 +16,7 @@ use std::{ time::{Duration, Instant}, }; use tokio::sync::{broadcast, watch::Receiver, Mutex, Semaphore}; +use tracing::{error, info}; use trust_dns_resolver::TokioAsyncResolver; use super::abstraction::Tree; @@ -56,6 +56,7 @@ struct MatrixServerVerifier { } impl ServerCertVerifier for MatrixServerVerifier { + #[tracing::instrument(skip(self, roots, presented_certs, dns_name, ocsp_response))] fn verify_server_cert( &self, roots: &rustls::RootCertStore, @@ -220,11 +221,13 @@ impl Globals { &self.reqwest_client } + #[tracing::instrument(skip(self))] pub fn next_count(&self) -> Result { utils::u64_from_bytes(&self.globals.increment(COUNTER)?) .map_err(|_| Error::bad_database("Count has invalid bytes.")) } + #[tracing::instrument(skip(self))] pub fn current_count(&self) -> Result { self.globals.get(COUNTER)?.map_or(Ok(0_u64), |bytes| { utils::u64_from_bytes(&bytes) diff --git a/src/database/pusher.rs b/src/database/pusher.rs index 348f4dcb..9e81dd16 100644 --- a/src/database/pusher.rs +++ b/src/database/pusher.rs @@ -1,6 +1,5 @@ use crate::{Database, Error, PduEvent, Result}; use bytes::BytesMut; -use log::{error, info, warn}; use ruma::{ api::{ client::r0::push::{get_pushers, set_pusher, PusherKind}, @@ -15,6 +14,7 @@ use ruma::{ push::{Action, PushConditionRoomCtx, PushFormat, Ruleset, Tweak}, uint, UInt, UserId, }; +use tracing::{error, info, warn}; use std::{convert::TryFrom, fmt::Debug, mem, sync::Arc}; @@ -26,6 +26,7 @@ pub struct PushData { } impl PushData { + #[tracing::instrument(skip(self, sender, pusher))] pub fn set_pusher(&self, sender: &UserId, pusher: set_pusher::Pusher) -> Result<()> { let mut key = sender.as_bytes().to_vec(); key.push(0xff); @@ -48,6 +49,7 @@ impl PushData { Ok(()) } + #[tracing::instrument(skip(self, senderkey))] pub fn get_pusher(&self, senderkey: &[u8]) -> Result> { self.senderkey_pusher .get(senderkey)? @@ -58,6 +60,7 @@ impl PushData { .transpose() } + #[tracing::instrument(skip(self, sender))] pub fn get_pushers(&self, sender: &UserId) -> Result> { let mut prefix = sender.as_bytes().to_vec(); prefix.push(0xff); @@ -71,6 +74,7 @@ impl PushData { .collect() } + #[tracing::instrument(skip(self, sender))] pub fn get_pusher_senderkeys<'a>( &'a self, sender: &UserId, @@ -82,6 +86,7 @@ impl PushData { } } +#[tracing::instrument(skip(globals, destination, request))] pub async fn send_request( globals: &crate::database::globals::Globals, destination: &str, @@ -155,6 +160,7 @@ where } } +#[tracing::instrument(skip(user, unread, pusher, ruleset, pdu, db))] pub async fn send_push_notice( user: &UserId, unread: UInt, @@ -194,6 +200,7 @@ pub async fn send_push_notice( Ok(()) } +#[tracing::instrument(skip(user, ruleset, pdu, db))] pub fn get_actions<'a>( user: &UserId, ruleset: &'a Ruleset, @@ -225,6 +232,7 @@ pub fn get_actions<'a>( Ok(ruleset.get_actions(&pdu.to_sync_room_event(), &ctx)) } +#[tracing::instrument(skip(unread, pusher, tweaks, event, db))] async fn send_notice( unread: UInt, pusher: &get_pushers::Pusher, diff --git a/src/database/rooms.rs b/src/database/rooms.rs index f0ec683d..756f65e9 100644 --- a/src/database/rooms.rs +++ b/src/database/rooms.rs @@ -5,7 +5,6 @@ use member::MembershipState; use tokio::sync::MutexGuard; use crate::{pdu::PduBuilder, utils, Database, Error, PduEvent, Result}; -use log::{debug, error, warn}; use lru_cache::LruCache; use regex::Regex; use ring::digest; @@ -27,6 +26,7 @@ use std::{ mem, sync::{Arc, Mutex}, }; +use tracing::{debug, error, warn}; use super::{abstraction::Tree, admin::AdminCommand, pusher}; @@ -82,7 +82,7 @@ pub struct Rooms { pub(super) eventid_outlierpdu: Arc, /// RoomId + EventId -> Parent PDU EventId. - pub(super) prevevent_parent: Arc, + pub(super) referencedevents: Arc, pub(super) pdu_cache: Mutex>>, pub(super) auth_chain_cache: Mutex>>, @@ -617,6 +617,7 @@ impl Rooms { } /// Returns the leaf pdus of a room. + #[tracing::instrument(skip(self))] pub fn get_pdu_leaves(&self, room_id: &RoomId) -> Result> { let mut prefix = room_id.as_bytes().to_vec(); prefix.push(0xff); @@ -636,6 +637,7 @@ impl Rooms { /// /// The provided `event_ids` become the new leaves, this allows a room to have multiple /// `prev_events`. + #[tracing::instrument(skip(self))] pub fn replace_pdu_leaves(&self, room_id: &RoomId, event_ids: &[EventId]) -> Result<()> { let mut prefix = room_id.as_bytes().to_vec(); prefix.push(0xff); @@ -653,13 +655,15 @@ impl Rooms { Ok(()) } + #[tracing::instrument(skip(self))] pub fn is_event_referenced(&self, room_id: &RoomId, event_id: &EventId) -> Result { let mut key = room_id.as_bytes().to_vec(); key.extend_from_slice(event_id.as_bytes()); - Ok(self.prevevent_parent.get(&key)?.is_some()) + Ok(self.referencedevents.get(&key)?.is_some()) } /// Returns the pdu from the outlier tree. + #[tracing::instrument(skip(self))] pub fn get_pdu_outlier(&self, event_id: &EventId) -> Result> { self.eventid_outlierpdu .get(event_id.as_bytes())? @@ -671,6 +675,7 @@ impl Rooms { /// Append the PDU as an outlier. /// /// Any event given to this will be processed (state-res) on another thread. + #[tracing::instrument(skip(self, pdu))] pub fn add_pdu_outlier(&self, event_id: &EventId, pdu: &CanonicalJsonObject) -> Result<()> { self.eventid_outlierpdu.insert( &event_id.as_bytes(), @@ -684,7 +689,7 @@ impl Rooms { /// /// By this point the incoming event should be fully authenticated, no auth happens /// in `append_pdu`. - #[allow(clippy::too_many_arguments)] + #[tracing::instrument(skip(self, pdu, pdu_json, leaves, db))] pub fn append_pdu( &self, pdu: &PduEvent, @@ -721,11 +726,10 @@ impl Rooms { } // We must keep track of all events that have been referenced. - for leaf in leaves { + for prev in &pdu.prev_events { let mut key = pdu.room_id().as_bytes().to_vec(); - key.extend_from_slice(leaf.as_bytes()); - self.prevevent_parent - .insert(&key, pdu.event_id().as_bytes())?; + key.extend_from_slice(prev.as_bytes()); + self.referencedevents.insert(&key, &[])?; } self.replace_pdu_leaves(&pdu.room_id, leaves)?; @@ -757,12 +761,11 @@ impl Rooms { // See if the event matches any known pushers for user in db - .users - .iter() + .rooms + .room_members(&pdu.room_id) .filter_map(|r| r.ok()) .filter(|user_id| user_id.server_name() == db.globals.server_name()) .filter(|user_id| !db.users.is_deactivated(user_id).unwrap_or(false)) - .filter(|user_id| self.is_joined(&user_id, &pdu.room_id).unwrap_or(false)) { // Don't notify the user of their own events if user == pdu.sender { @@ -992,6 +995,7 @@ impl Rooms { Ok(pdu_id) } + #[tracing::instrument(skip(self))] pub fn reset_notification_counts(&self, user_id: &UserId, room_id: &RoomId) -> Result<()> { let mut userroom_id = user_id.as_bytes().to_vec(); userroom_id.push(0xff); @@ -1005,6 +1009,7 @@ impl Rooms { Ok(()) } + #[tracing::instrument(skip(self))] pub fn notification_count(&self, user_id: &UserId, room_id: &RoomId) -> Result { let mut userroom_id = user_id.as_bytes().to_vec(); userroom_id.push(0xff); @@ -1019,6 +1024,7 @@ impl Rooms { .unwrap_or(Ok(0)) } + #[tracing::instrument(skip(self))] pub fn highlight_count(&self, user_id: &UserId, room_id: &RoomId) -> Result { let mut userroom_id = user_id.as_bytes().to_vec(); userroom_id.push(0xff); @@ -1037,6 +1043,7 @@ impl Rooms { /// /// This adds all current state events (not including the incoming event) /// to `stateid_pduid` and adds the incoming event to `eventid_statehash`. + #[tracing::instrument(skip(self, state, globals))] pub fn set_event_state( &self, event_id: &EventId, @@ -1121,6 +1128,7 @@ impl Rooms { /// /// This adds all current state events (not including the incoming event) /// to `stateid_pduid` and adds the incoming event to `eventid_statehash`. + #[tracing::instrument(skip(self, new_pdu, globals))] pub fn append_to_state( &self, new_pdu: &PduEvent, @@ -1227,6 +1235,7 @@ impl Rooms { } } + #[tracing::instrument(skip(self, invite_event))] pub fn calculate_invite_state( &self, invite_event: &PduEvent, @@ -1264,6 +1273,7 @@ impl Rooms { Ok(state) } + #[tracing::instrument(skip(self))] pub fn set_room_state(&self, room_id: &RoomId, shortstatehash: u64) -> Result<()> { self.roomid_shortstatehash .insert(room_id.as_bytes(), &shortstatehash.to_be_bytes())?; @@ -1272,6 +1282,7 @@ impl Rooms { } /// Creates a new persisted data unit and adds it to a room. + #[tracing::instrument(skip(self, db, _mutex_lock))] pub fn build_and_append_pdu( &self, pdu_builder: PduBuilder, @@ -1563,6 +1574,7 @@ impl Rooms { /// Returns an iterator over all events and their tokens in a room that happened before the /// event with id `until` in reverse-chronological order. + #[tracing::instrument(skip(self))] pub fn pdus_until<'a>( &'a self, user_id: &UserId, @@ -1625,6 +1637,7 @@ impl Rooms { } /// Replace a PDU with the redacted form. + #[tracing::instrument(skip(self, reason))] pub fn redact_pdu(&self, event_id: &EventId, reason: &PduEvent) -> Result<()> { if let Some(pdu_id) = self.get_pdu_id(event_id)? { let mut pdu = self @@ -1642,6 +1655,7 @@ impl Rooms { } /// Update current membership data. + #[tracing::instrument(skip(self, last_state, db))] pub fn update_membership( &self, room_id: &RoomId, @@ -2026,6 +2040,7 @@ impl Rooms { } /// Makes a user forget a room. + #[tracing::instrument(skip(self))] pub fn forget(&self, room_id: &RoomId, user_id: &UserId) -> Result<()> { let mut userroom_id = user_id.as_bytes().to_vec(); userroom_id.push(0xff); @@ -2041,6 +2056,7 @@ impl Rooms { Ok(()) } + #[tracing::instrument(skip(self, globals))] pub fn set_alias( &self, alias: &RoomAliasId, @@ -2076,6 +2092,7 @@ impl Rooms { Ok(()) } + #[tracing::instrument(skip(self))] pub fn id_from_alias(&self, alias: &RoomAliasId) -> Result> { self.alias_roomid .get(alias.alias().as_bytes())? @@ -2089,6 +2106,7 @@ impl Rooms { }) } + #[tracing::instrument(skip(self))] pub fn room_aliases<'a>( &'a self, room_id: &RoomId, @@ -2104,6 +2122,7 @@ impl Rooms { }) } + #[tracing::instrument(skip(self))] pub fn set_public(&self, room_id: &RoomId, public: bool) -> Result<()> { if public { self.publicroomids.insert(room_id.as_bytes(), &[])?; @@ -2114,10 +2133,12 @@ impl Rooms { Ok(()) } + #[tracing::instrument(skip(self))] pub fn is_public_room(&self, room_id: &RoomId) -> Result { Ok(self.publicroomids.get(room_id.as_bytes())?.is_some()) } + #[tracing::instrument(skip(self))] pub fn public_rooms(&self) -> impl Iterator> + '_ { self.publicroomids.iter().map(|(bytes, _)| { RoomId::try_from( @@ -2219,6 +2240,7 @@ impl Rooms { } /// Returns an iterator of all servers participating in this room. + #[tracing::instrument(skip(self))] pub fn room_servers<'a>( &'a self, room_id: &RoomId, @@ -2242,6 +2264,7 @@ impl Rooms { } /// Returns an iterator of all rooms a server participates in (as far as we know). + #[tracing::instrument(skip(self))] pub fn server_rooms<'a>( &'a self, server: &ServerName, @@ -2287,6 +2310,7 @@ impl Rooms { } /// Returns an iterator over all User IDs who ever joined a room. + #[tracing::instrument(skip(self))] pub fn room_useroncejoined<'a>( &'a self, room_id: &RoomId, @@ -2494,6 +2518,7 @@ impl Rooms { }) } + #[tracing::instrument(skip(self))] pub fn once_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result { let mut userroom_id = user_id.as_bytes().to_vec(); userroom_id.push(0xff); @@ -2502,6 +2527,7 @@ impl Rooms { Ok(self.roomuseroncejoinedids.get(&userroom_id)?.is_some()) } + #[tracing::instrument(skip(self))] pub fn is_joined(&self, user_id: &UserId, room_id: &RoomId) -> Result { let mut userroom_id = user_id.as_bytes().to_vec(); userroom_id.push(0xff); @@ -2510,6 +2536,7 @@ impl Rooms { Ok(self.userroomid_joined.get(&userroom_id)?.is_some()) } + #[tracing::instrument(skip(self))] pub fn is_invited(&self, user_id: &UserId, room_id: &RoomId) -> Result { let mut userroom_id = user_id.as_bytes().to_vec(); userroom_id.push(0xff); @@ -2518,6 +2545,7 @@ impl Rooms { Ok(self.userroomid_invitestate.get(&userroom_id)?.is_some()) } + #[tracing::instrument(skip(self))] pub fn is_left(&self, user_id: &UserId, room_id: &RoomId) -> Result { let mut userroom_id = user_id.as_bytes().to_vec(); userroom_id.push(0xff); @@ -2526,6 +2554,7 @@ impl Rooms { Ok(self.userroomid_leftstate.get(&userroom_id)?.is_some()) } + #[tracing::instrument(skip(self))] pub fn auth_chain_cache( &self, ) -> std::sync::MutexGuard<'_, LruCache>> { diff --git a/src/database/rooms/edus.rs b/src/database/rooms/edus.rs index 9a5cdeb2..664c1710 100644 --- a/src/database/rooms/edus.rs +++ b/src/database/rooms/edus.rs @@ -116,6 +116,7 @@ impl RoomEdus { } /// Sets a private read marker at `count`. + #[tracing::instrument(skip(self, globals))] pub fn private_read_set( &self, room_id: &RoomId, diff --git a/src/database/sending.rs b/src/database/sending.rs index a07192e8..f28e8837 100644 --- a/src/database/sending.rs +++ b/src/database/sending.rs @@ -10,7 +10,6 @@ use crate::{ appservice_server, database::pusher, server_server, utils, Database, Error, PduEvent, Result, }; use federation::transactions::send_transaction_message; -use log::{error, warn}; use ring::digest; use rocket::futures::{ channel::mpsc, @@ -34,6 +33,7 @@ use tokio::{ select, sync::{RwLock, Semaphore}, }; +use tracing::{error, warn}; use super::abstraction::Tree; @@ -45,6 +45,7 @@ pub enum OutgoingKind { } impl OutgoingKind { + #[tracing::instrument(skip(self))] pub fn get_prefix(&self) -> Vec { let mut prefix = match self { OutgoingKind::Appservice(server) => { @@ -223,6 +224,7 @@ impl Sending { }); } + #[tracing::instrument(skip(outgoing_kind, new_events, current_transaction_status, db))] fn select_events( outgoing_kind: &OutgoingKind, new_events: Vec<(SendingEventType, Vec)>, // Events we want to send: event and full key @@ -295,6 +297,7 @@ impl Sending { Ok(Some(events)) } + #[tracing::instrument(skip(db, server))] pub fn select_edus(db: &Database, server: &ServerName) -> Result<(Vec>, u64)> { // u64: count of last edu let since = db @@ -371,7 +374,7 @@ impl Sending { Ok((events, max_edu_count)) } - #[tracing::instrument(skip(self))] + #[tracing::instrument(skip(self, pdu_id, senderkey))] pub fn send_push_pdu(&self, pdu_id: &[u8], senderkey: Vec) -> Result<()> { let mut key = b"$".to_vec(); key.extend_from_slice(&senderkey); @@ -383,7 +386,7 @@ impl Sending { Ok(()) } - #[tracing::instrument(skip(self))] + #[tracing::instrument(skip(self, server, pdu_id))] pub fn send_pdu(&self, server: &ServerName, pdu_id: &[u8]) -> Result<()> { let mut key = server.as_bytes().to_vec(); key.push(0xff); @@ -394,7 +397,7 @@ impl Sending { Ok(()) } - #[tracing::instrument(skip(self))] + #[tracing::instrument(skip(self, server, serialized))] pub fn send_reliable_edu(&self, server: &ServerName, serialized: &[u8]) -> Result<()> { let mut key = server.as_bytes().to_vec(); key.push(0xff); @@ -418,7 +421,7 @@ impl Sending { Ok(()) } - #[tracing::instrument] + #[tracing::instrument(skip(keys))] fn calculate_hash(keys: &[&[u8]]) -> Vec { // We only hash the pdu's event ids, not the whole pdu let bytes = keys.join(&0xff); @@ -426,7 +429,7 @@ impl Sending { hash.as_ref().to_owned() } - #[tracing::instrument(skip(db))] + #[tracing::instrument(skip(db, events, kind))] async fn handle_events( kind: OutgoingKind, events: Vec, @@ -658,6 +661,7 @@ impl Sending { } } + #[tracing::instrument(skip(key))] fn parse_servercurrentevent(key: &[u8]) -> Result<(OutgoingKind, SendingEventType)> { // Appservices start with a plus Ok::<_, Error>(if key.starts_with(b"+") { @@ -723,7 +727,7 @@ impl Sending { }) } - #[tracing::instrument(skip(self, globals))] + #[tracing::instrument(skip(self, globals, destination, request))] pub async fn send_federation_request( &self, globals: &crate::database::globals::Globals, @@ -740,7 +744,7 @@ impl Sending { response } - #[tracing::instrument(skip(self, globals))] + #[tracing::instrument(skip(self, globals, registration, request))] pub async fn send_appservice_request( &self, globals: &crate::database::globals::Globals, diff --git a/src/database/users.rs b/src/database/users.rs index cd46c458..f501ec30 100644 --- a/src/database/users.rs +++ b/src/database/users.rs @@ -8,6 +8,7 @@ use ruma::{ DeviceId, DeviceKeyAlgorithm, DeviceKeyId, MilliSecondsSinceUnixEpoch, UInt, UserId, }; use std::{collections::BTreeMap, convert::TryFrom, mem, sync::Arc}; +use tracing::warn; use super::abstraction::Tree; @@ -34,11 +35,13 @@ pub struct Users { impl Users { /// Check if a user has an account on this homeserver. + #[tracing::instrument(skip(self, user_id))] pub fn exists(&self, user_id: &UserId) -> Result { Ok(self.userid_password.get(user_id.as_bytes())?.is_some()) } /// Check if account is deactivated + #[tracing::instrument(skip(self, user_id))] pub fn is_deactivated(&self, user_id: &UserId) -> Result { Ok(self .userid_password @@ -51,17 +54,20 @@ impl Users { } /// Create a new user account on this homeserver. + #[tracing::instrument(skip(self, user_id, password))] pub fn create(&self, user_id: &UserId, password: Option<&str>) -> Result<()> { self.set_password(user_id, password)?; Ok(()) } /// Returns the number of users registered on this server. + #[tracing::instrument(skip(self))] pub fn count(&self) -> Result { Ok(self.userid_password.iter().count()) } /// Find out which user an access token belongs to. + #[tracing::instrument(skip(self, token))] pub fn find_from_token(&self, token: &str) -> Result> { self.token_userdeviceid .get(token.as_bytes())? @@ -89,6 +95,7 @@ impl Users { } /// Returns an iterator over all users on this homeserver. + #[tracing::instrument(skip(self))] pub fn iter(&self) -> impl Iterator> + '_ { self.userid_password.iter().map(|(bytes, _)| { UserId::try_from(utils::string_from_bytes(&bytes).map_err(|_| { @@ -99,6 +106,7 @@ impl Users { } /// Returns the password hash for the given user. + #[tracing::instrument(skip(self, user_id))] pub fn password_hash(&self, user_id: &UserId) -> Result> { self.userid_password .get(user_id.as_bytes())? @@ -110,6 +118,7 @@ impl Users { } /// Hash and set the user's password to the Argon2 hash + #[tracing::instrument(skip(self, user_id, password))] pub fn set_password(&self, user_id: &UserId, password: Option<&str>) -> Result<()> { if let Some(password) = password { if let Ok(hash) = utils::calculate_hash(&password) { @@ -129,6 +138,7 @@ impl Users { } /// Returns the displayname of a user on this homeserver. + #[tracing::instrument(skip(self, user_id))] pub fn displayname(&self, user_id: &UserId) -> Result> { self.userid_displayname .get(user_id.as_bytes())? @@ -140,6 +150,7 @@ impl Users { } /// Sets a new displayname or removes it if displayname is None. You still need to nofify all rooms of this change. + #[tracing::instrument(skip(self, user_id, displayname))] pub fn set_displayname(&self, user_id: &UserId, displayname: Option) -> Result<()> { if let Some(displayname) = displayname { self.userid_displayname @@ -152,6 +163,7 @@ impl Users { } /// Get the avatar_url of a user. + #[tracing::instrument(skip(self, user_id))] pub fn avatar_url(&self, user_id: &UserId) -> Result> { self.userid_avatarurl .get(user_id.as_bytes())? @@ -164,6 +176,7 @@ impl Users { } /// Sets a new avatar_url or removes it if avatar_url is None. + #[tracing::instrument(skip(self, user_id, avatar_url))] pub fn set_avatar_url(&self, user_id: &UserId, avatar_url: Option) -> Result<()> { if let Some(avatar_url) = avatar_url { self.userid_avatarurl @@ -176,6 +189,7 @@ impl Users { } /// Get the blurhash of a user. + #[tracing::instrument(skip(self, user_id))] pub fn blurhash(&self, user_id: &UserId) -> Result> { self.userid_blurhash .get(user_id.as_bytes())? @@ -189,6 +203,7 @@ impl Users { } /// Sets a new avatar_url or removes it if avatar_url is None. + #[tracing::instrument(skip(self, user_id, blurhash))] pub fn set_blurhash(&self, user_id: &UserId, blurhash: Option) -> Result<()> { if let Some(blurhash) = blurhash { self.userid_blurhash @@ -201,6 +216,7 @@ impl Users { } /// Adds a new device to a user. + #[tracing::instrument(skip(self, user_id, device_id, token, initial_device_display_name))] pub fn create_device( &self, user_id: &UserId, @@ -235,6 +251,7 @@ impl Users { } /// Removes a device from a user. + #[tracing::instrument(skip(self, user_id, device_id))] pub fn remove_device(&self, user_id: &UserId, device_id: &DeviceId) -> Result<()> { let mut userdeviceid = user_id.as_bytes().to_vec(); userdeviceid.push(0xff); @@ -265,6 +282,7 @@ impl Users { } /// Returns an iterator over all device ids of this user. + #[tracing::instrument(skip(self, user_id))] pub fn all_device_ids<'a>( &'a self, user_id: &UserId, @@ -287,6 +305,7 @@ impl Users { } /// Replaces the access token of one device. + #[tracing::instrument(skip(self, user_id, device_id, token))] pub fn set_token(&self, user_id: &UserId, device_id: &DeviceId, token: &str) -> Result<()> { let mut userdeviceid = user_id.as_bytes().to_vec(); userdeviceid.push(0xff); @@ -310,6 +329,14 @@ impl Users { Ok(()) } + #[tracing::instrument(skip( + self, + user_id, + device_id, + one_time_key_key, + one_time_key_value, + globals + ))] pub fn add_one_time_key( &self, user_id: &UserId, @@ -346,7 +373,7 @@ impl Users { Ok(()) } - #[tracing::instrument(skip(self))] + #[tracing::instrument(skip(self, user_id))] pub fn last_one_time_keys_update(&self, user_id: &UserId) -> Result { self.userid_lastonetimekeyupdate .get(&user_id.as_bytes())? @@ -358,6 +385,7 @@ impl Users { .unwrap_or(Ok(0)) } + #[tracing::instrument(skip(self, user_id, device_id, key_algorithm, globals))] pub fn take_one_time_key( &self, user_id: &UserId, @@ -397,7 +425,7 @@ impl Users { .transpose() } - #[tracing::instrument(skip(self))] + #[tracing::instrument(skip(self, user_id, device_id))] pub fn count_one_time_keys( &self, user_id: &UserId, @@ -430,6 +458,7 @@ impl Users { Ok(counts) } + #[tracing::instrument(skip(self, user_id, device_id, device_keys, rooms, globals))] pub fn add_device_keys( &self, user_id: &UserId, @@ -452,6 +481,14 @@ impl Users { Ok(()) } + #[tracing::instrument(skip( + self, + master_key, + self_signing_key, + user_signing_key, + rooms, + globals + ))] pub fn add_cross_signing_keys( &self, user_id: &UserId, @@ -552,6 +589,7 @@ impl Users { Ok(()) } + #[tracing::instrument(skip(self, target_id, key_id, signature, sender_id, rooms, globals))] pub fn sign_key( &self, target_id: &UserId, @@ -595,7 +633,7 @@ impl Users { Ok(()) } - #[tracing::instrument(skip(self))] + #[tracing::instrument(skip(self, user_or_room_id, from, to))] pub fn keys_changed<'a>( &'a self, user_or_room_id: &str, @@ -608,9 +646,24 @@ impl Users { let mut start = prefix.clone(); start.extend_from_slice(&(from + 1).to_be_bytes()); + let to = to.unwrap_or(u64::MAX); + self.keychangeid_userid .iter_from(&start, false) - .take_while(move |(k, _)| k.starts_with(&prefix)) + .take_while(move |(k, _)| { + k.starts_with(&prefix) + && if let Some(current) = k.splitn(2, |&b| b == 0xff).nth(1) { + if let Ok(c) = utils::u64_from_bytes(current) { + c <= to + } else { + warn!("BadDatabase: Could not parse keychangeid_userid bytes"); + false + } + } else { + warn!("BadDatabase: Could not parse keychangeid_userid"); + false + } + }) .map(|(_, bytes)| { UserId::try_from(utils::string_from_bytes(&bytes).map_err(|_| { Error::bad_database("User ID in devicekeychangeid_userid is invalid unicode.") @@ -619,6 +672,7 @@ impl Users { }) } + #[tracing::instrument(skip(self, user_id, rooms, globals))] fn mark_device_key_update( &self, user_id: &UserId, @@ -650,6 +704,7 @@ impl Users { Ok(()) } + #[tracing::instrument(skip(self, user_id, device_id))] pub fn get_device_keys( &self, user_id: &UserId, @@ -666,6 +721,7 @@ impl Users { }) } + #[tracing::instrument(skip(self, user_id, allowed_signatures))] pub fn get_master_key bool>( &self, user_id: &UserId, @@ -693,6 +749,7 @@ impl Users { }) } + #[tracing::instrument(skip(self, user_id, allowed_signatures))] pub fn get_self_signing_key bool>( &self, user_id: &UserId, @@ -720,6 +777,7 @@ impl Users { }) } + #[tracing::instrument(skip(self, user_id))] pub fn get_user_signing_key(&self, user_id: &UserId) -> Result> { self.userid_usersigningkeyid .get(user_id.as_bytes())? @@ -732,6 +790,15 @@ impl Users { }) } + #[tracing::instrument(skip( + self, + sender, + target_user_id, + target_device_id, + event_type, + content, + globals + ))] pub fn add_to_device_event( &self, sender: &UserId, @@ -759,7 +826,7 @@ impl Users { Ok(()) } - #[tracing::instrument(skip(self))] + #[tracing::instrument(skip(self, user_id, device_id))] pub fn get_to_device_events( &self, user_id: &UserId, @@ -782,7 +849,7 @@ impl Users { Ok(events) } - #[tracing::instrument(skip(self))] + #[tracing::instrument(skip(self, user_id, device_id, until))] pub fn remove_to_device_events( &self, user_id: &UserId, @@ -817,6 +884,7 @@ impl Users { Ok(()) } + #[tracing::instrument(skip(self, user_id, device_id, device))] pub fn update_device_metadata( &self, user_id: &UserId, @@ -842,6 +910,7 @@ impl Users { } /// Get device metadata. + #[tracing::instrument(skip(self, user_id, device_id))] pub fn get_device_metadata( &self, user_id: &UserId, @@ -860,6 +929,7 @@ impl Users { }) } + #[tracing::instrument(skip(self, user_id))] pub fn get_devicelist_version(&self, user_id: &UserId) -> Result> { self.userid_devicelistversion .get(user_id.as_bytes())? @@ -870,6 +940,7 @@ impl Users { }) } + #[tracing::instrument(skip(self, user_id))] pub fn all_devices_metadata<'a>( &'a self, user_id: &UserId, @@ -886,6 +957,7 @@ impl Users { } /// Deactivate account + #[tracing::instrument(skip(self, user_id))] pub fn deactivate_account(&self, user_id: &UserId) -> Result<()> { // Remove all associated devices for device_id in self.all_device_ids(user_id) { diff --git a/src/error.rs b/src/error.rs index f62bdee0..eda522a2 100644 --- a/src/error.rs +++ b/src/error.rs @@ -1,4 +1,3 @@ -use log::warn; use ruma::{ api::client::{ error::{Error as RumaError, ErrorKind}, @@ -7,17 +6,18 @@ use ruma::{ ServerName, }; use thiserror::Error; +use tracing::warn; #[cfg(feature = "conduit_bin")] use { crate::RumaResponse, http::StatusCode, - log::error, rocket::{ response::{self, Responder}, Request, }, ruma::api::client::r0::uiaa::UiaaResponse, + tracing::error, }; pub type Result = std::result::Result; diff --git a/src/main.rs b/src/main.rs index a5face78..9f6cced4 100644 --- a/src/main.rs +++ b/src/main.rs @@ -17,6 +17,7 @@ use std::sync::Arc; use database::Config; pub use database::Database; pub use error::{Error, Result}; +use opentelemetry::trace::Tracer; pub use pdu::PduEvent; pub use rocket::State; use ruma::api::client::error::ErrorKind; @@ -31,8 +32,7 @@ use rocket::{ routes, Request, }; use tokio::sync::RwLock; -use tracing::span; -use tracing_subscriber::{prelude::*, Registry}; +use tracing_subscriber::{prelude::*, EnvFilter}; fn setup_rocket(config: Figment, data: Arc>) -> rocket::Rocket { rocket::custom(config) @@ -201,38 +201,57 @@ async fn main() { .extract::() .expect("It looks like your config is invalid. Please take a look at the error"); - let mut _span: Option = None; - let mut _enter: Option> = None; + let start = async { + config.warn_deprecated(); + + let db = Database::load_or_create(&config) + .await + .expect("config is valid"); + + let rocket = setup_rocket(raw_config, Arc::clone(&db)) + .ignite() + .await + .unwrap(); + + Database::start_on_shutdown_tasks(db, rocket.shutdown()).await; + + rocket.launch().await.unwrap(); + }; if config.allow_jaeger { - let (tracer, _uninstall) = opentelemetry_jaeger::new_pipeline() + let tracer = opentelemetry_jaeger::new_pipeline() .with_service_name("conduit") - .install() + .install_simple() .unwrap(); - let telemetry = tracing_opentelemetry::layer().with_tracer(tracer); - Registry::default().with(telemetry).try_init().unwrap(); - _span = Some(span!(tracing::Level::INFO, "app_start", work_units = 2)); - _enter = Some(_span.as_ref().unwrap().enter()); + let span = tracer.start("conduit"); + start.await; + drop(span); } else { std::env::set_var("RUST_LOG", &config.log); - tracing_subscriber::fmt::init(); + + let registry = tracing_subscriber::Registry::default(); + if config.tracing_flame { + let (flame_layer, _guard) = + tracing_flame::FlameLayer::with_file("./tracing.folded").unwrap(); + let flame_layer = flame_layer.with_empty_samples(false); + + let filter_layer = EnvFilter::new("trace,h2=off"); + + let subscriber = registry.with(filter_layer).with(flame_layer); + tracing::subscriber::set_global_default(subscriber).unwrap(); + start.await; + } else { + let fmt_layer = tracing_subscriber::fmt::Layer::new(); + let filter_layer = EnvFilter::try_from_default_env() + .or_else(|_| EnvFilter::try_new("info")) + .unwrap(); + + let subscriber = registry.with(filter_layer).with(fmt_layer); + tracing::subscriber::set_global_default(subscriber).unwrap(); + start.await; + } } - - config.warn_deprecated(); - - let db = Database::load_or_create(&config) - .await - .expect("config is valid"); - - let rocket = setup_rocket(raw_config, Arc::clone(&db)) - .ignite() - .await - .unwrap(); - - Database::start_on_shutdown_tasks(db, rocket.shutdown()).await; - - rocket.launch().await.unwrap(); } #[catch(404)] diff --git a/src/pdu.rs b/src/pdu.rs index f8dddd9e..00eda5b1 100644 --- a/src/pdu.rs +++ b/src/pdu.rs @@ -1,5 +1,4 @@ use crate::Error; -use log::error; use ruma::{ events::{ pdu::EventHash, room::member::MemberEventContent, AnyEphemeralRoomEvent, @@ -13,6 +12,7 @@ use ruma::{ use serde::{Deserialize, Serialize}; use serde_json::json; use std::{cmp::Ordering, collections::BTreeMap, convert::TryFrom}; +use tracing::error; #[derive(Clone, Deserialize, Serialize, Debug)] pub struct PduEvent { diff --git a/src/ruma_wrapper.rs b/src/ruma_wrapper.rs index a4beac64..21214399 100644 --- a/src/ruma_wrapper.rs +++ b/src/ruma_wrapper.rs @@ -10,7 +10,6 @@ use std::ops::Deref; #[cfg(feature = "conduit_bin")] use { crate::server_server, - log::{debug, warn}, rocket::{ data::{self, ByteUnit, Data, FromData}, http::Status, @@ -23,6 +22,7 @@ use { std::collections::BTreeMap, std::convert::TryFrom, std::io::Cursor, + tracing::{debug, warn}, }; /// This struct converts rocket requests into ruma structs by converting them into http requests @@ -45,6 +45,7 @@ where { type Error = (); + #[tracing::instrument(skip(request, data))] async fn from_data( request: &'a Request<'_>, data: Data<'a>, @@ -256,7 +257,10 @@ where match ruma::signatures::verify_json(&pub_key_map, &request_map) { Ok(()) => (None, None, Some(origin), false), Err(e) => { - warn!("Failed to verify json request from {}: {}", origin, e); + warn!( + "Failed to verify json request from {}: {}\n{:?}", + origin, e, request_map + ); if request.uri().to_string().contains('@') { warn!("Request uri contained '@' character. Make sure your reverse proxy gives Conduit the raw uri (apache: use nocanon)"); diff --git a/src/server_server.rs b/src/server_server.rs index f725dce4..232c5d46 100644 --- a/src/server_server.rs +++ b/src/server_server.rs @@ -5,7 +5,6 @@ use crate::{ }; use get_profile_information::v1::ProfileField; use http::header::{HeaderValue, AUTHORIZATION, HOST}; -use log::{debug, error, info, trace, warn}; use regex::Regex; use rocket::response::content::Json; use ruma::{ @@ -63,7 +62,8 @@ use std::{ sync::{Arc, RwLock}, time::{Duration, Instant, SystemTime}, }; -use tokio::sync::Semaphore; +use tokio::sync::{MutexGuard, Semaphore}; +use tracing::{debug, error, info, trace, warn}; #[cfg(feature = "conduit_bin")] use rocket::{get, post, put}; @@ -838,6 +838,7 @@ type AsyncRecursiveResult<'a, T, E> = Pin( origin: &'a ServerName, event_id: &'a EventId, @@ -1156,6 +1157,18 @@ pub fn handle_incoming_pdu<'a>( } debug!("Auth check succeeded."); + // We start looking at current room state now, so lets lock the room + + let mutex = Arc::clone( + db.globals + .roomid_mutex + .write() + .unwrap() + .entry(room_id.clone()) + .or_default(), + ); + let mutex_lock = mutex.lock().await; + // Now we calculate the set of extremities this room has after the incoming event has been // applied. We start with the previous extremities (aka leaves) let mut extremities = db @@ -1170,8 +1183,8 @@ pub fn handle_incoming_pdu<'a>( } } - // Only keep those extremities we don't have in our timeline yet - extremities.retain(|id| !matches!(db.rooms.get_non_outlier_pdu_json(id), Ok(Some(_)))); + // Only keep those extremities were not referenced yet + extremities.retain(|id| !matches!(db.rooms.is_event_referenced(&room_id, id), Ok(true))); let mut extremity_statehashes = Vec::new(); @@ -1301,9 +1314,11 @@ pub fn handle_incoming_pdu<'a>( return Err("State resolution failed, either an event could not be found or deserialization".into()); } }; + state }; + debug!("starting soft fail auth check"); // 13. Check if the event passes auth based on the "current state" of the room, if not "soft fail" it let soft_fail = !state_res::event_auth::auth_check( &room_version, @@ -1322,11 +1337,11 @@ pub fn handle_incoming_pdu<'a>( pdu_id = Some( append_incoming_pdu( &db, - &room_id, &incoming_pdu, val, extremities, &state_at_incoming_event, + &mutex_lock, ) .await .map_err(|_| "Failed to add pdu to db.".to_owned())?, @@ -1350,6 +1365,7 @@ pub fn handle_incoming_pdu<'a>( } // Event has passed all auth/stateres checks + drop(mutex_lock); Ok(pdu_id) }) } @@ -1626,25 +1642,15 @@ pub(crate) async fn fetch_signing_keys( /// Append the incoming event setting the state snapshot to the state from the /// server that sent the event. -#[tracing::instrument(skip(db))] +#[tracing::instrument(skip(db, pdu, pdu_json, new_room_leaves, state, _mutex_lock))] async fn append_incoming_pdu( db: &Database, - room_id: &RoomId, pdu: &PduEvent, pdu_json: CanonicalJsonObject, new_room_leaves: HashSet, state: &StateMap>, + _mutex_lock: &MutexGuard<'_, ()>, // Take mutex guard to make sure users get the room mutex ) -> Result> { - let mutex = Arc::clone( - db.globals - .roomid_mutex - .write() - .unwrap() - .entry(room_id.clone()) - .or_default(), - ); - let mutex_lock = mutex.lock().await; - // We append to state before appending the pdu, so we don't have a moment in time with the // pdu without it's state. This is okay because append_pdu can't fail. db.rooms @@ -1657,8 +1663,6 @@ async fn append_incoming_pdu( &db, )?; - drop(mutex_lock); - for appservice in db.appservice.iter_all()?.filter_map(|r| r.ok()) { if let Some(namespaces) = appservice.1.get("namespaces") { let users = namespaces diff --git a/src/utils.rs b/src/utils.rs index a4dfe036..60a4e0cf 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -9,6 +9,7 @@ use std::{ time::{SystemTime, UNIX_EPOCH}, }; +#[tracing::instrument] pub fn millis_since_unix_epoch() -> u64 { SystemTime::now() .duration_since(UNIX_EPOCH) @@ -48,16 +49,19 @@ pub fn generate_keypair() -> Vec { } /// Parses the bytes into an u64. +#[tracing::instrument(skip(bytes))] pub fn u64_from_bytes(bytes: &[u8]) -> Result { let array: [u8; 8] = bytes.try_into()?; Ok(u64::from_be_bytes(array)) } /// Parses the bytes into a string. +#[tracing::instrument(skip(bytes))] pub fn string_from_bytes(bytes: &[u8]) -> Result { String::from_utf8(bytes.to_vec()) } +#[tracing::instrument(skip(length))] pub fn random_string(length: usize) -> String { thread_rng() .sample_iter(&rand::distributions::Alphanumeric) @@ -67,6 +71,7 @@ pub fn random_string(length: usize) -> String { } /// Calculate a new hash for the given password +#[tracing::instrument(skip(password))] pub fn calculate_hash(password: &str) -> Result { let hashing_config = Config { variant: Variant::Argon2id, @@ -77,6 +82,7 @@ pub fn calculate_hash(password: &str) -> Result { argon2::hash_encoded(password.as_bytes(), salt.as_bytes(), &hashing_config) } +#[tracing::instrument(skip(iterators, check_order))] pub fn common_elements( mut iterators: impl Iterator>>, check_order: impl Fn(&[u8], &[u8]) -> Ordering, @@ -104,6 +110,7 @@ pub fn common_elements( /// Fallible conversion from any value that implements `Serialize` to a `CanonicalJsonObject`. /// /// `value` must serialize to an `serde_json::Value::Object`. +#[tracing::instrument(skip(value))] pub fn to_canonical_object( value: T, ) -> Result { @@ -117,6 +124,7 @@ pub fn to_canonical_object( } } +#[tracing::instrument(skip(deserializer))] pub fn deserialize_from_str< 'de, D: serde::de::Deserializer<'de>,