Merge remote-tracking branch 'origin/main' into devel

This commit is contained in:
Bryan Stitt 2023-05-11 20:05:47 -07:00
commit 36cc884112
12 changed files with 484 additions and 510 deletions

334
Cargo.lock generated
View File

@ -159,39 +159,38 @@ dependencies = [
[[package]]
name = "async-io"
version = "1.12.0"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c374dda1ed3e7d8f0d9ba58715f924862c63eae6849c92d3a18e7fbde9e2794"
checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af"
dependencies = [
"async-lock",
"autocfg",
"cfg-if",
"concurrent-queue",
"futures-lite",
"libc",
"log",
"parking",
"polling",
"rustix",
"slab",
"socket2",
"waker-fn",
"windows-sys 0.42.0",
]
[[package]]
name = "async-lock"
version = "2.6.0"
version = "2.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8101efe8695a6c17e02911402145357e718ac92d3ff88ae8419e84b1707b685"
checksum = "fa24f727524730b077666307f2734b4a1a1c57acb79193127dcc8914d5242dd7"
dependencies = [
"event-listener",
"futures-lite",
]
[[package]]
name = "async-stream"
version = "0.3.4"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad445822218ce64be7a341abfb0b1ea43b5c23aa83902542a4542e78309d8e5e"
checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51"
dependencies = [
"async-stream-impl",
"futures-core",
@ -200,13 +199,13 @@ dependencies = [
[[package]]
name = "async-stream-impl"
version = "0.3.4"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4655ae1a7b0cdf149156f780c5bf3f1352bc53cbd9e0a361a7ef7b22947e965"
checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"syn 2.0.15",
]
[[package]]
@ -364,7 +363,7 @@ dependencies = [
"cc",
"cfg-if",
"libc",
"miniz_oxide",
"miniz_oxide 0.6.2",
"object",
"rustc-demangle",
]
@ -420,9 +419,9 @@ checksum = "2dabbe35f96fb9507f7330793dc490461b2962659ac5d427181e451a623751d1"
[[package]]
name = "bigdecimal"
version = "0.3.0"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6aaf33151a6429fe9211d1b276eafdf70cdff28b071e76c0b0e1503221ea3744"
checksum = "a6773ddc0eafc0e509fb60e48dff7f450f8e674a0686ae8605e8d9901bd5eefa"
dependencies = [
"num-bigint",
"num-integer",
@ -501,9 +500,9 @@ dependencies = [
[[package]]
name = "borsh"
version = "0.10.2"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40f9ca3698b2e4cb7c15571db0abc5551dca417a21ae8140460b50309bb2cc62"
checksum = "4114279215a005bc675e386011e594e1d9b800918cea18fcadadcce864a2046b"
dependencies = [
"borsh-derive",
"hashbrown 0.13.2",
@ -511,9 +510,9 @@ dependencies = [
[[package]]
name = "borsh-derive"
version = "0.10.2"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "598b3eacc6db9c3ee57b22707ad8f6a8d2f6d442bfe24ffeb8cbb70ca59e6a35"
checksum = "0754613691538d51f329cce9af41d7b7ca150bc973056f1156611489475f54f7"
dependencies = [
"borsh-derive-internal",
"borsh-schema-derive-internal",
@ -524,9 +523,9 @@ dependencies = [
[[package]]
name = "borsh-derive-internal"
version = "0.10.2"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "186b734fa1c9f6743e90c95d7233c9faab6360d1a96d4ffa19d9cfd1e9350f8a"
checksum = "afb438156919598d2c7bad7e1c0adf3d26ed3840dbc010db1a882a65583ca2fb"
dependencies = [
"proc-macro2",
"quote",
@ -535,9 +534,9 @@ dependencies = [
[[package]]
name = "borsh-schema-derive-internal"
version = "0.10.2"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99b7ff1008316626f485991b960ade129253d4034014616b94f309a15366cc49"
checksum = "634205cc43f74a1b9046ef87c4540ebda95696ec0f315024860cad7c5b0f5ccd"
dependencies = [
"proc-macro2",
"quote",
@ -732,9 +731,9 @@ dependencies = [
[[package]]
name = "clap"
version = "3.2.23"
version = "3.2.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5"
checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123"
dependencies = [
"atty",
"bitflags",
@ -758,9 +757,9 @@ dependencies = [
[[package]]
name = "clap_derive"
version = "3.2.18"
version = "3.2.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea0c8bce528c4be4da13ea6fead8965e95b6073585a2f05204bd8f4119f82a65"
checksum = "ae6371b8bdc8b7d3959e9cf7b22d4435ef3e79e138688421ec654acf8c81b008"
dependencies = [
"heck 0.4.1",
"proc-macro-error",
@ -855,9 +854,9 @@ dependencies = [
[[package]]
name = "color-eyre"
version = "0.5.11"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f1885697ee8a177096d42f158922251a41973117f6d8a234cee94b9509157b7"
checksum = "5a667583cca8c4f8436db8de46ea8233c42a7d9ae424a82d338f2e4675229204"
dependencies = [
"backtrace",
"color-spantrace",
@ -871,9 +870,9 @@ dependencies = [
[[package]]
name = "color-spantrace"
version = "0.1.6"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6eee477a4a8a72f4addd4de416eb56d54bc307b284d6601bafdee1f4ea462d1"
checksum = "1ba75b3d9449ecdccb27ecbc479fdc0b87fa2dd43d2f8298f9bf0e59aacc8dce"
dependencies = [
"once_cell",
"owo-colors",
@ -897,9 +896,9 @@ dependencies = [
[[package]]
name = "concurrent-queue"
version = "2.1.0"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c278839b831783b70278b14df4d45e1beb1aad306c07bb796637de9a0e323e8e"
checksum = "62ec6771ecfa0762d24683ee5a32ad78487a3d3afdc0fb8cae19d2c5deb50b7c"
dependencies = [
"crossbeam-utils",
]
@ -933,9 +932,9 @@ dependencies = [
[[package]]
name = "console-api"
version = "0.4.0"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e57ff02e8ad8e06ab9731d5dc72dc23bef9200778eae1a89d555d8c42e5d4a86"
checksum = "c2895653b4d9f1538a83970077cb01dfc77a4810524e51a110944688e916b18e"
dependencies = [
"prost",
"prost-types",
@ -945,9 +944,9 @@ dependencies = [
[[package]]
name = "console-subscriber"
version = "0.1.8"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22a3a81dfaf6b66bce5d159eddae701e3a002f194d378cbf7be5f053c281d9be"
checksum = "57ab2224a0311582eb03adba4caaf18644f7b1f10a760803a803b9b605187fc7"
dependencies = [
"console-api",
"crossbeam-channel",
@ -1171,9 +1170,9 @@ dependencies = [
[[package]]
name = "csv"
version = "1.2.0"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af91f40b7355f82b0a891f50e70399475945bb0b0da4f1700ce60761c9d3e359"
checksum = "0b015497079b9a9d69c02ad25de6c0a6edef051ea6360a327d0bd05802ef64ad"
dependencies = [
"csv-core",
"itoa",
@ -1201,9 +1200,9 @@ dependencies = [
[[package]]
name = "cxx"
version = "1.0.91"
version = "1.0.94"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86d3488e7665a7a483b57e25bdd90d0aeb2bc7608c8d0346acf2ad3f1caf1d62"
checksum = "f61f1b6389c3fe1c316bf8a4dccc90a38208354b330925bce1f74a6c4756eb93"
dependencies = [
"cc",
"cxxbridge-flags",
@ -1213,9 +1212,9 @@ dependencies = [
[[package]]
name = "cxx-build"
version = "1.0.91"
version = "1.0.94"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48fcaf066a053a41a81dfb14d57d99738b767febb8b735c3016e469fac5da690"
checksum = "12cee708e8962df2aeb38f594aae5d827c022b6460ac71a7a3e2c3c2aae5a07b"
dependencies = [
"cc",
"codespan-reporting",
@ -1223,24 +1222,24 @@ dependencies = [
"proc-macro2",
"quote",
"scratch",
"syn 1.0.109",
"syn 2.0.15",
]
[[package]]
name = "cxxbridge-flags"
version = "1.0.91"
version = "1.0.94"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2ef98b8b717a829ca5603af80e1f9e2e48013ab227b68ef37872ef84ee479bf"
checksum = "7944172ae7e4068c533afbb984114a56c46e9ccddda550499caa222902c7f7bb"
[[package]]
name = "cxxbridge-macro"
version = "1.0.91"
version = "1.0.94"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "086c685979a698443656e5cf7856c95c642295a38599f12fb1ff76fb28d19892"
checksum = "2345488264226bf682893e25de0769f3360aac9957980ec49361b083ddaa5bc5"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"syn 2.0.15",
]
[[package]]
@ -1447,9 +1446,9 @@ checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f"
[[package]]
name = "dotenvy"
version = "0.15.6"
version = "0.15.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03d8c417d7a8cb362e0c37e5d815f5eb7c37f79ff93707329d5a194e42e54ca0"
checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b"
[[package]]
name = "dunce"
@ -1599,17 +1598,6 @@ dependencies = [
"termcolor",
]
[[package]]
name = "errno"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1"
dependencies = [
"errno-dragonfly",
"libc",
"winapi",
]
[[package]]
name = "errno"
version = "0.3.1"
@ -2050,12 +2038,12 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flate2"
version = "1.0.25"
version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841"
checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743"
dependencies = [
"crc32fast",
"miniz_oxide",
"miniz_oxide 0.7.1",
]
[[package]]
@ -2068,7 +2056,7 @@ dependencies = [
"futures-sink",
"nanorand",
"pin-project",
"spin 0.9.5",
"spin 0.9.8",
]
[[package]]
@ -2211,9 +2199,9 @@ checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964"
[[package]]
name = "futures-lite"
version = "1.12.0"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7694489acd39452c77daa48516b894c153f192c3578d5a839b62c58099fcbf48"
checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce"
dependencies = [
"fastrand",
"futures-core",
@ -2507,7 +2495,7 @@ dependencies = [
"atomic-polyfill",
"hash32",
"rustc_version",
"spin 0.9.5",
"spin 0.9.8",
"stable_deref_trait",
]
@ -2698,16 +2686,16 @@ dependencies = [
[[package]]
name = "iana-time-zone"
version = "0.1.53"
version = "0.1.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64c122667b287044802d6ce17ee2ddf13207ed924c712de9a66a5814d5b64765"
checksum = "0722cd7114b7de04316e7ea5456a0bbb20e4adb46fd27a3697adb812cff0f37c"
dependencies = [
"android_system_properties",
"core-foundation-sys",
"iana-time-zone-haiku",
"js-sys",
"wasm-bindgen",
"winapi",
"windows",
]
[[package]]
@ -2798,9 +2786,9 @@ dependencies = [
[[package]]
name = "influxdb2"
version = "0.4.0"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1b8812dff6d6d39dd8a8aec9771fe9639d6546e47a356fea5c12ff7f2f25359"
checksum = "320c502ec0cf39e9b9fc36afc57435944fdfb6f15e8e8b0ecbc9a871d398cf63"
dependencies = [
"base64 0.13.1",
"bytes",
@ -2883,9 +2871,9 @@ dependencies = [
[[package]]
name = "io-uring"
version = "0.5.12"
version = "0.5.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41c85eff7f7c8d3ab8c7ec87313c0c194bbaf4371bb7d40f80293ba01bce8264"
checksum = "dd1e1a01cfb924fd8c5c43b6827965db394f5a3a16c599ce03452266e1cf984c"
dependencies = [
"bitflags",
"libc",
@ -2909,14 +2897,14 @@ dependencies = [
[[package]]
name = "is-terminal"
version = "0.4.3"
version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22e18b0a45d56fe973d6db23972bf5bc46f988a4a2385deac9cc29572f09daef"
checksum = "adcf93614601c8129ddf72e2d5633df827ba6551541c6d8c59520a371475be1f"
dependencies = [
"hermit-abi 0.3.1",
"io-lifetimes",
"rustix 0.36.8",
"windows-sys 0.45.0",
"rustix",
"windows-sys 0.48.0",
]
[[package]]
@ -3052,9 +3040,9 @@ checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb"
[[package]]
name = "libz-sys"
version = "1.1.8"
version = "1.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9702761c3935f8cc2f101793272e202c72b99da8f4224a19ddcf1279a6450bbf"
checksum = "56ee889ecc9568871456d42f603d6a0ce59ff328d291063a45cbdf0036baf6db"
dependencies = [
"cc",
"libc",
@ -3071,12 +3059,6 @@ dependencies = [
"cc",
]
[[package]]
name = "linux-raw-sys"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4"
[[package]]
name = "linux-raw-sys"
version = "0.3.7"
@ -3185,6 +3167,15 @@ dependencies = [
"adler",
]
[[package]]
name = "miniz_oxide"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7"
dependencies = [
"adler",
]
[[package]]
name = "mio"
version = "0.7.14"
@ -3585,9 +3576,9 @@ dependencies = [
[[package]]
name = "os_info"
version = "3.6.0"
version = "3.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c424bc68d15e0778838ac013b5b3449544d8133633d8016319e7e05a820b8c0"
checksum = "006e42d5b888366f1880eda20371fedde764ed2213dc8496f49622fa0c99cd5e"
dependencies = [
"log",
"serde",
@ -3631,9 +3622,9 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "owo-colors"
version = "1.3.0"
version = "3.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2386b4ebe91c2f7f51082d4cefa145d030e33a1842a96b12e4885cc3c01f7a55"
checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f"
[[package]]
name = "pagerduty-rs"
@ -3675,9 +3666,9 @@ dependencies = [
[[package]]
name = "parking"
version = "2.0.0"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "427c3892f9e783d91cc128285287e70a59e206ca452770ece88a76f7a3eddd72"
checksum = "14f2252c834a40ed9bb5422029649578e63aa341ac401f74e719dd1afda8394e"
[[package]]
name = "parking_lot"
@ -3743,9 +3734,9 @@ dependencies = [
[[package]]
name = "paste"
version = "1.0.11"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d01a5bd0424d00070b0098dd17ebca6f961a959dead1dbcbbbc1d1cd8d3deeba"
checksum = "9f746c4065a8fa3fe23974dd82f15431cc8d40779821001404d10d2e79ca7d79"
[[package]]
name = "path-slash"
@ -3792,9 +3783,9 @@ checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e"
[[package]]
name = "pest"
version = "2.5.5"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "028accff104c4e513bad663bbcd2ad7cfd5304144404c31ed0a77ac103d00660"
checksum = "e68e84bfb01f0507134eac1e9b410a12ba379d064eab48c50ba4ce329a527b70"
dependencies = [
"thiserror",
"ucd-trie",
@ -3802,9 +3793,9 @@ dependencies = [
[[package]]
name = "pest_derive"
version = "2.5.5"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ac3922aac69a40733080f53c1ce7f91dcf57e1a5f6c52f421fadec7fbdc4b69"
checksum = "6b79d4c71c865a25a4322296122e3924d30bc8ee0834c8bfc8b95f7f054afbfb"
dependencies = [
"pest",
"pest_generator",
@ -3812,22 +3803,22 @@ dependencies = [
[[package]]
name = "pest_generator"
version = "2.5.5"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d06646e185566b5961b4058dd107e0a7f56e77c3f484549fb119867773c0f202"
checksum = "6c435bf1076437b851ebc8edc3a18442796b30f1728ffea6262d59bbe28b077e"
dependencies = [
"pest",
"pest_meta",
"proc-macro2",
"quote",
"syn 1.0.109",
"syn 2.0.15",
]
[[package]]
name = "pest_meta"
version = "2.5.5"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6f60b2ba541577e2a0c307c8f39d1439108120eb7903adeb6497fa880c59616"
checksum = "745a452f8eb71e39ffd8ee32b3c5f51d03845f99786fa9b68db6ff509c505411"
dependencies = [
"once_cell",
"pest",
@ -3981,22 +3972,24 @@ dependencies = [
[[package]]
name = "pkg-config"
version = "0.3.26"
version = "0.3.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160"
checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964"
[[package]]
name = "polling"
version = "2.5.2"
version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22122d5ec4f9fe1b3916419b76be1e80bcb93f618d071d2edf841b137b2a2bd6"
checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce"
dependencies = [
"autocfg",
"bitflags",
"cfg-if",
"concurrent-queue",
"libc",
"log",
"wepoll-ffi",
"windows-sys 0.42.0",
"pin-project-lite",
"windows-sys 0.48.0",
]
[[package]]
@ -4120,9 +4113,9 @@ dependencies = [
[[package]]
name = "prost"
version = "0.11.8"
version = "0.11.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e48e50df39172a3e7eb17e14642445da64996989bc212b583015435d39a58537"
checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd"
dependencies = [
"bytes",
"prost-derive",
@ -4130,9 +4123,9 @@ dependencies = [
[[package]]
name = "prost-derive"
version = "0.11.8"
version = "0.11.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ea9b0f8cbe5e15a8a042d030bd96668db28ecb567ec37d691971ff5731d2b1b"
checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4"
dependencies = [
"anyhow",
"itertools",
@ -4143,9 +4136,9 @@ dependencies = [
[[package]]
name = "prost-types"
version = "0.11.8"
version = "0.11.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "379119666929a1afd7a043aa6cf96fa67a6dce9af60c88095a4686dbce4c9c88"
checksum = "213622a1460818959ac1181aaeb2dc9c7f63df720db7d788b3e24eacd1983e13"
dependencies = [
"prost",
]
@ -4500,9 +4493,9 @@ dependencies = [
[[package]]
name = "rkyv"
version = "0.7.40"
version = "0.7.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c30f1d45d9aa61cbc8cd1eb87705470892289bb2d01943e7803b873a57404dc3"
checksum = "21499ed91807f07ae081880aabb2ccc0235e9d88011867d984525e9a4c3cfa3e"
dependencies = [
"bytecheck",
"hashbrown 0.12.3",
@ -4514,9 +4507,9 @@ dependencies = [
[[package]]
name = "rkyv_derive"
version = "0.7.40"
version = "0.7.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff26ed6c7c4dfc2aa9480b86a60e3c7233543a270a680e10758a507c5a4ce476"
checksum = "ac1c672430eb41556291981f45ca900a0239ad007242d1cb4b4167af842db666"
dependencies = [
"proc-macro2",
"quote",
@ -4589,9 +4582,9 @@ dependencies = [
[[package]]
name = "rust_decimal"
version = "1.28.1"
version = "1.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e13cf35f7140155d02ba4ec3294373d513a3c7baa8364c162b030e33c61520a8"
checksum = "26bd36b60561ee1fb5ec2817f198b6fd09fa571c897a5e86d1487cfc2b096dfc"
dependencies = [
"arrayvec",
"borsh",
@ -4607,9 +4600,9 @@ dependencies = [
[[package]]
name = "rustc-demangle"
version = "0.1.21"
version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342"
checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
[[package]]
name = "rustc-hash"
@ -4632,20 +4625,6 @@ dependencies = [
"semver",
]
[[package]]
name = "rustix"
version = "0.36.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f43abb88211988493c1abb44a70efa56ff0ce98f233b7b276146f1f3f7ba9644"
dependencies = [
"bitflags",
"errno 0.2.8",
"io-lifetimes",
"libc",
"linux-raw-sys 0.1.4",
"windows-sys 0.45.0",
]
[[package]]
name = "rustix"
version = "0.37.19"
@ -4653,10 +4632,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acf8729d8542766f1b2cf77eb034d52f40d375bb8b615d0b147089946e16613d"
dependencies = [
"bitflags",
"errno 0.3.1",
"errno",
"io-lifetimes",
"libc",
"linux-raw-sys 0.3.7",
"linux-raw-sys",
"windows-sys 0.48.0",
]
@ -4767,9 +4746,9 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "scratch"
version = "1.0.3"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ddccb15bcce173023b3fedd9436f882a0739b8dfb45e4f6b6002bee5929f61b2"
checksum = "1792db035ce95be60c3f8853017b3999209281c24e2ba5bc8e59bf97a0c590c1"
[[package]]
name = "scrypt"
@ -4869,9 +4848,9 @@ dependencies = [
[[package]]
name = "sea-query"
version = "0.28.3"
version = "0.28.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2fbe015dbdaa7d8829d71c1e14fb6289e928ac256b93dfda543c85cd89d6f03"
checksum = "bbab99b8cd878ab7786157b7eb8df96333a6807cc6e45e8888c85b51534b401a"
dependencies = [
"bigdecimal",
"chrono",
@ -4884,9 +4863,9 @@ dependencies = [
[[package]]
name = "sea-query-binder"
version = "0.3.0"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03548c63aec07afd4fd190923e0160d2f2fc92def27470b54154cf232da6203b"
checksum = "4cea85029985b40dfbf18318d85fe985c04db7c1b4e5e8e0a0a0cdff5f1e30f9"
dependencies = [
"bigdecimal",
"chrono",
@ -5450,9 +5429,9 @@ checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
[[package]]
name = "spin"
version = "0.9.5"
version = "0.9.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7dccf47db1b41fa1573ed27ccf5e08e3ca771cb994f776668c5ebda893b248fc"
checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
dependencies = [
"lock_api",
]
@ -5500,9 +5479,9 @@ dependencies = [
[[package]]
name = "sqlx"
version = "0.6.2"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9249290c05928352f71c077cc44a464d880c63f26f7534728cca008e135c0428"
checksum = "f8de3b03a925878ed54a954f621e64bf55a3c1bd29652d0d1a17830405350188"
dependencies = [
"sqlx-core",
"sqlx-macros",
@ -5510,9 +5489,9 @@ dependencies = [
[[package]]
name = "sqlx-core"
version = "0.6.2"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dcbc16ddba161afc99e14d1713a453747a2b07fc097d2009f4c300ec99286105"
checksum = "fa8241483a83a3f33aa5fff7e7d9def398ff9990b2752b6c6112b83c6d246029"
dependencies = [
"ahash 0.7.6",
"atoi",
@ -5565,9 +5544,9 @@ dependencies = [
[[package]]
name = "sqlx-macros"
version = "0.6.2"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b850fa514dc11f2ee85be9d055c512aa866746adfacd1cb42d867d68e6a5b0d9"
checksum = "9966e64ae989e7e575b19d7265cb79d7fc3cbbdf179835cb0d716f294c2049c9"
dependencies = [
"dotenvy",
"either",
@ -5584,9 +5563,9 @@ dependencies = [
[[package]]
name = "sqlx-rt"
version = "0.6.2"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24c5b2d25fa654cc5f841750b8e1cdedbe21189bf9a9382ee90bfa9dd3562396"
checksum = "804d3f245f894e61b1e6263c84b23ca675d96753b5abfd5cc8597d86806e8024"
dependencies = [
"once_cell",
"tokio",
@ -5742,7 +5721,7 @@ dependencies = [
"cfg-if",
"fastrand",
"redox_syscall 0.3.5",
"rustix 0.37.19",
"rustix",
"windows-sys 0.45.0",
]
@ -5915,9 +5894,9 @@ dependencies = [
[[package]]
name = "tokio-console"
version = "0.1.7"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fce5f0a53cd350a00b3a37dcb85758eb3c333beeb334b40584f7747b1e01374e"
checksum = "1738c09da4ebb54a823aa8808d403d2fbe7c461c1f8924b90321ef1bbc10143c"
dependencies = [
"atty",
"clap",
@ -5937,6 +5916,7 @@ dependencies = [
"tokio",
"toml 0.5.11",
"tonic",
"tower",
"tracing",
"tracing-subscriber 0.3.17",
"tui",
@ -6085,14 +6065,13 @@ dependencies = [
[[package]]
name = "tonic"
version = "0.8.3"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f219fad3b929bef19b1f86fbc0358d35daed8f2cac972037ac0dc10bbb8d5fb"
checksum = "3082666a3a6433f7f511c7192923fa1fe07c69332d3c6a2e6bb040b569199d5a"
dependencies = [
"async-stream",
"async-trait",
"axum",
"base64 0.13.1",
"base64 0.21.0",
"bytes",
"futures-core",
"futures-util",
@ -6104,15 +6083,12 @@ dependencies = [
"percent-encoding",
"pin-project",
"prost",
"prost-derive",
"tokio",
"tokio-stream",
"tokio-util",
"tower",
"tower-layer",
"tower-service",
"tracing",
"tracing-futures",
]
[[package]]
@ -6191,9 +6167,9 @@ dependencies = [
[[package]]
name = "tracing-core"
version = "0.1.30"
version = "0.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a"
checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a"
dependencies = [
"once_cell",
"valuable",
@ -6201,12 +6177,12 @@ dependencies = [
[[package]]
name = "tracing-error"
version = "0.1.2"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4d7c0b83d4a500748fa5879461652b361edf5c9d51ede2a2ac03875ca185e24"
checksum = "d686ec1c0f384b1277f097b2f279a2ecc11afe8c133c1aabf036a27cb4cd206e"
dependencies = [
"tracing",
"tracing-subscriber 0.2.25",
"tracing-subscriber 0.3.17",
]
[[package]]
@ -6645,6 +6621,7 @@ dependencies = [
"entities",
"env_logger",
"ethers",
"ewma",
"fdlimit",
"flume",
"fstrings",
@ -6716,15 +6693,6 @@ dependencies = [
"webpki",
]
[[package]]
name = "wepoll-ffi"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d743fdedc5c64377b5fc2bc036b01c7fd642205a0d96356034ae3404d49eb7fb"
dependencies = [
"cc",
]
[[package]]
name = "winapi"
version = "0.3.9"
@ -6972,9 +6940,9 @@ checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9"
[[package]]
name = "zip"
version = "0.6.4"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0445d0fbc924bb93539b4316c11afb121ea39296f99a3c4c9edad09e3658cdef"
checksum = "7e92305c174683d78035cbf1b70e18db6329cc0f1b9cae0a52ca90bf5bfe7125"
dependencies = [
"aes 0.7.5",
"byteorder",

View File

@ -1,4 +1,4 @@
FROM rust:1.68.2-bullseye AS builder
FROM rust:1.69.0-bullseye AS builder
WORKDIR /app
ENV CARGO_TERM_COLOR always
@ -22,6 +22,7 @@ RUN apt-get update && \
cmake \
liblz4-dev \
libpthread-stubs0-dev \
libsasl2-dev \
libssl-dev \
libzstd-dev \
make \
@ -34,20 +35,20 @@ COPY . .
# test the application with cargo-nextest
RUN --mount=type=cache,target=/usr/local/cargo/registry \
--mount=type=cache,target=/app/target \
cargo nextest run
cargo nextest run --features "rdkafka-src tokio-uring" --no-default-features
# build the application
# using a "release" profile (which install does) is **very** important
RUN --mount=type=cache,target=/usr/local/cargo/registry \
--mount=type=cache,target=/app/target \
cargo install \
--features tokio-uring \
--features "rdkafka-src tokio-uring" \
--locked \
--features rdkafka-src \
--no-default-features \
--path ./web3_proxy \
--profile faster_release \
--root /usr/local/bin
--root /usr/local/bin \
;
#
# We do not need the Rust toolchain to run the binary!

View File

@ -1,2 +1,2 @@
[toolchain]
channel = "1.68.2"
channel = "1.69.0"

View File

@ -38,6 +38,7 @@ derive_more = "0.99.17"
dotenv = "0.15.0"
env_logger = "0.10.0"
ethers = { version = "2.0.4", default-features = false, features = ["rustls", "ws"] }
ewma = "0.1.1"
fdlimit = "0.2.1"
flume = "0.10.14"
fstrings = "0.2"
@ -48,9 +49,9 @@ handlebars = "4.3.7"
hashbrown = { version = "0.13.2", features = ["serde"] }
hdrhistogram = "7.5.2"
http = "0.2.9"
hostname = "0.3.1"
influxdb2 = { version = "0.4", features = ["rustls"] }
influxdb2-structmap = "0.2.0"
hostname = "0.3.1"
ipnet = "2.7.2"
itertools = "0.10.5"
log = "0.4.17"

View File

@ -670,12 +670,13 @@ impl Web3ProxyApp {
// TODO: capacity from configs
// all these are the same size, so no need for a weigher
// TODO: ttl on this? or is max_capacity fine?
// TODO: this used to have a time_to_idle
let pending_transactions = Cache::builder()
.max_capacity(10_000)
// TODO: different chains might handle this differently
// TODO: what should we set? 5 minutes is arbitrary. the nodes themselves hold onto transactions for much longer
.time_to_idle(Duration::from_secs(300))
// TODO: this used to be time_to_update, but
.time_to_live(Duration::from_secs(300))
.build_with_hasher(hashbrown::hash_map::DefaultHashBuilder::default());
// responses can be very different in sizes, so this is a cache with a max capacity and a weigher
@ -695,7 +696,7 @@ impl Web3ProxyApp {
}
})
// TODO: what should we set? 10 minutes is arbitrary. the nodes themselves hold onto transactions for much longer
.time_to_idle(Duration::from_secs(600))
.time_to_live(Duration::from_secs(600))
.build_with_hasher(hashbrown::hash_map::DefaultHashBuilder::default());
// all the users are the same size, so no need for a weigher
@ -1208,14 +1209,60 @@ impl Web3ProxyApp {
}
}
/// try to send transactions to the best available rpcs with private mempools
/// if no private rpcs are configured, then some public rpcs are used instead
async fn try_send_protected(
self: &Arc<Self>,
authorization: &Arc<Authorization>,
request: &JsonRpcRequest,
request_metadata: Arc<RequestMetadata>,
num_public_rpcs: Option<usize>,
) -> Web3ProxyResult<JsonRpcForwardedResponse> {
if let Some(protected_rpcs) = self.private_rpcs.as_ref() {
if !protected_rpcs.is_empty() {
let protected_response = protected_rpcs
.try_send_all_synced_connections(
authorization,
request,
Some(request_metadata),
None,
None,
Level::Trace,
None,
true,
)
.await?;
return Ok(protected_response);
}
}
// no private rpcs to send to. send to a few public rpcs
// try_send_all_upstream_servers puts the request id into the response. no need to do that ourselves here.
self.balanced_rpcs
.try_send_all_synced_connections(
authorization,
request,
Some(request_metadata),
None,
None,
Level::Trace,
num_public_rpcs,
true,
)
.await
}
// TODO: more robust stats and kafka logic! if we use the try operator, they aren't saved!
// TODO: move this to another module
async fn proxy_cached_request(
self: &Arc<Self>,
authorization: &Arc<Authorization>,
mut request: JsonRpcRequest,
head_block_num: Option<U64>,
) -> Web3ProxyResult<(JsonRpcForwardedResponse, Vec<Arc<Web3Rpc>>)> {
// trace!("Received request: {:?}", request);
// TODO: move this code to another module so that its easy to turn this trace logging on in dev
trace!("Received request: {:?}", request);
let request_metadata = Arc::new(RequestMetadata::new(request.num_bytes()));
@ -1287,7 +1334,7 @@ impl Web3ProxyApp {
// TODO: if eth_chainId or net_version, serve those without querying the backend
// TODO: don't clone?
let partial_response: serde_json::Value = match request_method.as_ref() {
let response: JsonRpcForwardedResponse = match request_method.as_ref() {
// lots of commands are blocked
method @ ("db_getHex"
| "db_getString"
@ -1357,14 +1404,11 @@ impl Web3ProxyApp {
| "shh_version") => {
// i don't think we will ever support these methods
// TODO: what error code?
return Ok((
JsonRpcForwardedResponse::from_string(
format!("method unsupported: {}", method),
None,
Some(request_id),
),
vec![],
));
JsonRpcForwardedResponse::from_string(
format!("method unsupported: {}", method),
None,
Some(request_id),
)
}
// TODO: implement these commands
method @ ("eth_getFilterChanges"
@ -1376,36 +1420,29 @@ impl Web3ProxyApp {
| "eth_uninstallFilter") => {
// TODO: unsupported command stat. use the count to prioritize new features
// TODO: what error code?
return Ok((
// TODO: what code?
JsonRpcForwardedResponse::from_string(
format!("not yet implemented: {}", method),
None,
Some(request_id),
),
vec![],
));
JsonRpcForwardedResponse::from_string(
format!("not yet implemented: {}", method),
None,
Some(request_id),
)
}
method @ ("debug_bundler_sendBundleNow"
| "debug_bundler_clearState"
| "debug_bundler_dumpMempool") => {
return Ok((
JsonRpcForwardedResponse::from_string(
// TODO: we should probably have some escaping on this. but maybe serde will protect us enough
format!("method unsupported: {}", method),
None,
Some(request_id),
),
vec![],
));
JsonRpcForwardedResponse::from_string(
// TODO: we should probably have some escaping on this. but maybe serde will protect us enough
format!("method unsupported: {}", method),
None,
Some(request_id),
)
}
method @ ("eth_sendUserOperation"
_method @ ("eth_sendUserOperation"
| "eth_estimateUserOperationGas"
| "eth_getUserOperationByHash"
| "eth_getUserOperationReceipt"
| "eth_supportedEntryPoints") => match self.bundler_4337_rpcs.as_ref() {
Some(bundler_4337_rpcs) => {
let response = bundler_4337_rpcs
bundler_4337_rpcs
.try_proxy_connection(
authorization,
request,
@ -1413,56 +1450,39 @@ impl Web3ProxyApp {
None,
None,
)
.await?;
// TODO: DRY
let rpcs = request_metadata.backend_requests.lock().clone();
if let Some(stat_sender) = self.stat_sender.as_ref() {
let response_stat = RpcQueryStats::new(
Some(method.to_string()),
authorization.clone(),
request_metadata,
response.num_bytes(),
);
stat_sender
.send_async(response_stat.into())
.await
.map_err(Web3ProxyError::SendAppStatError)?;
}
return Ok((response, rpcs));
.await?
}
None => {
// TODO: stats!
// TODO: not synced error?
return Err(anyhow::anyhow!("no bundler_4337_rpcs available").into());
}
},
// some commands can use local data or caches
"eth_accounts" => {
// no stats on this. its cheap
serde_json::Value::Array(vec![])
JsonRpcForwardedResponse::from_value(serde_json::Value::Array(vec![]), request_id)
}
"eth_blockNumber" => {
match head_block_num.or(self.balanced_rpcs.head_block_num()) {
Some(head_block_num) => {
json!(head_block_num)
JsonRpcForwardedResponse::from_value(json!(head_block_num), request_id)
}
None => {
// TODO: what does geth do if this happens?
return Err(Web3ProxyError::UnknownBlockNumber);
// TODO: standard not synced error
return Err(Web3ProxyError::NoServersSynced);
}
}
}
"eth_chainId" => json!(U64::from(self.config.chain_id)),
"eth_chainId" => JsonRpcForwardedResponse::from_value(
json!(U64::from(self.config.chain_id)),
request_id,
),
// TODO: eth_callBundle (https://docs.flashbots.net/flashbots-auction/searchers/advanced/rpc-endpoint#eth_callbundle)
// TODO: eth_cancelPrivateTransaction (https://docs.flashbots.net/flashbots-auction/searchers/advanced/rpc-endpoint#eth_cancelprivatetransaction, but maybe just reject)
// TODO: eth_sendPrivateTransaction (https://docs.flashbots.net/flashbots-auction/searchers/advanced/rpc-endpoint#eth_sendprivatetransaction)
"eth_coinbase" => {
// no need for serving coinbase
// no stats on this. its cheap
json!(Address::zero())
JsonRpcForwardedResponse::from_value(json!(Address::zero()), request_id)
}
"eth_estimateGas" => {
let mut response = self
@ -1501,22 +1521,52 @@ impl Web3ProxyApp {
gas_estimate += gas_increase;
json!(gas_estimate)
JsonRpcForwardedResponse::from_value(json!(gas_estimate), request_id)
}
"eth_getTransactionReceipt" | "eth_getTransactionByHash" => {
// try to get the transaction without specifying a min_block_height
let mut response = self
.balanced_rpcs
.try_proxy_connection(
authorization,
request.clone(),
Some(&request_metadata),
None,
None,
)
.await?;
// if we got "null", it is probably because the tx is old. retry on nodes with old block data
if let Some(ref result) = response.result {
if result.get() == "null" {
request_metadata
.archive_request
.store(true, atomic::Ordering::Release);
response = self
.balanced_rpcs
.try_proxy_connection(
authorization,
request,
Some(&request_metadata),
Some(&U64::one()),
None,
)
.await?;
}
}
response
}
// TODO: eth_gasPrice that does awesome magic to predict the future
"eth_hashrate" => {
// no stats on this. its cheap
json!(U64::zero())
}
"eth_hashrate" => JsonRpcForwardedResponse::from_value(json!(U64::zero()), request_id),
"eth_mining" => {
// no stats on this. its cheap
serde_json::Value::Bool(false)
JsonRpcForwardedResponse::from_value(serde_json::Value::Bool(false), request_id)
}
// TODO: eth_sendBundle (flashbots command)
// TODO: eth_sendBundle (flashbots/eden command)
// broadcast transactions to all private rpcs at once
"eth_sendRawTransaction" => {
// TODO: how should we handle private_mode here?
let default_num = match authorization.checks.proxy_mode {
let num_public_rpcs = match authorization.checks.proxy_mode {
// TODO: how many balanced rpcs should we send to? configurable? percentage of total?
ProxyMode::Best | ProxyMode::Debug => Some(4),
ProxyMode::Fastest(0) => None,
@ -1527,47 +1577,25 @@ impl Web3ProxyApp {
ProxyMode::Versus => None,
};
let (private_rpcs, num) = if let Some(private_rpcs) = self.private_rpcs.as_ref() {
if !private_rpcs.is_empty() && authorization.checks.private_txs {
// if we are sending the transaction privately, no matter the proxy_mode, we send to ALL private rpcs
(private_rpcs, None)
} else {
// TODO: send to balanced_rpcs AND private_rpcs
(&self.balanced_rpcs, default_num)
}
} else {
(&self.balanced_rpcs, default_num)
};
let head_block_num = head_block_num
.or(self.balanced_rpcs.head_block_num())
.ok_or_else(|| Web3ProxyError::NoServersSynced)?;
// TODO: error/wait if no head block!
// try_send_all_upstream_servers puts the request id into the response. no need to do that ourselves here.
// TODO: what lag should we allow?
let mut response = private_rpcs
.try_send_all_synced_connections(
let mut response = self
.try_send_protected(
authorization,
&request,
Some(request_metadata.clone()),
Some(&head_block_num.saturating_sub(2.into())),
None,
Level::Trace,
num,
true,
request_metadata.clone(),
num_public_rpcs,
)
.await?;
// sometimes we get an error that the transaction is already known by our nodes,
// that's not really an error. Just return the hash like a successful response would.
// that's not really an error. Return the hash like a successful response would.
// TODO: move this to a helper function
if let Some(response_error) = response.error.as_ref() {
if response_error.code == -32000
&& (response_error.message == "ALREADY_EXISTS: already known"
|| response_error.message
== "INTERNAL_ERROR: existing tx with same hash")
{
// TODO: expect instead of web3_context?
let params = request
.params
.web3_context("there must be params if we got this far")?;
@ -1598,9 +1626,7 @@ impl Web3ProxyApp {
}
}
let rpcs = request_metadata.backend_requests.lock().clone();
// emit stats
// emit transaction count stats
if let Some(salt) = self.config.public_recent_ips_salt.as_ref() {
if let Some(tx_hash) = response.result.clone() {
let now = Utc::now().timestamp();
@ -1638,47 +1664,35 @@ impl Web3ProxyApp {
}
}
return Ok((response, rpcs));
response
}
"eth_syncing" => {
// no stats on this. its cheap
// TODO: return a real response if all backends are syncing or if no servers in sync
serde_json::Value::Bool(false)
}
"eth_subscribe" => {
return Ok((
JsonRpcForwardedResponse::from_str(
"notifications not supported. eth_subscribe is only available over a websocket",
Some(-32601),
Some(request_id),
),
vec![],
));
}
"eth_unsubscribe" => {
return Ok((
JsonRpcForwardedResponse::from_str(
"notifications not supported. eth_unsubscribe is only available over a websocket",
Some(-32601),
Some(request_id),
),
vec![],
));
JsonRpcForwardedResponse::from_value(serde_json::Value::Bool(false), request_id)
}
"eth_subscribe" => JsonRpcForwardedResponse::from_str(
"notifications not supported. eth_subscribe is only available over a websocket",
Some(-32601),
Some(request_id),
),
"eth_unsubscribe" => JsonRpcForwardedResponse::from_str(
"notifications not supported. eth_unsubscribe is only available over a websocket",
Some(-32601),
Some(request_id),
),
"net_listening" => {
// no stats on this. its cheap
// TODO: only if there are some backends on balanced_rpcs?
serde_json::Value::Bool(true)
}
"net_peerCount" => {
// no stats on this. its cheap
// TODO: do something with proxy_mode here?
json!(U64::from(self.balanced_rpcs.num_synced_rpcs()))
}
"web3_clientVersion" => {
// no stats on this. its cheap
serde_json::Value::String(APP_USER_AGENT.to_string())
JsonRpcForwardedResponse::from_value(serde_json::Value::Bool(true), request_id)
}
"net_peerCount" => JsonRpcForwardedResponse::from_value(
json!(U64::from(self.balanced_rpcs.num_synced_rpcs())),
request_id,
),
"web3_clientVersion" => JsonRpcForwardedResponse::from_value(
serde_json::Value::String(APP_USER_AGENT.to_string()),
request_id,
),
"web3_sha3" => {
// returns Keccak-256 (not the standardized SHA3-256) of the given data.
match &request.params {
@ -1713,32 +1727,23 @@ impl Web3ProxyApp {
let hash = H256::from(keccak256(param));
json!(hash)
JsonRpcForwardedResponse::from_value(json!(hash), request_id)
}
_ => {
// TODO: this needs the correct error code in the response
// TODO: emit stat?
return Ok((
JsonRpcForwardedResponse::from_str(
"invalid request",
Some(StatusCode::BAD_REQUEST.as_u16().into()),
Some(request_id),
),
vec![],
));
JsonRpcForwardedResponse::from_str(
"invalid request",
Some(StatusCode::BAD_REQUEST.as_u16().into()),
Some(request_id),
)
}
}
}
"test" => {
return Ok((
JsonRpcForwardedResponse::from_str(
"The method test does not exist/is not available.",
Some(-32601),
Some(request_id),
),
vec![],
));
}
"test" => JsonRpcForwardedResponse::from_str(
"The method test does not exist/is not available.",
Some(-32601),
Some(request_id),
),
// anything else gets sent to backend rpcs and cached
method => {
if method.starts_with("admin_") {
@ -1893,32 +1898,14 @@ impl Web3ProxyApp {
// replace the id with our request's id.
response.id = request_id;
// TODO: DRY!
let rpcs = request_metadata.backend_requests.lock().clone();
if let Some(stat_sender) = self.stat_sender.as_ref() {
let response_stat = RpcQueryStats::new(
Some(method.to_string()),
authorization.clone(),
request_metadata,
response.num_bytes(),
);
stat_sender
.send_async(response_stat.into())
.await
.map_err(Web3ProxyError::SendAppStatError)?;
}
return Ok((response, rpcs));
response
}
};
let response = JsonRpcForwardedResponse::from_value(partial_response, request_id);
// TODO: DRY
// save the rpcs so they can be included in a response header
let rpcs = request_metadata.backend_requests.lock().clone();
// send stats used for accounting and graphs
if let Some(stat_sender) = self.stat_sender.as_ref() {
let response_stat = RpcQueryStats::new(
Some(request_method),
@ -1933,6 +1920,7 @@ impl Web3ProxyApp {
.map_err(Web3ProxyError::SendAppStatError)?;
}
// send debug info as a kafka log
if let Some((kafka_topic, kafka_key, kafka_headers)) = kafka_stuff {
let kafka_producer = self
.kafka_producer

View File

@ -131,10 +131,12 @@ struct JsonRpcChainIdResult {
}
async fn get_chain_id(rpc: &str, client: &reqwest::Client) -> anyhow::Result<u64> {
// empty params aren't required by the spec, but some rpc providers require them
let get_chain_id_request = json!({
"id": "1",
"jsonrpc": "2.0",
"method": "eth_chainId",
"params": [],
});
// TODO: loop until chain id is found?

View File

@ -177,12 +177,13 @@ pub async fn block_needed(
"eth_getLogs" => {
// TODO: think about this more
// TODO: jsonrpc has a specific code for this
// TODO: this shouldn't be a 500. this should be a 400. 500 will make haproxy retry a bunch
let obj = params
.get_mut(0)
.ok_or_else(|| anyhow::anyhow!("invalid format. no params"))?
.ok_or_else(|| Web3ProxyError::BadRequest("invalid format. no params".to_string()))?
.as_object_mut()
.ok_or_else(|| Web3ProxyError::BadRequest("invalid format".to_string()))?;
.ok_or_else(|| {
Web3ProxyError::BadRequest("invalid format. params not object".to_string())
})?;
if obj.contains_key("blockHash") {
return Ok(BlockNeeded::CacheSuccessForever);

View File

@ -43,7 +43,7 @@ impl Serialize for Web3ProxyBlock {
state.serialize_field("age", &self.age())?;
let block = json!({
"block_hash": self.block.hash,
"hash": self.block.hash,
"parent_hash": self.block.parent_hash,
"number": self.block.number,
"timestamp": self.block.timestamp,

View File

@ -157,19 +157,21 @@ impl Web3Rpcs {
// these blocks don't have full transactions, but they do have rather variable amounts of transaction hashes
// TODO: how can we do the weigher better? need to know actual allocated size
// TODO: time_to_idle instead?
// TODO: limits from config
let blocks_by_hash: BlocksByHashCache = Cache::builder()
.max_capacity(1024 * 1024 * 1024)
.weigher(|_k, v: &Web3ProxyBlock| {
1 + v.block.transactions.len().try_into().unwrap_or(u32::MAX)
})
.time_to_idle(Duration::from_secs(600))
.time_to_live(Duration::from_secs(30 * 60))
.build_with_hasher(hashbrown::hash_map::DefaultHashBuilder::default());
// all block numbers are the same size, so no need for weigher
// TODO: limits from config
// TODO: time_to_idle instead?
let blocks_by_number = Cache::builder()
.time_to_idle(Duration::from_secs(600))
.time_to_live(Duration::from_secs(30 * 60))
.max_capacity(10_000)
.build_with_hasher(hashbrown::hash_map::DefaultHashBuilder::default());
@ -509,12 +511,16 @@ impl Web3Rpcs {
min_block_needed: Option<&U64>,
max_block_needed: Option<&U64>,
) -> Web3ProxyResult<OpenRequestResult> {
let usable_rpcs_by_tier_and_head_number: BTreeMap<(u64, Option<U64>), Vec<Arc<Web3Rpc>>> = {
let usable_rpcs_by_tier_and_head_number: BTreeMap<
(u64, Reverse<Option<U64>>),
Vec<Arc<Web3Rpc>>,
> = {
let mut m = BTreeMap::new();
if self.watch_consensus_head_sender.is_none() {
// pick any server
let mut m = BTreeMap::new();
let key = (0, None);
let key = (0, Reverse(None));
for x in self.by_name.read().values() {
if skip.contains(x) {
@ -525,8 +531,6 @@ impl Web3Rpcs {
m.entry(key).or_insert_with(Vec::new).push(x.clone());
}
m
} else {
let synced_connections = self.watch_consensus_rpcs_sender.borrow().clone();
@ -567,9 +571,6 @@ impl Web3Rpcs {
trace!("needed_blocks_comparison: {:?}", needed_blocks_comparison);
// collect "usable_rpcs_by_head_num_and_weight"
// TODO: MAKE SURE None SORTS LAST?
let mut m = BTreeMap::new();
match needed_blocks_comparison {
cmp::Ordering::Less => {
// need an old block. check all the rpcs. ignore rpcs that are still syncing
@ -632,7 +633,7 @@ impl Web3Rpcs {
}
}
let key = (x.tier, Some(*x_head_num));
let key = (x.tier, Reverse(Some(*x_head_num)));
m.entry(key).or_insert_with(Vec::new).push(x);
}
@ -645,7 +646,7 @@ impl Web3Rpcs {
// the key doesn't matter if we are checking synced connections
// they are all at the same block and it is already sized to what we need
let key = (0, None);
let key = (0, Reverse(None));
for x in synced_connections.best_rpcs.iter() {
if skip.contains(x) {
@ -662,9 +663,9 @@ impl Web3Rpcs {
return Ok(OpenRequestResult::NotReady);
}
}
m
}
m
};
trace!(
@ -1099,7 +1100,7 @@ impl Web3Rpcs {
/// be sure there is a timeout on this or it might loop forever
#[allow(clippy::too_many_arguments)]
pub async fn try_send_all_synced_connections(
&self,
self: &Arc<Self>,
authorization: &Arc<Authorization>,
request: &JsonRpcRequest,
request_metadata: Option<Arc<RequestMetadata>>,

View File

@ -526,12 +526,12 @@ impl Web3Rpc {
.context(format!("waiting for request handle on {}", self))?
.request(
"eth_chainId",
&json!(Option::None::<()>),
&json!(Vec::<()>::new()),
Level::Trace.into(),
unlocked_provider.clone(),
)
.await;
// trace!("found_chain_id: {:?}", found_chain_id);
trace!("found_chain_id: {:#?}", found_chain_id);
match found_chain_id {
Ok(found_chain_id) => {

View File

@ -218,14 +218,11 @@ impl OpenRequestHandle {
.active_requests
.fetch_sub(1, std::sync::atomic::Ordering::Relaxed);
// // TODO: i think ethers already has trace logging (and does it much more fancy)
// trace!(
// "response from {} for {} {:?}: {:?}",
// self.rpc,
// method,
// params,
// response,
// );
// TODO: i think ethers already has trace logging (and does it much more fancy)
trace!(
"response from {} for {} {:?}: {:?}",
self.rpc, method, params, response,
);
if let Err(err) = &response {
// only save reverts for some types of calls

View File

@ -14,7 +14,7 @@ use futures::stream;
use hashbrown::HashMap;
use influxdb2::api::write::TimestampPrecision;
use influxdb2::models::DataPoint;
use log::{error, info};
use log::{error, info, trace};
use migration::sea_orm::{self, DatabaseConnection, EntityTrait};
use migration::{Expr, OnConflict};
use std::num::NonZeroU64;
@ -114,9 +114,10 @@ impl RpcQueryStats {
}
}
/// all queries are aggregated
/// all rpc keys are aggregated in the global stats
/// TODO: should we store "anon" or "registered" as a key just to be able to split graphs?
fn global_timeseries_key(&self) -> RpcQueryKey {
// we include the method because that can be helpful for predicting load
let method = self.method.clone();
// we don't store origin in the timeseries db. its only used for optional accounting
let origin = None;
@ -133,37 +134,37 @@ impl RpcQueryStats {
}
}
fn opt_in_timeseries_key(&self) -> RpcQueryKey {
/// rpc keys can opt into more detailed tracking
fn opt_in_timeseries_key(&self) -> Option<RpcQueryKey> {
// we don't store origin in the timeseries db. its only optionaly used for accounting
let origin = None;
let (method, rpc_secret_key_id) = match self.authorization.checks.tracking_level {
// depending on tracking level, we either skip opt-in stats, track without method, or track with method
let method = match self.authorization.checks.tracking_level {
TrackingLevel::None => {
// this RPC key requested no tracking. this is the default.
// we still want graphs though, so we just use None as the rpc_secret_key_id
(self.method.clone(), None)
return None;
}
TrackingLevel::Aggregated => {
// this RPC key requested tracking aggregated across all methods
(None, self.authorization.checks.rpc_secret_key_id)
None
}
TrackingLevel::Detailed => {
// detailed tracking keeps track of the method
(
self.method.clone(),
self.authorization.checks.rpc_secret_key_id,
)
self.method.clone()
}
};
RpcQueryKey {
let key = RpcQueryKey {
response_timestamp: self.response_timestamp,
archive_needed: self.archive_request,
error_response: self.error_response,
method,
rpc_secret_key_id,
rpc_secret_key_id: self.authorization.checks.rpc_secret_key_id,
origin,
}
};
Some(key)
}
}
@ -201,6 +202,10 @@ pub struct StatBuffer {
tsdb_save_interval_seconds: u32,
db_save_interval_seconds: u32,
billing_period_seconds: i64,
global_timeseries_buffer: HashMap<RpcQueryKey, BufferedRpcQueryStats>,
opt_in_timeseries_buffer: HashMap<RpcQueryKey, BufferedRpcQueryStats>,
accounting_db_buffer: HashMap<RpcQueryKey, BufferedRpcQueryStats>,
timestamp_precision: TimestampPrecision,
}
impl BufferedRpcQueryStats {
@ -310,18 +315,12 @@ impl BufferedRpcQueryStats {
Ok(())
}
// TODO: change this to return a DataPoint?
async fn save_timeseries(
async fn build_timeseries_point(
self,
bucket: &str,
measurement: &str,
chain_id: u64,
influxdb2_clent: &influxdb2::Client,
key: RpcQueryKey,
) -> anyhow::Result<()> {
// TODO: error if key.origin is set?
// TODO: what name?
) -> anyhow::Result<DataPoint> {
let mut builder = DataPoint::builder(measurement);
builder = builder.tag("chain_id", chain_id.to_string());
@ -347,16 +346,10 @@ impl BufferedRpcQueryStats {
.field("sum_response_bytes", self.sum_response_bytes as i64);
builder = builder.timestamp(key.response_timestamp);
let timestamp_precision = TimestampPrecision::Seconds;
let points = [builder.build()?];
let point = builder.build()?;
// TODO: bucket should be an enum so that we don't risk typos
influxdb2_clent
.write_with_precision(bucket, stream::iter(points), timestamp_precision)
.await?;
Ok(())
Ok(point)
}
}
@ -423,6 +416,7 @@ impl StatBuffer {
let (stat_sender, stat_receiver) = flume::unbounded();
let timestamp_precision = TimestampPrecision::Seconds;
let mut new = Self {
chain_id,
db_conn,
@ -430,6 +424,10 @@ impl StatBuffer {
db_save_interval_seconds,
tsdb_save_interval_seconds,
billing_period_seconds,
global_timeseries_buffer: Default::default(),
opt_in_timeseries_buffer: Default::default(),
accounting_db_buffer: Default::default(),
timestamp_precision,
};
// any errors inside this task will cause the application to exit
@ -452,11 +450,6 @@ impl StatBuffer {
let mut db_save_interval =
interval(Duration::from_secs(self.db_save_interval_seconds as u64));
// TODO: this is used for rpc_accounting_v2 and influxdb. give it a name to match that? "stat" of some kind?
let mut global_timeseries_buffer = HashMap::<RpcQueryKey, BufferedRpcQueryStats>::new();
let mut opt_in_timeseries_buffer = HashMap::<RpcQueryKey, BufferedRpcQueryStats>::new();
let mut accounting_db_buffer = HashMap::<RpcQueryKey, BufferedRpcQueryStats>::new();
// TODO: Somewhere here we should probably be updating the balance of the user
// And also update the credits used etc. for the referred user
@ -472,15 +465,15 @@ impl StatBuffer {
let global_timeseries_key = stat.global_timeseries_key();
global_timeseries_buffer.entry(global_timeseries_key).or_default().add(stat.clone());
self.global_timeseries_buffer.entry(global_timeseries_key).or_default().add(stat.clone());
let opt_in_timeseries_key = stat.opt_in_timeseries_key();
opt_in_timeseries_buffer.entry(opt_in_timeseries_key).or_default().add(stat.clone());
if let Some(opt_in_timeseries_key) = stat.opt_in_timeseries_key() {
self.opt_in_timeseries_buffer.entry(opt_in_timeseries_key).or_default().add(stat.clone());
}
}
if self.db_conn.is_some() {
accounting_db_buffer.entry(stat.accounting_key(self.billing_period_seconds)).or_default().add(stat);
self.accounting_db_buffer.entry(stat.accounting_key(self.billing_period_seconds)).or_default().add(stat);
}
}
Err(err) => {
@ -491,34 +484,16 @@ impl StatBuffer {
}
_ = db_save_interval.tick() => {
// info!("DB save internal tick");
let db_conn = self.db_conn.as_ref().expect("db connection should always exist if there are buffered stats");
// TODO: batch saves
for (key, stat) in accounting_db_buffer.drain() {
// TODO: i don't like passing key (which came from the stat) to the function on the stat. but it works for now
if let Err(err) = stat.save_db(self.chain_id, db_conn, key).await {
error!("unable to save accounting entry! err={:?}", err);
};
let count = self.save_relational_stats().await;
if count > 0 {
trace!("Saved {} stats to the relational db", count);
}
}
_ = tsdb_save_interval.tick() => {
// info!("TSDB save internal tick");
// TODO: batch saves
// TODO: better bucket names
let influxdb_client = self.influxdb_client.as_ref().expect("influxdb client should always exist if there are buffered stats");
for (key, stat) in global_timeseries_buffer.drain() {
// TODO: i don't like passing key (which came from the stat) to the function on the stat. but it works for now
if let Err(err) = stat.save_timeseries(bucket.clone().as_ref(), "global_proxy", self.chain_id, influxdb_client, key).await {
error!("unable to save global stat! err={:?}", err);
};
}
for (key, stat) in opt_in_timeseries_buffer.drain() {
// TODO: i don't like passing key (which came from the stat) to the function on the stat. but it works for now
if let Err(err) = stat.save_timeseries(bucket.clone().as_ref(), "opt_in_proxy", self.chain_id, influxdb_client, key).await {
error!("unable to save opt-in stat! err={:?}", err);
};
let count = self.save_tsdb_stats(&bucket).await;
if count > 0 {
trace!("Saved {} stats to the tsdb", count);
}
}
x = shutdown_receiver.recv() => {
@ -534,62 +509,102 @@ impl StatBuffer {
}
}
// TODO: dry
if let Some(db_conn) = self.db_conn.as_ref() {
info!(
"saving {} buffered accounting entries",
accounting_db_buffer.len(),
);
let saved_relational = self.save_relational_stats().await;
for (key, stat) in accounting_db_buffer.drain() {
if let Err(err) = stat.save_db(self.chain_id, db_conn, key).await {
error!(
"Unable to save accounting entry while shutting down! err={:?}",
err
);
};
}
}
info!("saved {} pending relational stats", saved_relational);
// TODO: dry
if let Some(influxdb_client) = self.influxdb_client.as_ref() {
info!(
"saving {} buffered global stats",
global_timeseries_buffer.len(),
);
let saved_tsdb = self.save_tsdb_stats(&bucket).await;
for (key, stat) in global_timeseries_buffer.drain() {
if let Err(err) = stat
.save_timeseries(&bucket, "global_proxy", self.chain_id, influxdb_client, key)
.await
{
error!(
"Unable to save global stat while shutting down! err={:?}",
err
);
};
}
info!(
"saving {} buffered opt-in stats",
opt_in_timeseries_buffer.len(),
);
for (key, stat) in opt_in_timeseries_buffer.drain() {
if let Err(err) = stat
.save_timeseries(&bucket, "opt_in_proxy", self.chain_id, influxdb_client, key)
.await
{
error!(
"unable to save opt-in stat while shutting down! err={:?}",
err
);
};
}
}
info!("saved {} pending tsdb stats", saved_tsdb);
info!("accounting and stat save loop complete");
Ok(())
}
async fn save_relational_stats(&mut self) -> usize {
let mut count = 0;
if let Some(db_conn) = self.db_conn.as_ref() {
count = self.accounting_db_buffer.len();
for (key, stat) in self.accounting_db_buffer.drain() {
// TODO: batch saves
// TODO: i don't like passing key (which came from the stat) to the function on the stat. but it works for now
if let Err(err) = stat.save_db(self.chain_id, db_conn, key).await {
error!("unable to save accounting entry! err={:?}", err);
};
}
}
count
}
// TODO: bucket should be an enum so that we don't risk typos
async fn save_tsdb_stats(&mut self, bucket: &str) -> usize {
let mut count = 0;
if let Some(influxdb_client) = self.influxdb_client.as_ref() {
// TODO: use stream::iter properly to avoid allocating this Vec
let mut points = vec![];
for (key, stat) in self.global_timeseries_buffer.drain() {
// TODO: i don't like passing key (which came from the stat) to the function on the stat. but it works for now
match stat
.build_timeseries_point("global_proxy", self.chain_id, key)
.await
{
Ok(point) => {
points.push(point);
}
Err(err) => {
error!("unable to build global stat! err={:?}", err);
}
};
}
for (key, stat) in self.opt_in_timeseries_buffer.drain() {
// TODO: i don't like passing key (which came from the stat) to the function on the stat. but it works for now
match stat
.build_timeseries_point("opt_in_proxy", self.chain_id, key)
.await
{
Ok(point) => {
points.push(point);
}
Err(err) => {
// TODO: if this errors, we throw away some of the pending stats! we should probably buffer them somewhere to be tried again
error!("unable to build opt-in stat! err={:?}", err);
}
};
}
count = points.len();
if count > 0 {
// TODO: put max_batch_size in config?
// TODO: i think the real limit is the byte size of the http request. so, a simple line count won't work very well
let max_batch_size = 100;
let mut num_left = count;
while num_left > 0 {
let batch_size = num_left.min(max_batch_size);
let p = points.split_off(batch_size);
num_left -= batch_size;
if let Err(err) = influxdb_client
.write_with_precision(bucket, stream::iter(p), self.timestamp_precision)
.await
{
// TODO: if this errors, we throw away some of the pending stats! we should probably buffer them somewhere to be tried again
error!("unable to save {} tsdb stats! err={:?}", batch_size, err);
}
}
}
}
count
}
}