Browse Source

Merge remote-tracking branch 'upstream/main' into interstellar-next

perillamint 8 months ago
parent
commit
d170fc415d
23 changed files with 581 additions and 468 deletions
  1. 1 1
      .cargo/config
  2. 168 239
      Cargo.lock
  3. 18 16
      Cargo.toml
  4. 6 6
      README.md
  5. 6 6
      flake.lock
  6. 2 1
      relay.nix
  7. 34 16
      src/admin/client.rs
  8. 39 7
      src/config.rs
  9. 4 2
      src/data/actor.rs
  10. 25 24
      src/data/state.rs
  11. 15 3
      src/error.rs
  12. 6 0
      src/extractors.rs
  13. 2 11
      src/jobs.rs
  14. 5 1
      src/jobs/contact.rs
  15. 7 1
      src/jobs/deliver.rs
  16. 15 2
      src/jobs/instance.rs
  17. 11 2
      src/jobs/nodeinfo.rs
  18. 63 25
      src/main.rs
  19. 6 2
      src/middleware/verifier.rs
  20. 109 99
      src/requests.rs
  21. 1 1
      src/routes/index.rs
  22. 9 3
      src/routes/media.rs
  23. 29 0
      src/spawner.rs

+ 1 - 1
.cargo/config

@@ -1,2 +1,2 @@
 [build]
-# rustflags = ["--cfg", "tokio_unstable"]
+rustflags = ["--cfg", "tokio_unstable"]

File diff suppressed because it is too large
+ 168 - 239
Cargo.lock


+ 18 - 16
Cargo.toml

@@ -1,7 +1,7 @@
 [package]
 name = "ap-relay"
 description = "A simple activitypub relay"
-version = "0.3.99"
+version = "0.3.102"
 authors = ["asonix <asonix@asonix.dog>"]
 license = "AGPL-3.0"
 readme = "README.md"
@@ -23,16 +23,11 @@ default = []
 [dependencies]
 anyhow = "1.0"
 actix-rt = "2.7.0"
-actix-web = { version = "4.0.1", default-features = false, features = [
-  "rustls",
-  "compress-brotli",
-  "compress-gzip",
-] }
-actix-webfinger = "0.4.0"
+actix-web = { version = "4.4.0", default-features = false, features = ["compress-brotli", "compress-gzip", "rustls-0_21"] }
+actix-webfinger = { version = "0.5.0", default-features = false }
 activitystreams = "0.7.0-alpha.25"
 activitystreams-ext = "0.1.0-alpha.3"
 ammonia = "3.1.0"
-awc = { version = "3.0.0", default-features = false, features = ["rustls"] }
 bcrypt = "0.15"
 base64 = "0.21"
 clap = { version = "4.0.0", features = ["derive"] }
@@ -40,7 +35,7 @@ config = "0.13.0"
 console-subscriber = { version = "0.1", optional = true }
 dashmap = "5.1.0"
 dotenv = "0.15.0"
-flume = "0.10.14"
+flume = "0.11.0"
 futures-util = "0.3.17"
 lru = "0.11.0"
 metrics = "0.21.0"
@@ -50,15 +45,18 @@ metrics-exporter-prometheus = { version = "0.12.0", default-features = false, fe
 metrics-util = "0.15.0"
 mime = "0.3.16"
 minify-html = "0.11.0"
-opentelemetry = { version = "0.19", features = ["rt-tokio"] }
-opentelemetry-otlp = "0.12"
+opentelemetry = { version = "0.20", features = ["rt-tokio"] }
+opentelemetry-otlp = "0.13"
 pin-project-lite = "0.2.9"
 quanta = "0.11.0"
 rand = "0.8"
+reqwest = { version = "0.11", default-features = false, features = ["rustls-tls", "stream"]}
+reqwest-middleware = "0.2"
+reqwest-tracing = "0.4.5"
 ring = "0.16.20"
 rsa = { version = "0.9" }
 rsa-magic-public-key = "0.8.0"
-rustls = "0.20.7"
+rustls = "0.21.0"
 rustls-pemfile = "1.0.1"
 serde = { version = "1.0", features = ["derive"] }
 serde_json = "1.0"
@@ -71,11 +69,10 @@ teloxide = { version = "0.12.0", default-features = false, features = [
 thiserror = "1.0"
 time = { version = "0.3.17", features = ["serde"] }
 tracing = "0.1"
-tracing-awc = "0.1.7"
 tracing-error = "0.2"
 tracing-futures = "0.2"
 tracing-log = "0.1"
-tracing-opentelemetry = "0.19"
+tracing-opentelemetry = "0.21"
 tracing-subscriber = { version = "0.3", features = [
   "ansi",
   "env-filter",
@@ -92,10 +89,15 @@ features = ["background-jobs-actix", "error-logging"]
 [dependencies.http-signature-normalization-actix]
 version = "0.10.1"
 default-features = false
-features = ["client", "server", "ring"]
+features = ["server", "ring"]
+
+[dependencies.http-signature-normalization-reqwest]
+version = "0.10.0"
+default-features = false
+features = ["middleware", "ring"]
 
 [dependencies.tracing-actix-web]
-version = "0.7.5"
+version = "0.7.6"
 
 [build-dependencies]
 anyhow = "1.0"

+ 6 - 6
README.md

@@ -108,7 +108,6 @@ LOCAL_BLURB="<p>Welcome to my cool relay where I have cool relay things happenin
 PROMETHEUS_ADDR=0.0.0.0
 PROMETHEUS_PORT=9000
 CLIENT_TIMEOUT=10
-CLIENT_POOL_SIZE=20
 DELIVER_CONCURRENCY=8
 SIGNATURE_THREADS=2
 ```
@@ -163,11 +162,6 @@ Optional - Port to bind to for serving the prometheus scrape endpoint
 ##### `CLIENT_TIMEOUT`
 Optional - How long the relay will hold open a connection (in seconds) to a remote server during
 fetches and deliveries. This defaults to 10
-##### `CLIENT_POOL_SIZE`
-Optional - How many connections the relay should maintain per thread. This value will be multiplied
-by the number of cores available to the relay. This defaults to 20, so a 4-core machine will have a
-maximum of 160 simultaneous outbound connections. If you run into problems related to "Too many open
-files", you can either decrease this number or increase the ulimit for your system.
 ##### `DELIVER_CONCURRENCY`
 Optional - How many deliver requests the relay should allow to be in-flight per thread. the default
 is 8
@@ -175,6 +169,12 @@ is 8
 Optional - Override number of threads used for signing and verifying requests. Default is
 `std::thread::available_parallelism()` (It tries to detect how many cores you have). If it cannot
 detect the correct number of cores, it falls back to 1.
+##### 'PROXY_URL'
+Optional - URL of an HTTP proxy to forward outbound requests through
+##### 'PROXY_USERNAME'
+Optional - username to provide to the HTTP proxy set with `PROXY_URL` through HTTP Basic Auth
+##### 'PROXY_PASSWORD'
+Optional - password to provide to the HTTP proxy set with `PROXY_URL` through HTTP Basic Auth
 
 ### Subscribing
 Mastodon admins can subscribe to this relay by adding the `/inbox` route to their relay settings.

+ 6 - 6
flake.lock

@@ -5,11 +5,11 @@
         "systems": "systems"
       },
       "locked": {
-        "lastModified": 1689068808,
-        "narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=",
+        "lastModified": 1692799911,
+        "narHash": "sha256-3eihraek4qL744EvQXsK1Ha6C3CR7nnT8X2qWap4RNk=",
         "owner": "numtide",
         "repo": "flake-utils",
-        "rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4",
+        "rev": "f9e7cf818399d17d347f847525c5a5a8032e4e44",
         "type": "github"
       },
       "original": {
@@ -20,11 +20,11 @@
     },
     "nixpkgs": {
       "locked": {
-        "lastModified": 1689679375,
-        "narHash": "sha256-LHUC52WvyVDi9PwyL1QCpaxYWBqp4ir4iL6zgOkmcb8=",
+        "lastModified": 1693003285,
+        "narHash": "sha256-5nm4yrEHKupjn62MibENtfqlP6pWcRTuSKrMiH9bLkc=",
         "owner": "NixOS",
         "repo": "nixpkgs",
-        "rev": "684c17c429c42515bafb3ad775d2a710947f3d67",
+        "rev": "5690c4271f2998c304a45c91a0aeb8fb69feaea7",
         "type": "github"
       },
       "original": {

+ 2 - 1
relay.nix

@@ -6,12 +6,13 @@
 
 rustPlatform.buildRustPackage {
   pname = "relay";
-  version = "0.3.99";
+  version = "0.3.102";
   src = ./.;
   cargoLock.lockFile = ./Cargo.lock;
 
   PROTOC = "${protobuf}/bin/protoc";
   PROTOC_INCLUDE = "${protobuf}/include";
+  RUSTFLAGS = "--cfg tokio_unstable";
 
   nativeBuildInputs = [ ];
 

+ 34 - 16
src/admin/client.rs

@@ -3,12 +3,14 @@ use crate::{
     collector::Snapshot,
     config::{AdminUrlKind, Config},
     error::{Error, ErrorKind},
+    extractors::XApiToken,
 };
-use awc::Client;
+use actix_web::http::header::Header;
+use reqwest_middleware::ClientWithMiddleware;
 use serde::de::DeserializeOwned;
 
 pub(crate) async fn allow(
-    client: &Client,
+    client: &ClientWithMiddleware,
     config: &Config,
     domains: Vec<String>,
 ) -> Result<(), Error> {
@@ -16,7 +18,7 @@ pub(crate) async fn allow(
 }
 
 pub(crate) async fn disallow(
-    client: &Client,
+    client: &ClientWithMiddleware,
     config: &Config,
     domains: Vec<String>,
 ) -> Result<(), Error> {
@@ -24,7 +26,7 @@ pub(crate) async fn disallow(
 }
 
 pub(crate) async fn block(
-    client: &Client,
+    client: &ClientWithMiddleware,
     config: &Config,
     domains: Vec<String>,
 ) -> Result<(), Error> {
@@ -32,35 +34,50 @@ pub(crate) async fn block(
 }
 
 pub(crate) async fn unblock(
-    client: &Client,
+    client: &ClientWithMiddleware,
     config: &Config,
     domains: Vec<String>,
 ) -> Result<(), Error> {
     post_domains(client, config, domains, AdminUrlKind::Unblock).await
 }
 
-pub(crate) async fn allowed(client: &Client, config: &Config) -> Result<AllowedDomains, Error> {
+pub(crate) async fn allowed(
+    client: &ClientWithMiddleware,
+    config: &Config,
+) -> Result<AllowedDomains, Error> {
     get_results(client, config, AdminUrlKind::Allowed).await
 }
 
-pub(crate) async fn blocked(client: &Client, config: &Config) -> Result<BlockedDomains, Error> {
+pub(crate) async fn blocked(
+    client: &ClientWithMiddleware,
+    config: &Config,
+) -> Result<BlockedDomains, Error> {
     get_results(client, config, AdminUrlKind::Blocked).await
 }
 
-pub(crate) async fn connected(client: &Client, config: &Config) -> Result<ConnectedActors, Error> {
+pub(crate) async fn connected(
+    client: &ClientWithMiddleware,
+    config: &Config,
+) -> Result<ConnectedActors, Error> {
     get_results(client, config, AdminUrlKind::Connected).await
 }
 
-pub(crate) async fn stats(client: &Client, config: &Config) -> Result<Snapshot, Error> {
+pub(crate) async fn stats(
+    client: &ClientWithMiddleware,
+    config: &Config,
+) -> Result<Snapshot, Error> {
     get_results(client, config, AdminUrlKind::Stats).await
 }
 
-pub(crate) async fn last_seen(client: &Client, config: &Config) -> Result<LastSeen, Error> {
+pub(crate) async fn last_seen(
+    client: &ClientWithMiddleware,
+    config: &Config,
+) -> Result<LastSeen, Error> {
     get_results(client, config, AdminUrlKind::LastSeen).await
 }
 
 async fn get_results<T: DeserializeOwned>(
-    client: &Client,
+    client: &ClientWithMiddleware,
     config: &Config,
     url_kind: AdminUrlKind,
 ) -> Result<T, Error> {
@@ -68,9 +85,9 @@ async fn get_results<T: DeserializeOwned>(
 
     let iri = config.generate_admin_url(url_kind);
 
-    let mut res = client
+    let res = client
         .get(iri.as_str())
-        .insert_header(x_api_token)
+        .header(XApiToken::name(), x_api_token.to_string())
         .send()
         .await
         .map_err(|e| ErrorKind::SendRequest(iri.to_string(), e.to_string()))?;
@@ -88,7 +105,7 @@ async fn get_results<T: DeserializeOwned>(
 }
 
 async fn post_domains(
-    client: &Client,
+    client: &ClientWithMiddleware,
     config: &Config,
     domains: Vec<String>,
     url_kind: AdminUrlKind,
@@ -99,8 +116,9 @@ async fn post_domains(
 
     let res = client
         .post(iri.as_str())
-        .insert_header(x_api_token)
-        .send_json(&Domains { domains })
+        .header(XApiToken::name(), x_api_token.to_string())
+        .json(&Domains { domains })
+        .send()
         .await
         .map_err(|e| ErrorKind::SendRequest(iri.to_string(), e.to_string()))?;
 

+ 39 - 7
src/config.rs

@@ -46,7 +46,9 @@ pub(crate) struct ParsedConfig {
     prometheus_port: Option<u16>,
     deliver_concurrency: u64,
     client_timeout: u64,
-    client_pool_size: usize,
+    proxy_url: Option<IriString>,
+    proxy_username: Option<String>,
+    proxy_password: Option<String>,
     signature_threads: Option<usize>,
 }
 
@@ -73,7 +75,7 @@ pub struct Config {
     prometheus_config: Option<PrometheusConfig>,
     deliver_concurrency: u64,
     client_timeout: u64,
-    client_pool_size: usize,
+    proxy_config: Option<ProxyConfig>,
     signature_threads: Option<usize>,
 }
 
@@ -89,6 +91,12 @@ struct PrometheusConfig {
     port: u16,
 }
 
+#[derive(Clone, Debug)]
+struct ProxyConfig {
+    url: IriString,
+    auth: Option<(String, String)>,
+}
+
 #[derive(Debug)]
 pub enum UrlKind {
     Activity,
@@ -144,7 +152,7 @@ impl std::fmt::Debug for Config {
             .field("prometheus_config", &self.prometheus_config)
             .field("deliver_concurrency", &self.deliver_concurrency)
             .field("client_timeout", &self.client_timeout)
-            .field("client_pool_size", &self.client_pool_size)
+            .field("proxy_config", &self.proxy_config)
             .field("signature_threads", &self.signature_threads)
             .finish()
     }
@@ -177,7 +185,9 @@ impl Config {
             .set_default("prometheus_port", None as Option<u16>)?
             .set_default("deliver_concurrency", 8u64)?
             .set_default("client_timeout", 10u64)?
-            .set_default("client_pool_size", 20u64)?
+            .set_default("proxy_url", None as Option<&str>)?
+            .set_default("proxy_username", None as Option<&str>)?
+            .set_default("proxy_password", None as Option<&str>)?
             .set_default("signature_threads", None as Option<u64>)?
             .add_source(Environment::default())
             .build()?;
@@ -220,6 +230,26 @@ impl Config {
             (None, None) => None,
         };
 
+        let proxy_config = match (config.proxy_username, config.proxy_password) {
+            (Some(username), Some(password)) => config.proxy_url.map(|url| ProxyConfig {
+                url,
+                auth: Some((username, password)),
+            }),
+            (Some(_), None) => {
+                tracing::warn!(
+                    "PROXY_USERNAME is set but PROXY_PASSWORD is not set, not setting Proxy Auth"
+                );
+                config.proxy_url.map(|url| ProxyConfig { url, auth: None })
+            }
+            (None, Some(_)) => {
+                tracing::warn!(
+                    "PROXY_PASSWORD is set but PROXY_USERNAME is not set, not setting Proxy Auth"
+                );
+                config.proxy_url.map(|url| ProxyConfig { url, auth: None })
+            }
+            (None, None) => config.proxy_url.map(|url| ProxyConfig { url, auth: None }),
+        };
+
         let source_url = match Self::git_hash() {
             Some(hash) => format!(
                 "{}{}{hash}",
@@ -252,7 +282,7 @@ impl Config {
             prometheus_config,
             deliver_concurrency: config.deliver_concurrency,
             client_timeout: config.client_timeout,
-            client_pool_size: config.client_pool_size,
+            proxy_config,
             signature_threads: config.signature_threads,
         })
     }
@@ -468,8 +498,10 @@ impl Config {
         )
     }
 
-    pub(crate) fn client_pool_size(&self) -> usize {
-        self.client_pool_size
+    pub(crate) fn proxy_config(&self) -> Option<(&IriString, Option<(&str, &str)>)> {
+        self.proxy_config.as_ref().map(|ProxyConfig { url, auth }| {
+            (url, auth.as_ref().map(|(u, p)| (u.as_str(), p.as_str())))
+        })
     }
 
     pub(crate) fn source_code(&self) -> &IriString {

+ 4 - 2
src/data/actor.rs

@@ -2,7 +2,7 @@ use crate::{
     apub::AcceptedActors,
     db::{Actor, Db},
     error::{Error, ErrorKind},
-    requests::Requests,
+    requests::{BreakerStrategy, Requests},
 };
 use activitystreams::{iri_string::types::IriString, prelude::*};
 use std::time::{Duration, SystemTime};
@@ -71,7 +71,9 @@ impl ActorCache {
         id: &IriString,
         requests: &Requests,
     ) -> Result<Actor, Error> {
-        let accepted_actor = requests.fetch::<AcceptedActors>(id).await?;
+        let accepted_actor = requests
+            .fetch::<AcceptedActors>(id, BreakerStrategy::Require2XX)
+            .await?;
 
         let input_authority = id.authority_components().ok_or(ErrorKind::MissingDomain)?;
         let accepted_actor_id = accepted_actor

+ 25 - 24
src/data/state.rs

@@ -1,5 +1,4 @@
 use crate::{
-    config::{Config, UrlKind},
     data::NodeCache,
     db::Db,
     error::Error,
@@ -10,6 +9,7 @@ use activitystreams::iri_string::types::IriString;
 use actix_web::web;
 use lru::LruCache;
 use rand::thread_rng;
+use reqwest_middleware::ClientWithMiddleware;
 use rsa::{RsaPrivateKey, RsaPublicKey};
 use std::sync::{Arc, RwLock};
 use std::collections::HashMap;
@@ -19,10 +19,10 @@ use super::node::NodeConfig;
 
 #[derive(Clone)]
 pub struct State {
+    pub(crate) requests: Requests,
     pub(crate) public_key: RsaPublicKey,
-    private_key: RsaPrivateKey,
     object_cache: Arc<RwLock<LruCache<IriString, IriString>>>,
-    node_cache: NodeCache,
+    pub(crate) node_cache: NodeCache,
     pub(crate) node_config: Arc<RwLock<HashMap<String, NodeConfig>>>,
     breakers: Breakers,
     pub(crate) last_online: Arc<LastOnline>,
@@ -40,23 +40,6 @@ impl std::fmt::Debug for State {
 }
 
 impl State {
-    pub(crate) fn node_cache(&self) -> NodeCache {
-        self.node_cache.clone()
-    }
-
-    pub(crate) fn requests(&self, config: &Config, spawner: Spawner) -> Requests {
-        Requests::new(
-            config.generate_url(UrlKind::MainKey).to_string(),
-            self.private_key.clone(),
-            config.user_agent(),
-            self.breakers.clone(),
-            self.last_online.clone(),
-            config.client_pool_size(),
-            config.client_timeout(),
-            spawner,
-        )
-    }
-
     #[tracing::instrument(
         level = "debug",
         name = "Get inboxes for other domains",
@@ -117,7 +100,13 @@ impl State {
     }
 
     #[tracing::instrument(level = "debug", name = "Building state", skip_all)]
-    pub(crate) async fn build(db: Db, node_config: HashMap<String, NodeConfig>) -> Result<Self, Error> {
+    pub(crate) async fn build(
+        db: Db,
+        key_id: String,
+        spawner: Spawner,
+        client: ClientWithMiddleware,
+        node_config: HashMap<String, NodeConfig>,
+    ) -> Result<Self, Error> {
         let private_key = if let Ok(Some(key)) = db.private_key().await {
             tracing::debug!("Using existing key");
             key
@@ -136,17 +125,29 @@ impl State {
 
         let public_key = private_key.to_public_key();
 
+        let breakers = Breakers::default();
+        let last_online = Arc::new(LastOnline::empty());
+
+        let requests = Requests::new(
+            key_id,
+            private_key,
+            breakers.clone(),
+            last_online.clone(),
+            spawner,
+            client,
+        );
+
         let state = State {
+            requests,
             public_key,
-            private_key,
             object_cache: Arc::new(RwLock::new(LruCache::new(
                 (1024 * 8).try_into().expect("nonzero"),
             ))),
             node_cache: NodeCache::new(db.clone()),
             node_config: Arc::new(RwLock::new(node_config)),
-            breakers: Breakers::default(),
+            breakers,
             db,
-            last_online: Arc::new(LastOnline::empty()),
+            last_online,
         };
 
         Ok(state)

+ 15 - 3
src/error.rs

@@ -5,7 +5,7 @@ use actix_web::{
     http::StatusCode,
     HttpResponse,
 };
-use http_signature_normalization_actix::PrepareSignError;
+use http_signature_normalization_reqwest::SignError;
 use std::{convert::Infallible, fmt::Debug, io};
 use tracing_error::SpanTrace;
 
@@ -84,6 +84,12 @@ pub(crate) enum ErrorKind {
     #[error("Couldn't sign request")]
     SignRequest,
 
+    #[error("Couldn't make request")]
+    Reqwest(#[from] reqwest::Error),
+
+    #[error("Couldn't build client")]
+    ReqwestMiddleware(#[from] reqwest_middleware::Error),
+
     #[error("Couldn't parse IRI, {0}")]
     ParseIri(#[from] activitystreams::iri_string::validate::Error),
 
@@ -102,8 +108,8 @@ pub(crate) enum ErrorKind {
     #[error("Couldn't do the json thing, {0}")]
     Json(#[from] serde_json::Error),
 
-    #[error("Couldn't build signing string, {0}")]
-    PrepareSign(#[from] PrepareSignError),
+    #[error("Couldn't sign request, {0}")]
+    Sign(#[from] SignError),
 
     #[error("Couldn't sign digest")]
     Signature(#[from] rsa::signature::Error),
@@ -255,3 +261,9 @@ impl From<http_signature_normalization_actix::Canceled> for ErrorKind {
         Self::Canceled
     }
 }
+
+impl From<http_signature_normalization_reqwest::Canceled> for ErrorKind {
+    fn from(_: http_signature_normalization_reqwest::Canceled) -> Self {
+        Self::Canceled
+    }
+}

+ 6 - 0
src/extractors.rs

@@ -243,3 +243,9 @@ impl FromStr for XApiToken {
         Ok(XApiToken(s.to_string()))
     }
 }
+
+impl std::fmt::Display for XApiToken {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        self.0.fmt(f)
+    }
+}

+ 2 - 11
src/jobs.rs

@@ -14,11 +14,9 @@ pub(crate) use self::{
 
 use crate::{
     config::Config,
-    data::{ActorCache, MediaCache, NodeCache, State},
+    data::{ActorCache, MediaCache, State},
     error::{Error, ErrorKind},
     jobs::{process_listeners::Listeners, record_last_online::RecordLastOnline},
-    requests::Requests,
-    spawner::Spawner,
 };
 use background_jobs::{
     memory_storage::{ActixTimer, Storage},
@@ -45,7 +43,6 @@ pub(crate) fn create_workers(
     actors: ActorCache,
     media: MediaCache,
     config: Config,
-    spawner: Spawner,
 ) -> JobServer {
     let deliver_concurrency = config.deliver_concurrency();
 
@@ -56,7 +53,6 @@ pub(crate) fn create_workers(
             JobServer::new(queue_handle),
             media.clone(),
             config.clone(),
-            spawner.clone(),
         )
     })
     .register::<Deliver>()
@@ -84,12 +80,10 @@ pub(crate) fn create_workers(
 
 #[derive(Clone, Debug)]
 pub(crate) struct JobState {
-    requests: Requests,
     state: State,
     actors: ActorCache,
     config: Config,
     media: MediaCache,
-    node_cache: NodeCache,
     job_server: JobServer,
 }
 
@@ -113,15 +107,12 @@ impl JobState {
         job_server: JobServer,
         media: MediaCache,
         config: Config,
-        spawner: Spawner,
     ) -> Self {
         JobState {
-            requests: state.requests(&config, spawner),
-            node_cache: state.node_cache(),
+            state,
             actors,
             config,
             media,
-            state,
             job_server,
         }
     }

+ 5 - 1
src/jobs/contact.rs

@@ -2,6 +2,7 @@ use crate::{
     apub::AcceptedActors,
     error::{Error, ErrorKind},
     jobs::JobState,
+    requests::BreakerStrategy,
 };
 use activitystreams::{iri_string::types::IriString, object::Image, prelude::*};
 use background_jobs::ActixJob;
@@ -32,6 +33,7 @@ impl QueryContact {
 
     async fn perform(self, state: JobState) -> Result<(), Error> {
         let contact_outdated = state
+            .state
             .node_cache
             .is_contact_outdated(self.actor_id.clone())
             .await;
@@ -41,8 +43,9 @@ impl QueryContact {
         }
 
         let contact = match state
+            .state
             .requests
-            .fetch::<AcceptedActors>(&self.contact_id)
+            .fetch::<AcceptedActors>(&self.contact_id, BreakerStrategy::Allow404AndBelow)
             .await
         {
             Ok(contact) => contact,
@@ -57,6 +60,7 @@ impl QueryContact {
             to_contact(contact).ok_or(ErrorKind::Extract("contact"))?;
 
         state
+            .state
             .node_cache
             .set_contact(self.actor_id, username, display_name, url, avatar)
             .await?;

+ 7 - 1
src/jobs/deliver.rs

@@ -1,6 +1,7 @@
 use crate::{
     error::Error,
     jobs::{debug_object, JobState},
+    requests::BreakerStrategy,
 };
 use activitystreams::iri_string::types::IriString;
 use background_jobs::{ActixJob, Backoff};
@@ -35,7 +36,12 @@ impl Deliver {
 
     #[tracing::instrument(name = "Deliver", skip(state))]
     async fn permform(self, state: JobState) -> Result<(), Error> {
-        if let Err(e) = state.requests.deliver(&self.to, &self.data).await {
+        if let Err(e) = state
+            .state
+            .requests
+            .deliver(&self.to, &self.data, BreakerStrategy::Allow401AndBelow)
+            .await
+        {
             if e.is_breaker() {
                 tracing::debug!("Not trying due to failed breaker");
                 return Ok(());

+ 15 - 2
src/jobs/instance.rs

@@ -2,6 +2,7 @@ use crate::{
     config::UrlKind,
     error::{Error, ErrorKind},
     jobs::{Boolish, JobState},
+    requests::BreakerStrategy,
 };
 use activitystreams::{iri, iri_string::types::IriString};
 use background_jobs::ActixJob;
@@ -40,15 +41,23 @@ impl QueryInstance {
             InstanceApiType::Mastodon => {
                 let mastodon_instance_uri = iri!(format!("{scheme}://{authority}/api/v1/instance"));
                 state
+                    .state
                     .requests
-                    .fetch_json::<Instance>(&mastodon_instance_uri)
+                    .fetch_json::<Instance>(
+                        &mastodon_instance_uri,
+                        BreakerStrategy::Allow404AndBelow,
+                    )
                     .await
             }
             InstanceApiType::Misskey => {
                 let msky_meta_uri = iri!(format!("{scheme}://{authority}/api/meta"));
                 state
+                    .state
                     .requests
-                    .fetch_json_msky::<MisskeyMeta>(&msky_meta_uri)
+                    .fetch_json_msky::<MisskeyMeta>(
+                        &msky_meta_uri,
+                        BreakerStrategy::Allow404AndBelow,
+                    )
                     .await
                     .map(|res| res.into())
             }
@@ -58,10 +67,12 @@ impl QueryInstance {
     #[tracing::instrument(name = "Query instance", skip(state))]
     async fn perform(self, state: JobState) -> Result<(), Error> {
         let contact_outdated = state
+            .state
             .node_cache
             .is_contact_outdated(self.actor_id.clone())
             .await;
         let instance_outdated = state
+            .state
             .node_cache
             .is_instance_outdated(self.actor_id.clone())
             .await;
@@ -123,6 +134,7 @@ impl QueryInstance {
             let avatar = state.config.generate_url(UrlKind::Media(uuid));
 
             state
+                .state
                 .node_cache
                 .set_contact(
                     self.actor_id.clone(),
@@ -137,6 +149,7 @@ impl QueryInstance {
         let description = ammonia::clean(&description);
 
         state
+            .state
             .node_cache
             .set_instance(
                 self.actor_id,

+ 11 - 2
src/jobs/nodeinfo.rs

@@ -1,6 +1,7 @@
 use crate::{
     error::{Error, ErrorKind},
     jobs::{Boolish, JobState, QueryContact},
+    requests::BreakerStrategy,
 };
 use activitystreams::{iri, iri_string::types::IriString, primitives::OneOrMany};
 use background_jobs::ActixJob;
@@ -27,6 +28,7 @@ impl QueryNodeinfo {
     #[tracing::instrument(name = "Query node info", skip(state))]
     async fn perform(self, state: JobState) -> Result<(), Error> {
         if !state
+            .state
             .node_cache
             .is_nodeinfo_outdated(self.actor_id.clone())
             .await
@@ -42,8 +44,9 @@ impl QueryNodeinfo {
         let well_known_uri = iri!(format!("{scheme}://{authority}/.well-known/nodeinfo"));
 
         let well_known = match state
+            .state
             .requests
-            .fetch_json::<WellKnown>(&well_known_uri)
+            .fetch_json::<WellKnown>(&well_known_uri, BreakerStrategy::Allow404AndBelow)
             .await
         {
             Ok(well_known) => well_known,
@@ -60,7 +63,12 @@ impl QueryNodeinfo {
             return Ok(());
         };
 
-        let nodeinfo = match state.requests.fetch_json::<Nodeinfo>(&href).await {
+        let nodeinfo = match state
+            .state
+            .requests
+            .fetch_json::<Nodeinfo>(&href, BreakerStrategy::Require2XX)
+            .await
+        {
             Ok(nodeinfo) => nodeinfo,
             Err(e) if e.is_breaker() => {
                 tracing::debug!("Not retrying due to failed breaker");
@@ -70,6 +78,7 @@ impl QueryNodeinfo {
         };
 
         state
+            .state
             .node_cache
             .set_info(
                 self.actor_id.clone(),

+ 63 - 25
src/main.rs

@@ -1,17 +1,21 @@
 // need this for ructe
 #![allow(clippy::needless_borrow)]
 
+use std::time::Duration;
+
 use activitystreams::iri_string::types::IriString;
 use actix_rt::task::JoinHandle;
 use actix_web::{middleware::Compress, web, App, HttpServer};
 use collector::MemoryCollector;
 #[cfg(feature = "console")]
 use console_subscriber::ConsoleLayer;
+use error::Error;
 use http_signature_normalization_actix::middleware::VerifySignature;
 use metrics_exporter_prometheus::PrometheusBuilder;
 use metrics_util::layers::FanoutBuilder;
 use opentelemetry::{sdk::Resource, KeyValue};
 use opentelemetry_otlp::WithExportConfig;
+use reqwest_middleware::ClientWithMiddleware;
 use rustls::ServerConfig;
 use tracing_actix_web::TracingLogger;
 use tracing_error::ErrorLayer;
@@ -34,6 +38,8 @@ mod routes;
 mod spawner;
 mod telegram;
 
+use crate::config::UrlKind;
+
 use self::{
     args::Args,
     config::Config,
@@ -100,6 +106,38 @@ fn init_subscriber(
     Ok(())
 }
 
+fn build_client(
+    user_agent: &str,
+    timeout_seconds: u64,
+    proxy: Option<(&IriString, Option<(&str, &str)>)>,
+) -> Result<ClientWithMiddleware, Error> {
+    let builder = reqwest::Client::builder().user_agent(user_agent.to_string());
+
+    let builder = if let Some((url, auth)) = proxy {
+        let proxy = reqwest::Proxy::all(url.as_str())?;
+
+        let proxy = if let Some((username, password)) = auth {
+            proxy.basic_auth(username, password)
+        } else {
+            proxy
+        };
+
+        builder.proxy(proxy)
+    } else {
+        builder
+    };
+
+    let client = builder
+        .timeout(Duration::from_secs(timeout_seconds))
+        .build()?;
+
+    let client_with_middleware = reqwest_middleware::ClientBuilder::new(client)
+        .with(reqwest_tracing::TracingMiddleware::default())
+        .build();
+
+    Ok(client_with_middleware)
+}
+
 #[actix_rt::main]
 async fn main() -> Result<(), anyhow::Error> {
     dotenv::dotenv().ok();
@@ -150,11 +188,11 @@ fn client_main(config: Config, args: Args) -> JoinHandle<Result<(), anyhow::Erro
 }
 
 async fn do_client_main(config: Config, args: Args) -> Result<(), anyhow::Error> {
-    let client = requests::build_client(
+    let client = build_client(
         &config.user_agent(),
-        config.client_pool_size(),
         config.client_timeout(),
-    );
+        config.proxy_config(),
+    )?;
 
     if !args.blocks().is_empty() || !args.allowed().is_empty() {
         if args.undo() {
@@ -251,18 +289,15 @@ async fn do_server_main(
     collector: MemoryCollector,
     config: Config,
 ) -> Result<(), anyhow::Error> {
+    let client = build_client(
+        &config.user_agent(),
+        config.client_timeout(),
+        config.proxy_config(),
+    )?;
+
     tracing::warn!("Creating state");
     let node_config = std::collections::HashMap::new();
 
-    let state = State::build(db.clone(), node_config).await?;
-
-    if let Some((token, admin_handle)) = config.telegram_info() {
-        tracing::warn!("Creating telegram handler");
-        telegram::start(admin_handle.to_owned(), db.clone(), token);
-    }
-
-    let keys = config.open_keys()?;
-
     let (signature_threads, verify_threads) = match config.signature_threads() {
         0 | 1 => (1, 1),
         n if n <= VERIFY_RATIO => (n, 1),
@@ -274,26 +309,29 @@ async fn do_server_main(
         }
     };
 
-    let spawner = Spawner::build("sign-cpu", signature_threads)?;
     let verify_spawner = Spawner::build("verify-cpu", verify_threads)?;
+    let sign_spawner = Spawner::build("sign-cpu", signature_threads)?;
+
+    let key_id = config.generate_url(UrlKind::MainKey).to_string();
+    let state = State::build(db.clone(), key_id, sign_spawner, client, node_config).await?;
+
+    if let Some((token, admin_handle)) = config.telegram_info() {
+        tracing::warn!("Creating telegram handler");
+        telegram::start(admin_handle.to_owned(), db.clone(), token);
+    }
+
+    let keys = config.open_keys()?;
 
     let bind_address = config.bind_address();
     let server = HttpServer::new(move || {
-        let requests = state.requests(&config, spawner.clone());
-
-        let job_server = create_workers(
-            state.clone(),
-            actors.clone(),
-            media.clone(),
-            config.clone(),
-            spawner.clone(),
-        );
+        let job_server =
+            create_workers(state.clone(), actors.clone(), media.clone(), config.clone());
 
         let app = App::new()
             .app_data(web::Data::new(db.clone()))
             .app_data(web::Data::new(state.clone()))
             .app_data(web::Data::new(
-                requests.clone().spawner(verify_spawner.clone()),
+                state.requests.clone().spawner(verify_spawner.clone()),
             ))
             .app_data(web::Data::new(actors.clone()))
             .app_data(web::Data::new(config.clone()))
@@ -319,7 +357,7 @@ async fn do_server_main(
                     .wrap(config.digest_middleware().spawner(verify_spawner.clone()))
                     .wrap(VerifySignature::new(
                         MyVerify(
-                            requests.spawner(verify_spawner.clone()),
+                            state.requests.clone().spawner(verify_spawner.clone()),
                             actors.clone(),
                             state.clone(),
                             verify_spawner.clone(),
@@ -366,7 +404,7 @@ async fn do_server_main(
             .with_no_client_auth()
             .with_single_cert(certs, key)?;
         server
-            .bind_rustls(bind_address, server_config)?
+            .bind_rustls_021(bind_address, server_config)?
             .run()
             .await?;
     } else {

+ 6 - 2
src/middleware/verifier.rs

@@ -2,7 +2,7 @@ use crate::{
     apub::AcceptedActors,
     data::{ActorCache, State},
     error::{Error, ErrorKind},
-    requests::Requests,
+    requests::{BreakerStrategy, Requests},
     spawner::Spawner,
 };
 use activitystreams::{base::BaseExt, iri, iri_string::types::IriString};
@@ -70,7 +70,11 @@ impl MyVerify {
 
             actor_id
         } else {
-            match self.0.fetch::<PublicKeyResponse>(&public_key_id).await {
+            match self
+                .0
+                .fetch::<PublicKeyResponse>(&public_key_id, BreakerStrategy::Require2XX)
+                .await
+            {
                 Ok(res) => res.actor_id().ok_or(ErrorKind::MissingId),
                 Err(e) => {
                     if e.is_gone() {

+ 109 - 99
src/requests.rs

@@ -5,10 +5,10 @@ use crate::{
 };
 use activitystreams::iri_string::types::IriString;
 use actix_web::http::header::Date;
-use awc::{error::SendRequestError, Client, ClientResponse, Connector};
 use base64::{engine::general_purpose::STANDARD, Engine};
 use dashmap::DashMap;
-use http_signature_normalization_actix::{digest::ring::Sha256, prelude::*};
+use http_signature_normalization_reqwest::{digest::ring::Sha256, prelude::*};
+use reqwest_middleware::ClientWithMiddleware;
 use ring::{
     rand::SystemRandom,
     signature::{RsaKeyPair, RSA_PKCS1_SHA256},
@@ -18,13 +18,22 @@ use std::{
     sync::Arc,
     time::{Duration, SystemTime},
 };
-use tracing_awc::Tracing;
 
 const ONE_SECOND: u64 = 1;
 const ONE_MINUTE: u64 = 60 * ONE_SECOND;
 const ONE_HOUR: u64 = 60 * ONE_MINUTE;
 const ONE_DAY: u64 = 24 * ONE_HOUR;
 
+#[derive(Debug)]
+pub(crate) enum BreakerStrategy {
+    // Requires a successful response
+    Require2XX,
+    // Allows HTTP 2xx-401
+    Allow401AndBelow,
+    // Allows HTTP 2xx-404
+    Allow404AndBelow,
+}
+
 #[derive(Clone)]
 pub(crate) struct Breakers {
     inner: Arc<DashMap<String, Breaker>>,
@@ -139,10 +148,8 @@ impl Default for Breaker {
 
 #[derive(Clone)]
 pub(crate) struct Requests {
-    pool_size: usize,
-    client: Client,
+    client: ClientWithMiddleware,
     key_id: String,
-    user_agent: String,
     private_key: Arc<RsaKeyPair>,
     rng: SystemRandom,
     config: Config<Spawner>,
@@ -153,66 +160,39 @@ pub(crate) struct Requests {
 impl std::fmt::Debug for Requests {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         f.debug_struct("Requests")
-            .field("pool_size", &self.pool_size)
             .field("key_id", &self.key_id)
-            .field("user_agent", &self.user_agent)
             .field("config", &self.config)
             .field("breakers", &self.breakers)
             .finish()
     }
 }
 
-thread_local! {
-    static CLIENT: std::cell::OnceCell<Client> = std::cell::OnceCell::new();
-}
-
-pub(crate) fn build_client(user_agent: &str, pool_size: usize, timeout_seconds: u64) -> Client {
-    CLIENT.with(|client| {
-        client
-            .get_or_init(|| {
-                let connector = Connector::new().limit(pool_size);
-
-                Client::builder()
-                    .connector(connector)
-                    .wrap(Tracing)
-                    .add_default_header(("User-Agent", user_agent.to_string()))
-                    .timeout(Duration::from_secs(timeout_seconds))
-                    .finish()
-            })
-            .clone()
-    })
-}
-
 impl Requests {
     #[allow(clippy::too_many_arguments)]
     pub(crate) fn new(
         key_id: String,
         private_key: RsaPrivateKey,
-        user_agent: String,
         breakers: Breakers,
         last_online: Arc<LastOnline>,
-        pool_size: usize,
-        timeout_seconds: u64,
         spawner: Spawner,
+        client: ClientWithMiddleware,
     ) -> Self {
         let private_key_der = private_key.to_pkcs1_der().expect("Can encode der");
         let private_key = ring::signature::RsaKeyPair::from_der(private_key_der.as_bytes())
             .expect("Key is valid");
         Requests {
-            pool_size,
-            client: build_client(&user_agent, pool_size, timeout_seconds),
+            client,
             key_id,
-            user_agent,
             private_key: Arc::new(private_key),
             rng: SystemRandom::new(),
-            config: Config::new().mastodon_compat().spawner(spawner),
+            config: Config::new_with_spawner(spawner).mastodon_compat(),
             breakers,
             last_online,
         }
     }
 
     pub(crate) fn spawner(mut self, spawner: Spawner) -> Self {
-        self.config = self.config.spawner(spawner);
+        self.config = self.config.set_spawner(spawner);
         self
     }
 
@@ -223,97 +203,126 @@ impl Requests {
     async fn check_response(
         &self,
         parsed_url: &IriString,
-        res: Result<ClientResponse, SendRequestError>,
-    ) -> Result<ClientResponse, Error> {
+        strategy: BreakerStrategy,
+        res: Result<reqwest::Response, reqwest_middleware::Error>,
+    ) -> Result<reqwest::Response, Error> {
         if res.is_err() {
             self.breakers.fail(&parsed_url);
         }
 
-        let mut res =
-            res.map_err(|e| ErrorKind::SendRequest(parsed_url.to_string(), e.to_string()))?;
+        let res = res?;
+
+        let status = res.status();
 
-        if res.status().is_server_error() {
+        let success = match strategy {
+            BreakerStrategy::Require2XX => status.is_success(),
+            BreakerStrategy::Allow401AndBelow => (200..=401).contains(&status.as_u16()),
+            BreakerStrategy::Allow404AndBelow => (200..=404).contains(&status.as_u16()),
+        };
+
+        if !success {
             self.breakers.fail(&parsed_url);
 
-            if let Ok(bytes) = res.body().await {
-                if let Ok(s) = String::from_utf8(bytes.as_ref().to_vec()) {
-                    if !s.is_empty() {
-                        tracing::debug!("Response from {parsed_url}, {s}");
-                    }
+            if let Ok(s) = res.text().await {
+                if !s.is_empty() {
+                    tracing::debug!("Response from {parsed_url}, {s}");
                 }
             }
 
-            return Err(ErrorKind::Status(parsed_url.to_string(), res.status()).into());
+            return Err(ErrorKind::Status(parsed_url.to_string(), status).into());
         }
 
-        self.last_online.mark_seen(&parsed_url);
-        self.breakers.succeed(&parsed_url);
+        // only actually succeed a breaker on 2xx response
+        if status.is_success() {
+            self.last_online.mark_seen(&parsed_url);
+            self.breakers.succeed(&parsed_url);
+        }
 
         Ok(res)
     }
 
     #[tracing::instrument(name = "Fetch Json", skip(self), fields(signing_string))]
-    pub(crate) async fn fetch_json<T>(&self, url: &IriString) -> Result<T, Error>
+    pub(crate) async fn fetch_json<T>(
+        &self,
+        url: &IriString,
+        strategy: BreakerStrategy,
+    ) -> Result<T, Error>
     where
         T: serde::de::DeserializeOwned,
     {
-        self.do_fetch(url, "application/json").await
+        self.do_fetch(url, "application/json", strategy).await
     }
 
     #[tracing::instrument(name = "Fetch Json", skip(self), fields(signing_string))]
-    pub(crate) async fn fetch_json_msky<T>(&self, url: &IriString) -> Result<T, Error>
+    pub(crate) async fn fetch_json_msky<T>(
+        &self,
+        url: &IriString,
+        strategy: BreakerStrategy,
+    ) -> Result<T, Error>
     where
         T: serde::de::DeserializeOwned,
     {
-        let mut res = self
+        let body = self
             .do_deliver(
                 url,
                 &serde_json::json!({}),
                 "application/json",
                 "application/json",
+                strategy,
             )
+            .await?
+            .bytes()
             .await?;
 
-        let body = res
-            .body()
-            .await
-            .map_err(|e| ErrorKind::ReceiveResponse(url.to_string(), e.to_string()))?;
-
-        Ok(serde_json::from_slice(body.as_ref())?)
+        Ok(serde_json::from_slice(&body)?)
     }
 
     #[tracing::instrument(name = "Fetch Activity+Json", skip(self), fields(signing_string))]
-    pub(crate) async fn fetch<T>(&self, url: &IriString) -> Result<T, Error>
+    pub(crate) async fn fetch<T>(
+        &self,
+        url: &IriString,
+        strategy: BreakerStrategy,
+    ) -> Result<T, Error>
     where
         T: serde::de::DeserializeOwned,
     {
-        self.do_fetch(url, "application/activity+json").await
+        self.do_fetch(url, "application/activity+json", strategy)
+            .await
     }
 
-    async fn do_fetch<T>(&self, url: &IriString, accept: &str) -> Result<T, Error>
+    async fn do_fetch<T>(
+        &self,
+        url: &IriString,
+        accept: &str,
+        strategy: BreakerStrategy,
+    ) -> Result<T, Error>
     where
         T: serde::de::DeserializeOwned,
     {
-        let mut res = self.do_fetch_response(url, accept).await?;
-
-        let body = res
-            .body()
-            .await
-            .map_err(|e| ErrorKind::ReceiveResponse(url.to_string(), e.to_string()))?;
+        let body = self
+            .do_fetch_response(url, accept, strategy)
+            .await?
+            .bytes()
+            .await?;
 
-        Ok(serde_json::from_slice(body.as_ref())?)
+        Ok(serde_json::from_slice(&body)?)
     }
 
     #[tracing::instrument(name = "Fetch response", skip(self), fields(signing_string))]
-    pub(crate) async fn fetch_response(&self, url: &IriString) -> Result<ClientResponse, Error> {
-        self.do_fetch_response(url, "*/*").await
+    pub(crate) async fn fetch_response(
+        &self,
+        url: &IriString,
+        strategy: BreakerStrategy,
+    ) -> Result<reqwest::Response, Error> {
+        self.do_fetch_response(url, "*/*", strategy).await
     }
 
     pub(crate) async fn do_fetch_response(
         &self,
         url: &IriString,
         accept: &str,
-    ) -> Result<ClientResponse, Error> {
+        strategy: BreakerStrategy,
+    ) -> Result<reqwest::Response, Error> {
         if !self.breakers.should_try(url) {
             return Err(ErrorKind::Breaker.into());
         }
@@ -321,25 +330,20 @@ impl Requests {
         let signer = self.signer();
         let span = tracing::Span::current();
 
-        let res = self
+        let request = self
             .client
             .get(url.as_str())
-            .insert_header(("Accept", accept))
-            .insert_header(Date(SystemTime::now().into()))
-            .no_decompress()
-            .signature(
-                self.config.clone(),
-                self.key_id.clone(),
-                move |signing_string| {
-                    span.record("signing_string", signing_string);
-                    span.in_scope(|| signer.sign(signing_string))
-                },
-            )
-            .await?
-            .send()
-            .await;
+            .header("Accept", accept)
+            .header("Date", Date(SystemTime::now().into()).to_string())
+            .signature(&self.config, self.key_id.clone(), move |signing_string| {
+                span.record("signing_string", signing_string);
+                span.in_scope(|| signer.sign(signing_string))
+            })
+            .await?;
+
+        let res = self.client.execute(request).await;
 
-        let res = self.check_response(url, res).await?;
+        let res = self.check_response(url, strategy, res).await?;
 
         Ok(res)
     }
@@ -349,7 +353,12 @@ impl Requests {
         skip_all,
         fields(inbox = inbox.to_string().as_str(), signing_string)
     )]
-    pub(crate) async fn deliver<T>(&self, inbox: &IriString, item: &T) -> Result<(), Error>
+    pub(crate) async fn deliver<T>(
+        &self,
+        inbox: &IriString,
+        item: &T,
+        strategy: BreakerStrategy,
+    ) -> Result<(), Error>
     where
         T: serde::ser::Serialize + std::fmt::Debug,
     {
@@ -358,6 +367,7 @@ impl Requests {
             item,
             "application/activity+json",
             "application/activity+json",
+            strategy,
         )
         .await?;
         Ok(())
@@ -369,7 +379,8 @@ impl Requests {
         item: &T,
         content_type: &str,
         accept: &str,
-    ) -> Result<ClientResponse, Error>
+        strategy: BreakerStrategy,
+    ) -> Result<reqwest::Response, Error>
     where
         T: serde::ser::Serialize + std::fmt::Debug,
     {
@@ -381,12 +392,12 @@ impl Requests {
         let span = tracing::Span::current();
         let item_string = serde_json::to_string(item)?;
 
-        let (req, body) = self
+        let request = self
             .client
             .post(inbox.as_str())
-            .insert_header(("Accept", accept))
-            .insert_header(("Content-Type", content_type))
-            .insert_header(Date(SystemTime::now().into()))
+            .header("Accept", accept)
+            .header("Content-Type", content_type)
+            .header("Date", Date(SystemTime::now().into()).to_string())
             .signature_with_digest(
                 self.config.clone(),
                 self.key_id.clone(),
@@ -397,12 +408,11 @@ impl Requests {
                     span.in_scope(|| signer.sign(signing_string))
                 },
             )
-            .await?
-            .split();
+            .await?;
 
-        let res = req.send_body(body).await;
+        let res = self.client.execute(request).await;
 
-        let res = self.check_response(inbox, res).await?;
+        let res = self.check_response(inbox, strategy, res).await?;
 
         Ok(res)
     }

+ 1 - 1
src/routes/index.rs

@@ -36,7 +36,7 @@ pub(crate) async fn route(
     state: web::Data<State>,
     config: web::Data<Config>,
 ) -> Result<HttpResponse, Error> {
-    let all_nodes = state.node_cache().nodes().await?;
+    let all_nodes = state.node_cache.nodes().await?;
 
     let mut nodes = Vec::new();
     let mut local = Vec::new();

+ 9 - 3
src/routes/media.rs

@@ -1,4 +1,8 @@
-use crate::{data::MediaCache, error::Error, requests::Requests};
+use crate::{
+    data::MediaCache,
+    error::Error,
+    requests::{BreakerStrategy, Requests},
+};
 use actix_web::{body::BodyStream, web, HttpResponse};
 use uuid::Uuid;
 
@@ -11,7 +15,9 @@ pub(crate) async fn route(
     let uuid = uuid.into_inner();
 
     if let Some(url) = media.get_url(uuid).await? {
-        let res = requests.fetch_response(&url).await?;
+        let res = requests
+            .fetch_response(&url, BreakerStrategy::Allow404AndBelow)
+            .await?;
 
         let mut response = HttpResponse::build(res.status());
 
@@ -19,7 +25,7 @@ pub(crate) async fn route(
             response.insert_header((name.clone(), value.clone()));
         }
 
-        return Ok(response.body(BodyStream::new(res)));
+        return Ok(response.body(BodyStream::new(res.bytes_stream())));
     }
 
     Ok(HttpResponse::NotFound().finish())

+ 29 - 0
src/spawner.rs

@@ -162,3 +162,32 @@ impl Spawn for Spawner {
         })
     }
 }
+
+impl http_signature_normalization_reqwest::Spawn for Spawner {
+    type Future<T> = std::pin::Pin<Box<dyn std::future::Future<Output = Result<T, http_signature_normalization_reqwest::Canceled>> + Send>> where T: Send;
+
+    fn spawn_blocking<Func, Out>(&self, func: Func) -> Self::Future<Out>
+    where
+        Func: FnOnce() -> Out + Send + 'static,
+        Out: Send + 'static,
+    {
+        let sender = self.sender.as_ref().expect("Sender exists").clone();
+
+        Box::pin(async move {
+            let (tx, rx) = flume::bounded(1);
+
+            let _ = sender
+                .send_async(Box::new(move || {
+                    if tx.try_send((func)()).is_err() {
+                        tracing::warn!("Requestor hung up");
+                        metrics::increment_counter!("relay.spawner.disconnected");
+                    }
+                }))
+                .await;
+
+            timer(rx.recv_async())
+                .await
+                .map_err(|_| http_signature_normalization_reqwest::Canceled)
+        })
+    }
+}

Some files were not shown because too many files changed in this diff