diff --git a/Cargo.lock b/Cargo.lock index 3f1cb74..bab0d42 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -26,17 +26,6 @@ version = "1.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" -[[package]] -name = "async-trait" -version = "0.1.89" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "atomic-waker" version = "1.1.2" @@ -51,14 +40,14 @@ checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] name = "axum" -version = "0.7.9" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" +checksum = "5b098575ebe77cb6d14fc7f32749631a6e44edbef6b796f89b020e99ba20d425" dependencies = [ - "async-trait", "axum-core", "axum-macros", "bytes", + "form_urlencoded", "futures-util", "http", "http-body", @@ -71,8 +60,7 @@ dependencies = [ "mime", "percent-encoding", "pin-project-lite", - "rustversion", - "serde", + "serde_core", "serde_json", "serde_path_to_error", "serde_urlencoded", @@ -86,19 +74,17 @@ dependencies = [ [[package]] name = "axum-core" -version = "0.4.5" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" +checksum = "59446ce19cd142f8833f856eb31f3eb097812d1479ab224f54d72428ca21ea22" dependencies = [ - "async-trait", "bytes", - "futures-util", + "futures-core", "http", "http-body", "http-body-util", "mime", "pin-project-lite", - "rustversion", "sync_wrapper", "tower-layer", "tower-service", @@ -107,9 +93,9 @@ dependencies = [ [[package]] name = "axum-macros" -version = "0.4.2" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57d123550fa8d071b7255cb0cc04dc302baa6c8c4a79f55701552684d8399bce" +checksum = "604fde5e028fea851ce1d8570bbdc034bec850d157f7569d10f347d06808c05c" dependencies = [ "proc-macro2", "quote", @@ -684,9 +670,9 @@ dependencies = [ [[package]] name = "matchit" -version = "0.7.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" +checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" [[package]] name = "memchr" @@ -794,9 +780,9 @@ dependencies = [ [[package]] name = "quick-xml" -version = "0.31.0" +version = "0.38.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33" +checksum = "b66c2058c55a409d601666cffe35f04333cf1013010882cec174a7467cd4e21c" dependencies = [ "memchr", ] @@ -815,7 +801,7 @@ dependencies = [ "rustc-hash", "rustls", "socket2", - "thiserror 2.0.17", + "thiserror", "tokio", "tracing", "web-time", @@ -836,7 +822,7 @@ dependencies = [ "rustls", "rustls-pki-types", "slab", - "thiserror 2.0.17", + "thiserror", "tinyvec", "tracing", "web-time", @@ -1068,7 +1054,7 @@ dependencies = [ [[package]] name = "seadexerr" -version = "0.3.2" +version = "0.4.0" dependencies = [ "anyhow", "axum", @@ -1077,7 +1063,7 @@ dependencies = [ "serde", "serde_json", "serde_with", - "thiserror 1.0.69", + "thiserror", "time", "tokio", "tracing", @@ -1153,9 +1139,9 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.15.1" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa66c845eee442168b2c8134fec70ac50dc20e760769c8ba0ad1319ca1959b04" +checksum = "10574371d41b0d9b2cff89418eda27da52bcaff2cc8741db26382a77c29131f1" dependencies = [ "base64", "chrono", @@ -1172,9 +1158,9 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.15.1" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b91a903660542fced4e99881aa481bdbaec1634568ee02e0b8bd57c64cb38955" +checksum = "08a72d8216842fdd57820dc78d840bef99248e35fb2554ff923319e60f2d686b" dependencies = [ "darling", "proc-macro2", @@ -1268,33 +1254,13 @@ dependencies = [ "syn", ] -[[package]] -name = "thiserror" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" -dependencies = [ - "thiserror-impl 1.0.69", -] - [[package]] name = "thiserror" version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" dependencies = [ - "thiserror-impl 2.0.17", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" -dependencies = [ - "proc-macro2", - "quote", - "syn", + "thiserror-impl", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 6366e7c..ea50efd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,19 +1,24 @@ [package] name = "seadexerr" -version = "0.3.2" +version = "0.4.0" edition = "2024" [dependencies] -anyhow = "1.0" -axum = { version = "0.7", features = ["macros"] } -quick-xml = "0.31" -reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] } -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -serde_with = "3.11" -thiserror = "1.0" -time = { version = "0.3", features = ["formatting", "parsing"] } -tokio = { version = "1.38", features = ["fs", "macros", "rt-multi-thread", "sync", "time"] } -tracing = "0.1" -tracing-subscriber = { version = "0.3", features = ["fmt", "env-filter"] } -url = "2.5" +anyhow = "1.0.100" +axum = { version = "0.8.7", features = ["macros"] } +quick-xml = "0.38.4" +reqwest = { version = "0.12.24", default-features = false, features = ["json", "rustls-tls"] } +serde = { version = "1.0.228", features = ["derive"] } +serde_json = "1.0.145" +serde_with = "3.16.0" +thiserror = "2.0.17" +time = { version = "0.3.44", features = ["formatting", "parsing"] } +tokio = { version = "1.48.0", features = ["fs", "macros", "rt-multi-thread", "sync", "time"] } +tracing = "0.1.41" +tracing-subscriber = { version = "0.3.20", features = ["fmt", "env-filter"] } +url = "2.5.7" + +[profile.release] +lto = true +codegen-units = 1 +strip = true diff --git a/README.md b/README.md index ef27ade..b06bf46 100644 --- a/README.md +++ b/README.md @@ -15,6 +15,8 @@ services: environment: - SONARR_BASE_URL=http://localhost:8989/ - SONARR_API_KEY= + - RADARR_BASE_URL=http://localhost:7878/ + - RADARR_API_KEY= ```
@@ -26,6 +28,9 @@ Most can be left as default | `SONARR_API_KEY` | **(required)** | Sonarr API key used to resolve series titles for feed names. | | `SONARR_BASE_URL` | `http://localhost:8989/` | Base URL for your Sonarr instance. | | `SONARR_TIMEOUT_SECS` | `SEADEXER_RELEASES_TIMEOUT_SECS` (10) | Timeout (seconds) for Sonarr API requests. | +| `RADARR_API_KEY` | **(required)** | Radarr API key used to resolve movie titles. | +| `RADARR_BASE_URL` | `http://localhost:7878/` | Base URL for your Radarr instance. | +| `RADARR_TIMEOUT_SECS` | `SEADEXER_RELEASES_TIMEOUT_SECS` (10) | Timeout (seconds) for Radarr API requests. | | `SEADEXER_ANILIST_BASE_URL` | `https://graphql.anilist.co` | GraphQL endpoint used to resolve AniList titles and formats. | | `SEADEXER_ANILIST_TIMEOUT_SECS` | `SEADEXER_RELEASES_TIMEOUT_SECS` (10) | Timeout (seconds) for AniList GraphQL requests. | | `SEADEXER_HOST` | `0.0.0.0` | Interface the HTTP server listens on. | @@ -53,7 +58,7 @@ In Prowlarr: 4. Set **Url** to `http://seadexerr:6767` 5. Click **Test** and **Save** -In Sonarr: +In Sonarr or Radarr: 1. Go to **Settings → Custom Formats** 2. Create a new **Custom Format** named `Seadex` @@ -65,9 +70,9 @@ In Sonarr: ## Future Plans -- [ ] Movie Support - [ ] Specials Support - [ ] Episode Support +- [x] Movie Support (TMDB + Radarr) - [x] RSS Refresh - [x] Local PlexAniBridge Mappings diff --git a/src/anilist.rs b/src/anilist.rs index 65fd78c..87971bd 100644 --- a/src/anilist.rs +++ b/src/anilist.rs @@ -12,12 +12,8 @@ query MediaById($idIn: [Int], $perPage: Int) { Page(perPage: $perPage) { media(id_in: $idIn) { id + type format - title { - english - romaji - native - } } } } @@ -93,16 +89,8 @@ impl AniListClient { None => continue, }; - let title = media - .title - .english - .or(media.title.romaji) - .or(media.title.native) - .unwrap_or_else(|| format!("AniList {}", media.id)); - result.entry(media.id).or_insert(AniListMedia { id: media.id, - title, format, }); } @@ -151,7 +139,6 @@ impl MediaFormat { #[derive(Debug, Clone)] pub struct AniListMedia { pub id: i64, - pub title: String, pub format: MediaFormat, } @@ -190,15 +177,9 @@ struct GraphqlPage { #[derive(Debug, Deserialize)] struct GraphqlMedia { id: i64, + #[serde(rename = "type")] + media_type: Option, format: Option, - title: AniListTitle, -} - -#[derive(Debug, Deserialize)] -struct AniListTitle { - english: Option, - romaji: Option, - native: Option, } #[derive(Debug, Deserialize)] @@ -208,9 +189,9 @@ struct GraphqlError { #[derive(Debug, Error)] pub enum AniListError { - #[error("http error when querying AniList GraphQL API")] + #[error("http error when querying AniList GraphQL API: {0}")] Http(#[from] reqwest::Error), - #[error("failed to deserialise AniList response payload")] + #[error("failed to deserialise AniList response payload: {0}")] Deserialisation(#[from] serde_json::Error), #[error("AniList response missing data node")] MissingData, diff --git a/src/config.rs b/src/config.rs index 61c7955..1ac7ed7 100644 --- a/src/config.rs +++ b/src/config.rs @@ -18,9 +18,22 @@ pub struct AppConfig { pub default_limit: usize, pub anilist_base_url: Url, pub anilist_timeout: Duration, - pub sonarr_url: Url, - pub sonarr_api_key: String, - pub sonarr_timeout: Duration, + pub sonarr: Option, + pub radarr: Option, +} + +#[derive(Clone, Debug)] +pub struct SonarrConfig { + pub url: Url, + pub api_key: String, + pub timeout: Duration, +} + +#[derive(Clone, Debug)] +pub struct RadarrConfig { + pub url: Url, + pub api_key: String, + pub timeout: Duration, } impl AppConfig { @@ -93,18 +106,63 @@ impl AppConfig { .unwrap_or(timeout_secs); let anilist_timeout = Duration::from_secs(anilist_timeout_secs.max(1)); - let raw_sonarr_url = - env::var("SONARR_BASE_URL").unwrap_or_else(|_| "http://localhost:8989".to_string()); - let sonarr_url = parse_root_url(&raw_sonarr_url, "SONARR_BASE_URL")?; - - let sonarr_api_key = - env::var("SONARR_API_KEY").context("Missing SONARR_API_KEY variable")?; - - let sonarr_timeout_secs = env::var("SONARR_TIMEOUT_SECS") - .ok() - .and_then(|value| value.parse::().ok()) - .unwrap_or(timeout_secs); - let sonarr_timeout = Duration::from_secs(sonarr_timeout_secs.max(1)); + let sonarr_enabled = env::var("SEADEXER_SONARR_ENABLED") + .map(|v| v != "false") + .unwrap_or(true); + + let sonarr = if sonarr_enabled { + let raw_sonarr_url = env::var("SONARR_BASE_URL") + .unwrap_or_else(|_| "http://localhost:8989".to_string()); + let sonarr_url = parse_root_url(&raw_sonarr_url, "SONARR_BASE_URL")?; + + let sonarr_api_key = + env::var("SONARR_API_KEY").context("Missing SONARR_API_KEY variable")?; + + let sonarr_timeout_secs = env::var("SONARR_TIMEOUT_SECS") + .ok() + .and_then(|value| value.parse::().ok()) + .unwrap_or(timeout_secs); + let sonarr_timeout = Duration::from_secs(sonarr_timeout_secs.max(1)); + + Some(SonarrConfig { + url: sonarr_url, + api_key: sonarr_api_key, + timeout: sonarr_timeout, + }) + } else { + None + }; + + let radarr_enabled = env::var("SEADEXER_RADARR_ENABLED") + .map(|v| v != "false") + .unwrap_or(true); + + let radarr = if radarr_enabled { + let raw_radarr_url = env::var("RADARR_BASE_URL") + .unwrap_or_else(|_| "http://localhost:7878".to_string()); + let radarr_url = parse_root_url(&raw_radarr_url, "RADARR_BASE_URL")?; + + let radarr_api_key = + env::var("RADARR_API_KEY").context("Missing RADARR_API_KEY variable")?; + + let radarr_timeout_secs = env::var("RADARR_TIMEOUT_SECS") + .ok() + .and_then(|value| value.parse::().ok()) + .unwrap_or(timeout_secs); + let radarr_timeout = Duration::from_secs(radarr_timeout_secs.max(1)); + + Some(RadarrConfig { + url: radarr_url, + api_key: radarr_api_key, + timeout: radarr_timeout, + }) + } else { + None + }; + + if sonarr.is_none() && radarr.is_none() { + anyhow::bail!("At least one of Sonarr or Radarr must be enabled"); + } Ok(Self { listen_addr, @@ -120,9 +178,8 @@ impl AppConfig { default_limit, anilist_base_url, anilist_timeout, - sonarr_url, - sonarr_api_key, - sonarr_timeout, + sonarr, + radarr, }) } } diff --git a/src/http.rs b/src/http.rs index 6231bd5..681f65f 100644 --- a/src/http.rs +++ b/src/http.rs @@ -1,4 +1,7 @@ -use std::{borrow::Cow, collections::{HashMap, HashSet}}; +use std::{ + borrow::Cow, + collections::{HashMap, HashSet}, +}; use axum::{ Json, Router, @@ -14,11 +17,12 @@ use tracing::{debug, info}; use url::Url; use crate::anilist::{AniListError, MediaFormat}; +use crate::radarr::RadarrError; use crate::releases::{ReleasesError, Torrent}; use crate::torznab::{self, ChannelMetadata, TorznabItem}; use crate::{ AppState, SharedAppState, - mapping::{MappingError, TvdbMapping}, + mapping::{MappingError, TvdbMapping, parse_season_key}, sonarr::SonarrError, }; @@ -46,6 +50,8 @@ struct TorznabQuery { season: Option, #[serde(rename = "tvdbid")] tvdb_id: Option, + #[serde(rename = "tmdbid")] + tmdb_id: Option, #[serde(rename = "q")] query: Option, } @@ -56,6 +62,7 @@ impl TorznabQuery { "caps" => TorznabOperation::Caps, "search" => TorznabOperation::Search, "tvsearch" | "tv-search" => TorznabOperation::TvSearch, + "movie" | "movie-search" | "moviesearch" => TorznabOperation::MovieSearch, other => TorznabOperation::Unsupported(other), } } @@ -66,6 +73,12 @@ impl TorznabQuery { .and_then(|value| value.trim().parse::().ok()) } + fn tmdb_identifier(&self) -> Option { + self.tmdb_id + .as_deref() + .and_then(|value| value.trim().parse::().ok()) + } + fn season_number(&self) -> Option { self.season .as_deref() @@ -77,6 +90,7 @@ enum TorznabOperation<'a> { Caps, Search, TvSearch, + MovieSearch, Unsupported(&'a str), } @@ -87,6 +101,10 @@ fn format_allowed(format: &MediaFormat) -> bool { ) } +fn movie_format_allowed(format: &MediaFormat) -> bool { + matches!(format, MediaFormat::Movie) +} + async fn torznab_handler( State(state): State, Query(query): Query, @@ -96,12 +114,14 @@ async fn torznab_handler( TorznabOperation::Caps => "caps", TorznabOperation::Search => "search", TorznabOperation::TvSearch => "tvsearch", + TorznabOperation::MovieSearch => "movie-search", TorznabOperation::Unsupported(name) => name, }; info!( operation = operation_name, tvdb = query.tvdb_id.as_deref(), + tmdb = query.tmdb_id.as_deref(), season = query.season.as_deref(), limit = query.limit, "torznab request received" @@ -111,6 +131,7 @@ async fn torznab_handler( TorznabOperation::Caps => respond_caps(&state), TorznabOperation::Search => respond_generic_search(&state, &query).await, TorznabOperation::TvSearch => respond_tv_search(&state, &query).await, + TorznabOperation::MovieSearch => respond_movie_search(&state, &query).await, TorznabOperation::Unsupported(name) => { Err(HttpError::UnsupportedOperation(name.to_string())) } @@ -184,23 +205,14 @@ async fn respond_generic_search( ); let fetch_limit = state.config.default_limit; - let torrents = state + let mut torrents = state .releases .recent_public_torrents(fetch_limit) .await .map_err(HttpError::Releases)?; - let season_packs: Vec = torrents - .into_iter() - .filter(|torrent| torrent.files.len() > 1) - .collect(); - - let total = season_packs.len(); - - let window: Vec = season_packs.into_iter().skip(offset).take(limit).collect(); - - if window.is_empty() { - let xml = torznab::render_feed(&metadata, &[], offset, total)?; + if torrents.is_empty() { + let xml = torznab::render_feed(&metadata, &[], offset, 0)?; return Ok(( [(header::CONTENT_TYPE, "application/rss+xml; charset=utf-8")], xml, @@ -208,7 +220,7 @@ async fn respond_generic_search( .into_response()); } - let missing_ids: Vec = window + let missing_ids: Vec = torrents .iter() .filter(|torrent| torrent.anilist_id.is_none()) .map(|torrent| torrent.id.clone()) @@ -224,7 +236,7 @@ async fn respond_generic_search( .map_err(HttpError::Releases)? }; - let window: Vec = window + torrents = torrents .into_iter() .map(|mut torrent| { if torrent.anilist_id.is_none() @@ -236,7 +248,7 @@ async fn respond_generic_search( }) .collect(); - let anilist_ids: Vec = window + let anilist_ids: Vec = torrents .iter() .filter_map(|torrent| torrent.anilist_id) .collect(); @@ -247,8 +259,45 @@ async fn respond_generic_search( .await .map_err(HttpError::AniList)?; - let mut title_cache: HashMap<(i64, u32), String> = HashMap::new(); + let mut eligible: Vec = Vec::new(); + + for torrent in torrents.into_iter() { + let Some(anilist_id) = torrent.anilist_id else { + continue; + }; + + let Some(media) = media_lookup.get(&anilist_id) else { + continue; + }; + + let include = match &media.format { + MediaFormat::Movie => true, + format if format_allowed(format) => torrent.files.len() > 1, + _ => false, + }; + + if include { + eligible.push(torrent); + } + } + + let total = eligible.len(); + + let window: Vec = eligible.into_iter().skip(offset).take(limit).collect(); + + if window.is_empty() { + let xml = torznab::render_feed(&metadata, &[], offset, total)?; + return Ok(( + [(header::CONTENT_TYPE, "application/rss+xml; charset=utf-8")], + xml, + ) + .into_response()); + } + + let mut tv_title_cache: HashMap<(i64, u32), String> = HashMap::new(); + let mut movie_title_cache: HashMap = HashMap::new(); let mut active_tvdb_ids: HashSet = HashSet::new(); + let mut active_tmdb_ids: HashSet = HashSet::new(); let mut items = Vec::with_capacity(window.len()); for torrent in window.into_iter() { @@ -265,32 +314,64 @@ async fn respond_generic_search( continue; }; - if !format_allowed(&media.format) { - debug!( - anilist_id, - format = ?media.format, - "skipping torrent due to unsupported AniList format" - ); - continue; + match &media.format { + format if format_allowed(format) => { + if state.sonarr.is_some() { + let title = resolve_tv_generic_title( + state, + &torrent, + &mut tv_title_cache, + &mut active_tvdb_ids, + ) + .await?; + items.push(build_torznab_item(torrent, title, tv_category_ids())); + } + } + MediaFormat::Movie => { + if state.radarr.is_some() { + match resolve_movie_generic_title( + state, + anilist_id, + &mut movie_title_cache, + &mut active_tmdb_ids, + ) + .await? + { + Some(title) => { + items.push(build_torznab_item(torrent, title, movie_category_ids())); + } + None => { + let fallback = default_torrent_title(&torrent.id); + items.push(build_torznab_item(torrent, fallback, movie_category_ids())); + } + } + } + } + other => { + debug!( + anilist_id, + format = ?other, + "skipping torrent due to unsupported AniList format" + ); + } } - - let title = resolve_generic_search_title( - state, - &torrent, - &mut title_cache, - &mut active_tvdb_ids, - ) - .await?; - items.push(build_torznab_item(torrent, title)); } let xml = torznab::render_feed(&metadata, &items, offset, total)?; - state - .sonarr - .retain_titles(&active_tvdb_ids) - .await - .map_err(HttpError::Sonarr)?; + if let Some(sonarr) = &state.sonarr { + sonarr + .retain_titles(&active_tvdb_ids) + .await + .map_err(HttpError::Sonarr)?; + } + + if let Some(radarr) = &state.radarr { + radarr + .retain_titles(&active_tmdb_ids) + .await + .map_err(HttpError::Radarr)?; + } Ok(( [(header::CONTENT_TYPE, "application/rss+xml; charset=utf-8")], @@ -309,6 +390,16 @@ async fn respond_tv_search(state: &AppState, query: &TorznabQuery) -> Result id, None => { @@ -435,8 +526,150 @@ async fn respond_tv_search(state: &AppState, query: &TorznabQuery) -> Result 1) .skip(offset) .take(limit) - .map(|torrent| build_torznab_item(torrent, feed_title.clone())) + .map(|torrent| build_torznab_item(torrent, feed_title.clone(), tv_category_ids())) + .collect(); + let xml = torznab::render_feed(&metadata, &items, offset, total)?; + + Ok(( + [(header::CONTENT_TYPE, "application/rss+xml; charset=utf-8")], + xml, + ) + .into_response()) +} + +async fn respond_movie_search( + state: &AppState, + query: &TorznabQuery, +) -> Result { + let metadata = build_channel_metadata(state)?; + let limit = query + .limit + .unwrap_or(state.config.default_limit) + .max(1) + .min(state.config.default_limit); + + let offset = query.offset.unwrap_or(0); + + if state.radarr.is_none() { + debug!("movie-search requested but radarr is disabled; returning empty feed"); + let xml = torznab::render_feed(&metadata, &[], offset, 0)?; + return Ok(( + [(header::CONTENT_TYPE, "application/rss+xml; charset=utf-8")], + xml, + ) + .into_response()); + } + + let tmdb_id = match query.tmdb_identifier() { + Some(id) => id, + None => { + debug!( + limit, + offset, "movie-search missing tmdbid; returning empty feed without error" + ); + let xml = torznab::render_feed(&metadata, &[], offset, 0)?; + return Ok(( + [(header::CONTENT_TYPE, "application/rss+xml; charset=utf-8")], + xml, + ) + .into_response()); + } + }; + + let anilist_id = match state + .mappings + .resolve_anilist_id_for_tmdb(tmdb_id) + .await + .map_err(HttpError::Mapping)? + { + Some(id) => id, + None => { + info!( + tmdb_id, + "no anilist mapping found for movie-search; returning empty result set" + ); + let xml = torznab::render_feed(&metadata, &[], offset, 0)?; + return Ok(( + [(header::CONTENT_TYPE, "application/rss+xml; charset=utf-8")], + xml, + ) + .into_response()); + } + }; + + debug!( + tmdb_id, + anilist_id, limit, "movie-search querying releases.moe" + ); + + let fetch_limit = offset.saturating_add(limit).min(state.config.default_limit); + let collected: Vec = match state + .releases + .search_torrents(anilist_id, fetch_limit) + .await + { + Ok(torrents) => torrents, + Err(err) => { + tracing::error!( + tmdb_id, + anilist_id, + error = %err, + "releases.moe lookup failed for movie-search" + ); + return Err(HttpError::Releases(err)); + } + }; + + let media_lookup = state + .anilist + .fetch_media(&[anilist_id]) + .await + .map_err(HttpError::AniList)?; + + let Some(media) = media_lookup.get(&anilist_id) else { + info!( + tmdb_id, + anilist_id, "AniList media missing for movie-search; returning empty result set" + ); + let xml = torznab::render_feed(&metadata, &[], offset, 0)?; + return Ok(( + [(header::CONTENT_TYPE, "application/rss+xml; charset=utf-8")], + xml, + ) + .into_response()); + }; + + if !movie_format_allowed(&media.format) { + info!( + tmdb_id, + anilist_id, + format = ?media.format, + "AniList format unsupported for movie-search" + ); + let xml = torznab::render_feed(&metadata, &[], offset, 0)?; + return Ok(( + [(header::CONTENT_TYPE, "application/rss+xml; charset=utf-8")], + xml, + ) + .into_response()); + } + + let total = collected.len(); + let feed_title = state + .radarr + .as_ref() + .unwrap() // We can be sure Radarr is enabled here + .resolve_name(tmdb_id) + .await + .map(|movie| format_movie_feed_title(&movie.title, movie.year)) + .map_err(HttpError::Radarr)?; + let items: Vec = collected + .into_iter() + .skip(offset) + .take(limit) + .map(|torrent| build_torznab_item(torrent, feed_title.clone(), movie_category_ids())) .collect(); + let xml = torznab::render_feed(&metadata, &items, offset, total)?; Ok(( @@ -452,8 +685,11 @@ async fn resolve_feed_title( season: u32, ) -> Result { debug!(tvdb_id, season, "resolving title from sonarr"); - let series_title = state + let sonarr = state .sonarr + .as_ref() + .ok_or_else(|| HttpError::UnsupportedOperation("Sonarr is disabled".to_string()))?; + let series_title = sonarr .resolve_name(tvdb_id) .await .map_err(HttpError::Sonarr)?; @@ -461,6 +697,14 @@ async fn resolve_feed_title( Ok(format!("{series_title} S{season:02} Bluray 1080p remux")) } +fn format_movie_feed_title(title: &str, year: u32) -> String { + if year == 0 { + format!("{title} Bluray 1080p remux") + } else { + format!("{title} ({year}) Bluray 1080p remux") + } +} + fn build_channel_metadata(state: &AppState) -> Result { let base = match state.config.public_base_url.clone() { Some(url) => url, @@ -476,7 +720,7 @@ fn build_channel_metadata(state: &AppState) -> Result, @@ -511,6 +755,43 @@ async fn resolve_generic_search_title( Ok(default_torrent_title(&torrent.id)) } +async fn resolve_movie_generic_title( + state: &AppState, + anilist_id: i64, + cache: &mut HashMap, + active_tmdb_ids: &mut HashSet, +) -> Result, HttpError> { + let Some(tmdb_id) = state + .mappings + .resolve_tmdb_id(anilist_id) + .await + .map_err(HttpError::Mapping)? + else { + return Ok(None); + }; + + if let Some(existing) = cache.get(&tmdb_id) { + active_tmdb_ids.insert(tmdb_id); + return Ok(Some(existing.clone())); + } + + let radarr = state + .radarr + .as_ref() + .ok_or_else(|| HttpError::UnsupportedOperation("Radarr is disabled".to_string()))?; + + let movie = match radarr.resolve_name(tmdb_id).await { + Ok(movie) => movie, + Err(RadarrError::NotFound { .. }) => return Ok(None), + Err(err) => return Err(HttpError::Radarr(err)), + }; + + let formatted = format_movie_feed_title(&movie.title, movie.year); + cache.insert(tmdb_id, formatted.clone()); + active_tmdb_ids.insert(tmdb_id); + Ok(Some(formatted)) +} + fn select_tvdb_and_season(mappings: &[TvdbMapping]) -> Option<(i64, u32)> { let mut best: Option<(i64, u32)> = None; @@ -537,27 +818,27 @@ fn select_tvdb_and_season(mappings: &[TvdbMapping]) -> Option<(i64, u32)> { best } -fn parse_season_key(key: &str) -> Option { - if !key.starts_with('s') { - return None; - } +fn default_torrent_title(id: &str) -> String { + format!("Torrent {id}") +} - let digits: String = key[1..] - .chars() - .take_while(|ch| ch.is_ascii_digit()) - .collect(); - if digits.is_empty() { - return None; +fn tv_category_ids() -> Vec { + let mut ids = vec![torznab::ANIME_CATEGORY.id]; + if let Some(sub) = torznab::ANIME_CATEGORY.subcategories.first() { + ids.push(sub.id); } - - digits.parse().ok() + ids } -fn default_torrent_title(id: &str) -> String { - format!("Torrent {id}") +fn movie_category_ids() -> Vec { + vec![torznab::MOVIE_CATEGORY.id] } -fn build_torznab_item(torrent: crate::releases::Torrent, title: String) -> TorznabItem { +fn build_torznab_item( + torrent: crate::releases::Torrent, + title: String, + categories: Vec, +) -> TorznabItem { let crate::releases::Torrent { id, download_url, @@ -587,6 +868,7 @@ fn build_torznab_item(torrent: crate::releases::Torrent, title: String) -> Torzn info_hash, seeders, leechers: 0, + categories, } } @@ -596,6 +878,7 @@ fn category_filter_matches(cat_param: &Option) -> bool { Some(value) => { let mut matches_supported = false; let mut any_values = false; + let categories = torznab::default_categories(); for part in value.split(',') { let trimmed = part.trim(); if trimmed.is_empty() { @@ -605,14 +888,12 @@ fn category_filter_matches(cat_param: &Option) -> bool { if trimmed == "0" { return true; } - if let Ok(id) = trimmed.parse::() - && (id == torznab::ANIME_CATEGORY.id - || torznab::ANIME_CATEGORY - .subcategories - .iter() - .any(|sub| sub.id == id)) - { - matches_supported = true; + if let Ok(id) = trimmed.parse::() { + if categories.iter().any(|category| { + category.id == id || category.subcategories.iter().any(|sub| sub.id == id) + }) { + matches_supported = true; + } } } @@ -637,6 +918,8 @@ pub enum HttpError { AniList(#[from] AniListError), #[error(transparent)] Sonarr(#[from] SonarrError), + #[error(transparent)] + Radarr(#[from] RadarrError), } impl IntoResponse for HttpError { @@ -674,6 +957,11 @@ impl IntoResponse for HttpError { Cow::from("Failed to construct Sonarr request"), ), HttpError::Sonarr(_) => (StatusCode::BAD_GATEWAY, Cow::from("Failed to query Sonarr")), + HttpError::Radarr(RadarrError::Url(_)) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Cow::from("Failed to construct Radarr request"), + ), + HttpError::Radarr(_) => (StatusCode::BAD_GATEWAY, Cow::from("Failed to query Radarr")), }; tracing::error!("torznab handler error: {self}"); diff --git a/src/main.rs b/src/main.rs index 1f1909b..055800b 100644 --- a/src/main.rs +++ b/src/main.rs @@ -2,6 +2,7 @@ mod anilist; mod config; mod http; mod mapping; +mod radarr; mod releases; mod sonarr; mod torznab; @@ -15,6 +16,7 @@ use tracing_subscriber::{EnvFilter, fmt, layer::SubscriberExt, util::SubscriberI use crate::anilist::AniListClient; use crate::config::AppConfig; use crate::mapping::PlexAniBridgeMappings; +use crate::radarr::RadarrClient; use crate::releases::ReleasesClient; use crate::sonarr::SonarrClient; @@ -22,7 +24,8 @@ use crate::sonarr::SonarrClient; pub struct AppState { pub config: AppConfig, pub anilist: AniListClient, - pub sonarr: SonarrClient, + pub sonarr: Option, + pub radarr: Option, pub releases: ReleasesClient, pub mappings: PlexAniBridgeMappings, } @@ -45,14 +48,35 @@ async fn main() -> anyhow::Result<()> { let anilist = AniListClient::new(config.anilist_base_url.clone(), config.anilist_timeout) .context("failed to construct AniList client")?; - let sonarr_cache_path = config.data_path.join("sonarr_titles.json"); - let sonarr = SonarrClient::new( - config.sonarr_url.clone(), - config.sonarr_api_key.clone(), - config.sonarr_timeout, - sonarr_cache_path, - ) - .context("failed to construct Sonarr client")?; + let sonarr = if let Some(sonarr_config) = &config.sonarr { + let sonarr_cache_path = config.data_path.join("sonarr_titles.json"); + Some( + SonarrClient::new( + sonarr_config.url.clone(), + sonarr_config.api_key.clone(), + sonarr_config.timeout, + sonarr_cache_path, + ) + .context("failed to construct Sonarr client")?, + ) + } else { + None + }; + + let radarr = if let Some(radarr_config) = &config.radarr { + let radarr_cache_path = config.data_path.join("radarr_titles.json"); + Some( + RadarrClient::new( + radarr_config.url.clone(), + radarr_config.api_key.clone(), + radarr_config.timeout, + radarr_cache_path, + ) + .context("failed to construct Radarr client")?, + ) + } else { + None + }; let mappings = PlexAniBridgeMappings::bootstrap( config.data_path.clone(), @@ -67,6 +91,7 @@ async fn main() -> anyhow::Result<()> { config, anilist, sonarr, + radarr, releases, mappings, }); diff --git a/src/mapping.rs b/src/mapping.rs index 636a76b..c97fed5 100644 --- a/src/mapping.rs +++ b/src/mapping.rs @@ -12,6 +12,7 @@ use reqwest::{ use serde::Deserialize; use thiserror::Error; use tokio::fs; +use tokio::task; use tokio::sync::RwLock; use tracing::{debug, trace, warn}; use url::Url; @@ -48,6 +49,8 @@ struct ReverseMappingEntry { struct MappingIndex { tvdb_to_entries: HashMap>, anilist_to_entries: HashMap>, + tmdb_to_anilist: HashMap, + anilist_to_tmdb: HashMap, } #[derive(Debug, Clone)] @@ -61,9 +64,27 @@ struct RawMappingRecord { #[serde(default)] tvdb_id: Option, #[serde(default)] + tmdb_movie_id: Option, + #[serde(default)] tvdb_mappings: HashMap, } +#[derive(Debug, Deserialize)] +#[serde(untagged)] +enum TmdbMovieId { + Single(i64), + Multiple(Vec), +} + +impl TmdbMovieId { + fn into_first(self) -> Option { + match self { + TmdbMovieId::Single(id) => Some(id), + TmdbMovieId::Multiple(ids) => ids.into_iter().next(), + } + } +} + impl PlexAniBridgeMappings { pub async fn bootstrap( data_path: PathBuf, @@ -206,8 +227,16 @@ impl PlexAniBridgeMappings { })? .to_vec(); - let raw: HashMap = serde_json::from_slice(&bytes)?; - let index = Self::build_index(raw); + // Offload heavy JSON deserialisation and index build to a blocking thread so the + // async runtime worker threads aren't stalled by CPU work. + let index = { + let bytes = bytes.clone(); + task::spawn_blocking(move || { + let raw: HashMap = serde_json::from_slice(&bytes)?; + Ok::(Self::build_index(raw)) + }) + .await?? + }; let series = index.tvdb_to_entries.len(); let entries = index .tvdb_to_entries @@ -362,8 +391,11 @@ impl PlexAniBridgeMappings { path: self.path.clone(), })?; - let raw: HashMap = serde_json::from_slice(&contents)?; - let index = Self::build_index(raw); + let index = task::spawn_blocking(move || { + let raw: HashMap = serde_json::from_slice(&contents)?; + Ok::(Self::build_index(raw)) + }) + .await??; let series = index.tvdb_to_entries.len(); let entries = index .tvdb_to_entries @@ -400,12 +432,10 @@ impl PlexAniBridgeMappings { fn build_index(raw: HashMap) -> MappingIndex { let mut tvdb_index: HashMap> = HashMap::new(); let mut anilist_index: HashMap> = HashMap::new(); + let mut tmdb_index: HashMap = HashMap::new(); + let mut anilist_tmdb: HashMap = HashMap::new(); for (anilist_id_str, record) in raw { - let Some(tvdb_id) = record.tvdb_id else { - continue; - }; - let Ok(anilist_id) = anilist_id_str.parse::() else { debug!( anilist_id = %anilist_id_str, @@ -414,25 +444,39 @@ impl PlexAniBridgeMappings { continue; }; - if record.tvdb_mappings.is_empty() { - trace!(anilist_id, tvdb_id, "skipping mapping with no season data"); - continue; + let RawMappingRecord { + tvdb_id, + tmdb_movie_id, + tvdb_mappings, + } = record; + + if let Some(tvdb_id) = tvdb_id { + if tvdb_mappings.is_empty() { + trace!(anilist_id, tvdb_id, "skipping mapping with no season data"); + } else { + let seasons = tvdb_mappings.into_keys().collect::>(); + tvdb_index.entry(tvdb_id).or_default().push(MappingEntry { + anilist_id, + seasons: seasons.clone(), + }); + anilist_index + .entry(anilist_id) + .or_default() + .push(ReverseMappingEntry { tvdb_id, seasons }); + } } - let seasons = record.tvdb_mappings.into_keys().collect::>(); - tvdb_index.entry(tvdb_id).or_default().push(MappingEntry { - anilist_id, - seasons: seasons.clone(), - }); - anilist_index - .entry(anilist_id) - .or_default() - .push(ReverseMappingEntry { tvdb_id, seasons }); + if let Some(tmdb_id) = tmdb_movie_id.and_then(|value| value.into_first()) { + tmdb_index.insert(tmdb_id, anilist_id); + anilist_tmdb.insert(anilist_id, tmdb_id); + } } MappingIndex { tvdb_to_entries: tvdb_index, anilist_to_entries: anilist_index, + tmdb_to_anilist: tmdb_index, + anilist_to_tmdb: anilist_tmdb, } } @@ -475,6 +519,68 @@ impl PlexAniBridgeMappings { Ok(None) } + pub async fn resolve_anilist_id_for_tvdb( + &self, + tvdb_id: i64, + ) -> Result, MappingError> { + let mappings = self.load_mappings().await?; + let Some(entries) = mappings.tvdb_to_entries.get(&tvdb_id) else { + debug!(tvdb_id, "no entries found for tvdb id"); + return Ok(None); + }; + + let mut best: Option<(i64, u32)> = None; + for entry in entries { + let mut seasons: Vec = entry + .seasons + .iter() + .filter_map(|key| parse_season_key(key)) + .collect(); + + let season = if seasons.is_empty() { + u32::MAX + } else { + seasons.sort_unstable(); + seasons[0] + }; + + match best { + Some((_, best_season)) if season >= best_season => {} + _ => best = Some((entry.anilist_id, season)), + } + } + + if let Some((anilist_id, season)) = best { + debug!( + tvdb_id, + anilist_id, season, "selected mapping for tv search" + ); + return Ok(Some(anilist_id)); + } + + debug!(tvdb_id, "failed to select mapping for movie search"); + Ok(None) + } + + pub async fn resolve_anilist_id_for_tmdb( + &self, + tmdb_id: i64, + ) -> Result, MappingError> { + let mappings = self.load_mappings().await?; + if let Some(anilist_id) = mappings.tmdb_to_anilist.get(&tmdb_id) { + debug!(tmdb_id, anilist_id, "resolved tmdb mapping"); + Ok(Some(*anilist_id)) + } else { + debug!(tmdb_id, "no tmdb mapping found"); + Ok(None) + } + } + + pub async fn resolve_tmdb_id(&self, anilist_id: i64) -> Result, MappingError> { + let mappings = self.load_mappings().await?; + Ok(mappings.anilist_to_tmdb.get(&anilist_id).copied()) + } + pub async fn resolve_tvdb_mappings( &self, anilist_id: i64, @@ -499,6 +605,22 @@ impl PlexAniBridgeMappings { } } +pub(crate) fn parse_season_key(key: &str) -> Option { + if !key.starts_with('s') { + return None; + } + + let digits: String = key[1..] + .chars() + .take_while(|ch| ch.is_ascii_digit()) + .collect(); + if digits.is_empty() { + return None; + } + + digits.parse().ok() +} + #[derive(Debug, Error)] pub enum MappingError { #[error("failed to download plexanibridge mappings from {url}")] @@ -533,4 +655,6 @@ pub enum MappingError { }, #[error("failed to deserialise plexanibridge mapping file")] Deserialisation(#[from] serde_json::Error), + #[error("background task failed")] + TaskJoin(#[from] tokio::task::JoinError), } diff --git a/src/radarr.rs b/src/radarr.rs new file mode 100644 index 0000000..08fdd87 --- /dev/null +++ b/src/radarr.rs @@ -0,0 +1,249 @@ +use std::{ + collections::{HashMap, HashSet}, + io::ErrorKind, + path::{Path, PathBuf}, + sync::Arc, + time::Duration, +}; + +use reqwest::Client; +use serde::{Deserialize, Serialize}; +use thiserror::Error; +use tokio::sync::RwLock; +use tokio::task; +use tracing::debug; +use url::Url; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RadarrMovie { + pub title: String, + pub year: u32, +} + +#[derive(Debug, Clone)] +pub struct RadarrClient { + http: Client, + base_url: Url, + api_key: String, + cache: Arc>>, + cache_path: PathBuf, +} + +impl RadarrClient { + pub fn new( + base_url: Url, + api_key: String, + timeout: Duration, + cache_path: PathBuf, + ) -> anyhow::Result { + let http = Client::builder() + .timeout(timeout) + .user_agent(format!("seadexerr/{}", env!("CARGO_PKG_VERSION"))) + .build()?; + + let cache = load_cache(&cache_path)?; + + Ok(Self { + http, + base_url, + api_key, + cache: Arc::new(RwLock::new(cache)), + cache_path, + }) + } + + pub async fn resolve_name(&self, tmdb_id: i64) -> Result { + if let Some(existing) = self.cached_movie(tmdb_id).await { + debug!(tmdb_id, "using cached Radarr title"); + return Ok(existing); + } + + let mut url = self + .base_url + .join("api/v3/movie/lookup/tmdb") + .map_err(RadarrError::Url)?; + + { + let mut pairs = url.query_pairs_mut(); + pairs.append_pair("tmdbId", &tmdb_id.to_string()); + } + + debug!(tmdb_id, url = %url, "requesting Radarr movie lookup"); + + let response = self + .http + .get(url) + .header("X-Api-Key", &self.api_key) + .send() + .await? + .error_for_status()?; + + let payload: MovieLookupEntry = response.json().await?; + + let Some(title) = payload.title else { + return Err(RadarrError::NotFound { tmdb_id }); + }; + + let Some(year) = payload.year else { + debug!(tmdb_id, "skipping Radarr movie lookup due to missing year"); + return Err(RadarrError::NotFound { tmdb_id }); + }; + + let movie = RadarrMovie { + title, + year, + }; + + self.store_movie(tmdb_id, &movie).await?; + + Ok(movie) + } + + pub async fn retain_titles(&self, keep: &HashSet) -> Result<(), RadarrError> { + if keep.is_empty() { + let mut guard = self.cache.write().await; + if guard.is_empty() { + return Ok(()); + } + guard.clear(); + drop(guard); + return self.persist_cache().await; + } + + let mut guard = self.cache.write().await; + let original_len = guard.len(); + guard.retain(|tmdb_id, _| keep.contains(tmdb_id)); + + if guard.len() == original_len { + return Ok(()); + } + + drop(guard); + self.persist_cache().await + } + + async fn cached_movie(&self, tmdb_id: i64) -> Option { + let guard = self.cache.read().await; + guard.get(&tmdb_id).cloned() + } + + async fn store_movie(&self, tmdb_id: i64, movie: &RadarrMovie) -> Result<(), RadarrError> { + { + let mut guard = self.cache.write().await; + guard.insert(tmdb_id, movie.clone()); + } + self.persist_cache().await + } + + async fn persist_cache(&self) -> Result<(), RadarrError> { + // Clone snapshot while holding the lock then offload CPU + IO to blocking thread. + let snapshot = { + let guard = self.cache.read().await; + guard.clone() + }; + + let path = self.cache_path.clone(); + + let result = task::spawn_blocking(move || -> Result<(), Box> { + let json = serde_json::to_vec_pretty(&snapshot)?; + + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent)?; + } + + std::fs::write(&path, json)?; + + Ok(()) + }) + .await + .map_err(|source| RadarrError::CacheWrite { + source: std::io::Error::new(std::io::ErrorKind::Other, format!("join error: {source}")), + path: self.cache_path.clone(), + })?; + + if let Err(_err) = result { + return Err(RadarrError::CacheWrite { + source: std::io::Error::new(std::io::ErrorKind::Other, "failed to persist cache"), + path: self.cache_path.clone(), + }); + } + + Ok(()) + } +} + +#[derive(Debug, Deserialize)] +struct MovieLookupEntry { + #[serde(default)] + title: Option, + #[serde(default)] + year: Option, +} + +fn load_cache(path: &Path) -> Result, RadarrError> { + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent).map_err(|source| RadarrError::CacheDir { + source, + path: parent.to_path_buf(), + })?; + } + + let bytes = match std::fs::read(path) { + Ok(bytes) => bytes, + Err(err) if err.kind() == ErrorKind::NotFound => return Ok(HashMap::new()), + Err(source) => { + return Err(RadarrError::CacheRead { + source, + path: path.to_path_buf(), + }); + } + }; + + if bytes.is_empty() { + return Ok(HashMap::new()); + } + + let data: HashMap = + serde_json::from_slice(&bytes).map_err(|source| RadarrError::CacheParse { + source, + path: path.to_path_buf(), + })?; + + Ok(data) +} + +#[derive(Debug, Error)] +pub enum RadarrError { + #[error("failed to build Radarr request url")] + Url(#[from] url::ParseError), + #[error("http error when querying Radarr api")] + Http(#[from] reqwest::Error), + #[error("no Radarr movie title found for tmdb {tmdb_id}")] + NotFound { tmdb_id: i64 }, + #[error("failed to read cached Radarr titles at {path}")] + CacheRead { + #[source] + source: std::io::Error, + path: PathBuf, + }, + #[error("failed to write cached Radarr titles at {path}")] + CacheWrite { + #[source] + source: std::io::Error, + path: PathBuf, + }, + #[error("failed to parse cached Radarr titles at {path}")] + CacheParse { + #[source] + source: serde_json::Error, + path: PathBuf, + }, + #[error("failed to serialise cached Radarr titles")] + CacheSerialise(#[from] serde_json::Error), + #[error("failed to create cache directory at {path}")] + CacheDir { + #[source] + source: std::io::Error, + path: PathBuf, + }, +} diff --git a/src/releases.rs b/src/releases.rs index 269ad4c..22329d0 100644 --- a/src/releases.rs +++ b/src/releases.rs @@ -33,21 +33,14 @@ impl ReleasesClient { anilist_id: i64, limit: usize, ) -> Result, ReleasesError> { - let mut url = self - .base_url - .join("collections/entries/records") - .map_err(ReleasesError::Url)?; - - { - let mut pairs = url.query_pairs_mut(); - pairs.append_pair("expand", "trs"); - pairs.append_pair("filter", &format!("(alID={anilist_id})&&incomplete=false")); - pairs.append_pair("page", "1"); - pairs.append_pair("perPage", &limit.min(self.default_limit).to_string()); - } - - let response = self.http.get(url).send().await?.error_for_status()?; - let payload: EntriesResponse = response.json().await?; + let payload = self + .fetch_entries_with(limit, |params| { + params.push(( + "filter".to_string(), + format!("(alID={anilist_id})&&incomplete=false"), + )); + }) + .await?; debug!( anilist_id, @@ -56,19 +49,8 @@ impl ReleasesClient { "releases.moe entries response received" ); - let torrents: Vec = payload - .items + let torrents: Vec = Self::entries_to_torrents(payload.items) .into_iter() - .flat_map(|entry| { - let al_id = entry.al_id; - entry.expand.into_iter().flat_map(move |expand| { - expand.trs.into_iter().map(move |record| (al_id, record)) - }) - }) - .filter(|(_, record)| record.tracker == "Nyaa") - .filter(|(_, record)| !record.tags.contains(&"Incomplete".to_string())) - .filter(|(_, record)| rewritten_download_url(record).is_some()) - .map(|(al_id, record)| Torrent::from_record(record, al_id)) .take(limit) .collect(); @@ -85,6 +67,43 @@ impl ReleasesClient { &self, limit: usize, ) -> Result, ReleasesError> { + let payload = self + .fetch_entries_with(limit, |params| { + params.push(("sort".to_string(), "-updated".to_string())); + params.push(("filter".to_string(), "(incomplete=false)".to_string())); + }) + .await?; + + let torrents = Self::entries_to_torrents(payload.items); + + debug!( + feed = "recent-public", + limit, + returned = torrents.len(), + "releases.moe entries response received" + ); + + Ok(torrents) + } + + async fn fetch_entries_with( + &self, + limit: usize, + configure: F, + ) -> Result + where + F: FnOnce(&mut Vec<(String, String)>), + { + let mut params = vec![ + ("expand".to_string(), "trs".to_string()), + ("page".to_string(), "1".to_string()), + ( + "perPage".to_string(), + limit.min(self.default_limit).to_string(), + ), + ]; + configure(&mut params); + let mut url = self .base_url .join("collections/entries/records") @@ -92,18 +111,19 @@ impl ReleasesClient { { let mut pairs = url.query_pairs_mut(); - pairs.append_pair("expand", "trs"); - pairs.append_pair("sort", "-updated"); - pairs.append_pair("filter", "(incomplete=false)"); - pairs.append_pair("page", "1"); - pairs.append_pair("perPage", &limit.min(self.default_limit).to_string()); + for (key, value) in params { + pairs.append_pair(&key, &value); + } } let response = self.http.get(url).send().await?.error_for_status()?; let payload: EntriesResponse = response.json().await?; - let torrents: Vec = payload - .items + Ok(payload) + } + + fn entries_to_torrents(entries: Vec) -> Vec { + entries .into_iter() .flat_map(|entry| { let al_id = entry.al_id; @@ -115,16 +135,7 @@ impl ReleasesClient { .filter(|(_, record)| !record.tags.contains(&"Incomplete".to_string())) .filter(|(_, record)| rewritten_download_url(record).is_some()) .map(|(al_id, record)| Torrent::from_record(record, al_id)) - .collect(); - - debug!( - feed = "recent-public", - limit, - returned = torrents.len(), - "releases.moe entries response received" - ); - - Ok(torrents) + .collect() } pub async fn resolve_anilist_ids_for_torrents( diff --git a/src/sonarr.rs b/src/sonarr.rs index d419d4d..0d2cc31 100644 --- a/src/sonarr.rs +++ b/src/sonarr.rs @@ -9,7 +9,8 @@ use std::{ use reqwest::Client; use serde::Deserialize; use thiserror::Error; -use tokio::{fs as async_fs, sync::RwLock}; +use tokio::sync::RwLock; +use tokio::task; use tracing::debug; use url::Url; @@ -129,28 +130,40 @@ impl SonarrClient { } async fn persist_cache(&self) -> Result<(), SonarrError> { + // Clone snapshot under the read lock, then offload serialization + write + // to a blocking thread to avoid blocking tokio worker threads. let snapshot = { let guard = self.cache.read().await; guard.clone() }; - let json = serde_json::to_vec_pretty(&snapshot).map_err(SonarrError::CacheSerialise)?; + let path = self.cache_path.clone(); - if let Some(parent) = self.cache_path.parent() { - async_fs::create_dir_all(parent) - .await - .map_err(|source| SonarrError::CacheDir { - source, - path: parent.to_path_buf(), - })?; - } + let result = task::spawn_blocking(move || -> Result<(), Box> { + let json = serde_json::to_vec_pretty(&snapshot)?; - async_fs::write(&self.cache_path, json) - .await - .map_err(|source| SonarrError::CacheWrite { - source, + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent)?; + } + + std::fs::write(&path, json)?; + + Ok(()) + }) + .await + .map_err(|source| SonarrError::CacheWrite { + source: std::io::Error::new(std::io::ErrorKind::Other, format!("join error: {source}")), + path: self.cache_path.clone(), + })?; + + if let Err(_err) = result { + // For simplicity, map any persistence error to CacheWrite. We avoid trying to + // downcast boxed errors back to concrete types here. + return Err(SonarrError::CacheWrite { + source: std::io::Error::new(std::io::ErrorKind::Other, "failed to persist cache"), path: self.cache_path.clone(), - })?; + }); + } Ok(()) } diff --git a/src/torznab.rs b/src/torznab.rs index a4da5cc..c5d4600 100644 --- a/src/torznab.rs +++ b/src/torznab.rs @@ -21,6 +21,7 @@ pub struct TorznabItem { pub info_hash: Option, pub seeders: u32, pub leechers: u32, + pub categories: Vec, } #[derive(Debug, Clone)] @@ -48,14 +49,22 @@ pub const ANIME_CATEGORY: TorznabCategory = TorznabCategory { }], }; +pub const MOVIE_CATEGORY: TorznabCategory = TorznabCategory { + id: 2000, + name: "Movies", + subcategories: &[], +}; + pub fn default_categories() -> Vec { - vec![ANIME_CATEGORY] + vec![ANIME_CATEGORY, MOVIE_CATEGORY] } #[derive(Debug, Error)] pub enum TorznabBuildError { #[error("failed to build XML document")] Xml(#[from] quick_xml::Error), + #[error("failed to write XML document")] + Io(#[from] std::io::Error), #[error("failed to format XML document as UTF-8")] Utf8(#[from] std::string::FromUtf8Error), #[error("failed to format timestamp in RFC2822 format")] @@ -96,6 +105,11 @@ pub fn render_caps(metadata: &ChannelMetadata) -> Result Result 0 { + writer.write_event(Event::Start(category_el))?; + + for sub in category.subcategories { + let sub_id = sub.id.to_string(); + let mut sub_el = BytesStart::new("subcat"); + sub_el.push_attribute(("id", sub_id.as_str())); + sub_el.push_attribute(("name", sub.name)); + writer.write_event(Event::Empty(sub_el))?; + } + + writer.write_event(Event::End(BytesEnd::new("category")))?; + } else { + writer.write_event(Event::Empty(category_el))?; + } } writer.write_event(Event::End(BytesEnd::new("categories")))?; @@ -179,9 +198,15 @@ pub fn render_feed( enclosure.push_attribute(("length", item.size_bytes.to_string().as_str())); writer.write_event(Event::Empty(enclosure))?; - write_attr(&mut writer, "category", &ANIME_CATEGORY.id.to_string())?; - if let Some(sub) = ANIME_CATEGORY.subcategories.first() { - write_attr(&mut writer, "category", &sub.id.to_string())?; + if item.categories.is_empty() { + write_attr(&mut writer, "category", &ANIME_CATEGORY.id.to_string())?; + if let Some(sub) = ANIME_CATEGORY.subcategories.first() { + write_attr(&mut writer, "category", &sub.id.to_string())?; + } + } else { + for category_id in &item.categories { + write_attr(&mut writer, "category", &category_id.to_string())?; + } } write_attr(&mut writer, "seeders", &item.seeders.to_string())?; write_attr(&mut writer, "leechers", &item.leechers.to_string())?;