Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
116 changes: 73 additions & 43 deletions packages/core/src/builtins/prowlarr/addon.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,10 @@ export type ProwlarrAddonConfig = z.infer<typeof ProwlarrAddonConfigSchema>;

const logger = createLogger('prowlarr');

// **KEY CHANGE 1: Define a hard deadline for returning results.**
// This should be less than the AIOStreams wrapper timeout (15s).
const SEARCH_DEADLINE_MS = 10000; // 10 seconds

export class ProwlarrAddon extends BaseDebridAddon<ProwlarrAddonConfig> {
readonly id = 'prowlarr';
readonly name = 'Prowlarr';
Expand All @@ -55,7 +59,7 @@ export class ProwlarrAddon extends BaseDebridAddon<ProwlarrAddonConfig> {
this.api = new ProwlarrApi({
baseUrl: config.url,
apiKey: config.apiKey,
timeout: Env.BUILTIN_PROWLARR_SEARCH_TIMEOUT,
timeout: Env.BUILTIN_PROWLARR_SEARCH_TIMEOUT, // This is the timeout for each *individual* request
});
}

Expand Down Expand Up @@ -171,59 +175,85 @@ export class ProwlarrAddon extends BaseDebridAddon<ProwlarrAddonConfig> {
const queries = this.buildQueries(parsedId, metadata, {
useAllTitles: useAllTitles(this.userData.url),
});
if (queries.length === 0) {
if (queries.length === 0 || chosenIndexers.length === 0) {
return [];
}

// **KEY CHANGE 2: Process results as they come in and race against a deadline.**
const torrents: UnprocessedTorrent[] = [];
const seenTorrents = new Set<string>();

const searchTasks: { query: string; indexer: ProwlarrApiIndexer }[] = [];
for (const q of queries) {
for (const indexer of chosenIndexers) {
searchTasks.push({ query: q, indexer: indexer });
}
}

const searchPromises = queries.map((q) =>
const searchPromises = searchTasks.map(({ query, indexer }) =>
queryLimit(async () => {
const start = Date.now();
const { data } = await this.api.search({
query: q,
indexerIds: chosenIndexers.map((indexer) => indexer.id),
type: 'search',
});
this.logger.info(
`Prowlarr search for ${q} took ${getTimeTakenSincePoint(start)}`,
{
results: data.length,
try {
const { data } = await this.api.search({
query: query,
indexerIds: [indexer.id],
type: 'search',
});
this.logger.info(
`Prowlarr search for "${query}" on [${indexer.name}] took ${getTimeTakenSincePoint(start)}`,
{ results: data.length }
);

// Process and add torrents to the main array immediately
for (const result of data) {
const magnetUrl = result.guid.includes('magnet:') ? result.guid : undefined;
const downloadUrl = result.magnetUrl?.startsWith('http') ? result.magnetUrl : result.downloadUrl;
const infoHash = validateInfoHash(result.infoHash || (magnetUrl ? extractInfoHashFromMagnet(magnetUrl) : undefined));
if (!infoHash && !downloadUrl) continue;
if (seenTorrents.has(infoHash ?? downloadUrl!)) continue;
seenTorrents.add(infoHash ?? downloadUrl!);

torrents.push({
hash: infoHash,
downloadUrl: downloadUrl,
sources: magnetUrl ? extractTrackersFromMagnet(magnetUrl) : [],
seeders: result.seeders,
title: result.title,
size: result.size,
indexer: result.indexer,
type: 'torrent',
});
}
);
return data;
} catch (error) {
this.logger.warn(
`Prowlarr search for "${query}" on [${indexer.name}] failed after ${getTimeTakenSincePoint(start)}: ${error instanceof Error ? error.message : String(error)}`
);
}
})
);
const allResults = await Promise.all(searchPromises);
const results = allResults.flat();

const seenTorrents = new Set<string>();
const torrents: UnprocessedTorrent[] = [];
// Create a promise that resolves when all searches are complete
const allSearchesPromise = Promise.all(searchPromises);

for (const result of results) {
const magnetUrl = result.guid.includes('magnet:')
? result.guid
: undefined;
const downloadUrl = result.magnetUrl?.startsWith('http')
? result.magnetUrl
: result.downloadUrl;
const infoHash = validateInfoHash(
result.infoHash ||
(magnetUrl ? extractInfoHashFromMagnet(magnetUrl) : undefined)
);
if (!infoHash && !downloadUrl) continue;
if (seenTorrents.has(infoHash ?? downloadUrl!)) continue;
seenTorrents.add(infoHash ?? downloadUrl!);

torrents.push({
hash: infoHash,
downloadUrl: downloadUrl,
sources: magnetUrl ? extractTrackersFromMagnet(magnetUrl) : [],
seeders: result.seeders,
title: result.title,
size: result.size,
indexer: result.indexer,
type: 'torrent',
});
// Create a timeout promise that rejects after our deadline
const timeoutPromise = new Promise((_, reject) =>
setTimeout(() => reject(new Error('Search deadline reached')), SEARCH_DEADLINE_MS)
);

try {
// Race the search completion against the timeout
await Promise.race([allSearchesPromise, timeoutPromise]);
} catch (error) {
// This catch block will be triggered if the timeout wins the race
this.logger.info(`Search deadline of ${SEARCH_DEADLINE_MS}ms reached. Returning ${torrents.length} results found so far.`);
}

// **KEY CHANGE 3: Only throw an error if we have NO results at the end.**
if (torrents.length === 0) {
// This preserves the "addon timeout" error behavior only when nothing is found.
throw new Error(`The operation was aborted due to timeout and no results were found.`);
}

return torrents;
}

Expand Down
191 changes: 143 additions & 48 deletions packages/core/src/builtins/torznab/addon.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import { z } from 'zod';
import { ParsedId } from '../../utils/id-parser.js';
import { createLogger } from '../../utils/index.js';
import { createLogger, getTimeTakenSincePoint } from '../../utils/index.js';
import { Torrent, NZB, UnprocessedTorrent } from '../../debrid/index.js';
import { SearchMetadata } from '../base/debrid';
import {
extractTrackersFromMagnet,
validateInfoHash,
} from '../utils/debrid.js';
import { BaseNabApi, Capabilities } from '../base/nab/api.js';
import { BaseNabApi } from '../base/nab/api.js';
import {
BaseNabAddon,
NabAddonConfigSchema,
Expand All @@ -16,23 +16,51 @@ import {

const logger = createLogger('torznab');

// API client is now just a thin wrapper
const TorznabAddonConfigSchema = NabAddonConfigSchema.extend({
timeout: z.number(),
indexers: z.array(z.string()),
});
type TorznabAddonConfig = z.infer<typeof TorznabAddonConfigSchema>;

class TorznabApi extends BaseNabApi<'torznab'> {
private readonly internalBaseUrl: string;
private readonly internalApiKey?: string;
private readonly internalApiPath?: string;

constructor(baseUrl: string, apiKey?: string, apiPath?: string) {
super('torznab', logger, baseUrl, apiKey, apiPath);
this.internalBaseUrl = baseUrl;
this.internalApiKey = apiKey;
this.internalApiPath = apiPath;
}

async searchIndexer(
indexerId: string,
functionName: string,
params: Record<string, string | number | boolean> = {}
): Promise<any> {
const originalUrl = this.internalBaseUrl;
const indexerUrl = originalUrl.replace('/all/', `/${indexerId}/`);
const tempApi = new BaseNabApi(
'torznab',
logger,
indexerUrl,
this.internalApiKey,
this.internalApiPath
);
return tempApi.search(functionName, params);
}
}

// Addon class
export class TorznabAddon extends BaseNabAddon<NabAddonConfig, TorznabApi> {
export class TorznabAddon extends BaseNabAddon<TorznabAddonConfig, TorznabApi> {
readonly name = 'Torznab';
readonly version = '1.0.0';
readonly id = 'torznab';
readonly logger = logger;
readonly api: TorznabApi;

constructor(userData: NabAddonConfig, clientIp?: string) {
super(userData, NabAddonConfigSchema, clientIp);
constructor(userData: TorznabAddonConfig, clientIp?: string) {
super(userData, TorznabAddonConfigSchema, clientIp);
this.api = new TorznabApi(
this.userData.url,
this.userData.apiKey,
Expand All @@ -44,66 +72,133 @@ export class TorznabAddon extends BaseNabAddon<NabAddonConfig, TorznabApi> {
parsedId: ParsedId,
metadata: SearchMetadata
): Promise<UnprocessedTorrent[]> {
const results = await this.performSearch(parsedId, metadata);
const seenTorrents = new Set<string>();
const searchDeadline = Math.max(1000, this.userData.timeout - 500);

const queries = this.buildQueries(parsedId, metadata, { useAllTitles: false });
if (queries.length === 0) return [];

const torrents: UnprocessedTorrent[] = [];
const seenTorrents = new Set<string>();

const indexerIds = this.userData.indexers;

for (const result of results) {
const infoHash = this.extractInfoHash(result);
const downloadUrl = result.enclosure.find(
(e: any) =>
e.type === 'application/x-bittorrent' && !e.url.includes('magnet:')
)?.url;

if (!infoHash && !downloadUrl) continue;
if (seenTorrents.has(infoHash ?? downloadUrl!)) continue;
seenTorrents.add(infoHash ?? downloadUrl!);

torrents.push({
hash: infoHash,
downloadUrl,
sources: result.torznab?.magneturl?.toString()
? extractTrackersFromMagnet(result.torznab.magneturl.toString())
: [],
seeders:
typeof result.torznab?.seeders === 'number' &&
![-1, 999].includes(result.torznab.seeders)
? result.torznab.seeders
: undefined,
indexer: result.jackettindexer?.name ?? undefined,
title: result.title,
size:
result.size ??
(result.torznab?.size ? Number(result.torznab.size) : 0),
type: 'torrent',
if (indexerIds && indexerIds.length > 0) {
this.logger.info(`Performing parallel search on ${indexerIds.length} user-defined indexers.`);
const searchTasks = queries.flatMap((query) =>
indexerIds.map((indexerId) => ({ query, indexerId }))
);

const searchPromises = searchTasks.map(({ query, indexerId }) => async () => {
const start = Date.now();
try {
const params: Record<string, string | number | boolean> = { q: query, o: 'json' };
if (parsedId.season) params.season = parsedId.season;
if (parsedId.episode) params.ep = parsedId.episode;

const results = await this.api.searchIndexer(indexerId, 'search', params);
this.processResults(results, torrents, seenTorrents, indexerId);
} catch (error) {
this.logger.warn(
`Jackett search for "${query}" on [${indexerId}] failed after ${getTimeTakenSincePoint(start)}: ${error instanceof Error ? error.message : String(error)}`
);
}
});
await this.runWithTimeout(searchPromises, searchDeadline);
} else {
this.logger.info('Performing single search using Jackett\'s /all/ endpoint.');
const searchPromises = queries.map((query) => async () => {
try {
const params: Record<string, string | number | boolean> = { q: query, o: 'json' };
if (parsedId.season) params.season = parsedId.season;
if (parsedId.episode) params.ep = parsedId.episode;
const results = await this.api.search('search', params);
this.processResults(results, torrents, seenTorrents);
} catch (error) {
this.logger.warn(`Jackett /all/ search for "${query}" failed: ${error instanceof Error ? error.message : String(error)}`);
}
});
await this.runWithTimeout(searchPromises, searchDeadline);
}

if (torrents.length === 0) {
throw new Error(`The operation was aborted due to timeout and no results were found.`);
}

return torrents;
}

private processResults(results: any[], torrents: UnprocessedTorrent[], seenTorrents: Set<string>, indexerId?: string) {
for (const result of results) {
const infoHash = this.extractInfoHash(result);

// **THE FIX: Prioritize the reliable infoHash over the unreliable downloadUrl.**
// If an infoHash exists, we set downloadUrl to undefined. This forces AIOStreams
// to use its more robust metadata fetching method and avoids getting stuck on broken links.
const downloadUrl = infoHash
? undefined
: result.enclosure.find(
(e: any) =>
e.type === 'application/x-bittorrent' && !e.url.includes('magnet:')
)?.url;

if (!infoHash && !downloadUrl) continue;
if (seenTorrents.has(infoHash ?? downloadUrl!)) continue;
seenTorrents.add(infoHash ?? downloadUrl!);

torrents.push({
hash: infoHash,
downloadUrl: downloadUrl,
sources: result.torznab?.magneturl?.toString()
? extractTrackersFromMagnet(result.torznab.magneturl.toString())
: [],
seeders:
typeof result.torznab?.seeders === 'number' &&
![-1, 999].includes(result.torznab.seeders)
? result.torznab.seeders
: undefined,
indexer: result.jackettindexer?.name ?? indexerId ?? 'unknown',
title: result.title,
size:
result.size ??
(result.torznab?.size ? Number(result.torznab.size) : 0),
type: 'torrent',
});
}
}

private async runWithTimeout(searchPromises: (() => Promise<void>)[], deadline: number) {
const allSearchesPromise = Promise.all(searchPromises.map((p) => p()));
const timeoutPromise = new Promise((_, reject) =>
setTimeout(() => reject(new Error('Search deadline reached')), deadline)
);
try {
await Promise.race([allSearchesPromise, timeoutPromise]);
} catch (error) {
this.logger.info(`Search deadline of ${deadline}ms reached. Returning results found so far.`);
}
}

protected async _searchNzbs(
parsedId: ParsedId,
metadata: SearchMetadata
): Promise<NZB[]> {
// This addon does not support NZBs, so we return an empty array.
return [];
}

private extractInfoHash(result: any): string | undefined {
return validateInfoHash(
result.torznab?.infohash?.toString() ||
(
result.torznab?.magneturl ||
result.enclosure.find(
(e: any) =>
e.type === 'application/x-bittorrent' && e.url.includes('magnet:')
)?.url
)
?.toString()
?.match(/(?:urn(?::|%3A)btih(?::|%3A))([a-f0-9]{40})/i)?.[1]
?.toLowerCase()
(
result.torznab?.magneturl ||
result.enclosure.find(
(e: any) =>
e.type === 'application/x-bittorrent' && e.url.includes('magnet:')
)?.url
)
?.toString()
// **THE FIX: Corrected a subtle regex typo from a previous version.**
?.match(/(?:urn(?::|%3A)btih(?::|%3A))([a-f0-9]{40})/i)?.[1]
?.toLowerCase()
);
}
}
Loading