Skip to content

Commit b4ece5c

Browse files
committed
Parallelize Prowlarr search across indexers using aiohttp
1 parent 989d30f commit b4ece5c

1 file changed

Lines changed: 48 additions & 16 deletions

File tree

app/prowlarr.py

Lines changed: 48 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,9 @@
1+
import asyncio
12
import re
23
from typing import Any, Dict, List
4+
from urllib.parse import urlencode
35

6+
import aiohttp
47
import log
58
import requests
69
import settings
@@ -44,25 +47,54 @@ def _extract_torrent_link(result: Dict[str, Any]) -> str | None:
4447
return None
4548

4649

50+
@memoize(3600)
51+
def get_indexer_ids() -> List[int]:
52+
resp = requests.get(
53+
f"{API_PATH}/indexer",
54+
headers={"X-Api-Key": API_KEY},
55+
timeout=10,
56+
)
57+
resp.raise_for_status()
58+
data = resp.json()
59+
return [i["id"] for i in data if isinstance(i, dict) and i.get("enable")]
60+
61+
62+
async def fetch_json(session: aiohttp.ClientSession, url: str) -> List[Dict[str, Any]]:
63+
try:
64+
async with session.get(url) as response:
65+
data: Any = await response.json()
66+
if not isinstance(data, list):
67+
return []
68+
return [r for r in data if isinstance(r, dict)] # type: ignore[reportUnknownVariableType]
69+
except (TimeoutError, aiohttp.ClientError):
70+
return []
71+
72+
73+
async def fetch_all(urls: List[str]) -> List[List[Dict[str, Any]]]:
74+
async with aiohttp.ClientSession(
75+
timeout=aiohttp.ClientTimeout(total=5),
76+
headers={"X-Api-Key": API_KEY},
77+
) as session:
78+
tasks = [asyncio.ensure_future(fetch_json(session, url)) for url in urls]
79+
return await asyncio.gather(*tasks)
80+
81+
4782
@memoize(300)
4883
def search(searchterm: str) -> List[Dict[str, int | str | Any | None]]:
4984
magnet_links: List[Dict[str, int | str | Any | None]] = []
5085
try:
51-
params: Dict[str, str] = {
52-
"query": searchterm,
53-
"type": "search",
54-
"limit": "100",
55-
}
56-
headers: Dict[str, str] = {"X-Api-Key": API_KEY}
57-
resp = requests.get(
58-
f"{API_PATH}/search", params=params, headers=headers, timeout=15
59-
)
60-
resp.raise_for_status()
61-
data: Any = resp.json()
62-
if not isinstance(data, list):
63-
log.debug(f"Prowlarr returned unexpected payload (status={resp.status_code}): {data!r:.200}")
64-
return magnet_links
65-
results: List[Dict[str, Any]] = [r for r in data if isinstance(r, dict)] # type: ignore[reportUnknownVariableType]
86+
urls = [
87+
f"{API_PATH}/search?{urlencode({'query': searchterm, 'type': 'search', 'limit': 100, 'indexerIds': iid})}"
88+
for iid in get_indexer_ids()
89+
]
90+
91+
loop = asyncio.new_event_loop()
92+
asyncio.set_event_loop(loop)
93+
responses = loop.run_until_complete(fetch_all(urls))
94+
95+
results: List[Dict[str, Any]] = []
96+
for data in responses:
97+
results.extend(data)
6698

6799
# Prowlarr can also return usenet results; only torrents are usable here
68100
results = [
@@ -124,7 +156,7 @@ def sort_by_only_season(x: Dict[str, Any]) -> int:
124156
"published": parse(published) if published else None,
125157
}
126158
)
127-
except (requests.RequestException, KeyError, ValueError) as e:
159+
except (requests.RequestException, aiohttp.ClientError, KeyError, ValueError) as e:
128160
log.write_log()
129161
log.debug(f"Prowlarr search failed: {e}")
130162
return magnet_links

0 commit comments

Comments
 (0)