|
| 1 | +import asyncio |
1 | 2 | import re |
2 | 3 | from typing import Any, Dict, List |
| 4 | +from urllib.parse import urlencode |
3 | 5 |
|
| 6 | +import aiohttp |
4 | 7 | import log |
5 | 8 | import requests |
6 | 9 | import settings |
@@ -44,25 +47,54 @@ def _extract_torrent_link(result: Dict[str, Any]) -> str | None: |
44 | 47 | return None |
45 | 48 |
|
46 | 49 |
|
| 50 | +@memoize(3600) |
| 51 | +def get_indexer_ids() -> List[int]: |
| 52 | + resp = requests.get( |
| 53 | + f"{API_PATH}/indexer", |
| 54 | + headers={"X-Api-Key": API_KEY}, |
| 55 | + timeout=10, |
| 56 | + ) |
| 57 | + resp.raise_for_status() |
| 58 | + data = resp.json() |
| 59 | + return [i["id"] for i in data if isinstance(i, dict) and i.get("enable")] |
| 60 | + |
| 61 | + |
| 62 | +async def fetch_json(session: aiohttp.ClientSession, url: str) -> List[Dict[str, Any]]: |
| 63 | + try: |
| 64 | + async with session.get(url) as response: |
| 65 | + data: Any = await response.json() |
| 66 | + if not isinstance(data, list): |
| 67 | + return [] |
| 68 | + return [r for r in data if isinstance(r, dict)] # type: ignore[reportUnknownVariableType] |
| 69 | + except (TimeoutError, aiohttp.ClientError): |
| 70 | + return [] |
| 71 | + |
| 72 | + |
| 73 | +async def fetch_all(urls: List[str]) -> List[List[Dict[str, Any]]]: |
| 74 | + async with aiohttp.ClientSession( |
| 75 | + timeout=aiohttp.ClientTimeout(total=5), |
| 76 | + headers={"X-Api-Key": API_KEY}, |
| 77 | + ) as session: |
| 78 | + tasks = [asyncio.ensure_future(fetch_json(session, url)) for url in urls] |
| 79 | + return await asyncio.gather(*tasks) |
| 80 | + |
| 81 | + |
47 | 82 | @memoize(300) |
48 | 83 | def search(searchterm: str) -> List[Dict[str, int | str | Any | None]]: |
49 | 84 | magnet_links: List[Dict[str, int | str | Any | None]] = [] |
50 | 85 | try: |
51 | | - params: Dict[str, str] = { |
52 | | - "query": searchterm, |
53 | | - "type": "search", |
54 | | - "limit": "100", |
55 | | - } |
56 | | - headers: Dict[str, str] = {"X-Api-Key": API_KEY} |
57 | | - resp = requests.get( |
58 | | - f"{API_PATH}/search", params=params, headers=headers, timeout=15 |
59 | | - ) |
60 | | - resp.raise_for_status() |
61 | | - data: Any = resp.json() |
62 | | - if not isinstance(data, list): |
63 | | - log.debug(f"Prowlarr returned unexpected payload (status={resp.status_code}): {data!r:.200}") |
64 | | - return magnet_links |
65 | | - results: List[Dict[str, Any]] = [r for r in data if isinstance(r, dict)] # type: ignore[reportUnknownVariableType] |
| 86 | + urls = [ |
| 87 | + f"{API_PATH}/search?{urlencode({'query': searchterm, 'type': 'search', 'limit': 100, 'indexerIds': iid})}" |
| 88 | + for iid in get_indexer_ids() |
| 89 | + ] |
| 90 | + |
| 91 | + loop = asyncio.new_event_loop() |
| 92 | + asyncio.set_event_loop(loop) |
| 93 | + responses = loop.run_until_complete(fetch_all(urls)) |
| 94 | + |
| 95 | + results: List[Dict[str, Any]] = [] |
| 96 | + for data in responses: |
| 97 | + results.extend(data) |
66 | 98 |
|
67 | 99 | # Prowlarr can also return usenet results; only torrents are usable here |
68 | 100 | results = [ |
@@ -124,7 +156,7 @@ def sort_by_only_season(x: Dict[str, Any]) -> int: |
124 | 156 | "published": parse(published) if published else None, |
125 | 157 | } |
126 | 158 | ) |
127 | | - except (requests.RequestException, KeyError, ValueError) as e: |
| 159 | + except (requests.RequestException, aiohttp.ClientError, KeyError, ValueError) as e: |
128 | 160 | log.write_log() |
129 | 161 | log.debug(f"Prowlarr search failed: {e}") |
130 | 162 | return magnet_links |
0 commit comments