diff --git a/lib/clients/base.py b/lib/clients/base.py
index 3f6c097..d65c717 100644
--- a/lib/clients/base.py
+++ b/lib/clients/base.py
@@ -1,6 +1,7 @@
from abc import ABC, abstractmethod
from requests import Session
-
+from typing import List
+from lib.domain.source import Source
class BaseClient(ABC):
def __init__(self, host, notification):
@@ -9,7 +10,7 @@ def __init__(self, host, notification):
self.session = Session()
@abstractmethod
- def search(self, tmdb_id, query, mode, media_type, season, episode):
+ def search(self, tmdb_id, query, mode, media_type, season, episode) -> List[Source]:
pass
@abstractmethod
diff --git a/lib/clients/debrid/torbox.py b/lib/clients/debrid/torbox.py
index a36e9ec..ab54715 100644
--- a/lib/clients/debrid/torbox.py
+++ b/lib/clients/debrid/torbox.py
@@ -77,9 +77,9 @@ def get_available_torrent(self, info_hash):
def get_torrent_instant_availability(self, torrent_hashes):
return self._make_request(
- "GET",
+ "POST",
"/torrents/checkcached",
- params={"hash": torrent_hashes, "format": "object"},
+ json={"hashes": torrent_hashes, "format": "object"},
)
def create_download_link(self, torrent_id, filename, user_ip):
diff --git a/lib/clients/debrid/torrserve.py b/lib/clients/debrid/torrserve.py
new file mode 100644
index 0000000..f666fe8
--- /dev/null
+++ b/lib/clients/debrid/torrserve.py
@@ -0,0 +1,187 @@
+import requests
+from requests.auth import HTTPBasicAuth
+from requests.exceptions import RequestException
+from urllib.parse import urlparse
+from lib.api.jacktook.kodi import kodilog
+
+class TorrServeException(Exception):
+ pass
+
+class TorrServeClient:
+ def __init__(self, base_url="http://localhost:8090", username=None, password=None):
+ # Validate and format base URL
+ parsed = urlparse(base_url)
+ if not parsed.scheme:
+ base_url = f"http://{base_url}"
+ elif parsed.scheme not in ("http", "https"):
+ raise TorrServeException("Invalid URL scheme. Use http:// or https://")
+
+ self.base_url = base_url.rstrip("/")
+ self.session = requests.Session()
+
+ if username and password:
+ self.session.auth = HTTPBasicAuth(username, password)
+
+ self.session.headers.update({
+ "Accept": "application/json",
+ "Content-Type": "application/json"
+ })
+
+ def _request(self, method, endpoint, data=None):
+ """Improved URL handling with better error messages"""
+ try:
+ # Construct safe URL
+ endpoint = endpoint.lstrip("/")
+ url = f"{self.base_url}/{endpoint}"
+
+ # Validate URL format
+ parsed = urlparse(url)
+ if not parsed.scheme or not parsed.netloc:
+ raise TorrServeException(f"Invalid URL format: {url}")
+
+ response = self.session.request(method, url, json=data)
+ response.raise_for_status()
+ return response.json()
+ except RequestException as e:
+ raise TorrServeException(f"Request to {url} failed: {str(e)}")
+ except ValueError:
+ raise TorrServeException(f"Invalid JSON response from {url}")
+
+ def get_torrent_instant_availability(self, info_hashes):
+ """
+ Check availability of torrents by their info hashes
+
+ Args:
+ info_hashes (list): List of torrent info hashes (strings)
+
+ Returns:
+ dict: Dictionary with info_hash as key and availability percentage as value
+ """
+ kodilog(f"Checking availability for {info_hashes} torrents")
+ try:
+ # Get list of all torrents
+ response = self._request("POST", "/torrents", {
+ "action": "list"
+ })
+
+ available = {}
+ hash_map = {}
+
+ # Parse response (assuming array of TorrentStatus)
+ for torrent in response:
+ if "hash" in torrent:
+ t_hash = torrent["hash"].lower()
+ if not t_hash in info_hashes:
+ continue
+ status = torrent.get("stat", 0)
+
+ # Calculate completion percentage
+ size = torrent.get("torrent_size", 0)
+ loaded = torrent.get("preloaded_bytes", 0)
+ percentage = (loaded / size * 100) if size > 0 else 0
+
+ hash_map[t_hash] = round(percentage, 2) if status == 3 else 0 # Only consider working torrents
+
+ # Match requested hashes
+ for ih in info_hashes:
+ completion = hash_map.get(ih.lower(), 0)
+ if not completion:
+ continue
+ available[ih.lower()] = hash_map.get(ih.lower(), 0)
+
+ return available
+
+ except TorrServeException as e:
+ raise TorrServeException(f"Availability check failed: {str(e)}")
+
+ def add_magnet(self, magnet_uri, save_to_db=True, title=None, category=None):
+ """
+ Add a torrent by magnet URI
+
+ Args:
+ magnet_uri (str): Magnet URI to add
+ save_to_db (bool): Save torrent to database
+ title (str): Custom title for the torrent
+ category (str): Torrent category (movie/tv/music/other)
+
+ Returns:
+ dict: Dictionary containing:
+ - status: "added", "duplicate", or "error"
+ - info_hash: Torrent info hash (lowercase)
+ - percentage: Current download completion percentage
+ """
+ try:
+ # Add torrent request
+ payload = {
+ "action": "add",
+ "link": magnet_uri,
+ "save_to_db": save_to_db
+ }
+ kodilog(f"Payload: {payload}")
+ if title:
+ payload["title"] = title
+ if category:
+ payload["category"] = category
+
+ response = self._request("POST", "/torrents", payload)
+
+ # Check response status
+ status_code = response.get("stat", 0)
+ info_hash = response.get("hash", "").lower()
+
+ if not info_hash:
+ raise TorrServeException("Missing info hash in response")
+
+ # Determine status
+ if status_code == 5: # TorrentInDB
+ status = "duplicate"
+ elif status_code == 3: # TorrentWorking
+ status = "added"
+ else:
+ status = "unknown"
+
+ # Calculate percentage
+ size = response.get("torrent_size", 0)
+ loaded = response.get("preloaded_bytes", 0)
+ percentage = (loaded / size * 100) if size > 0 else 0
+
+ return {
+ "status": status,
+ "info_hash": info_hash,
+ "percentage": round(percentage, 2)
+ }
+
+ except TorrServeException as e:
+ return {
+ "status": "error",
+ "info_hash": "",
+ "percentage": 0,
+ "error": str(e)
+ }
+
+ def get_torrent_status(self, info_hash):
+ """
+ Get detailed status of a specific torrent
+ """
+ try:
+ response = self._request("POST", "/torrents", {
+ "action": "get",
+ "hash": info_hash
+ })
+ return response
+ except TorrServeException as e:
+ raise TorrServeException(f"Status check failed: {str(e)}")
+
+ def remove_torrent(self, info_hash, remove_data=False):
+ """
+ Remove a torrent from the server
+ """
+ try:
+ action = "rem" if not remove_data else "drop"
+ self._request("POST", "/torrents", {
+ "action": action,
+ "hash": info_hash
+ })
+ return True
+ except TorrServeException as e:
+ raise TorrServeException(f"Removal failed: {str(e)}")
\ No newline at end of file
diff --git a/lib/clients/debrid/transmission.py b/lib/clients/debrid/transmission.py
new file mode 100644
index 0000000..05c1d77
--- /dev/null
+++ b/lib/clients/debrid/transmission.py
@@ -0,0 +1,120 @@
+import requests
+from lib.utils.kodi_utils import notification
+from requests.exceptions import RequestException
+from lib.domain.interface.cache_provider_interface import CacheProviderInterface
+from lib.domain.cached_source import CachedSource
+from typing import Dict, List
+from lib.api.jacktook.kodi import kodilog
+from lib.domain.source import Source
+from lib.utils.kodi_formats import is_video
+
+class TransmissionException(Exception):
+ pass
+
+class TransmissionClient(CacheProviderInterface):
+ def __init__(self, base_url: str = "http://192.168.1.130:9091", downloads_url: str = "", username: str = "", password: str = ""):
+ self.base_url = base_url.rstrip("/")
+ self.session = requests.Session()
+ self.session_id = None
+ self.downloads_url = downloads_url.rstrip("/")
+ self.session.headers.update({"Content-Type": "application/json", "Accept": "application/json"})
+ if username and password:
+ self.session.auth = (username, password)
+
+ def _rpc_request(self, method, arguments=None):
+ url = f"{self.base_url}/transmission/rpc"
+ payload = {"method": method, "arguments": arguments or {}}
+
+ for _ in range(2): # Allow one retry after 409
+ try:
+ response = self.session.post(url, json=payload)
+ if response.status_code == 409:
+ self.session_id = response.headers.get("X-Transmission-Session-Id")
+ if self.session_id:
+ self.session.headers["X-Transmission-Session-Id"] = self.session_id
+ continue # Retry with new session ID
+ raise TransmissionException("Missing session ID in 409 response")
+
+ response.raise_for_status()
+ data = response.json()
+ if data.get("result") != "success":
+ raise TransmissionException(f"RPC error: {data.get('result')}")
+
+ return data.get("arguments", {})
+
+ except (RequestException, ValueError) as e:
+ raise TransmissionException(f"Request failed: {str(e)}")
+
+ raise TransmissionException("Failed after session ID retry")
+
+ def get_torrent_instant_availability(self, info_hashes):
+ try:
+ torrents = self._rpc_request("torrent-get", {"fields": ["hashString", "percentDone"]}).get("torrents", [])
+ hash_map = {t["hashString"].lower(): t["percentDone"] for t in torrents}
+ return {ih: round(hash_map[ih.lower()] * 100, 2) for ih in info_hashes if ih.lower() in hash_map}
+ except TransmissionException as e:
+ notification(f"Transmission error: {str(e)}")
+ raise
+
+ def add_magnet(self, magnet_uri: str):
+ try:
+ response = self._rpc_request("torrent-add", {"filename": magnet_uri, "paused": False})
+ if "torrent-added" in response:
+ torrent = response["torrent-added"]
+ return {"status": "added", "info_hash": torrent["hashString"].lower()}
+ elif "torrent-duplicate" in response:
+ torrent = response["torrent-duplicate"]
+ return {
+ "status": "duplicate",
+ "info_hash": torrent["hashString"].lower(),
+ "percentage": round(torrent["percentDone"] * 100, 2),
+ }
+ raise TransmissionException("Unexpected response structure")
+ except TransmissionException as e:
+ notification(f"Failed to add magnet: {str(e)}")
+ raise
+
+ def get_cached_hashes(self, sources: List[Source]) -> Dict[str, CachedSource]:
+ info_hashes = {source["info_hash"]: source.get("filename", "") for source in sources if source.get("info_hash")}
+ cached_sources = {}
+
+ try:
+ torrents = self._rpc_request("torrent-get", {"fields": ["hashString", "percentDone", "files"]}).get("torrents", [])
+ kodilog(f"TransmissionClient: {len(torrents)} torrents found")
+
+ for t in torrents:
+ t_hash = t["hashString"]
+ if t_hash not in info_hashes:
+ continue
+
+ filename = info_hashes[t_hash]
+
+ t_files = [f"{self.downloads_url}/{file['name']}" for file in t.get("files", [])]
+
+ first_video = ""
+ playable_url = ""
+ for file in t_files:
+ if filename and file.endswith(filename):
+ playable_url = file
+ break
+ if not first_video and is_video(file):
+ first_video = file
+
+ if not playable_url:
+ playable_url = first_video
+
+ cached_sources[t["hashString"]] = CachedSource(
+ hash=t["hashString"].lower(),
+ cache_provider=self,
+ cache_provider_name="Transmission",
+ ratio=t["percentDone"],
+ instant_availability=t["percentDone"] == 1,
+ urls=t_files,
+ playable_url=playable_url,
+ )
+
+ return cached_sources
+
+ except TransmissionException as e:
+ notification(f"Transmission error: {str(e)}")
+ raise
\ No newline at end of file
diff --git a/lib/clients/elfhosted.py b/lib/clients/elfhosted.py
index c209e2b..2d7457f 100644
--- a/lib/clients/elfhosted.py
+++ b/lib/clients/elfhosted.py
@@ -33,7 +33,7 @@ def parse_response(self, res):
"type": "Torrent",
"indexer": "Elfhosted",
"guid": item["infoHash"],
- "infoHash": item["infoHash"],
+ "info_hash": item["infoHash"],
"size": parsed_item["size"],
"publishDate": "",
"seeders": 0,
diff --git a/lib/clients/jackett.py b/lib/clients/jackett.py
index fdbcdc0..2dc8139 100644
--- a/lib/clients/jackett.py
+++ b/lib/clients/jackett.py
@@ -58,6 +58,6 @@ def extract_result(results, item):
"magnetUrl": attributes.get("magneturl", ""),
"seeders": int(attributes.get("seeders", 0)),
"peers": int(attributes.get("peers", 0)),
- "infoHash": attributes.get("infohash", ""),
+ "info_hash": attributes.get("infohash", ""),
}
)
diff --git a/lib/clients/jacktook_burst.py b/lib/clients/jacktook_burst.py
index 3c9e6f1..807cffd 100644
--- a/lib/clients/jacktook_burst.py
+++ b/lib/clients/jacktook_burst.py
@@ -31,7 +31,7 @@ def parse_response(self, res):
"indexer": "Burst",
"provider": r.indexer,
"guid": r.guid,
- "infoHash": None,
+ "info_hash": None,
"size": convert_size_to_bytes(r.size),
"seeders": int(r.seeders),
"peers": int(r.peers),
diff --git a/lib/clients/medifusion.py b/lib/clients/medifusion.py
index 4e65d1c..e13df61 100644
--- a/lib/clients/medifusion.py
+++ b/lib/clients/medifusion.py
@@ -75,7 +75,7 @@ def parse_response(self, res):
"type": "Torrent",
"indexer": "MediaFusion",
"guid": info_hash,
- "infoHash": info_hash,
+ "info_hash": info_hash,
"size": parsed_item["size"],
"seeders": parsed_item["seeders"],
"languages": parsed_item["languages"],
@@ -92,7 +92,7 @@ def extract_info_hash(self, item):
path = urlparse(item["url"]).path.split("/")
info_hash = path[path.index("stream") + 1]
else:
- info_hash = item["infoHash"]
+ info_hash = item["info_hash"]
return info_hash
def parse_stream_title(self, item):
diff --git a/lib/clients/peerflix.py b/lib/clients/peerflix.py
index 26b3a1d..0d9d66b 100644
--- a/lib/clients/peerflix.py
+++ b/lib/clients/peerflix.py
@@ -31,8 +31,8 @@ def parse_response(self, res):
"title": item["title"].splitlines()[0],
"type": "Torrent",
"indexer": "Peerflix",
- "guid": item["infoHash"],
- "infoHash": item["infoHash"],
+ "guid": item["info_hash"],
+ "info_hash": item["infoHash"],
"size":item["sizebytes"] or 0,
"seeders": item.get("seed", 0) or 0,
"languages": [item["language"]],
diff --git a/lib/clients/search.py b/lib/clients/search.py
index 1a6aa77..cb1f806 100644
--- a/lib/clients/search.py
+++ b/lib/clients/search.py
@@ -8,7 +8,7 @@
def search_client(
- query, ids, mode, media_type, dialog, rescrape=False, season=1, episode=1
+ item, dialog, rescrape=False
):
def perform_search(indexer_key, dialog, *args, **kwargs):
if indexer_key != Indexer.BURST:
@@ -19,20 +19,22 @@ def perform_search(indexer_key, dialog, *args, **kwargs):
return client.search(*args, **kwargs)
if not rescrape:
- if mode == "tv" or media_type == "tv" or mode == "anime":
- cached_results = get_cached(query, params=(episode, "index"))
+ if item["mode"] == "tv" or item["media_type"] == "tv" or item["mode"] == "anime":
+ cached_results = get_cached(item["query"], params=(episode, "index"))
else:
- cached_results = get_cached(query, params=("index"))
+ cached_results = get_cached(item["query"], params=("index"))
if cached_results:
dialog.create("")
return cached_results
- if ids:
- tmdb_id, _, imdb_id = ids.values()
- else:
- tmdb_id = imdb_id = None
-
+ tmdb_id = item["tmdb_id"]
+ imdb_id = item["imdb_id"]
+ mode = item["mode"]
+ media_type = item["media_type"]
+ query = item["query"]
+ season = item["season"]
+ episode = item["episode"]
dialog.create("")
total_results = []
diff --git a/lib/clients/stremio_addon.py b/lib/clients/stremio_addon.py
index 42a2bb3..296b5b8 100644
--- a/lib/clients/stremio_addon.py
+++ b/lib/clients/stremio_addon.py
@@ -3,11 +3,8 @@
from lib.utils.utils import USER_AGENT_HEADER, IndexerType, info_hash_to_magnet
from lib.stremio.addons_manager import Addon
from lib.stremio.stream import Stream
-
from lib.api.jacktook.kodi import kodilog
-from lib.utils.kodi_utils import convert_size_to_bytes
-from lib.utils.language_detection import find_languages_in_string
-import re
+
class StremioAddonCatalogsClient(BaseClient):
@@ -76,7 +73,6 @@ def parse_response(self, res):
results = []
for item in res["streams"]:
stream = Stream(item)
- parsed = self.parse_torrent_description(stream.description)
results.append(
{
"title": stream.get_parsed_title(),
@@ -85,50 +81,19 @@ def parse_response(self, res):
if stream.url
else IndexerType.TORRENT
),
+ "description": stream.description,
"url": stream.url,
"indexer": self.addon.manifest.name.split(" ")[0],
"guid": stream.infoHash,
"magnet": info_hash_to_magnet(stream.infoHash),
- "infoHash": stream.infoHash,
- "size": stream.get_parsed_size()
- or item.get("sizebytes")
- or parsed["size"],
- "seeders": item.get("seed", 0) or parsed["seeders"],
- "languages": parsed["languages"], # [item.get("language", "")],
- "fullLanguages": parsed["languages"], # [item.get("language", "")],
- "provider": parsed["provider"],
+ "info_hash": stream.infoHash,
+ "size": stream.get_parsed_size() or item.get("sizebytes"),
+ "seeders": item.get("seed", 0),
+ "languages": [item.get("language")] if item.get("language") else [],
+ "fullLanguages": [item.get("language")] if item.get("language") else [],
+ "provider": "",
"publishDate": "",
"peers": 0,
}
)
- return results
-
- def parse_torrent_description(self, desc: str) -> dict:
- # Extract size
- size_pattern = r"💾 ([\d.]+ (?:GB|MB))"
- size_match = re.search(size_pattern, desc)
- size = size_match.group(1) if size_match else None
- if size:
- size = convert_size_to_bytes(size)
-
- # Extract seeders
- seeders_pattern = r"👤 (\d+)"
- seeders_match = re.search(seeders_pattern, desc)
- seeders = int(seeders_match.group(1)) if seeders_match else None
-
- # Extract provider
- provider_pattern = r"([🌐🔗⚙️])\s*([^🌐🔗⚙️]+)"
- provider_match = re.findall(provider_pattern, desc)
-
- words = [match[1].strip() for match in provider_match]
- if words:
- words = words[-1].splitlines()[0]
-
- provider = words
-
- return {
- "size": size or 0,
- "seeders": seeders or 0,
- "provider": provider or "",
- "languages": find_languages_in_string(desc),
- }
+ return results
\ No newline at end of file
diff --git a/lib/clients/torrentio.py b/lib/clients/torrentio.py
index 53b01a2..58619d1 100644
--- a/lib/clients/torrentio.py
+++ b/lib/clients/torrentio.py
@@ -34,8 +34,8 @@ def parse_response(self, res):
"title": parsed_item["title"],
"type": "Torrent",
"indexer": "Torrentio",
- "guid": item["infoHash"],
- "infoHash": item["infoHash"],
+ "guid": item["info_hash"],
+ "info_hash": item["infoHash"],
"size": parsed_item["size"],
"seeders": parsed_item["seeders"],
"languages": parsed_item["languages"],
diff --git a/lib/clients/zilean.py b/lib/clients/zilean.py
index e7dbb92..d649633 100644
--- a/lib/clients/zilean.py
+++ b/lib/clients/zilean.py
@@ -70,7 +70,7 @@ def api_scrape(self, query, mode, media_type, season, episode):
for result in response:
torrents.append(
{
- "infoHash": result.info_hash,
+ "info_hash": result.info_hash,
"filename": result.raw_title,
"filesize": result.size,
"languages": result.languages,
@@ -87,9 +87,9 @@ def parse_response(self, data):
"title": item["filename"],
"type": "Torrent",
"indexer": "Zilean",
- "guid": item["infoHash"],
- "magnet": info_hash_to_magnet(item["infoHash"]),
- "infoHash": item["infoHash"],
+ "guid": item["info_hash"],
+ "magnet": info_hash_to_magnet(item["info_hash"]),
+ "info_hash": item["info_hash"],
"size": item["filesize"],
"seeders": 0,
"languages": item["languages"],
diff --git a/lib/domain/cached_source.py b/lib/domain/cached_source.py
new file mode 100644
index 0000000..cf88558
--- /dev/null
+++ b/lib/domain/cached_source.py
@@ -0,0 +1,11 @@
+from typing import TypedDict, Any, List
+
+
+class CachedSource(TypedDict):
+ hash: str
+ cache_provider: Any
+ cache_provider_name: str
+ ratio: float
+ instant_availability: bool
+ urls: List[str]
+ playable_url: str
diff --git a/lib/domain/interface/cache_provider_interface.py b/lib/domain/interface/cache_provider_interface.py
new file mode 100644
index 0000000..77f2079
--- /dev/null
+++ b/lib/domain/interface/cache_provider_interface.py
@@ -0,0 +1,10 @@
+from abc import ABC, abstractmethod
+from typing import List, Dict
+from lib.domain.cached_source import CachedSource
+from lib.domain.source import Source
+
+
+class CacheProviderInterface(ABC):
+ @abstractmethod
+ def get_cached_hashes(self, infoHashes: List[Source]) -> Dict[str, CachedSource]:
+ pass
diff --git a/lib/domain/interface/enricher_interface.py b/lib/domain/interface/enricher_interface.py
new file mode 100644
index 0000000..a3b6975
--- /dev/null
+++ b/lib/domain/interface/enricher_interface.py
@@ -0,0 +1,25 @@
+import abc
+from typing import List
+from lib.domain.source import Source
+
+
+class EnricherInterface(abc.ABC):
+ @abc.abstractmethod
+ def enrich(self, item: Source) -> None:
+ """Enrich an item with additional metadata"""
+ pass
+
+ @abc.abstractmethod
+ def initialize(self, items: List[Source]) -> None:
+ """Initialize the enricher with a list of items"""
+ pass
+
+ @abc.abstractmethod
+ def needs(self) -> List[str]:
+ """Returns the fields that the enricher needs to function"""
+ pass
+
+ @abc.abstractmethod
+ def provides(self) -> List[str]:
+ """Returns the fields that the enricher will provide"""
+ pass
diff --git a/lib/domain/interface/filter_interface.py b/lib/domain/interface/filter_interface.py
new file mode 100644
index 0000000..1291ffb
--- /dev/null
+++ b/lib/domain/interface/filter_interface.py
@@ -0,0 +1,12 @@
+from abc import ABC, abstractmethod
+from typing import Dict, Any
+from lib.domain.source import Source
+
+
+class FilterInterface(ABC):
+ @abstractmethod
+ def matches(self, item: Source) -> bool:
+ pass
+
+ def reset(self):
+ pass
diff --git a/lib/domain/quality_tier.py b/lib/domain/quality_tier.py
new file mode 100644
index 0000000..96070c9
--- /dev/null
+++ b/lib/domain/quality_tier.py
@@ -0,0 +1,28 @@
+import re
+
+
+class QualityTier:
+
+ def __init__(self, pattern: str, label: str, label_formatted: str, priority: int):
+ self.regex = re.compile(pattern, re.IGNORECASE) if pattern else None
+ self.label = label
+ self.label_formatted = label_formatted
+ self.priority = priority
+
+ @staticmethod
+ def default_quality_tiers():
+ return [
+ QualityTier(
+ r"(?i)\b(2160p?|4k)\b", "4k", "[B][COLOR yellow]4k[/COLOR][/B]", 4
+ ),
+ QualityTier(
+ r"(?i)\b(1080p?)\b", "1080p", "[B][COLOR cyan]1080p[/COLOR][/B]", 3
+ ),
+ QualityTier(
+ r"(?i)\b720p?\b", "720p", "[B][COLOR orange]720p[/COLOR][/B]", 2
+ ),
+ QualityTier(
+ r"(?i)\b480p?\b", "480p", "[B][COLOR orange]480p[/COLOR][/B]", 1
+ ),
+ QualityTier(None, "Other qualities", "[B][COLOR red]N/A[/COLOR][/B]", 0),
+ ]
diff --git a/lib/domain/source.py b/lib/domain/source.py
new file mode 100644
index 0000000..0eb07ba
--- /dev/null
+++ b/lib/domain/source.py
@@ -0,0 +1,42 @@
+from typing import TypedDict, List, Any
+from .cached_source import CachedSource
+
+
+class Source(TypedDict, total=False):
+ title: str
+ description: str
+ type: Any # IndexerType
+ url: str
+ indexer: str
+ guid: str
+ magnet: str
+ info_hash: str
+ size: int
+ languages: List[str]
+ full_languages: List[str]
+ provider: str
+ publishDate: str
+ seeders: int
+ peers: int
+
+ quality: str
+ quality_sort: int
+
+ status: str
+
+ is_pack: bool
+ is_cached: bool
+ cache_sources: List[CachedSource]
+
+ file: str
+ folder: str
+
+ correlative_id: int
+
+ quality_formatted: str
+ a1: str
+ a2: str
+ a3: str
+ b1: str
+ b2: str
+ b3: str
diff --git a/lib/gui/base_window.py b/lib/gui/base_window.py
index d819f69..0ee936f 100644
--- a/lib/gui/base_window.py
+++ b/lib/gui/base_window.py
@@ -57,8 +57,8 @@ def set_default_focus(
self.setFocus(control)
return
- if control_list and control_list.size() > 0:
- if control_list_reset:
+ if control_list:
+ if control_list.size() > 0 and control_list_reset:
control_list.selectItem(0)
self.setFocus(control_list)
elif control_id:
@@ -67,7 +67,7 @@ def set_default_focus(
else:
raise ValueError("Neither valid control list nor control ID provided.")
except (RuntimeError, ValueError) as e:
- kodilog(f"Could not set focus: {e}", "debug")
+ kodilog(f"Could not set focus: {e}")
if control_id:
self.setFocusId(control_id)
diff --git a/lib/gui/custom_dialogs.py b/lib/gui/custom_dialogs.py
index 9d71b08..f6a8a22 100644
--- a/lib/gui/custom_dialogs.py
+++ b/lib/gui/custom_dialogs.py
@@ -6,7 +6,6 @@
from lib.gui.resolver_window import ResolverWindow
from lib.gui.resume_window import ResumeDialog
from lib.utils.kodi_utils import ADDON_PATH, PLAYLIST
-from lib.gui.source_select import SourceSelect
class CustomWindow(WindowXML):
@@ -82,20 +81,6 @@ def onClick(self, controlId):
"plot": "Silo is the story of the last ten thousand people on earth, their mile-deep home protecting them from the toxic and deadly world outside. However, no one knows when or why the silo was built and any who try to find out face fatal consequences.",
}
-
-def source_select(item_info, xml_file, sources):
- window = SourceSelect(
- xml_file,
- ADDON_PATH,
- item_information=item_info,
- sources=sources,
- uncached=sources,
- )
- data = window.doModal()
- del window
- return data
-
-
def run_next_dialog(params):
kodilog("run_next_dialog")
if PLAYLIST.size() > 0 and PLAYLIST.getposition() != (PLAYLIST.size() - 1):
@@ -133,20 +118,6 @@ def run_next_mock():
del window
-def source_select_mock():
- sources = [mock_source for _ in range(10)]
-
- window = SourceSelect(
- "source_select.xml",
- ADDON_PATH,
- item_information=_mock_information,
- sources=sources,
- uncached=sources,
- )
- window.doModal()
- del window
-
-
def resume_dialog_mock():
try:
window = ResumeDialog(
diff --git a/lib/gui/resolver_window.py b/lib/gui/resolver_window.py
index 99a5b06..3a4a4e4 100644
--- a/lib/gui/resolver_window.py
+++ b/lib/gui/resolver_window.py
@@ -100,7 +100,7 @@ def resolve_single_source(self, url, magnet, is_torrent):
"indexer": self.source["indexer"],
"url": url,
"magnet": magnet,
- "info_hash": self.source.get("infoHash", ""),
+ "info_hash": self.source.get("info_hash", ""),
"is_torrent": is_torrent,
"is_pack": self.pack_select,
"mode": self.item_information["mode"],
@@ -114,7 +114,7 @@ def resolve_single_source(self, url, magnet, is_torrent):
def resolve_pack(self):
self.pack_data = get_pack_info(
type=self.source.get("type"),
- info_hash=self.source.get("infoHash"),
+ info_hash=self.source.get("info_hash"),
)
self.window = SourcePackSelect(
diff --git a/lib/gui/source_section_manager.py b/lib/gui/source_section_manager.py
new file mode 100644
index 0000000..b037a67
--- /dev/null
+++ b/lib/gui/source_section_manager.py
@@ -0,0 +1,65 @@
+import xbmcgui
+from typing import List
+from lib.domain.source import Source
+
+
+class SourceItem(xbmcgui.ListItem):
+ """A custom ListItem representing a media source with formatted properties."""
+
+ @staticmethod
+ def from_source(source: Source) -> "SourceItem":
+ item = SourceItem(label=source["title"])
+
+ for key, value in source.items():
+ item.setProperty(key, str(value))
+
+ return item
+
+
+class SourceSection:
+ """Represents a group of media sources with common characteristics."""
+
+ def __init__(self, title: str, description: str, sources: List[SourceItem]):
+ self.title = title
+ self.description = description
+ self.sources = sources
+ self.selection_position = 0
+
+ @property
+ def current_source(self) -> SourceItem:
+ """Get currently selected source in this section."""
+ return self.sources[self.selection_position]
+
+ def update_selection_position(self, new_position: int) -> None:
+ """Update the selected position in this section's source list."""
+ self.selection_position = max(0, min(new_position, len(self.sources) - 1))
+
+
+class SourceSectionManager:
+ """Manages navigation between multiple SourceSections."""
+
+ def __init__(self, sections: List[SourceSection], initial_index: int = 0):
+ self._sections = sections
+ self._current_index = initial_index
+
+ @property
+ def current_section(self) -> SourceSection:
+ """Get the currently active section."""
+ return self._sections[self._current_index]
+
+ @property
+ def section_titles(self) -> List[str]:
+ """Get list of all section titles."""
+ return [section.title for section in self._sections]
+
+ def move_to_next_section(self) -> None:
+ """Advance to the next section in the list."""
+ self._current_index = min(self._current_index + 1, len(self._sections) - 1)
+
+ def move_to_previous_section(self) -> None:
+ """Return to the previous section in the list."""
+ self._current_index = max(self._current_index - 1, 0)
+
+ def jump_to_section(self, section_index: int) -> None:
+ """Jump directly to a specific section by index."""
+ self._current_index = max(0, min(section_index, len(self._sections) - 1))
diff --git a/lib/gui/source_select.py b/lib/gui/source_select.py
deleted file mode 100644
index 7353113..0000000
--- a/lib/gui/source_select.py
+++ /dev/null
@@ -1,126 +0,0 @@
-import xbmcgui
-from lib.gui.base_window import BaseWindow
-from lib.gui.resolver_window import ResolverWindow
-from lib.gui.resume_window import ResumeDialog
-from lib.utils.kodi_utils import ADDON_PATH
-from lib.utils.debrid_utils import get_debrid_status
-from lib.utils.kodi_utils import bytes_to_human_readable
-from lib.utils.utils import (
- extract_publish_date,
- get_colored_languages,
- get_random_color,
-)
-
-
-class SourceSelect(BaseWindow):
- def __init__(
- self, xml_file, location, item_information=None, sources=None, uncached=None
- ):
- super().__init__(xml_file, location, item_information=item_information)
- self.uncached_sources = uncached or []
- self.position = -1
- self.sources = sources
- self.item_information = item_information
- self.playback_info = None
- self.resume = None
- self.CACHE_KEY = (
- self.item_information["tv_data"] or str(self.item_information["ids"])
- )
- self.setProperty("instant_close", "false")
- self.setProperty("resolving", "false")
-
- def onInit(self):
- self.display_list = self.getControlList(1000)
- self.populate_sources_list()
- self.set_default_focus(self.display_list, 1000, control_list_reset=True)
- super().onInit()
-
- def doModal(self):
- super().doModal()
- return self.playback_info
-
- def populate_sources_list(self):
- self.display_list.reset()
-
- for source in self.sources:
- menu_item = xbmcgui.ListItem(label=f"{source['title']}")
-
- for info in source:
- value = source[info]
- if info == "peers":
- value = value if value else ""
- if info == "publishDate":
- value = extract_publish_date(value)
- if info == "size":
- value = bytes_to_human_readable(int(value)) if value else ""
- if info in ["indexer", "provider", "type"]:
- color = get_random_color(value)
- value = f"[B][COLOR {color}]{value}[/COLOR][/B]"
- if info == "fullLanguages":
- value = get_colored_languages(value)
- if len(value) <= 0:
- value = ""
- if info == "isCached":
- info = "status"
- value = get_debrid_status(source)
-
- menu_item.setProperty(info, str(value))
-
- self.display_list.addItem(menu_item)
-
- def handle_action(self, action_id, control_id=None):
- self.position = self.display_list.getSelectedPosition()
-
- if action_id == 117:
- selected_source = self.sources[self.position]
- type = selected_source["type"]
- if type == "Torrent":
- response = xbmcgui.Dialog().contextmenu(["Download to Debrid"])
- if response == 0:
- self._download_into()
- elif type == "Direct":
- pass
- else:
- response = xbmcgui.Dialog().contextmenu(["Browse into"])
- if response == 0:
- self._resolve_item(pack_select=True)
-
- if action_id == 7:
- if control_id == 1000:
- control_list = self.getControl(control_id)
- self.set_cached_focus(control_id, control_list.getSelectedPosition())
- self._resolve_item(pack_select=False)
-
- def _download_into(self):
- pass
-
- def _resolve_item(self, pack_select=False):
- self.setProperty("resolving", "true")
-
- selected_source = self.sources[self.position]
-
- resolver_window = ResolverWindow(
- "resolver.xml",
- ADDON_PATH,
- source=selected_source,
- previous_window=self,
- item_information=self.item_information,
- )
- resolver_window.doModal(pack_select)
- self.playback_info = resolver_window.playback_info
-
- del resolver_window
- self.setProperty("instant_close", "true")
- self.close()
-
- def show_resume_dialog(self, playback_percent):
- try:
- resume_window = ResumeDialog(
- "resume_dialog.xml",
- ADDON_PATH,
- resume_percent=playback_percent,
- )
- resume_window.doModal()
- return resume_window.resume
- finally:
- del resume_window
diff --git a/lib/gui/source_select_new.py b/lib/gui/source_select_new.py
new file mode 100644
index 0000000..6b18adc
--- /dev/null
+++ b/lib/gui/source_select_new.py
@@ -0,0 +1,217 @@
+import xbmcgui
+from typing import List, Dict, Optional
+from lib.gui.base_window import BaseWindow
+from lib.gui.source_section_manager import (
+ SourceSectionManager,
+ SourceSection,
+ SourceItem,
+)
+from lib.api.jacktook.kodi import kodilog
+from lib.domain.quality_tier import QualityTier
+from lib.services.filters import FilterBuilder
+from lib.clients.debrid.transmission import TransmissionClient
+from lib.clients.debrid.torrserve import TorrServeClient
+from lib.utils.kodi_utils import get_setting
+from lib.domain.source import Source
+
+
+class SourceSelectWindow(BaseWindow):
+ """Media source selection window with categorized sections."""
+
+ CACHE_KEY_FIELD = "tv_data"
+ SOURCE_ITEM_ID = 1000
+ NAVIGATION_LABEL_ID = 1001
+ DESCRIPTION_LABEL_ID = 1002
+ SETTINGS_GROUP_ID = 2000
+ SETTINGS_FIRST_ID = 2002
+
+ def __init__(
+ self,
+ xml_layout: str,
+ window_location: str,
+ item_information: Optional[Dict] = None,
+ get_sources=None,
+ ):
+ super().__init__(xml_layout, window_location, item_information=item_information)
+ self._item_metadata = item_information or {}
+ self._get_sources = get_sources
+ self._source: Optional[Source] = None
+ self._sources: Optional[List[Source]] = []
+ self._navigation_label: Optional[xbmcgui.ControlLabel] = None
+ self.setProperty("instant_close", "true")
+
+ def _create_sections(self) -> SourceSectionManager:
+ """Create organized source sections."""
+ sections = [
+ self._create_language_section("Spanish", "es"),
+ *self._create_quality_sections(),
+ ]
+ return SourceSectionManager([s for s in sections if s])
+
+ def _create_language_section(self, lang: str, code: str) -> Optional[SourceSection]:
+ """Create section for specific language sources."""
+ sources = [s for s in self._sources if code in s.get("languages", [])]
+ if not sources:
+ return None
+
+ return SourceSection(
+ title=f"Priority Language ({len(sources)})",
+ description=f"Sources with {lang} audio",
+ sources=[SourceItem.from_source(s) for s in sources],
+ )
+
+ def _create_quality_sections(self) -> List[SourceSection]:
+ """Generate quality tier sections."""
+ return [
+ SourceSection(
+ title=f"{tier.label} ({len(sources)})",
+ description=f"{tier.label_formatted} resolution sources",
+ sources=[SourceItem.from_source(s) for s in sources],
+ )
+ for tier in QualityTier.default_quality_tiers()
+ if (
+ sources := FilterBuilder()
+ .filter_by_quality(tier.priority)
+ .build(self._sources)
+ )
+ ]
+
+ def onInit(self) -> None:
+ """Initialize window and populate data."""
+ self.setProperty("instant_close", "true")
+
+ self._source_list = self.getControlList(self.SOURCE_ITEM_ID)
+ self._navigation_label = self.getControl(self.NAVIGATION_LABEL_ID)
+ self._description_label = self.getControl(self.DESCRIPTION_LABEL_ID)
+ self._settings_first = self.getControl(self.SETTINGS_FIRST_ID)
+
+ super().onInit()
+
+ sources = self._get_sources()
+ for i, source in enumerate(sources):
+ source["correlative_id"] = i
+
+ self._sources = sources
+ self.section_manager = self._create_sections()
+
+ self._refresh_ui()
+
+ self.set_default_focus(self._source_list)
+
+
+ def _refresh_ui(self) -> None:
+ """Update all UI components."""
+ kodilog(f"Current Section: {self.section_manager.current_section.title}")
+ self._navigation_label.setLabel(self._build_navigation_path())
+ self._description_label.setLabel(
+ self.section_manager.current_section.description
+ )
+ self._source_list.reset()
+ self._source_list.addItems(self.section_manager.current_section.sources)
+ self.setProperty("instant_close", "false")
+
+ def _build_navigation_path(self) -> str:
+ """Create truncated navigation breadcrumb."""
+ titles = self.section_manager.section_titles
+ current_idx = self.section_manager._current_index
+
+ preceding = titles[:current_idx]
+ if len(preceding) > 2:
+ preceding = ["...", *preceding[-2:]]
+
+ return " | ".join(
+ [
+ *preceding,
+ f"[B][COLOR white]{titles[current_idx]}[/COLOR][/B]",
+ *titles[current_idx + 1 :],
+ ]
+ )
+
+ def handle_action(self, action_id: int, control_id: Optional[int] = None) -> None:
+ """Route user actions to appropriate handlers."""
+ kodilog(f"Action ID: {action_id}, Control ID: {control_id}")
+ if control_id is None:
+ return
+ elif control_id == self.SOURCE_ITEM_ID:
+ self._handle_list_action(action_id)
+ elif control_id >= self.SETTINGS_GROUP_ID:
+ self._handle_settings_action(action_id)
+
+ def _handle_list_action(self, action_id: int) -> None:
+ """Process source list interactions."""
+ actions = {
+ xbmcgui.ACTION_SELECT_ITEM: self._resolve_source,
+ xbmcgui.ACTION_MOVE_LEFT: self._handle_navigation_left,
+ xbmcgui.ACTION_MOVE_RIGHT: self.section_manager.move_to_next_section,
+ xbmcgui.ACTION_CONTEXT_MENU: self._show_context_menu,
+ }
+ if handler := actions.get(action_id):
+ handler()
+ self._refresh_ui()
+
+ def _handle_settings_action(self, action_id: int) -> None:
+ """Process settings interactions."""
+ if action_id == xbmcgui.ACTION_MOVE_RIGHT:
+ self.setProperty("settings_open", "false")
+ self.setFocus(self._source_list)
+
+ def _handle_navigation_left(self) -> None:
+ """Handle left navigation between sections/settings."""
+ if self.section_manager._current_index > 0:
+ self.section_manager.move_to_previous_section()
+ else:
+ self.setProperty("settings_open", "true")
+ self.setFocus(self._settings_first)
+
+ def _resolve_source(self) -> None:
+ """Initiate playback resolution for selected source."""
+ source = self._get_selected_source()
+ self._source = source
+ self.close()
+
+ # resolver = ResolverWindow(
+ # "resolver.xml", ADDON_PATH,
+ # source=source,
+ # item_information=self._item_metadata,
+ # previous_window=self
+ # )
+ # resolver.doModal()
+ # self._playback_info = resolver.playback_info
+ # self.close()
+
+ def _show_context_menu(self) -> None:
+ """Display context options for selected source."""
+ source = self._get_selected_source()
+
+ options = {
+ "Torrent": [
+ "Download to Debrid",
+ "Download to Transmission",
+ "Download to TorrServer",
+ ],
+ "Direct": [],
+ }.get(source["type"], ["Browse into"])
+
+ choice = xbmcgui.Dialog().contextmenu(options)
+ if choice == 1:
+ TransmissionClient(
+ get_setting("transmission_host"),
+ get_setting("transmission_folder"),
+ get_setting("transmission_user"),
+ get_setting("transmission_pass"),
+ ).add_magnet(source["magnet"])
+ elif choice == 2:
+ kodilog("TorrServeClient().add_magnet")
+ TorrServeClient().add_magnet(source["magnet"])
+
+ def _get_selected_source(self) -> Source:
+ """Retrieve currently selected source data."""
+
+ return self._sources[
+ int(self._source_list.getSelectedItem().getProperty("correlative_id"))
+ ]
+
+ def doModal(self) -> Optional[Source]:
+ """Show window and return playback info on close."""
+ super().doModal()
+ return self._source
diff --git a/lib/gui/source_window.py b/lib/gui/source_window.py
index 87dc117..993048c 100644
--- a/lib/gui/source_window.py
+++ b/lib/gui/source_window.py
@@ -2,7 +2,6 @@
import xbmcgui
from lib.gui.base_window import BaseWindow
from lib.utils.debrid_utils import get_debrid_status
-from lib.utils.kodi_utils import bytes_to_human_readable
from lib.utils.utils import extract_publish_date, get_colored_languages, get_random_color
diff --git a/lib/navigation.py b/lib/navigation.py
index cdc1365..e8ea9e5 100644
--- a/lib/navigation.py
+++ b/lib/navigation.py
@@ -15,20 +15,17 @@
CustomDialog,
resume_dialog_mock,
run_next_mock,
- source_select,
- source_select_mock,
+ #source_select_mock,
)
from lib.player import JacktookPLayer
from lib.stremio.catalogs import list_stremio_catalogs
from lib.utils.seasons import show_episode_info, show_season_info
-from lib.utils.tmdb_utils import get_tmdb_media_details
from lib.utils.torrentio_utils import open_providers_selection
from lib.api.trakt.trakt_api import (
trakt_authenticate,
trakt_revoke_authentication,
)
-from lib.clients.search import search_client
from lib.files_history import last_files
from lib.play import get_playback_info
from lib.titles_history import last_titles
@@ -41,7 +38,6 @@
from lib.utils.rd_utils import get_rd_info
from lib.utils.items_menus import tv_items, movie_items, anime_items, animation_items
-from lib.utils.debrid_utils import check_debrid_cached
from lib.tmdb import (
handle_tmdb_anime_query,
@@ -54,23 +50,16 @@
from lib.db.cached import cache
from lib.utils.utils import (
- TMDB_POSTER_URL,
- DialogListener,
- clean_auto_play_undesired,
clear,
clear_all_cache,
- get_fanart_details,
get_password,
get_random_color,
get_service_host,
get_username,
make_listing,
- post_process,
- pre_process,
get_port,
list_item,
set_content_type,
- set_watched_title,
ssl_enabled,
check_debrid_enabled,
Debrids,
@@ -96,7 +85,7 @@
translation,
)
-from lib.utils.settings import get_cache_expiration, is_auto_play
+from lib.utils.settings import get_cache_expiration
from lib.utils.settings import addon_settings
from lib.updater import updates_check_addon
@@ -530,135 +519,6 @@ def search_direct(params):
endOfDirectory(ADDON_HANDLE, updateListing=update_listing)
-def search(params):
- query = params["query"]
- mode = params["mode"]
- media_type = params.get("media_type", "")
- ids = literal_eval(params.get("ids", "{}"))
- tv_data = params.get("tv_data", "")
- direct = params.get("direct", False)
- rescrape = params.get("rescrape", False)
-
- set_content_type(mode, media_type)
- set_watched_title(query, ids, mode, media_type)
-
- episode, season, ep_name = (0, 0, "")
- if tv_data:
- try:
- ep_name, episode, season = tv_data.split("(^)")
- except ValueError:
- pass
-
- with DialogListener() as listener:
- results = search_client(
- query, ids, mode, media_type, listener.dialog, rescrape, season, episode
- )
- if not results:
- notification("No results found")
- return
-
- pre_results = pre_process(
- results,
- mode,
- ep_name,
- episode,
- season,
- )
- if not pre_results:
- notification("No results found")
- return
-
- if get_setting("torrent_enable"):
- post_results = post_process(pre_results)
- else:
- with DialogListener() as listener:
- cached_results = handle_debrid_client(
- query,
- pre_results,
- mode,
- media_type,
- listener.dialog,
- rescrape,
- episode,
- )
- if not cached_results:
- notification("No cached results found")
- return
-
- if is_auto_play():
- auto_play(cached_results, ids, tv_data, mode)
- return
-
- post_results = post_process(cached_results, season)
-
- data = handle_results(post_results, mode, ids, tv_data, direct)
-
- if not data:
- cancel_playback()
- return
-
- player = JacktookPLayer(db=bookmark_db)
- player.run(data=data)
- del player
-
-
-def handle_results(results, mode, ids, tv_data, direct=False):
- if ids:
- tmdb_id, tvdb_id, _ = ids.values()
- else:
- tmdb_id = None
-
- if direct or not tmdb_id:
- item_info = {"tv_data": tv_data, "ids": ids, "mode": mode}
- else:
- details = get_tmdb_media_details(tmdb_id, mode)
- poster = f"{TMDB_POSTER_URL}{details.poster_path or ''}"
- overview = details.overview or ""
-
- fanart_data = get_fanart_details(tvdb_id=tvdb_id, tmdb_id=tmdb_id, mode=mode)
-
- item_info = {
- "poster": poster,
- "fanart": fanart_data["fanart"] or poster,
- "clearlogo": fanart_data["clearlogo"],
- "plot": overview,
- "tv_data": tv_data,
- "ids": ids,
- "mode": mode,
- }
-
- if mode == "direct":
- xml_file_string = "source_select_direct.xml"
- else:
- xml_file_string = "source_select.xml"
-
- return source_select(
- item_info,
- xml_file=xml_file_string,
- sources=results,
- )
-
-
-def handle_debrid_client(
- query,
- proc_results,
- mode,
- media_type,
- p_dialog,
- rescrape,
- episode,
-):
- return check_debrid_cached(
- query,
- proc_results,
- mode,
- media_type,
- p_dialog,
- rescrape,
- episode,
- )
-
-
def play_torrent(params):
data = literal_eval(params["data"])
player = JacktookPLayer(db=bookmark_db)
@@ -666,25 +526,6 @@ def play_torrent(params):
del player
-def auto_play(results, ids, tv_data, mode):
- result = clean_auto_play_undesired(results)
- playback_info = get_playback_info(
- data={
- "title": result.get("title"),
- "mode": mode,
- "indexer": result.get("indexer"),
- "type": result.get("type"),
- "ids": ids,
- "info_hash": result.get("infoHash"),
- "tv_data": tv_data,
- "is_torrent": False,
- },
- )
- player = JacktookPLayer(db=bookmark_db)
- player.run(data=playback_info)
- del player
-
-
def cloud_details(params):
type = params.get("type")
@@ -1005,8 +846,8 @@ def test_run_next(params):
run_next_mock()
-def test_source_select(params):
- source_select_mock()
+#def test_source_select(params):
+ #source_select_mock()
def test_resume_dialog(params):
diff --git a/lib/player.py b/lib/player.py
index 4e5e2e1..f742682 100644
--- a/lib/player.py
+++ b/lib/player.py
@@ -61,6 +61,7 @@ def run(self, data=None):
self.play_video(list_item)
except Exception as e:
kodilog(f"Error in run: {e}")
+
self.run_error()
def play_playlist(self):
@@ -264,9 +265,11 @@ def clear_playback_properties(self):
def add_external_trakt_scrolling(self):
ids = self.data.get("ids")
mode = self.data.get("mode")
-
+ tmdb_id = self.data.get("tmdb_id")
+ tvdb_id = self.data.get("tvdb_id")
+ imdb_id = self.data.get("imdb_id")
+
if ids:
- tmdb_id, tvdb_id, imdb_id = ids.values()
trakt_ids = {
"tmdb": tmdb_id,
"imdb": imdb_id,
diff --git a/lib/router.py b/lib/router.py
index ccd353a..641e0e8 100644
--- a/lib/router.py
+++ b/lib/router.py
@@ -30,13 +30,12 @@
telegram_menu,
test_resume_dialog,
test_run_next,
- test_source_select,
+ #test_source_select,
torrentio_selection,
play_torrent,
rd_auth,
rd_remove_auth,
root_menu,
- search,
search_direct,
search_item,
settings,
@@ -50,6 +49,7 @@
tv_seasons_details,
tv_shows_items,
)
+from lib.search import search
from lib.stremio.catalogs import list_stremio_catalog, list_stremio_episodes, list_stremio_seasons, list_stremio_tv, list_stremio_tv_streams
from lib.telegram import (
get_telegram_files,
@@ -119,7 +119,7 @@ def addon_router():
"telegram_menu": telegram_menu,
"display_picture": display_picture,
"display_text": display_text,
- "test_source_select": test_source_select,
+ #"test_source_select": test_source_select,
"test_run_next": test_run_next,
"test_resume_dialog": test_resume_dialog,
"animation_menu": animation_menu,
diff --git a/lib/search.py b/lib/search.py
new file mode 100644
index 0000000..2c28766
--- /dev/null
+++ b/lib/search.py
@@ -0,0 +1,252 @@
+from lib.utils.tmdb_utils import get_tmdb_media_details
+from lib.utils.utils import (
+ TMDB_POSTER_URL,
+ get_fanart_details,
+ clean_auto_play_undesired,
+ set_content_type,
+ set_watched_title,
+ IndexerType,
+)
+from lib.utils.language_detection import language_codes, langsSet
+from lib.clients.debrid.transmission import TransmissionClient
+from lib.db.bookmark_db import bookmark_db
+from lib.clients.search import search_client
+from lib.utils.kodi_utils import (
+ cancel_playback,
+ get_setting,
+ convert_size_to_bytes,
+ ADDON_PATH,
+)
+from lib.player import JacktookPLayer
+from lib.play import get_playback_info
+from lib.services.filters import FilterBuilder
+from lib.services.enrich import (
+ EnricherBuilder,
+ StatsEnricher,
+ QualityEnricher,
+ LanguageEnricher,
+ IsPackEnricher,
+ CacheEnricher,
+ FormatEnricher,
+ MagnetEnricher,
+ FileEnricher,
+)
+from lib.gui.source_select_new import SourceSelectWindow
+
+from lib.api.jacktook.kodi import kodilog
+import json
+def search(params):
+ """
+ Handles media search and playback.
+ """
+ kodilog("Search params: %s" % params)
+ query = params["query"]
+ mode = params["mode"]
+ media_type = params.get("media_type", "")
+ ids = params.get("ids", "")
+ tv_data = params.get("tv_data", "")
+ direct = params.get("direct", False)
+ rescrape = params.get("rescrape", False)
+
+ # Parse TV data if available
+ episode, season, ep_name = parse_tv_data(tv_data)
+
+ # Extract TMDb and TVDb IDs
+ ids = json.loads(ids.replace("'", '"').replace("None", "null"))
+ tmdb_id, tvdb_id, imdb_id = (ids["tmdb_id"], ids["tvdb_id"], ids["imdb_id"])
+
+ # Fetch media details from TMDb
+ details = get_tmdb_media_details(tmdb_id, mode)
+ poster = f"{TMDB_POSTER_URL}{details.poster_path or ''}"
+ overview = details.overview or ""
+
+ # Fetch fanart details
+ fanart_data = get_fanart_details(tvdb_id=tvdb_id, tmdb_id=tmdb_id, mode=mode)
+
+ # Prepare item information
+ item_info = {
+ "episode": episode,
+ "season": season,
+ "ep_name": ep_name,
+ "tvdb_id": tvdb_id,
+ "tmdb_id": tmdb_id,
+ "imdb_id": imdb_id,
+ "tv_data": tv_data,
+ "ids": {
+ "tmdb_id": tmdb_id,
+ "tvdb_id": tvdb_id,
+ "imdb_id": imdb_id,
+ },
+ "mode": mode,
+ "poster": poster,
+ "fanart": fanart_data["fanart"] or poster,
+ "clearlogo": fanart_data["clearlogo"],
+ "plot": overview,
+ "query": query,
+ "media_type": media_type,
+ }
+
+ # Set content type and watched title
+ set_content_type(mode, media_type)
+ set_watched_title(query, ids, mode, media_type)
+
+ # Search for sources
+ source = select_source(
+ item_info, rescrape, direct
+ )
+ if not source:
+ return
+
+ # Handle selected source
+ playback_info = handle_results(source, item_info)
+ if not playback_info:
+ cancel_playback()
+ return
+
+ # Start playback
+ player = JacktookPLayer(db=bookmark_db)
+ player.run(data=playback_info)
+ del player
+
+
+def parse_tv_data(tv_data):
+ """
+ Parses TV data into episode, season, and episode name.
+ """
+ episode, season, ep_name = 0, 0, ""
+ if tv_data:
+ try:
+ ep_name, episode, season = tv_data.split("(^)")
+ except ValueError:
+ pass
+ return int(episode), int(season), ep_name
+
+
+def select_source(
+ info_item, rescrape, direct
+):
+ """
+ Searches for and selects a source.
+ """
+
+ def get_sources():
+ results = search_client(
+ info_item, FakeDialog(), rescrape,
+ )
+ if not results:
+ notification("No results found")
+ return None
+ return process(results, info_item["mode"], info_item["ep_name"], info_item["episode"], info_item["season"])
+
+ source_select_window = SourceSelectWindow(
+ "source_select_new.xml",
+ ADDON_PATH,
+ item_information=info_item,
+ get_sources=get_sources,
+ )
+ source = source_select_window.doModal()
+ del source_select_window
+ return source
+
+
+def process(results, mode, ep_name, episode, season):
+ """
+ Processes and filters search results.
+ """
+ sort_by = get_setting("indexers_sort_by")
+ limit = int(get_setting("indexers_total_results"))
+
+ enricher = (
+ EnricherBuilder()
+ .add(FileEnricher())
+ .add(StatsEnricher(size_converter=convert_size_to_bytes))
+ .add(IsPackEnricher(season) if season else None)
+ .add(MagnetEnricher())
+ .add(QualityEnricher())
+ .add(LanguageEnricher(language_codes, langsSet))
+ .add(
+ CacheEnricher(
+ [
+ (
+ TransmissionClient(
+ get_setting("transmission_host"),
+ get_setting("transmission_folder"),
+ get_setting("transmission_user"),
+ get_setting("transmission_pass"),
+ )
+ if get_setting("transmission_enabled")
+ else None
+ ),
+ ]
+ )
+ )
+ .add(FormatEnricher())
+ )
+ results = enricher.build(results)
+
+ filters = FilterBuilder().dedupe_by_infoHash().limit(limit)
+ if get_setting("stremio_enabled") and get_setting("torrent_enable"):
+ filters.filter_by_source()
+ if mode == "tv" and get_setting("filter_by_episode"):
+ filters.filter_by_episode(ep_name, episode, season)
+ if sort_by == "Seeds":
+ filters.sort_by("seeders", ascending=False)
+ elif sort_by == "Size":
+ filters.sort_by("size", ascending=False)
+ elif sort_by == "Date":
+ filters.sort_by("publishDate", ascending=False)
+ elif sort_by == "Quality":
+ filters.sort_by("quality_sort", ascending=False)
+ filters.sort_by("seeders", ascending=False)
+ elif sort_by == "Cached":
+ filters.sort_by("isCached", ascending=False)
+ return filters.build(results)
+
+
+def handle_results(source, info_item):
+ """
+ Handles the selected source and prepares playback information.
+ """
+ if not source:
+ return None
+
+ cache_sources = source.get("cache_sources", [])
+ for cache_source in cache_sources:
+ if cache_source.get("instant_availability"):
+ playable_url = cache_source.get("playable_url")
+ return {
+ **info_item,
+ "title": source["title"],
+ "type": source["type"],
+ "indexer": source["indexer"],
+ "url": playable_url,
+ "info_hash": source.get("info_hash", ""),
+ "is_torrent": False,
+ "is_pack": False,
+ }
+
+ return get_playback_info(
+ {
+ **info_item,
+ "title": source["title"],
+ "type": IndexerType.TORRENT,
+ "indexer": source["indexer"],
+ "info_hash": source.get("info_hash", ""),
+ "magnet": source.get("magnet", ""),
+ "is_pack": False,
+ "is_torrent": True,
+ "url": source["magnet"],
+ }
+ )
+
+
+class FakeDialog:
+ """
+ A placeholder dialog class for mocking progress updates.
+ """
+
+ def create(self, message: str):
+ pass
+
+ def update(self, percent: int, title: str, message: str):
+ pass
diff --git a/lib/services/enrich/__init__.py b/lib/services/enrich/__init__.py
new file mode 100644
index 0000000..f95bdbf
--- /dev/null
+++ b/lib/services/enrich/__init__.py
@@ -0,0 +1,23 @@
+from .enricher_builder import EnricherBuilder
+from .language_enricher import LanguageEnricher
+from .stats_enricher import StatsEnricher
+from .is_pack_enricher import IsPackEnricher
+from .quality_enricher import QualityEnricher
+from .cache_enricher import CacheEnricher
+from lib.domain.quality_tier import QualityTier
+from .format_enricher import FormatEnricher
+from .magnet_enricher import MagnetEnricher
+from .file_enricher import FileEnricher
+
+__all__ = [
+ "EnricherBuilder",
+ "LanguageEnricher",
+ "StatsEnricher",
+ "IsPackEnricher",
+ "QualityEnricher",
+ "CacheEnricher",
+ "QualityTier",
+ "FormatEnricher",
+ "MagnetEnricher",
+ "FileEnricher",
+]
diff --git a/lib/services/enrich/cache_enricher.py b/lib/services/enrich/cache_enricher.py
new file mode 100644
index 0000000..9c181d9
--- /dev/null
+++ b/lib/services/enrich/cache_enricher.py
@@ -0,0 +1,42 @@
+from lib.domain.interface.enricher_interface import EnricherInterface
+from typing import Dict, List
+from lib.clients.debrid.debrid_client import ProviderException
+from lib.domain.interface.cache_provider_interface import CacheProviderInterface
+from lib.domain.cached_source import CachedSource
+from lib.api.jacktook.kodi import kodilog
+from lib.domain.source import Source
+
+
+class CacheEnricher(EnricherInterface):
+ def __init__(self, cache_providers: List[CacheProviderInterface]):
+ self.cache_providers = cache_providers
+
+ def initialize(self, items: List[Source]) -> None:
+ self.provider_results: List[Dict[str, CachedSource]] = []
+
+ for cache_provider in self.cache_providers:
+ cached_hashes = cache_provider.get_cached_hashes(items)
+ self.provider_results.append(cached_hashes)
+
+ return
+
+ def needs(self):
+ return ["info_hash"]
+
+ def provides(self):
+ return ["is_cached", "cache_sources"]
+
+ def enrich(self, item: Source) -> None:
+ info_hash = item.get("info_hash")
+ if not info_hash:
+ return
+
+ for provider_result in self.provider_results:
+ cached_source = provider_result.get(info_hash)
+ if not cached_source:
+ continue
+
+ item["is_cached"] = True
+ item["cache_sources"] = list(
+ item.get("cache_sources", []) + [cached_source]
+ )
diff --git a/lib/services/enrich/enricher_builder.py b/lib/services/enrich/enricher_builder.py
new file mode 100644
index 0000000..88ff184
--- /dev/null
+++ b/lib/services/enrich/enricher_builder.py
@@ -0,0 +1,116 @@
+from typing import List, Optional
+from lib.domain.interface.enricher_interface import EnricherInterface
+from collections import defaultdict
+from lib.domain.source import Source
+
+
+class EnricherBuilder:
+ def __init__(self):
+ self._enrichers: List[EnricherInterface] = []
+
+ def add(self, enricher: Optional[EnricherInterface]) -> "EnricherBuilder":
+ if enricher is None:
+ return self
+
+ self._enrichers.append(enricher)
+ return self
+
+ def build(self, items: List[Source]) -> List[Source]:
+ processed = []
+ for enricher in self._enrichers:
+ enricher.initialize(items)
+
+ for item in [item.copy() for item in items]:
+ for enricher in self._enrichers:
+ enricher.enrich(item)
+ processed.append(item)
+ return processed
+
+ def generate_report(self) -> str:
+ report = []
+ enrichers_info = []
+ provided_fields = defaultdict(list)
+ all_needs = set()
+
+ for enricher in self._enrichers:
+ name = type(enricher).__name__
+ needs = enricher.needs()
+ provides = enricher.provides()
+ enrichers_info.append((name, needs, provides))
+ all_needs.update(needs)
+ for field in provides:
+ provided_fields[field].append(name)
+
+ # Enrichers Details Section
+ report.append("Enrichers Report:")
+ report.append("=================")
+ report.append("\nEnrichers Details:")
+ for name, needs, provides in enrichers_info:
+ report.append(f"- {name}:")
+ report.append(f" Needs: {', '.join(needs) if needs else 'None'}")
+ report.append(f" Provides: {', '.join(provides) if provides else 'None'}")
+ report.append("")
+
+ # Insights Section
+ report.append("\nInsights:")
+ report.append("=========")
+
+ # Insight 1: Check unmet dependencies
+ cumulative_provided = set()
+ insights_unmet = []
+ for index, (name, needs, _) in enumerate(enrichers_info):
+ unmet = [field for field in needs if field not in cumulative_provided]
+ if unmet:
+ insights_unmet.append((name, unmet))
+ # Update cumulative_provided with current enricher's provides
+ cumulative_provided.update(enrichers_info[index][2])
+
+ if insights_unmet:
+ report.append(
+ "\n1. Enrichers with unmet dependencies (needs not provided by prior enrichers):"
+ )
+ for name, unmet in insights_unmet:
+ report.append(
+ f" - {name} requires fields not provided earlier: {', '.join(unmet)}"
+ )
+ else:
+ report.append(
+ "\n1. All enrichers' dependencies are met by prior enrichers."
+ )
+
+ # Insight 2: Check overlapping provided fields
+ multi_provided = [
+ field for field, providers in provided_fields.items() if len(providers) > 1
+ ]
+ if multi_provided:
+ report.append(
+ "\n2. Fields provided by multiple enrichers (possible conflicts):"
+ )
+ for field in multi_provided:
+ providers = ", ".join(provided_fields[field])
+ report.append(f" - Field '{field}' is provided by: {providers}")
+ else:
+ report.append("\n2. No fields are provided by multiple enrichers.")
+
+ # Insight 3: Check unused provides
+ unused_provides = defaultdict(list)
+ for i, (name, _, provides) in enumerate(enrichers_info):
+ for field in provides:
+ used = False
+ for j in range(i + 1, len(enrichers_info)):
+ if field in enrichers_info[j][1]:
+ used = True
+ break
+ if not used:
+ unused_provides[name].append(field)
+
+ if unused_provides:
+ report.append(
+ "\n3. Fields provided but not needed by subsequent enrichers:"
+ )
+ for name, fields in unused_provides.items():
+ report.append(f" - {name} provides: {', '.join(fields)}")
+ else:
+ report.append("\n3. All provided fields are used by subsequent enrichers.")
+
+ return "\n".join(report)
diff --git a/lib/services/enrich/file_enricher.py b/lib/services/enrich/file_enricher.py
new file mode 100644
index 0000000..9b10930
--- /dev/null
+++ b/lib/services/enrich/file_enricher.py
@@ -0,0 +1,29 @@
+from lib.domain.interface.enricher_interface import EnricherInterface
+from typing import List
+from lib.domain.source import Source
+from lib.utils.kodi_formats import is_video
+
+
+class FileEnricher(EnricherInterface):
+ def __init__(self):
+ pass
+
+ def initialize(self, items: List[Source]) -> None:
+ return
+
+ def needs(self):
+ return ["description"]
+
+ def provides(self):
+ return ["title", "file", "folder"]
+
+ def enrich(self, item: Source) -> None:
+ description = item.get("description", "").splitlines()
+ if len(description) > 1 and is_video(description[1]):
+ item["title"] = description[1]
+ item["file"] = description[1]
+ item["folder"] = description[0]
+ else:
+ item["title"] = description[0]
+ item["file"] = description[0]
+ item["folder"] = ""
diff --git a/lib/services/enrich/format_enricher.py b/lib/services/enrich/format_enricher.py
new file mode 100644
index 0000000..bc9c865
--- /dev/null
+++ b/lib/services/enrich/format_enricher.py
@@ -0,0 +1,105 @@
+from lib.domain.interface.enricher_interface import EnricherInterface
+from typing import List
+from lib.domain.source import Source
+import math
+
+# from lib.utils.debrid_utils import get_debrid_status
+from lib.utils.utils import (
+ # extract_publish_date,
+ get_random_color,
+)
+
+
+class FormatEnricher(EnricherInterface):
+ def __init__(self):
+ pass
+
+ def initialize(self, items: List[Source]) -> None:
+ return
+
+ def needs(self):
+ return ["is_cached", "cached_sources"]
+
+ def provides(self):
+ return ["status", "a1", "a2", "a3", "b1", "b2", "b3"]
+
+ def enrich(self, item: Source) -> None:
+ # Extract cache-related information if available
+ if item.get("is_cached") and item.get("cache_sources"):
+ cache_sources = item.get("cache_sources", [])
+
+ # Separate instant availability and non-instant availability sources
+ cached_sources = [source for source in cache_sources if source.get("instant_availability")]
+ caching_sources = [source for source in cache_sources if not source.get("instant_availability")]
+
+ # Build status message for cached and caching sources
+ status_parts = []
+ if cached_sources:
+ cached_providers = ", ".join(
+ source.get("cache_provider_name", "Unknown") for source in cached_sources
+ )
+ status_parts.append(f"Cached in {cached_providers}")
+
+ if caching_sources:
+ caching_providers = ", ".join(
+ f"{source.get('cache_provider_name', 'Unknown')} ({round(source.get('ratio', 0) * 100)}%)"
+ for source in caching_sources
+ )
+ status_parts.append(f"Caching in {caching_providers}")
+
+ # Combine status parts into a single string
+ item["status"] = ", ".join(status_parts)
+
+ # Update additional fields
+ item.update({
+ "a1": str(item.get("seeders", "")), # Seeders count (default to empty string if missing)
+ "a2": (
+ self._bytes_to_human_readable(item.get("size"))
+ if item.get("size") is not None
+ else ""
+ ), # Human-readable size (default to empty string if missing)
+ "a3": "Torrent", # Quality formatted (default to empty string if missing)
+ "b1": item.get("title", ""), # Title (default to empty string if missing)
+ "b2": self._colored_list(
+ [item.get("quality_formatted", "")] + item.get("languages", []) + [item.get("indexer", "")] + [item.get("provider", "")]
+ ), # Colored list of languages, indexer, and provider
+ "b3": item.get("status", ""), # Status (default to empty string if missing)
+ })
+
+ def _format_colored_text(self, text: str) -> str:
+ """Formats text with random color using Kodi markup."""
+ color = get_random_color(text)
+ return f"[COLOR {color}]{text}[/COLOR]"
+
+ def _colored_list(self, languages):
+ if not languages:
+ return ""
+ colored_languages = []
+ for lang in languages:
+ if not lang:
+ continue
+ lang_color = get_random_color(lang)
+ colored_lang = f"[[COLOR {lang_color}]{lang}[/COLOR]]"
+ colored_languages.append(colored_lang)
+ colored_languages = " ".join(colored_languages)
+ return colored_languages
+
+ def _format_significant(self, size, digits=3):
+ if size == 0:
+ return "0" # Handle zero case
+
+ order = math.floor(math.log10(abs(size))) # Get the order of magnitude
+ factor = 10 ** (digits - 1 - order) # Compute scaling factor
+ rounded = round(size * factor) / factor # Round to significant digits
+
+ return str(rounded)
+
+ def _bytes_to_human_readable(self, size, unit="B"):
+ units = {"B": 0, "KB": 1, "MB": 2, "GB": 3, "TB": 4, "PB": 5}
+
+ while size >= 1000 and unit != "PB":
+ size /= 1000
+ unit = list(units.keys())[list(units.values()).index(units[unit] + 1)]
+
+
+ return f"{self._format_significant(size, 3)} {unit}"
diff --git a/lib/services/enrich/is_pack_enricher.py b/lib/services/enrich/is_pack_enricher.py
new file mode 100644
index 0000000..7c30601
--- /dev/null
+++ b/lib/services/enrich/is_pack_enricher.py
@@ -0,0 +1,49 @@
+from lib.domain.interface.enricher_interface import EnricherInterface
+import re
+from re import Pattern
+from typing import Dict, List
+from lib.domain.source import Source
+
+class IsPackEnricher(EnricherInterface):
+ def __init__(self, season_number: int):
+ self.season_number = season_number
+ self.season_fill = f"{season_number:02d}"
+ self.pattern = self._build_pattern()
+
+ def initialize(self, items: List[Source]) -> None:
+ return
+
+ def needs(self):
+ return ["title"]
+
+ def provides(self):
+ return ["isPack"]
+
+ def _build_pattern(self) -> Pattern:
+ base_patterns = [
+ # Season number variations
+ rf"\.S({self.season_number}|{self.season_fill})\.",
+ rf"\sS({self.season_number}|{self.season_fill})\s",
+ rf"\.({self.season_number}|{self.season_fill})\.season",
+ # Complete season indicators
+ r"total\.season",
+ r"(^|\s)season(\s|$)",
+ r"the\.complete",
+ r"(^|\s)complete(\s|$)",
+ # Episode range detection
+ rf"S{self.season_fill}E\d{{2}}-\d{{2}}",
+ # Season directory patterns
+ rf"\.season\.({self.season_number}|{self.season_fill})\.",
+ rf"\.season({self.season_number}|{self.season_fill})\.",
+ # Season range patterns
+ rf"s01 (to|thru) ({self.season_number}|s{self.season_fill})",
+ rf"s1 (to|thru) ({self.season_number}|s{self.season_fill})",
+ ]
+
+ return re.compile(
+ "|".join(f"({p})" for p in base_patterns), flags=re.IGNORECASE
+ )
+
+ def enrich(self, item: Source) -> None:
+ title = item.get("title", "")
+ item["is_pack"] = bool(self.pattern.search(title))
diff --git a/lib/services/enrich/language_enricher.py b/lib/services/enrich/language_enricher.py
new file mode 100644
index 0000000..3eff9a2
--- /dev/null
+++ b/lib/services/enrich/language_enricher.py
@@ -0,0 +1,35 @@
+from lib.domain.interface.enricher_interface import EnricherInterface
+import re
+from typing import Dict, Set, List
+from lib.domain.source import Source
+
+class LanguageEnricher(EnricherInterface):
+ def __init__(self, language_map: Dict[str, str], keywords: Set[str]):
+ self.flag_regex = re.compile(r"[\U0001F1E6-\U0001F1FF]{2}")
+ self.keyword_regex = re.compile(
+ r"\b(?:" + "|".join(re.escape(k) for k in keywords) + r")\b", re.IGNORECASE
+ )
+ self.language_map = language_map
+
+ def initialize(self, items: List[Source]) -> None:
+ return
+
+ def needs(self):
+ return ["description", "languages"]
+
+ def provides(self):
+ return ["languages"]
+
+ def enrich(self, item: Source) -> None:
+ desc = item.get("description", "")
+
+ # Flag-based detection
+ flags = self.flag_regex.findall(desc)
+ flag_langs = {self.language_map.get(f, "") for f in flags}
+
+ # Keyword-based detection
+ keywords = self.keyword_regex.findall(desc.lower())
+ keyword_langs = {self.language_map.get(k, "") for k in keywords} - {""}
+
+ combined = set(item["languages"]) | flag_langs | keyword_langs
+ item["languages"] = list(combined)
\ No newline at end of file
diff --git a/lib/services/enrich/magnet_enricher.py b/lib/services/enrich/magnet_enricher.py
new file mode 100644
index 0000000..d88f238
--- /dev/null
+++ b/lib/services/enrich/magnet_enricher.py
@@ -0,0 +1,26 @@
+from lib.domain.interface.enricher_interface import EnricherInterface
+from typing import List
+from lib.domain.source import Source
+
+class MagnetEnricher(EnricherInterface):
+ def __init__(self):
+ pass
+
+ def initialize(self, items: List[Source]) -> None:
+ return
+
+ def needs(self):
+ return ["info_hash"]
+
+ def provides(self):
+ return ["magnet"]
+
+ def enrich(self, item: Source) -> None:
+ if "magnet" in item:
+ return
+
+ if "info_hash" not in item:
+ return
+
+ infoHash = item.get("info_hash")
+ item["magnet"] = f"magnet:?xt=urn:btih:{infoHash}"
\ No newline at end of file
diff --git a/lib/services/enrich/quality_enricher.py b/lib/services/enrich/quality_enricher.py
new file mode 100644
index 0000000..2ee91a7
--- /dev/null
+++ b/lib/services/enrich/quality_enricher.py
@@ -0,0 +1,26 @@
+from lib.domain.interface.enricher_interface import EnricherInterface
+from lib.domain.quality_tier import QualityTier
+from typing import List
+from lib.domain.source import Source
+
+class QualityEnricher(EnricherInterface):
+ def __init__(self):
+ self.tiers = QualityTier.default_quality_tiers()
+
+ def initialize(self, items: List[Source]) -> None:
+ return
+
+ def needs(self):
+ return ["title"]
+
+ def provides(self):
+ return ["quality", "quality_sort", "quality_formatted"]
+
+ def enrich(self, item: Source) -> None:
+ title = item.get("title", "")
+ for tier in sorted(self.tiers, key=lambda t: -t.priority):
+ if tier.regex is None or tier.regex.search(title):
+ item["quality"] = tier.label
+ item["quality_formatted"] = tier.label_formatted
+ item["quality_sort"] = tier.priority
+ return
diff --git a/lib/services/enrich/stats_enricher.py b/lib/services/enrich/stats_enricher.py
new file mode 100644
index 0000000..04ddb02
--- /dev/null
+++ b/lib/services/enrich/stats_enricher.py
@@ -0,0 +1,38 @@
+from lib.domain.interface.enricher_interface import EnricherInterface
+from typing import Dict, Callable, List
+import re
+from lib.domain.source import Source
+
+class StatsEnricher(EnricherInterface):
+ def __init__(self, size_converter: Callable):
+ self.size_pattern = re.compile(r"💾 ([\d.]+ (?:GB|MB))")
+ self.seeders_pattern = re.compile(r"👤 (\d+)")
+ self.provider_pattern = re.compile(r"([🌐🔗⚙️])\s*([^🌐🔗⚙️]+)")
+ self.convert_size = size_converter
+
+ def initialize(self, items: List[Source]) -> None:
+ return
+
+ def needs(self):
+ return ["description"]
+
+ def provides(self):
+ return ["size", "seeders", "provider"]
+
+ def enrich(self, item: Source) -> None:
+ desc = item.get("description", "")
+ if not desc:
+ return
+
+ # Size extraction
+ if size_match := self.size_pattern.search(desc):
+ item["size"] = self.convert_size(size_match.group(1))
+
+ # Seeders extraction
+ if seeders_match := self.seeders_pattern.search(desc):
+ item["seeders"] = int(seeders_match.group(1))
+
+ # Provider detection
+ if provider_matches := self.provider_pattern.findall(desc):
+ cleaned = [m[1].strip().splitlines()[0] for m in provider_matches]
+ item["provider"] = cleaned[-1] if cleaned else "N/A"
diff --git a/lib/services/filters/__init__.py b/lib/services/filters/__init__.py
new file mode 100644
index 0000000..66700a2
--- /dev/null
+++ b/lib/services/filters/__init__.py
@@ -0,0 +1,184 @@
+import re
+from typing import List, Any, Union
+from lib.domain.source import Source
+from lib.domain.interface.filter_interface import FilterInterface
+
+
+
+class FieldFilter(FilterInterface):
+ def __init__(self, field: str, value: Any):
+ self.field = field
+ self.value = value
+
+ def matches(self, item: Source) -> bool:
+ return item.get(self.field) == self.value
+
+ def reset(self):
+ pass
+
+
+class DedupeFilter(FilterInterface):
+ def __init__(self):
+ self.seen = set()
+
+ def matches(self, item: Source) -> bool:
+ info_hash = item.get("info_hash")
+ if info_hash in self.seen:
+ return False
+ if info_hash is not None:
+ self.seen.add(info_hash)
+ return True
+
+ def reset(self):
+ self.seen.clear()
+
+
+class SourceFilter(FilterInterface):
+ def matches(self, item: Source) -> bool:
+ return bool(item.get("info_hash") or item.get("guid"))
+
+
+class LanguageFilter(FilterInterface):
+ def __init__(self, languages: List[str]):
+ self.languages = languages
+
+ def matches(self, item: Source) -> bool:
+ if not self.languages:
+ return True
+ item_langs = item.get("languages", [])
+ return any(lang in item_langs for lang in self.languages)
+
+
+class EpisodeFilter(FilterInterface):
+ def __init__(self, episode_name: str, episode_num: int, season_num: int):
+ self.episode_name = episode_name
+ self.episode_num = episode_num
+ self.season_num = season_num
+ self.compiled_pattern = self._compile_pattern()
+
+ def _compile_pattern(self):
+ episode_fill = f"{self.episode_num:02d}"
+ season_fill = f"{self.season_num:02d}"
+
+ patterns = [
+ rf"S{season_fill}E{episode_fill}",
+ rf"{season_fill}x{episode_fill}",
+ rf"\s{season_fill}\s",
+ rf"\.S{season_fill}",
+ rf"\.S{season_fill}E{episode_fill}",
+ rf"\sS{season_fill}E{episode_fill}\s",
+ r"Cap\.",
+ ]
+
+ if self.episode_name:
+ patterns.append(re.escape(self.episode_name))
+
+ return re.compile("|".join(patterns), flags=re.IGNORECASE)
+
+ def matches(self, item: Source) -> bool:
+ title = item.get("title", "")
+ return bool(self.compiled_pattern.search(title))
+
+
+class FilterBuilder(FilterInterface):
+ def __init__(self, operator: str = "AND"):
+ self._filters: List[FilterInterface] = []
+ self._operator = operator.upper()
+ self._sort_criteria: List[tuple] = []
+ self._limit: int = 0
+
+ def matches(self, item: Source) -> bool:
+ if not self._filters:
+ return True
+
+ results = [f.matches(item) for f in self._filters]
+ if self._operator == "AND":
+ return all(results)
+ elif self._operator == "OR":
+ return any(results)
+ else:
+ raise ValueError(f"Invalid operator: {self._operator}. Use 'AND' or 'OR'.")
+
+ def reset(self):
+ for f in self._filters:
+ f.reset()
+
+ def sort_by(self, field: str, ascending: bool = True) -> "FilterBuilder":
+ self._sort_criteria.append((field, ascending))
+ return self
+
+ def limit(self, n: int) -> "FilterBuilder":
+ self._limit = n
+ return self
+
+ def filter_by_field(self, field: str, value: Any) -> "FilterBuilder":
+ self._filters.append(FieldFilter(field, value))
+ return self
+
+ def filter_by_quality(self, priority: int) -> "FilterBuilder":
+ self._filters.append(FieldFilter("quality_sort", priority))
+ return self
+
+ def filter_by_language(self, language_code: str) -> "FilterBuilder":
+ existing = next((f for f in self._filters if isinstance(f, LanguageFilter)), None)
+ if existing:
+ existing.languages.append(language_code)
+ else:
+ self._filters.append(LanguageFilter([language_code]))
+ return self
+
+ def dedupe_by_infoHash(self) -> "FilterBuilder":
+ self.add_filter(DedupeFilter())
+ return self
+
+ def filter_by_episode(
+ self,
+ episode_name: str,
+ episode_num: Union[int, str],
+ season_num: Union[int, str],
+ ) -> "FilterBuilder":
+ episode_num = int(episode_num)
+ season_num = int(season_num)
+ self._filters = [f for f in self._filters if not isinstance(f, EpisodeFilter)]
+ self._filters.append(EpisodeFilter(episode_name, episode_num, season_num))
+ return self
+
+ def filter_by_source(self) -> "FilterBuilder":
+ if not any(isinstance(f, SourceFilter) for f in self._filters):
+ self._filters.append(SourceFilter())
+ return self
+
+ def add_filter(self, filter: FilterInterface) -> "FilterBuilder":
+ self._filters.append(filter)
+ return self
+
+ def build(self, items: List[Source]) -> List[Source]:
+ self.reset()
+ filtered_items = [item for item in items if self.matches(item)]
+ sorted_items = self._apply_sorting(filtered_items)
+ limited_items = self._apply_limit(sorted_items)
+ return limited_items
+
+ def _apply_sorting(self, items: List[Source]) -> List[Source]:
+ if not self._sort_criteria:
+ return items
+
+ def sort_key(item):
+ key = []
+ for field, ascending in self._sort_criteria:
+ value = item.get(field)
+ if isinstance(value, (int, float)):
+ key.append(-value if not ascending else value)
+ elif isinstance(value, str):
+ key.append(value.lower() if ascending else value.lower()[::-1])
+ else:
+ key.append(value)
+ return tuple(key)
+
+ try:
+ return sorted(items, key=sort_key)
+ except TypeError:
+ return items
+
+ def _apply_limit(self, items: List[Source]) -> List[Source]:
+ return items[: self._limit] if self._limit else items
\ No newline at end of file
diff --git a/lib/utils/debrid_utils.py b/lib/utils/debrid_utils.py
index 6a51ecd..5d94275 100644
--- a/lib/utils/debrid_utils.py
+++ b/lib/utils/debrid_utils.py
@@ -131,14 +131,15 @@ def get_rd_status_pack(res):
def get_pack_info(type, info_hash):
if type == Debrids.PM:
- info = get_pm_pack_info(info_hash)
+ return get_pm_pack_info(info_hash)
elif type == Debrids.TB:
- info = get_torbox_pack_info(info_hash)
+ return get_torbox_pack_info(info_hash)
elif type == Debrids.RD:
- info = get_rd_pack_info(info_hash)
+ return get_rd_pack_info(info_hash)
elif type == Debrids.ED:
- info = get_ed_pack_info(info_hash)
- return info
+ return get_ed_pack_info(info_hash)
+ else:
+ return None
def filter_results(results, direct_results):
@@ -148,7 +149,7 @@ def filter_results(results, direct_results):
info_hash = extract_info_hash(res)
if info_hash:
- res["infoHash"] = info_hash
+ res["info_hash"] = info_hash
filtered_results.append(res)
elif (
res["indexer"] == Indexer.TELEGRAM
@@ -161,8 +162,8 @@ def filter_results(results, direct_results):
def extract_info_hash(res):
"""Extracts and returns the info hash from a result if available."""
- if res.get("infoHash"):
- return res["infoHash"].lower()
+ if res.get("info_hash"):
+ return res["info_hash"].lower()
if (guid := res.get("guid", "")) and (
guid.startswith("magnet:?") or len(guid) == 40
diff --git a/lib/utils/ed_utils.py b/lib/utils/ed_utils.py
index c86257c..4e583f6 100644
--- a/lib/utils/ed_utils.py
+++ b/lib/utils/ed_utils.py
@@ -20,9 +20,9 @@
def check_ed_cached(
results, cached_results, uncached_results, total, dialog, lock
):
- filtered_results = [res for res in results if "infoHash" in res]
+ filtered_results = [res for res in results if "info_hash" in res]
if filtered_results:
- magnets = [info_hash_to_magnet(res["infoHash"]) for res in filtered_results]
+ magnets = [info_hash_to_magnet(res["info_hash"]) for res in filtered_results]
torrents_info = client.get_torrent_instant_availability(magnets)
cached_response = torrents_info.get("cached", [])
diff --git a/lib/utils/kodi_utils.py b/lib/utils/kodi_utils.py
index 04ddac0..dd21608 100644
--- a/lib/utils/kodi_utils.py
+++ b/lib/utils/kodi_utils.py
@@ -310,16 +310,6 @@ def update_kodi_addons_db(addon_name=ADDON_NAME):
pass
-def bytes_to_human_readable(size, unit="B"):
- units = {"B": 0, "KB": 1, "MB": 2, "GB": 3, "TB": 4, "PB": 5}
-
- while size >= 1024 and unit != "PB":
- size /= 1024
- unit = list(units.keys())[list(units.values()).index(units[unit] + 1)]
-
- return f"{size:.3g} {unit}"
-
-
def convert_size_to_bytes(size_str: str) -> int:
"""Convert size string to bytes."""
match = re.match(r"(\d+(?:\.\d+)?)\s*(GB|MB)", size_str, re.IGNORECASE)
diff --git a/lib/utils/language_detection.py b/lib/utils/language_detection.py
index e1e9e96..442b0ea 100644
--- a/lib/utils/language_detection.py
+++ b/lib/utils/language_detection.py
@@ -264,6 +264,7 @@
"cas",
"castellano",
"castilian",
+ "[cap."
}
language_codes = {
"bosnian": "bs",
@@ -530,6 +531,7 @@
"cas": "es",
"castellano": "es",
"castilian": "es",
+ "[cap.": "es"
}
diff --git a/lib/utils/pm_utils.py b/lib/utils/pm_utils.py
index 86ed359..3abd662 100644
--- a/lib/utils/pm_utils.py
+++ b/lib/utils/pm_utils.py
@@ -18,7 +18,7 @@
def check_pm_cached(results, cached_results, uncached_results, total, dialog, lock):
- hashes = [res.get("infoHash") for res in results]
+ hashes = [res.get("info_hash") for res in results]
torrents_info = pm_client.get_torrent_instant_availability(hashes)
cached_response = torrents_info.get("response")
diff --git a/lib/utils/rd_utils.py b/lib/utils/rd_utils.py
index b9409f4..5630e58 100644
--- a/lib/utils/rd_utils.py
+++ b/lib/utils/rd_utils.py
@@ -28,7 +28,7 @@ def check_rd_cached(results, cached_results, uncached_results, total, dialog, lo
debrid_dialog_update("RD", total, dialog, lock)
res["type"] = Debrids.RD
- if res.get("infoHash") in torr_available_hashes:
+ if res.get("info_hash") in torr_available_hashes:
res["isCached"] = True
cached_results.append(res)
else:
diff --git a/lib/utils/torbox_utils.py b/lib/utils/torbox_utils.py
index 24ae9c7..ba7a6a2 100644
--- a/lib/utils/torbox_utils.py
+++ b/lib/utils/torbox_utils.py
@@ -21,7 +21,7 @@
def check_torbox_cached(
results, cached_results, uncached_results, total, dialog, lock
):
- hashes = [res.get("infoHash") for res in results]
+ hashes = [res.get("info_hash") for res in results]
response = client.get_torrent_instant_availability(hashes)
cached_response = response.get("data", [])
@@ -29,7 +29,7 @@ def check_torbox_cached(
debrid_dialog_update("TB", total, dialog, lock)
res["type"] = Debrids.TB
- if res.get("infoHash") in cached_response:
+ if res.get("info_hash") in cached_response:
with lock:
res["isCached"] = True
cached_results.append(res)
diff --git a/lib/utils/utils.py b/lib/utils/utils.py
index 716f51a..fa0f54a 100644
--- a/lib/utils/utils.py
+++ b/lib/utils/utils.py
@@ -14,8 +14,6 @@
from lib.api.tvdbapi.tvdbapi import TVDBAPI
from lib.db.cached import cache
from lib.db.main_db import main_db
-
-
from lib.torf._magnet import Magnet
from lib.utils.kodi_utils import (
ADDON_HANDLE,
@@ -196,9 +194,16 @@ class Cartoons(Enum):
]
+class FakeDialog():
+ def create(self, message: str):
+ pass
+ def update(self, percent: int, title: str, message: str):
+ pass
class DialogListener:
def __init__(self):
- self._dialog = DialogProgressBG()
+ # self._dialog = DialogProgressBG()
+ self._dialog = FakeDialog()
+ pass
@property
def dialog(self):
@@ -316,18 +321,16 @@ def set_video_info(
def make_listing(metadata):
title = metadata.get("title")
- ids = metadata.get("ids")
tv_data = metadata.get("tv_data", {})
mode = metadata.get("mode", "")
-
+ ep_name = metadata.get("ep_name", "")
+ episode = metadata.get("episode", "")
+ season = metadata.get("season", "")
list_item = ListItem(label=title)
list_item.setLabel(title)
list_item.setContentLookup(False)
- if tv_data:
- ep_name, episode, season = tv_data.split("(^)")
- else:
- ep_name = episode = season = ""
+ ids={"tmdb_id":metadata["tmdb_id"], "tvdb_id":metadata["tvdb_id"], "imdb_id":metadata["imdb_id"]}
set_media_infotag(
list_item,
@@ -357,6 +360,7 @@ def set_media_infotag(
url="",
original_name="",
):
+ RuntimeError("CHECK THIS FUNCTION")
info_tag = list_item.getVideoInfoTag()
info_tag.setPath(url)
info_tag.setTitle(name)
@@ -557,18 +561,6 @@ def get_random_color(provider_name):
return "FF" + "".join(colors).upper()
-def get_colored_languages(languages):
- if not languages:
- return ""
- colored_languages = []
- for lang in languages:
- lang_color = get_random_color(lang)
- colored_lang = f"[B][COLOR {lang_color}][{lang}][/COLOR][/B]"
- colored_languages.append(colored_lang)
- colored_languages = " ".join(colored_languages)
- return colored_languages
-
-
def execute_thread_pool(results, func, *args, **kwargs):
with ThreadPoolExecutor(max_workers=10) as executor:
[executor.submit(func, res, *args, **kwargs) for res in results]
@@ -610,25 +602,10 @@ def clear(type="", update=False):
container_refresh()
-def limit_results(results):
- limit = int(get_setting("indexers_total_results"))
- return results[:limit]
-
-
def get_description_length():
return int(get_setting("indexers_desc_length"))
-def remove_duplicate(results):
- seen_values = []
- result_dict = []
- for res in results:
- if res not in seen_values:
- result_dict.append(res)
- seen_values.append(res)
- return result_dict
-
-
def unzip(zip_location, destination_location, destination_check):
try:
zipfile = ZipFile(zip_location)
@@ -642,162 +619,6 @@ def unzip(zip_location, destination_location, destination_check):
return status
-def check_season_pack(results, season_num):
- season_fill = f"{int(season_num):02}"
-
- patterns = [
- # Season as ".S{season_num}." or ".S{season_fill}."
- r"\.S%s\." % season_num,
- r"\.S%s\." % season_fill,
- # Season as " S{season_num} " or " S{season_fill} "
- r"\sS%s\s" % season_num,
- r"\sS%s\s" % season_fill,
- # Season as ".{season_num}.season" (like .1.season, .01.season)
- r"\.%s\.season" % season_num,
- # "total.season" or "season" or "the.complete"
- r"total\.season",
- r"season",
- r"the\.complete",
- r"complete",
- # Pattern to detect episode ranges like S02E01-02
- r"S(\d{2})E(\d{2})-(\d{2})",
- # Season as ".season.{season_num}." or ".season.{season_fill}."
- r"\.season\.%s\." % season_num,
- r"\.season%s\." % season_num,
- r"\.season\.%s\." % season_fill,
- # Handle cases "s1 to {season_num}", "s1 thru {season_num}", etc.
- r"s1 to %s" % season_num,
- r"s1 to s%s" % season_num,
- r"s01 to %s" % season_fill,
- r"s01 to s%s" % season_fill,
- r"s1 thru %s" % season_num,
- r"s1 thru s%s" % season_num,
- r"s01 thru %s" % season_fill,
- r"s01 thru s%s" % season_fill,
- ]
-
- combined_pattern = "|".join(patterns)
-
- for res in results:
- match = re.search(combined_pattern, res["title"])
- if match:
- res["isPack"] = True
- else:
- res["isPack"] = False
-
-
-def pre_process(results, mode, episode_name, episode, season):
- results = remove_duplicate(results)
-
- if get_setting("stremio_enabled") and get_setting("torrent_enable"):
- results = filter_torrent_sources(results)
-
- if mode == "tv" and get_setting("filter_by_episode"):
- results = filter_by_episode(results, episode_name, episode, season)
-
- results = filter_by_quality(results)
-
- return results
-
-
-def post_process(results, season=0):
- if int(season) > 0:
- check_season_pack(results, season)
-
- results = sort_results(results)
-
- results = limit_results(results)
-
- return results
-
-
-def filter_torrent_sources(results):
- filtered_results = []
- for res in results:
- if res["infoHash"] or res["guid"]:
- filtered_results.append(res)
- return filtered_results
-
-
-def sort_results(res):
- sort_by = get_setting("indexers_sort_by")
-
- field_to_sort = {
- "Seeds": "seeders",
- "Size": "size",
- "Date": "publishDate",
- "Quality": "quality",
- "Cached": "isCached",
- }
-
- if sort_by in field_to_sort:
- res = sorted(res, key=lambda r: r.get(field_to_sort[sort_by], 0), reverse=True)
-
- priority_language = get_setting("priority_language").lower()
- if priority_language and priority_language != "None":
- res = sorted(
- res, key=lambda r: priority_language in r.get("languages", []), reverse=True
- )
-
- return res
-
-
-def filter_by_episode(results, episode_name, episode_num, season_num):
- episode_fill = f"{int(episode_num):02}"
- season_fill = f"{int(season_num):02}"
-
- patterns = [
- r"S%sE%s" % (season_fill, episode_fill), # SXXEXX format
- r"%sx%s" % (season_fill, episode_fill), # XXxXX format
- r"\s%s\s" % season_fill, # season surrounded by spaces
- r"\.S%s" % season_fill, # .SXX format
- r"\.S%sE%s" % (season_fill, episode_fill), # .SXXEXX format
- r"\sS%sE%s\s"
- % (season_fill, episode_fill), # season and episode surrounded by spaces
- r"Cap\.", # match "Cap."
- ]
-
- if episode_name:
- patterns.append(episode_name)
-
- combined_pattern = "|".join(patterns)
-
- filtered_episodes = []
- for res in results:
- match = re.search(combined_pattern, res["title"])
- if match:
- filtered_episodes.append(res)
-
- return filtered_episodes
-
-
-def filter_by_quality(results):
- quality_720p = []
- quality_1080p = []
- quality_4k = []
- no_quarlity = []
-
- for res in results:
- title = res["title"]
- if "480p" in title:
- res["quality"] = "[B][COLOR orange]480p[/COLOR][/B]"
- quality_720p.append(res)
- elif "720p" in title:
- res["quality"] = "[B][COLOR orange]720p[/COLOR][/B]"
- quality_720p.append(res)
- elif "1080p" in title:
- res["quality"] = "[B][COLOR blue]1080p[/COLOR][/B]"
- quality_1080p.append(res)
- elif "2160" in title:
- res["quality"] = "[B][COLOR yellow]4k[/COLOR][/B]"
- quality_4k.append(res)
- else:
- res["quality"] = "[B][COLOR yellow]N/A[/COLOR][/B]"
- no_quarlity.append(res)
-
- combined_list = quality_4k + quality_1080p + quality_720p + no_quarlity
- return combined_list
-
def clean_auto_play_undesired(results):
undesired = ("SD", "CAM", "TELE", "SYNC", "480p")
@@ -810,16 +631,6 @@ def clean_auto_play_undesired(results):
return results[0]
-def is_torrent_url(uri):
- res = requests.head(uri, timeout=20, headers=USER_AGENT_HEADER)
- if (
- res.status_code == 200
- and res.headers.get("Content-Type") == "application/octet-stream"
- ):
- return True
- else:
- return False
-
def supported_video_extensions():
media_types = getSupportedMedia("video")
diff --git a/resources/language/English/strings.po b/resources/language/English/strings.po
index a017f2d..5d412d9 100644
--- a/resources/language/English/strings.po
+++ b/resources/language/English/strings.po
@@ -1256,9 +1256,39 @@ msgid "Prioritize results in the selected language"
msgstr ""
msgctxt "#30874"
-msgid "Update Addons"
+msgid "Transmission password (if required)."
msgstr ""
msgctxt "#30875"
msgid "Toggle Catalogs Add-ons"
msgstr "Alternar complementos de catalogos"
+
+msgctxt "#30881"
+msgid "Transmission username (if required)."
+msgstr ""
+
+msgctxt "#30876"
+msgid "Password"
+msgstr ""
+
+msgctxt "#30877"
+msgid "Transmission Configuration"
+msgstr ""
+
+msgctxt "#30878"
+msgid "Enable Transmission torrent client integration."
+msgstr ""
+
+msgctxt "#30879"
+msgid "Path to the folder where Transmission downloads are saved."
+msgstr ""
+
+msgctxt "#30880"
+msgid "Transmission server URL (e.g., http://localhost:9091)."
+msgstr ""
+
+msgctxt "#30881"
+msgid "Download Folder"
+msgstr ""
+msgid "Update Addons"
+msgstr ""
diff --git a/resources/settings.xml b/resources/settings.xml
index 007a401..957efd7 100644
--- a/resources/settings.xml
+++ b/resources/settings.xml
@@ -668,12 +668,70 @@
+
+
+ false
+
+
+
+
+
+ true
+
+
+
+
+ true
+
+
+
+
+
+
+ true
+
+
+
+
+ true
+
+
+
+
+
+
+ true
+
+
+
+
+ true
+
+
+
+
+
+
+ false
+ true
+
+
+ Transmission Download Folder
+
+
+
+ true
+
+
+
+
true
+
diff --git a/resources/skins/Default/1080i/source_select_new.xml b/resources/skins/Default/1080i/source_select_new.xml
new file mode 100644
index 0000000..37b4f63
--- /dev/null
+++ b/resources/skins/Default/1080i/source_select_new.xml
@@ -0,0 +1,369 @@
+
+
+ 0
+ 0
+ 1920
+ 1080
+
+
+
+
+
+ 0
+ 0
+ 1920
+ 1080
+
+
+
+ white.png
+ C0000000
+
+
+
+
+
+
+ $INFO[Window().Property(info.fanart)]
+ FF404040
+ scale
+
+
+
+
+
+
+
+ Conditional
+ Conditional
+
+ 0
+ 70
+ 200
+ 150
+ keep
+ jtk_clearlogo.png
+ CCFFFFFF
+
+
+
+
+
+
+ font20
+ 50
+ 560
+ 1620
+ 50
+
+
+
+
+
+
+ font20
+ 110
+ 560
+ 1620
+ 50
+
+
+
+
+
+ Conditional
+ Conditional
+ 1890
+ 190
+ 20
+ 800
+ false
+ white.png
+ white.png
+ white.png
+
+
+
+ Conditional
+ Conditional
+ list
+ 1111
+ 560
+ 180
+ 1340
+ 800
+ 20
+ vertical
+
+
+
+
+ 1250
+
+
+ 10
+ 200
+ left-circle.png
+ FF00559D
+
+
+
+ 10
+ circle.png
+ 992A3E5C
+
+
+
+
+ font12
+ 30
+ 20
+ center
+ 200
+
+
+
+
+ font12
+ 70
+ 20
+ center
+ 200
+
+
+
+
+
+
+ 110
+ 20
+ center
+ 200
+ font12
+ left
+
+
+
+
+
+ 30
+ 220
+ 1000
+ 20
+ font12
+ left
+
+
+
+
+ 70
+ 220
+ 1000
+ 20
+ font12
+ left
+
+
+
+
+ 110
+ 220
+ 1000
+ 20
+ font12
+ left
+
+
+
+
+
+
+
+ 1250
+
+
+ 10
+ 200
+ left-circle.png
+ FF00559D
+
+
+
+
+ 10
+ circle.png
+ FF362e33
+
+
+
+ font12
+ 30
+ 20
+ center
+ 200
+
+
+
+
+ font12
+ 70
+ 20
+ center
+ 200
+
+
+
+
+
+
+ 110
+ 20
+ center
+ 200
+ font12
+ left
+
+
+
+
+
+ 30
+ 220
+ 1000
+ 20
+ font12
+ left
+ 66FFFFFF
+
+
+
+
+ 70
+ 220
+ 1000
+ 20
+ font12
+ left
+
+
+
+
+ 110
+ 220
+ 1000
+ 20
+ font12
+ left
+
+
+
+
+
+
+
+
+ Conditional
+ Conditional
+ Conditional
+ Conditional
+
+ 100
+ 20
+ 500
+
+
+
+
+ keep
+ 20
+ 20
+ 600
+ 500
+ center
+ top
+ $INFO[Window.Property(info.poster)]
+ !String.IsEmpty(Window.Property(info.poster))
+ keep
+
+
+
+
+ true
+ 650
+ 60
+ 200
+ 460
+ font12
+ FFFFFFFF
+ left
+ top
+
+ !String.IsEmpty(Window.Property(info.plot))
+
+
+
+
+
+ Conditional
+ Conditional
+ 140
+ 60
+ 450
+ 980
+
+
+
+ 20
+
+ 0
+ 20
+ 40
+ left
+
+ SetProperty(dedupe,True,1000)
+ SetProperty(dedupe,False,1000)
+ 2002
+
+
+ white.png
+ white.png
+
+
+
+
+ left
+
+
+
+
+ left
+
+
+
+ left
+
+
+
+ left
+
+
+
+
+
+ left
+ center
+
+ Enter search string
+
+ SetProperty(languageFilter,$INFO[Control.GetLabel(3)],1000)
+
+
+
+
+