Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: multiple logging improvements and various other fixes #1015

Merged
merged 4 commits into from
Feb 28, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

14 changes: 9 additions & 5 deletions src/program/media/item.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""MediaItem class"""
from datetime import datetime
from pathlib import Path
from typing import List, Optional, Self
from typing import Dict, List, Optional, Self

import sqlalchemy
from loguru import logger
Expand Down Expand Up @@ -499,9 +499,9 @@ def _determine_state(self):
for season in self.seasons
):
return States.PartiallyCompleted
if all(season.state == States.Symlinked for season in self.seasons):
if any(season.state == States.Symlinked for season in self.seasons):
return States.Symlinked
if all(season.state == States.Downloaded for season in self.seasons):
if any(season.state == States.Downloaded for season in self.seasons):
return States.Downloaded
if self.is_scraped():
return States.Scraped
Expand Down Expand Up @@ -610,9 +610,9 @@ def _determine_state(self):
return States.Ongoing
if any(episode.state == States.Completed for episode in self.episodes):
return States.PartiallyCompleted
if all(episode.state == States.Symlinked for episode in self.episodes):
if any(episode.state == States.Symlinked for episode in self.episodes):
return States.Symlinked
if all(episode.file and episode.folder for episode in self.episodes):
if any(episode.file and episode.folder for episode in self.episodes):
return States.Downloaded
if self.is_scraped():
return States.Scraped
Expand Down Expand Up @@ -677,6 +677,10 @@ def log_string(self):
return self.parent.log_string + " S" + str(self.number).zfill(2)

def get_top_title(self) -> str:
"""Get the top title of the season."""
session = object_session(self)
if session:
session.refresh(self, ["parent"])
return self.parent.title


Expand Down
33 changes: 29 additions & 4 deletions src/program/services/downloaders/realdebrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

from loguru import logger
from pydantic import BaseModel
from requests import Session
from requests import HTTPError, Session

from program.services.downloaders.models import (
VALID_VIDEO_EXTENSIONS,
Expand Down Expand Up @@ -144,7 +144,17 @@ def get_instant_availability(self, infohash: str, item_type: str) -> Optional[To
except InvalidDebridFileException as e:
logger.debug(f"{infohash}: {e}")
except Exception as e:
logger.error(f"Failed to get instant availability for {infohash}: {e}")
if len(e.args) > 0:
if " 503 " in e.args[0] or "Infringing" in e.args[0]:
logger.debug(f"Failed to get instant availability for {infohash}: [503] Infringing Torrent or Service Unavailable")
elif " 429 " in e.args[0] or "Rate Limit Exceeded" in e.args[0]:
logger.debug(f"Failed to get instant availability for {infohash}: [429] Rate Limit Exceeded")
elif " 404 " in e.args[0] or "Torrent Not Found" in e.args[0]:
logger.debug(f"Failed to get instant availability for {infohash}: [404] Torrent Not Found or Service Unavailable")
elif " 400 " in e.args[0] or "Torrent file is not valid" in e.args[0]:
logger.debug(f"Failed to get instant availability for {infohash}: [400] Torrent file is not valid")
else:
logger.error(f"Failed to get instant availability for {infohash}: {e}")
finally:
if torrent_id is not None:
self.delete_torrent(torrent_id)
Expand Down Expand Up @@ -216,8 +226,23 @@ def add_torrent(self, infohash: str) -> str:
)
return response["id"]
except Exception as e:
logger.error(f"Failed to add torrent {infohash}: {e}")
raise
if len(e.args) > 0:
if " 503 " in e.args[0]:
logger.debug(f"Failed to add torrent {infohash}: [503] Infringing Torrent or Service Unavailable")
raise RealDebridError("Infringing Torrent or Service Unavailable")
elif " 429 " in e.args[0]:
logger.debug(f"Failed to add torrent {infohash}: [429] Rate Limit Exceeded")
raise RealDebridError("Rate Limit Exceeded")
elif " 404 " in e.args[0]:
logger.debug(f"Failed to add torrent {infohash}: [404] Torrent Not Found or Service Unavailable")
raise RealDebridError("Torrent Not Found or Service Unavailable")
elif " 400 " in e.args[0]:
logger.debug(f"Failed to add torrent {infohash}: [400] Torrent file is not valid. Magnet: {magnet}")
raise RealDebridError("Torrent file is not valid")
else:
logger.debug(f"Failed to add torrent {infohash}: {e}")

raise RealDebridError(f"Failed to add torrent {infohash}: {e}")

def select_files(self, torrent_id: str, ids: List[int] = None) -> None:
"""Select files from a torrent"""
Expand Down
5 changes: 0 additions & 5 deletions src/program/services/indexers/trakt.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,8 +120,3 @@ def _add_seasons_to_show(self, show: Show, imdb_id: str):
if episode_item:
season_item.add_episode(episode_item)
show.add_season(season_item)





6 changes: 3 additions & 3 deletions src/program/services/scrapers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,9 @@ def can_we_scrape(cls, item: MediaItem) -> bool:
if not item.is_released:
logger.debug(f"Cannot scrape {item.log_string}: Item is not released")
return False
if item.active_stream:
logger.debug(f"Cannot scrape {item.log_string}: Item was already downloaded by another session")
return False
if not cls.should_submit(item):
return False
return True
Expand All @@ -159,9 +162,6 @@ def should_submit(item: MediaItem) -> bool:
if not item.is_released:
logger.debug(f"Cannot scrape {item.log_string}: Item is not released")
return False
if item.active_stream:
logger.debug(f"Cannot scrape {item.log_string}: Item was already downloaded by another session")
return False
if item.is_parent_blocked():
logger.debug(f"Cannot scrape {item.log_string}: Item is blocked or blocked by a parent item")
return False
Expand Down
30 changes: 26 additions & 4 deletions src/program/services/scrapers/shared.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,11 +54,12 @@ def _parse_results(item: MediaItem, results: Dict[str, str], log_msg: bool = Tru
torrents: Set[Torrent] = set()
processed_infohashes: Set[str] = set()
correct_title: str = item.get_top_title()
needed_seasons: list[int] = []

logger.log("SCRAPER", f"Processing {len(results)} results for {item.log_string}")

if item.type in ["show", "season", "episode"]:
needed_seasons: list[int] = _get_needed_seasons(item) or []
needed_seasons = _get_needed_seasons(item)

for infohash, raw_title in results.items():
if infohash in processed_infohashes:
Expand Down Expand Up @@ -103,14 +104,33 @@ def _parse_results(item: MediaItem, results: Dict[str, str], log_msg: bool = Tru
else:
if parse_debug:
logger.debug(f"Skipping show pack torrent '{raw_title}' for {item.log_string} due to insufficient seasons. Required: {len(needed_seasons)}, Found: {len(torrent.data.seasons)}")
elif not torrent.data.seasons and not torrent.data.episodes:
# keep torrents that have no seasons or episodes
torrents.add(torrent)
elif not torrent.data.seasons and len(torrent.data.episodes) >= 12 and len(needed_seasons) == 1:
# keep torrents that have no seasons but at least 12 episodes
# and the item is a show with only one season
torrents.add(torrent)
elif torrent.data.seasons and torrent.data.episodes:
# skip torrents that have both seasons and episodes
if parse_debug:
logger.debug(f"Skipping torrent with season(s) and episode(s) for {item.log_string}, wanted show pack: {raw_title}")
elif not torrent.data.seasons and torrent.data.episodes:
# skip torrents that have episodes but no seasons
if parse_debug:
logger.debug(f"Skipping torrent with episodes but no seasons for {item.log_string}, wanted show pack: {raw_title}")

elif item.type == "season":
# If the torrent has the needed seasons and no episodes, we can add it
if any(season in torrent.data.seasons for season in needed_seasons) and not torrent.data.episodes:
if not torrent.data.episodes and any(season in torrent.data.seasons for season in needed_seasons):
# If the torrent has the needed seasons and no episodes, we can add it
torrents.add(torrent)
elif not torrent.data.seasons and len(torrent.data.episodes) >= 12 and len(needed_seasons) == 1:
# keep torrents that have no seasons but at least 12 episodes
# and the item is a show with only one season. These are typically season 1 packs.
torrents.add(torrent)
else:
if parse_debug:
logger.debug(f"Skipping torrent for incorrect season with {item.log_string}: {raw_title}")
logger.debug(f"Skipping torrent for incorrect season with {item.log_string}, wanted season pack: {raw_title}")

elif item.type == "episode":
# If the torrent has the season and episode numbers, we can add it
Expand Down Expand Up @@ -138,10 +158,12 @@ def _parse_results(item: MediaItem, results: Dict[str, str], log_msg: bool = Tru
# so we'll just ignore them.
if parse_debug and log_msg:
logger.debug(f"Skipping unparseable torrent: '{raw_title}' - {e}")
processed_infohashes.add(infohash)
continue
except GarbageTorrent as e:
if parse_debug and log_msg:
logger.debug(f"GarbageTorrent: '{raw_title}' - {e}")
processed_infohashes.add(infohash)
continue

if torrents:
Expand Down
13 changes: 10 additions & 3 deletions src/program/symlink.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,8 +106,15 @@ def run(self, item: Union[Movie, Show, Season, Episode]):
yield (item, next_attempt)
try:
for _item in items:
self._symlink(_item)
logger.log("SYMLINKER", f"Symlinks created for {item.log_string}")
symlinked = False
if self._symlink(_item):
symlinked = True
if symlinked:
logger.log("SYMLINKER", f"Symlinks created for {_item.log_string}")
else:
logger.log("SYMLINKER", f"No symlinks created for {_item.log_string}")
_item.blacklist_active_stream()
_item.reset()
except Exception as e:
logger.error(f"Exception thrown when creating symlink for {item.log_string}: {e}")
yield item
Expand Down Expand Up @@ -154,7 +161,7 @@ def _symlink(self, item: Union[Movie, Episode]) -> bool:

source = _get_item_path(item)
if not source:
logger.error(f"Could not find path for {item.log_string}, cannot create symlink.")
logger.error(f"Could not find path for {item.log_string} in rclone path, cannot create symlink.")
return False

filename = self._determine_file_name(item)
Expand Down
11 changes: 9 additions & 2 deletions src/routers/secure/items.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,7 @@ async def get_item(_: Request, id: str, use_tmdb_id: Optional[bool] = False) ->
with db.Session() as session:
query = select(MediaItem)
if use_tmdb_id:
query = query.where(MediaItem.tmdb_id == id)
query = query.where(MediaItem.id == id).where(MediaItem.type.in_(["movie", "show"]))
else:
query = query.where(MediaItem.id == id)
try:
Expand All @@ -238,7 +238,14 @@ async def get_item(_: Request, id: str, use_tmdb_id: Optional[bool] = False) ->
logger.debug(f"Item with ID {id} not found in database")
raise HTTPException(status_code=404, detail="Item not found")
except Exception as e:
logger.error(f"Error fetching item with ID {id}: {str(e)}")
if "Multiple rows were found when one or none was required" in str(e):
duplicate_ids = set()
items = session.execute(query).unique().scalars().all()
for item in items:
duplicate_ids.add(item.id)
logger.debug(f"Multiple items found with ID {id}: {duplicate_ids}")
else:
logger.error(f"Error fetching item with ID {id}: {str(e)}")
raise HTTPException(status_code=500, detail=str(e)) from e

@router.get(
Expand Down