This commit is contained in:
florian 2025-11-03 18:24:45 +01:00
parent 3bba8f2331
commit c205779956
2 changed files with 128 additions and 54 deletions

View File

@ -14,7 +14,7 @@ PODCAST_AUTHORIZATION_TOKEN = os.getenv("PODCAST_AUTHORIZATION_TOKEN")
PODCAST_API_URL = os.getenv("PODCAST_API_URL") PODCAST_API_URL = os.getenv("PODCAST_API_URL")
def get_audiobookshelf_data()->tuple[int | None, str | None]: def get_audiobookshelf_data() -> tuple[int | None, str | None]:
""" """
Fetches the latest episode data from the Audiobookshelf API. Fetches the latest episode data from the Audiobookshelf API.
@ -43,27 +43,34 @@ def get_audiobookshelf_data()->tuple[int | None, str | None]:
audiobookshelf_title = result["media"]["episodes"][-1]["audioFile"]["metaTags"]["tagTitle"] audiobookshelf_title = result["media"]["episodes"][-1]["audioFile"]["metaTags"]["tagTitle"]
logger.debug(f"[Audiobookshelf] Fetched Audiobookshelf data: track={audiobookshelf_track}, title={audiobookshelf_title}") logger.debug(f"[Audiobookshelf] Fetched Audiobookshelf data: track={audiobookshelf_track}, title={audiobookshelf_title}")
return audiobookshelf_track, audiobookshelf_title return (audiobookshelf_track, audiobookshelf_title)
except requests.exceptions.ConnectionError as e: except requests.exceptions.ConnectionError as e:
logger.warning(f"[Audiobookshelf] Connection error, will retry: {e}") logger.warning(f"[Audiobookshelf] Connection error, will retry: {e}")
return None return (None, None)
except requests.exceptions.Timeout as e: except requests.exceptions.Timeout as e:
logger.warning(f"[Audiobookshelf] Request timed out, will retry: {e}") logger.warning(f"[Audiobookshelf] Request timed out, will retry: {e}")
return None return (None, None)
except requests.exceptions.HTTPError as e: except requests.exceptions.HTTPError as e:
status = e.response.status_code status = e.response.status_code
if status in {500, 502, 503, 504}: if status in {500, 502, 503, 504}:
logger.warning(f"[Audiobookshelf] Server error {status}, will retry: {e}") logger.warning(f"[Audiobookshelf] Server error {status}, will retry: {e}")
return None return (None, None)
else: else:
logger.error(f"[Audiobookshelf] HTTP error {status}, not retrying: {e}") logger.error(f"[Audiobookshelf] HTTP error {status}, not retrying: {e}")
raise raise
def check_until_new_episode_gets_released(): def check_until_new_episode_gets_released() -> tuple[int | None, dict | None, str | None]:
""" """
Polls YouTube every hour for a new episode and compares it to the available episode on Audiobookshelf. Polls YouTube every hour for a new episode and compares it to the available episode on Audiobookshelf.
Stops after 72 hours. Stops after 72 hours.
Returns:
tuple[int | None, dict | None, str | None]:
- Track number from Audiobookshelf
- Episode info dictionary from YouTube
- Episode URL
Returns (None, None, None) if no new episode found within timeout
""" """
CHECK_INTERVAL_HOURS = 1 CHECK_INTERVAL_HOURS = 1
MAX_HOURS = 72 MAX_HOURS = 72
@ -77,43 +84,66 @@ def check_until_new_episode_gets_released():
continue continue
episode_url = get_url_for_latest_video() episode_url = get_url_for_latest_video()
if episode_url is None:
logger.warning("[EpisodeCheck] Unable to fetch latest video URL, retrying in 1 hour.")
time.sleep(CHECK_INTERVAL_HOURS * 3600)
continue
episode_info = get_youtube_data(episode_url) episode_info = get_youtube_data(episode_url)
if not episode_info:
logger.warning("[EpisodeCheck] Unable to fetch video metadata, retrying in 1 hour.")
time.sleep(CHECK_INTERVAL_HOURS * 3600)
continue
if audiobookshelf_title != episode_info["title"]: if audiobookshelf_title != episode_info["title"]:
logger.info(f"[EpisodeCheck] Latest YouTube episode: {episode_info['title']}") logger.info(f"[EpisodeCheck] Latest YouTube episode: {episode_info['title']}")
return audiobookshelf_track,episode_info,episode_url return (audiobookshelf_track,episode_info,episode_url)
logger.debug("[EpisodeCheck] No new episode found, retrying in 1 hour.") logger.debug("[EpisodeCheck] No new episode found, retrying in 1 hour.")
time.sleep(CHECK_INTERVAL_HOURS * 3600) time.sleep(CHECK_INTERVAL_HOURS * 3600)
logger.warning("[EpisodeCheck] No new episode found after maximum attempts.") logger.warning("[EpisodeCheck] No new episode found after maximum attempts.")
return None, None, None return (None, None, None)
def wait_for_sponsorblock_segments_to_be_added(): def wait_for_sponsorblock_segments_to_be_added(episode_url) -> bool:
""" """
Polls SponsorBlock for segments on the current video until found or until max attempts. Polls SponsorBlock for segments on the current video until found or until max attempts.
Args:
episode_url: YouTube video URL to check for SponsorBlock segments
Returns:
True if segments found, False otherwise
""" """
CHECK_INTERVAL_HOURS = 1 CHECK_INTERVAL_HOURS = 1
MAX_HOURS = 24 MAX_HOURS = 24
for attempt in range(1, MAX_HOURS + 1): for attempt in range(1, MAX_HOURS + 1):
logger.debug(f"[SponsorBlock] Waiting for SponsorBlock to be added, attempt: {attempt}/{MAX_HOURS} ") logger.debug(f"[SponsorBlock] Waiting for SponsorBlock to be added, attempt: {attempt}/{MAX_HOURS} ")
segments = check_for_sponsorblock_segments() segments = check_for_sponsorblock_segments(episode_url)
if segments: if segments:
logger.debug("[SponsorBlock] Segments found, existing loop.") logger.debug("[SponsorBlock] Segments found, exiting loop.")
return True return True
logger.debug("[SponsorBlock] No SponsorBlock segments found yet, retrying in 1 hour.") logger.debug("[SponsorBlock] No SponsorBlock segments found yet, retrying in 1 hour.")
time.sleep(CHECK_INTERVAL_HOURS * 3600) time.sleep(CHECK_INTERVAL_HOURS * 3600)
logger.warning("[SponsorBlock] Segments not found after maximum attempts.") logger.warning("[SponsorBlock] Segments not found after maximum attempts.")
return None return False
def download_episode(): def download_episode() -> None:
"""
Main workflow: Check for new episode, download it, upload via SFTP, and send notification.
"""
logger.info("[App] Starting Perun") logger.info("[App] Starting Perun")
try: try:
audiobookshelf_track,episode_info,episode_url = check_until_new_episode_gets_released() audiobookshelf_track,episode_info,episode_url = check_until_new_episode_gets_released()
if audiobookshelf_track is None or episode_info is None or episode_url is None:
logger.error("[App] Failed to find new episode within timeout period")
return
logger.info("[App] New episode found") logger.info("[App] New episode found")
except Exception as e: except Exception as e:
logger.error(f"[App] Failed to fetch new episode info: {e}", exc_info=True) logger.error(f"[App] Failed to fetch new episode info: {e}", exc_info=True)
@ -123,7 +153,7 @@ def download_episode():
episode_description = episode_info.get("description", "") episode_description = episode_info.get("description", "")
if "sponsored" in episode_description.lower(): if "sponsored" in episode_description.lower():
logger.debug(f"[App] Sponsored segments found in description, waiting for SponsorBlock") logger.debug(f"[App] Sponsored segments found in description, waiting for SponsorBlock")
wait_for_sponsorblock_segments_to_be_added() wait_for_sponsorblock_segments_to_be_added(episode_url)
else: else:
logger.debug(f"[App] No sponsored segments found in description") logger.debug(f"[App] No sponsored segments found in description")
except Exception as e: except Exception as e:
@ -131,8 +161,8 @@ def download_episode():
try: try:
track = str(int(audiobookshelf_track) + 1).zfill(4) track = str(int(audiobookshelf_track) + 1).zfill(4)
except Exception as e: except (ValueError,TypeError) as e:
logger.error(f"[App] Invalid Audiobookshelf track number: {audiobookshelf_track}, error: {e}") logger.warning(f"[App] Failed incrementing audiobookshelf track: {e}", exc_info=True)
return return
try: try:

View File

@ -4,6 +4,7 @@ from dotenv import load_dotenv
from json import dumps from json import dumps
from logger_handler import setup_logger from logger_handler import setup_logger
import time import time
import shlex
logger = setup_logger(__name__) logger = setup_logger(__name__)
load_dotenv() load_dotenv()
@ -12,9 +13,19 @@ REMOTE_PATH = os.getenv("REMOTE_PATH")
BACKEND_API_URL = os.getenv("BACKEND_API_URL") BACKEND_API_URL = os.getenv("BACKEND_API_URL")
BACKEND_API_KEY= os.getenv("BACKEND_API_KEY") BACKEND_API_KEY= os.getenv("BACKEND_API_KEY")
def load_ssh_config(host_alias): def load_ssh_config(host_alias:str) -> tuple[str, int, str, str]:
""" """
Load SSH connection details from ~/.ssh/config for the given alias. Load SSH connection details from ~/.ssh/config for the given alias.
Args:
host_alias: The SSH host alias to look up
Returns:
Tuple of (hostname, port, username, keyfile)
Raises:
FileNotFoundError: If SSH config file doesn't exist
ValueError: If SSH configuration is incomplete
""" """
logger.debug(f"[SSH] Loading SSH configuration for host alias '{host_alias}'") logger.debug(f"[SSH] Loading SSH configuration for host alias '{host_alias}'")
ssh_config = paramiko.SSHConfig() ssh_config = paramiko.SSHConfig()
@ -41,9 +52,21 @@ def load_ssh_config(host_alias):
return hostname, port, username, keyfile return hostname, port, username, keyfile
def create_ssh_client(hostname, port, username, keyfile): def create_ssh_client(hostname: str, port: int, username: str, keyfile: str)-> paramiko.SSHClient:
""" """
Create and return a connected Paramiko SSHClient instance. Create and return a connected Paramiko SSHClient instance.
Args:
hostname: Remote hostname
port: SSH port
username: SSH username
keyfile: Path to SSH private key file
Returns:
Connected SSHClient instance (caller must close it)
Raises:
Exception: If SSH connection fails
""" """
logger.debug("[SSH] Creating SSH client") logger.debug("[SSH] Creating SSH client")
try: try:
@ -60,9 +83,15 @@ def create_ssh_client(hostname, port, username, keyfile):
raise raise
def upload_via_sftp(filename): def upload_via_sftp(filename) -> None:
""" """
Upload a file to the remote host via SFTP using SSH credentials. Upload a file to the remote host via SFTP using SSH credentials.
Args:
filename: Local file path to upload
Raises:
Exception: If upload fails
""" """
logger.info(f"[SFTP] Preparing to upload file '{filename}' via SFTP") logger.info(f"[SFTP] Preparing to upload file '{filename}' via SFTP")
try: try:
@ -86,51 +115,66 @@ def upload_via_sftp(filename):
raise raise
def send_notification_via_ssh(notification_title, notification_info): def send_notification_via_ssh(notification_title, notification_info) -> None:
""" """
Send a JSON-formatted notification payload via SSH to the backend. Send a JSON-formatted notification payload via SSH to the backend.
Args:
notification_title: Title of the notification
notification_info: Body/content of the notification
Raises:
Exception: If notification sending fails
""" """
logger.info(f"[Notification] Sending SSH notification: {notification_title}") logger.info(f"[Notification] Sending SSH notification: {notification_title}")
ssh = None
try: try:
hostname, port, username, keyfile = load_ssh_config(REMOTE_HOSTNAME) hostname, port, username, keyfile = load_ssh_config(REMOTE_HOSTNAME)
with create_ssh_client(hostname, port, username, keyfile) as ssh: ssh = create_ssh_client(hostname, port, username, keyfile)
data = {
"receipent_user_id": 1, data = {
"message": { "receipent_user_id": 1,
"title": notification_title, "message": {
"body": notification_info, "title": notification_title,
"category": "podcasts", "body": notification_info,
"timestamp": int(time.time()) "category": "podcasts",
} "timestamp": int(time.time())
} }
json_payload = dumps(data) }
logger.debug(f"[Notification] Notification payload: {json_payload}") json_payload = dumps(data)
logger.debug(f"[Notification] Notification payload: {json_payload}")
notification_cmd = ( escaped_payload = shlex.quote(json_payload)
f"API_KEY=$(head -n1) && " escaped_url = shlex.quote(BACKEND_API_URL)
f"curl -s -X POST '{BACKEND_API_URL}' "
f"-H 'Content-Type: application/json' "
f"-H \"X-API-Key-Internal: $API_KEY\" "
f"-d '{json_payload}'"
)
stdin, stdout, stderr = ssh.exec_command(notification_cmd) notification_cmd = (
stdin.write(f"{BACKEND_API_KEY}\n") f"API_KEY=$(head -n1) && "
stdin.flush() f"curl -s -X POST {escaped_url} "
stdin.channel.shutdown_write() f"-H 'Content-Type: application/json' "
f"-H \"X-API-Key-Internal: $API_KEY\" "
f"-d {escaped_payload}"
)
exit_status = stdout.channel.recv_exit_status() stdin, stdout, stderr = ssh.exec_command(notification_cmd)
response_output = stdout.read().decode() stdin.write(f"{BACKEND_API_KEY}\n")
stdin.flush()
stdin.channel.shutdown_write()
exit_status = stdout.channel.recv_exit_status() exit_status = stdout.channel.recv_exit_status()
if exit_status == 0: response_output = stdout.read().decode()
logger.info("[Notification] Notification sent successfully")
logger.debug(f"[Notification] Response: {response_output}") if exit_status == 0:
else: logger.info("[Notification] Notification sent successfully")
error_output = stderr.read().decode() logger.debug(f"[Notification] Response: {response_output}")
logger.warning(f"[Notification] Notification command exited with {exit_status}") else:
logger.warning(f"[Notification] Error: {error_output}") error_output = stderr.read().decode()
logger.warning(f"[Notification] Response: {response_output}") logger.warning(f"[Notification] Notification command exited with {exit_status}")
logger.warning(f"[Notification] Error: {error_output}")
logger.warning(f"[Notification] Response: {response_output}")
except Exception as e: except Exception as e:
logger.error(f"[Notification] Failed to send SSH notification: {e}", exc_info=True) logger.error(f"[Notification] Failed to send SSH notification: {e}", exc_info=True)
raise raise
finally:
if ssh:
ssh.close()
logger.debug("[Notification] SSH connection closed")