Imported RoyalRoad chapter checker, changed it to use FastAPI with metrics and uses their RSS feed instead of scraping with the FanFicFare module

This commit is contained in:
florian 2025-10-09 21:42:38 +02:00
parent 4ebf3c1df8
commit 36a8767349
7 changed files with 263 additions and 0 deletions

21
requirements.txt Normal file
View File

@ -0,0 +1,21 @@
annotated-types==0.7.0
anyio==4.11.0
certifi==2025.10.5
charset-normalizer==3.4.3
click==8.3.0
fastapi==0.118.2
feedparser==6.0.12
h11==0.16.0
idna==3.10
mysql-connector-python==9.4.0
prometheus_client==0.23.1
pydantic==2.12.0
pydantic_core==2.41.1
requests==2.32.5
sgmllib3k==1.0.0
sniffio==1.3.1
starlette==0.48.0
typing-inspection==0.4.2
typing_extensions==4.15.0
urllib3==2.5.0
uvicorn==0.37.0

79
src/db.py Normal file
View File

@ -0,0 +1,79 @@
import mysql.connector
import threading
from secret_handler import return_credentials
import os
import time
import sys
db_username = return_credentials("/etc/secrets/db_username")
db_password = return_credentials("/etc/secrets/db_password")
db_host = os.getenv("SERVICE_RR_DB_HOST","localhost")
db_database = os.getenv("SERVICE_RR_DB_HOST_DATABASE","app")
MAX_RETRIES = 5
RETRY_DELAY = 5
MYSQL_CONFIG = {
"host": db_host,
"user": db_username,
"password": db_password,
"database": db_database
}
_pool_lock = threading.Lock()
_connection_pool = None
def create_connection_pool():
global _connection_pool
for attempt in range(1, MAX_RETRIES+1):
try:
print(f"[MySQL] Attempt {attempt} to connect...")
pool = mysql.connector.pooling.MySQLConnectionPool(
pool_name="mypool",
pool_size=5,
pool_reset_session=True,
**MYSQL_CONFIG
)
with _pool_lock:
_connection_pool = pool
print("[MySQL] Connection pool created successfully.")
return
except mysql.connector.Error as e:
print(f"[MySQL] Attempt {attempt} failed: {e}")
if attempt < MAX_RETRIES:
time.sleep(RETRY_DELAY)
print(f"[MySQL] Failed to connect after {MAX_RETRIES} attempts — exiting.")
sys.exit(1)
def close_connection_pool():
global _connection_pool
with _pool_lock:
if _connection_pool:
_connection_pool = None
print("[MySQL] Connection pool closed.")
def get_connection_pool():
global _connection_pool
with _pool_lock:
if _connection_pool is None:
create_connection_pool()
return _connection_pool
def get_db():
pool = get_connection_pool()
try:
conn = pool.get_connection()
if not conn.is_connected():
conn.reconnect(attempts=MAX_RETRIES, delay=RETRY_DELAY)
except Exception:
create_connection_pool()
pool = get_connection_pool()
conn = pool.get_connection()
try:
yield conn
finally:
conn.close()

26
src/feed_handler.py Normal file
View File

@ -0,0 +1,26 @@
import feedparser
import re
def extract_number(s: str)->int | None:
match = re.search(r"\d+", s)
if match:
return int(match.group())
else:
return None
def grab_latest_chapter_information(id:str)->tuple[int | None, str, str]:
url = f"https://www.royalroad.com/fiction/syndication/{id}"
feed = feedparser.parse(url)
if not feed.entries:
raise ValueError(f"No entries found for feed {id}")
latest_chapter_data = feed["entries"][0]
chapter_number = extract_number(latest_chapter_data["title"])
chapter_link = latest_chapter_data["link"]
title = feed["title"]
return chapter_number,chapter_link,title
if __name__ == "__main__":
print(grab_latest_chapter_information("118891"))

80
src/main.py Normal file
View File

@ -0,0 +1,80 @@
from fastapi import FastAPI, Depends, HTTPException, Response, Request
import uvicorn
from contextlib import asynccontextmanager
from db import get_db, create_connection_pool, close_connection_pool
import logging
from feed_handler import grab_latest_chapter_information
from send_notification import send_notification
from metrics_server import REQUEST_COUNTER
import asyncio
logger = logging.getLogger(__name__)
@asynccontextmanager
async def lifespan(app: FastAPI):
logger.info("Starting application...")
logger.info("Creating MySQL connection pool...")
create_connection_pool()
yield
logger.info("Closing MySQL connection pool...")
close_connection_pool()
api = FastAPI(
title="Docker Repository Query",
description="Queries Dockerhub and GHCR for new docker images",
version="1.0.0",
lifespan=lifespan
)
@api.middleware("http")
async def prometheus_middleware(request: Request, call_next):
status = 500
try:
response = await call_next(request)
status = response.status_code
except Exception:
raise
finally:
REQUEST_COUNTER.labels(request.method, request.url.path, status).inc()
return response
@api.get("/health")
def return_health():
return Response(status_code=200)
@api.get("/royalroad")
def get_chapters(
request: Request,
db = Depends(get_db)
):
try:
logger.info("Checking for new Royalroad chapters")
cursor = db.cursor()
cursor.execute("SELECT id,royalroadId,lastChapter FROM stories where active=1")
for id,royalroadId,last_chapter_db in cursor.fetchall():
chapter_number,chapter_link,title = grab_latest_chapter_information(royalroadId)
if chapter_number > last_chapter_db:
cursor.execute("UPDATE stories SET lastChapter = %s WHERE id = %s",
(chapter_number, id))
db.commit()
send_notification(title,chapter_number,chapter_link)
return {"status": "checked"}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
async def start_servers():
config_main = uvicorn.Config("main:api", host="0.0.0.0", port=5000, log_level="info")
config_metrics = uvicorn.Config("metrics_server:metrics_api", host="0.0.0.0", port=9000, log_level="info")
server_main = uvicorn.Server(config_main)
server_metrics = uvicorn.Server(config_metrics)
await asyncio.gather(server_main.serve(), server_metrics.serve())
if __name__ == "__main__":
asyncio.run(start_servers())

10
src/metrics_server.py Normal file
View File

@ -0,0 +1,10 @@
from fastapi import FastAPI, Response
from prometheus_client import generate_latest, CONTENT_TYPE_LATEST, Counter
metrics_api = FastAPI(title="Metrics Server", description="Prometheus metrics endpoint")
REQUEST_COUNTER = Counter("http_requests_total", "Total HTTP Requests", ["method", "endpoint", "status"])
@metrics_api.get("/metrics")
async def metrics():
return Response(generate_latest(), media_type=CONTENT_TYPE_LATEST)

12
src/secret_handler.py Normal file
View File

@ -0,0 +1,12 @@
import sys
def return_credentials(path: str)->str:
try:
with open (path) as file:
return file.read().strip()
except FileNotFoundError:
print(f"[FATAL] Secret file not found: {path}")
sys.exit(1)
except Exception as e:
print(f"[FATAL] Failed to read secret file {path}: {e}")
sys.exit(1)

35
src/send_notification.py Normal file
View File

@ -0,0 +1,35 @@
import requests
from requests.exceptions import RequestException, Timeout, ConnectionError, HTTPError
from secret_handler import return_credentials
import os
backend_api_url=os.getenv("BACKEND_API_URL","localhost:8101/internal/receive-notifications")
api_key= return_credentials("/etc/secrets/api_key")
def send_notification(title:str,chapter:int, link:str):
headers = {
"X-API-Key-Internal": backend_api_url,
"Content-Type": "application/json"
}
data = {
"receipent_user_id": 1,
"message": {
"title": title,
"info": f"Chapter {chapter} has been released",
"link": link
}}
try:
response = requests.post(backend_api_url, headers=headers, json=data)
response.raise_for_status()
print("Success: Notification sent")
except Timeout:
print("Error: request timed out")
except ConnectionError:
print("Error: connection failed")
except HTTPError as e:
print(f"HTTP error: {e.response.status_code} - {e.response.text}")
except RequestException as e:
print("Request failed:", str(e))