Commit ccf805f2 authored by Marco Schmiedel's avatar Marco Schmiedel

fix

parent 8e9dcfe4
......@@ -3,6 +3,7 @@
"./work/"
],
"exclude": [
"./work/notebooks",
"Workbench.mwb.bak",
"__pycache__",
".ipynb_checkpoints",
......
......@@ -2,11 +2,11 @@
"fileId": "01509001-bd4e-4462-93ad-dc5066fd729a",
"originalPath": "work/models/deal_deal.py",
"currentPath": "work/models/deal_deal.py",
"hash": "4d15256bbe0040ef1797a5cbf973ceac39d41c8bb583e11e98f60e9a3824d7a5",
"hash": "f98ec3d5e7fa72f4907d6079628bf18a610e5229aca15ea7bfdca1008370f090",
"docContent": "<p><br></p>",
"checkedStatus": "done",
"comments": [],
"lastCheckedTimestamp": 1744805220640,
"lastFileModificationTimestamp": 1744787042633.7546,
"lastCheckedTimestamp": 1746694069667,
"lastFileModificationTimestamp": 1746694022329.5034,
"flaggedForCopy": false
}
......@@ -5,7 +5,13 @@
"hash": "a5d54b395c9edf435ee5ec0a2f3637058b54bab058827175487268f4dc07304a",
"docContent": "<p><br></p>",
"checkedStatus": "done",
"comments": [],
"comments": [
{
"commentId": "04836786-70b7-402d-b03a-21a3f77ef524",
"text": "Documentation is still in german. Must be translated some day.",
"timestamp": 1746694260026
}
],
"lastCheckedTimestamp": 1745314592645,
"lastFileModificationTimestamp": 1745314218432.6475
}
......@@ -5,7 +5,13 @@
"hash": "3626bb3b00d9142ba6d471dfdd2d7a0f54d72afca4b908843c9cf6d483d66754",
"docContent": "<p><br></p>",
"checkedStatus": "done",
"comments": [],
"comments": [
{
"commentId": "f73dfc3c-e689-4909-8eb4-52479881432f",
"text": "Documentation is still in german. Must be translated some day.",
"timestamp": 1746694257567
}
],
"lastCheckedTimestamp": 1745314595210,
"lastFileModificationTimestamp": 1745314341957.45
}
......@@ -3,7 +3,7 @@
"originalPath": "work/config/MauiConfig.py",
"currentPath": "work/config/MauiConfig.py",
"hash": "6e627f3800fd413c6dbde92ad2e274d5e3047af0f906de4d75fc826cc129631e",
"docContent": "<p>In this configuration, you’ll find the credentials required to log in to Freenet-Maui.</p>",
"docContent": "<p><br></p>",
"checkedStatus": "todo",
"comments": [
{
......@@ -12,6 +12,6 @@
"timestamp": 1744614418809
}
],
"lastCheckedTimestamp": 1745314578450,
"lastCheckedTimestamp": 1746694114141,
"lastFileModificationTimestamp": 1745313945182.1555
}
......@@ -2,11 +2,11 @@
"fileId": "24784b38-54dc-4000-9d2a-f59082ebbc1c",
"originalPath": "work/models/base_base.py",
"currentPath": "work/models/base_base.py",
"hash": "722215d7b7ca6bc285f74f96c5096fdae09a7d3ec477e319031640e3100ad5b4",
"hash": "4b4953aec18780e96b30745c6e4806eb9b5c19a1bf015ceb3465e38f85062b5c",
"docContent": "<p><br></p>",
"checkedStatus": "changed",
"checkedStatus": "done",
"comments": [],
"lastCheckedTimestamp": 1745314601207,
"lastFileModificationTimestamp": 1746440397236.4856,
"lastCheckedTimestamp": 1746694072323,
"lastFileModificationTimestamp": 1746694051656.7517,
"flaggedForCopy": false
}
......@@ -4,8 +4,14 @@
"currentPath": "work/workbench/Workbench.mwb",
"hash": "d53db9e9d211116d4aafc32106a7e0c05a86c062af72f21a37420853a1c4eacc",
"docContent": "<p><br></p>",
"checkedStatus": "changed",
"comments": [],
"lastCheckedTimestamp": 1746433397717,
"checkedStatus": "done",
"comments": [
{
"commentId": "9eaabff2-a6a7-4fc9-b57e-c47dc019890e",
"text": "All green for now...",
"timestamp": 1746693753181
}
],
"lastCheckedTimestamp": 1746693747974,
"lastFileModificationTimestamp": 1746440499172.53
}
{
"fileId": "38da158f-ad91-433f-8b7b-60ff4949d7ff",
"originalPath": "work/config/_CronConfig.txt",
"currentPath": "work/config/_CronConfig.txt",
"hash": "1e8e88f1aa7f699241bbf7d99bb8fa225443b0bf86d7366a7fa7b1e6d5bde8ac",
"docContent": "<p><br></p>",
"checkedStatus": "done",
"comments": [],
"lastCheckedTimestamp": 1746694217163,
"lastFileModificationTimestamp": 1746694212093.9915
}
{
"fileId": "4c784f14-4710-4694-bf73-f5665baab43f",
"originalPath": "work/cron.sh",
"currentPath": "work/cron.sh",
"hash": "4c6e694c417005a79207a32c26609e0e2701f17d6484a536bd8188bf8dcceb93",
"docContent": "<p><br></p>",
"checkedStatus": "todo",
"comments": [
{
"commentId": "e5f40597-ae51-440f-886a-44f06dbe8e96",
"text": "This is copy & paste from obsidian. The documentation must be translated.",
"timestamp": 1746693690181
}
],
"lastCheckedTimestamp": 1746693667833,
"lastFileModificationTimestamp": 1746448049902.1914
}
{
"fileId": "58307c8c-416a-4c24-adc9-7ed6324d1f8a",
"originalPath": "work/manager/WebManager.py",
"currentPath": "work/manager/WebManager.py",
"hash": "66f022cdc155ded9c47e49a893ed3070099faa960a72aa01d23929a1c02a8657",
"docContent": "<p><br></p>",
"checkedStatus": "changed",
"comments": [],
"lastCheckedTimestamp": 1746694408088,
"lastFileModificationTimestamp": 1746696251620.3523
}
{
"fileId": "647ff9a8-a56f-486e-ba2a-8ff77e4514d4",
"originalPath": "work/Dockerfile",
"currentPath": "work/Dockerfile",
"hash": "d885a8a45174b2f425d3c0201b797754c69f6ff798dabd51f0e53af17b047964",
"docContent": "<p><br></p>",
"checkedStatus": "changed",
"comments": [
{
"commentId": "2a07c637-2149-4d5a-870d-94870f78945d",
"text": "This is copy & paste from obsidian. This should do the job for now but there are much more packages installed than needed. Actually not a big problem but i must clean this some day. Also the documentation is still in german.",
"timestamp": 1746693591017
}
],
"lastCheckedTimestamp": 1746693552978,
"lastFileModificationTimestamp": 1746694865448.947
}
......@@ -2,11 +2,17 @@
"fileId": "766dc461-001e-4901-8faf-263820ad96cd",
"originalPath": "work/manager/MysqlManager.py",
"currentPath": "work/manager/MysqlManager.py",
"hash": "0506c7ebbfff68bce628902018e66ba50936a52e08aa595cadc8d5324c48d46f",
"hash": "27129c35df4b6b0e4d5fcb7a77c8e1c19d1b74f80d5c3ec822cdc26701124a68",
"docContent": "<p><br></p>",
"checkedStatus": "done",
"comments": [],
"checkedStatus": "changed",
"comments": [
{
"commentId": "7227a7a0-99bc-47b4-a725-3547eb56015d",
"text": "Documentation is still in german. Must be translated some day.",
"timestamp": 1746694262639
}
],
"lastCheckedTimestamp": 1745314589383,
"lastFileModificationTimestamp": 1745314105678.9277,
"lastFileModificationTimestamp": 1746696474493.3755,
"flaggedForCopy": false
}
......@@ -5,7 +5,13 @@
"hash": "5a6654cb1cd77f8d531fcc1541d31261ea02c4e8cb126f2cc43a217c9c6920aa",
"docContent": "<p><br></p>",
"checkedStatus": "todo",
"comments": [],
"comments": [
{
"commentId": "3c070677-67c2-458d-8ad9-1ef595c16e0e",
"text": "This data is currently stored statically and should be dynamically linked to the Docker container at the appropriate time.",
"timestamp": 1746694106055
}
],
"lastCheckedTimestamp": 1745314580866,
"lastFileModificationTimestamp": 1745311719614.9841
}
......@@ -2,11 +2,11 @@
"fileId": "858c1430-9b46-4bb8-bb8d-4e5380be9c0d",
"originalPath": "work/models/provisiongroup_pgro.py",
"currentPath": "work/models/provisiongroup_pgro.py",
"hash": "3c4c4f905da78503daf856aa9f9ab1b0bac3e8c959ad23ec6f260f3964c8f98f",
"hash": "6fbbebce9ecb5a0562748d7e4269285e2f00fe0f97e2714724281529feb2d871",
"docContent": "<p><br></p>",
"checkedStatus": "done",
"comments": [],
"lastCheckedTimestamp": 1744626248304,
"lastFileModificationTimestamp": 1744625950442.8225,
"lastCheckedTimestamp": 1746694064123,
"lastFileModificationTimestamp": 1746693990977.7754,
"flaggedForCopy": false
}
{
"fileId": "986eeb57-8634-4f40-a4ea-a2eae9d87e71",
"originalPath": "work/readme.md",
"currentPath": "work/readme.md",
"hash": "3e2bf4db6ad284fb011128f2ac0d3cf7849268068a39b160418173f0230ba4bd",
"docContent": "<p><br></p>",
"checkedStatus": "changed",
"comments": [
{
"commentId": "574b8332-b3c0-4afa-9f2b-8a632e910e0d",
"text": "I need to insert the AWS-ECR-Uplink-Data.",
"timestamp": 1746693537936
}
],
"lastCheckedTimestamp": 1746693903209,
"lastFileModificationTimestamp": 1746694946510.8994
}
{
"fileId": "caf03c7b-60d8-4a77-ac21-0eccabeae4a2",
"originalPath": "work/boot.sh",
"currentPath": "work/boot.sh",
"hash": "d665dba2f614cbf283cf1900c259bea8472f31353be894740e06535e6c3936c3",
"docContent": "<p><br></p>",
"checkedStatus": "todo",
"comments": [
{
"commentId": "6ba2875c-14b5-4444-a34e-52295efd65bc",
"text": "This is copy & paste from obsidian. The documentation must be translated.",
"timestamp": 1746693713037
}
],
"lastCheckedTimestamp": 1746693711224,
"lastFileModificationTimestamp": 1746447735575.9163
}
......@@ -5,7 +5,13 @@
"hash": "f29307970d124e55c7066e71ddf682f55e043d4f925195bdf320ff9da1311e27",
"docContent": "<p><br></p>",
"checkedStatus": "done",
"comments": [],
"comments": [
{
"commentId": "f79831c7-fa9e-42e7-a1c5-60a083ae9c6c",
"text": "Documentation is still in german. Must be translated some day.",
"timestamp": 1746694255236
}
],
"lastCheckedTimestamp": 1745314597597,
"lastFileModificationTimestamp": 1745314531223.019
}
......@@ -2,11 +2,11 @@
"fileId": "e146d34c-9d63-4ea0-b8b8-5f8d503f34f6",
"originalPath": "work/models/option_opti.py",
"currentPath": "work/models/option_opti.py",
"hash": "b2da20cf0b8a0d239fc042773ecacca52777230bb7b2c418fe6d45d55fc3e37b",
"hash": "389c9070a2271f186f35fac6f51490b3ec7b5a4483b0769af7309b36aac0a163",
"docContent": "<p><br></p>",
"checkedStatus": "done",
"comments": [],
"lastCheckedTimestamp": 1744626245976,
"lastFileModificationTimestamp": 1744625945396.1123,
"lastCheckedTimestamp": 1746694066635,
"lastFileModificationTimestamp": 1746694007962.1345,
"flaggedForCopy": false
}
......@@ -4,8 +4,14 @@
"currentPath": "work/config/OpenAiConfig.py",
"hash": "50c0f7d96f9ea76aa069a0a24137e898dbd4fc3c4af867565c90468981bf6ff5",
"docContent": "<p><br></p>",
"checkedStatus": "changed",
"comments": [],
"lastCheckedTimestamp": 1745314586062,
"checkedStatus": "todo",
"comments": [
{
"commentId": "1b2c6a64-0a75-4763-9613-12634d96bed2",
"text": "This data is currently stored statically and should be dynamically linked to the Docker container at the appropriate time.",
"timestamp": 1746694100919
}
],
"lastCheckedTimestamp": 1746694087733,
"lastFileModificationTimestamp": 1746437070245.503
}
......@@ -74,6 +74,7 @@ COPY config /maui/config
COPY manager /maui/manager
COPY commands /maui/commands
COPY models /maui/models
COPY routes /maui/routes
COPY boot.sh /maui/boot.sh
RUN chmod +x /maui/boot.sh
......
......@@ -182,7 +182,10 @@ for p in plan_rows:
#
# In diesem Schritt werden doppelte Deal- und Optionszeilen anhand ihrer Schlüsselwerte entfernt.
deal_rows_insert = list({(r["base_deal"], r["providercode_deal"]): r for r in deal_rows_insert}.values())
opt_rows_insert = list({(r["base_opti"], r["providercode_opti"]): r for r in opt_rows_insert }.values())
opt_rows_insert = list({
(r["base_opti"], r["providercode_opti"], r["providercategory_opti"]): r
for r in opt_rows_insert
}.values())
#
# Diese Listen sammeln Datensätze, deren Status auf gestoppt oder reaktiviert gesetzt werden muss.
......
0 4 * * * /maui/cron.sh downloadDataFromMaui.py
30 5 * * * /maui/cron.sh importCacheToDatabase.py
0 6 * * * /maui/cron.sh uploadCacheToAwsS3.py
0 6 * * * /maui/cron.sh calculateTarifDetailsWithGpt.py
# STEP 1 - IMPORT TARIFS FROM MAUI
0 3 * * * /maui/cron.sh downloadDataFromMaui.py
# STEP 2 - IMPORT DUMP TO DATABASE
30 4 * * * /maui/cron.sh importCacheToDatabase.py
# STEP 3 - UPLOAD FLYER TO AWS S3
0 5 * * * /maui/cron.sh uploadCacheToAwsS3.py
# STEP 4 - GENERATE TARIF-DETAILS WITH GPT
0 5 * * * /maui/cron.sh calculateTarifDetailsWithGpt.py
import sys; sys.path.append("..")
import sys
sys.path.append("..")
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.pool import QueuePool
import config.MysqlConfig as DatabaseConfig
from sshtunnel import SSHTunnelForwarder
# In dieser Klasse wird die Verwaltung einer MySQL-Verbindung umgesetzt.
# Diese Klasse verwaltet die MySQL‑Verbindung und nutzt optional einen SSH‑Tunnel.
# Die Konfiguration stammt weiterhin aus config.MysqlConfig; es wird lediglich
# der erweiterte Engine‑Teil (QueuePool, Timeouts u. a.) integriert.
class MysqlManager:
# In diesem Konstruktor werden die Verbindungskonfigurationen aus dem Config-Modul geladen und der SSH-Tunnel bei Bedarf gestartet.
def __init__(self):
# In dieser Variablen werden die Konfigurationsdaten für den Datenbankzugriff gesammelt.
# ───────────────────────────────────────────────────────
# Konfiguration aus dem Modul laden
# ───────────────────────────────────────────────────────
self.dbConfig = {
"host": DatabaseConfig.MYSQL_HOST,
"user": DatabaseConfig.MYSQL_USER,
"password": DatabaseConfig.MYSQL_PASSWORD,
"database": DatabaseConfig.MYSQL_DATABASE,
"port": DatabaseConfig.MYSQL_PORT
"port": DatabaseConfig.MYSQL_PORT,
}
# In dieser Abzweigung wird geprüft, ob ein SSH-Tunnel verwendet werden soll.
# ───────────────────────────────────────────────────────
# Optionalen SSH‑Tunnel aufbauen
# ───────────────────────────────────────────────────────
if getattr(DatabaseConfig, "USE_SSH_TUNNEL", False):
# In dieser Variablen wird ein SSH-Tunnel erstellt, der den Datenverkehr zu einem lokalen Port umleitet.
self.sshTunnel = SSHTunnelForwarder(
(DatabaseConfig.SSH_HOST, DatabaseConfig.SSH_PORT),
ssh_username=DatabaseConfig.SSH_USERNAME,
ssh_password=DatabaseConfig.SSH_PASSWORD,
remote_bind_address=(self.dbConfig["host"], self.dbConfig["port"])
remote_bind_address=(self.dbConfig["host"], self.dbConfig["port"]),
)
# Hier wird der SSH-Tunnel gestartet, damit die Weiterleitung aktiv wird.
self.sshTunnel.start()
# In diesen Variablen werden Host und Port auf die lokalen Tunnel-Daten gesetzt.
dbHost = "127.0.0.1"
dbPort = self.sshTunnel.local_bind_port
db_host = "127.0.0.1"
db_port = self.sshTunnel.local_bind_port
else:
# In dieser Abzweigung wird kein SSH-Tunnel verwendet.
self.sshTunnel = None
dbHost = self.dbConfig["host"]
dbPort = self.dbConfig["port"]
db_host = self.dbConfig["host"]
db_port = self.dbConfig["port"]
# In dieser Variablen wird das SQLAlchemy-Engine-Objekt mit den Verbindungsdaten erzeugt.
dbEngine = create_engine(
f"mysql+pymysql://{self.dbConfig['user']}:{self.dbConfig['password']}@{dbHost}:{dbPort}/{self.dbConfig['database']}",
echo=False
# ───────────────────────────────────────────────────────
# SQLAlchemy‑Engine mit QueuePool & Timeouts erstellen
# ───────────────────────────────────────────────────────
self.engine = create_engine(
f"mysql+pymysql://{self.dbConfig['user']}:{self.dbConfig['password']}@{db_host}:{db_port}/{self.dbConfig['database']}",
echo=False,
poolclass=QueuePool,
pool_size=getattr(DatabaseConfig, "POOL_SIZE", 1),
max_overflow=getattr(DatabaseConfig, "MAX_OVERFLOW", 0),
pool_recycle=getattr(DatabaseConfig, "POOL_RECYCLE", 3600), # Sekunden
pool_pre_ping=True,
connect_args={"connect_timeout": getattr(DatabaseConfig, "CONNECT_TIMEOUT", 30)},
)
# In dieser Variablen wird eine neue SessionFactory erzeugt und eine Session erstellt.
self.dbSession = sessionmaker(bind=dbEngine)()
# Session Factory sofort initialisieren
self.dbSession = sessionmaker(bind=self.engine)()
# In dieser Methode wird eine Session-Instanz zurückgegeben, um Datenbankaktionen durchzuführen.
# Gibt die aktuelle Session zurück
def getSession(self):
return self.dbSession
# In dieser Methode wird die bestehende Session geschlossen und der SSH-Tunnel (falls vorhanden) gestoppt.
# Schließt Session und SSH‑Tunnel (falls vorhanden)
def close(self):
self.dbSession.close()
if self.sshTunnel:
......
import sys
sys.path.append("..")
# Leertaste vor dem Kommentar.
# Dieses Modul stellt Datums- und Zeitfunktionen bereit, die für diverse Berechnungen benötigt werden.
from datetime import datetime, date, timedelta, time, timezone
# Leertaste vor dem Kommentar.
# Dieses Modul stellt Funktionen für den Zugriff auf das Betriebssystem bereit.
import os
"""
Hauptanwendung (Manager-Kontext)
# Leertaste vor dem Kommentar.
# Dieses Modul dient zur Verarbeitung von XML-Daten.
import xml.etree.ElementTree as ET
Startet die Flask-App, erzeugt einen WebManager und registriert
alle Blueprints zentral.
# Leertaste vor dem Kommentar.
# Dieses Modul stellt Klassen und Funktionen für eine Flask-Webanwendung bereit.
from flask import Flask, Response, request, jsonify
**Neu**
Alle Endpunkte verlangen jetzt zwingend den Query-Parameter
?token=12345
Fehlt der Parameter oder stimmt der Wert nicht, erhält der Client
# Leertaste vor dem Kommentar.
# Dieses Modul erzeugt RSS-Feeds aus Datenstrukturen.
from feedgen.feed import FeedGenerator
HTTP/1.1 401 Unauthorized
{"status": "NOK", "message": "Ungültiger oder fehlender Token."}
"""
# Leertaste vor dem Kommentar.
# Diese Klasse stellt eine Verbindung zur MySQL-Datenbank her und verwaltet Sessions.
from manager.MysqlManager import MysqlManager
from __future__ import annotations
# Leertaste vor dem Kommentar.
# Diese Klasse repräsentiert Rohdaten von News, die aus Obsidian stammen.
from models.rawnewsdata_rane import RawNewsDataRane
import sys
sys.path.append("..") # Projekt-Root im Suchpfad registrieren
# Leertaste vor dem Kommentar.
# Diese Klasse repräsentiert wirtschaftliche News, die ausgewertet werden.
from models.economicnews_ecne import EconomicNewsEcne
from flask import Flask, request, jsonify
from routes.HealtCheckRouter import blueprint as health_router
from routes.BaseRouter import blueprint as tarifs_router
from routes.EeccxRouter import blueprint as eeccx_router
# Leertaste vor dem Kommentar.
# Diese Klasse repräsentiert einen Wirtschaftsdatenkalender.
from models.economiccalendar_ecca import EconomicCalendarEcca
# --------------------------------------------------------------------------- #
# WebManager: registriert sämtliche Blueprints
# --------------------------------------------------------------------------- #
class WebManager:
"""Registriert Blueprints und bündelt weitere Infrastruktur."""
# Leertaste vor dem Kommentar.
# Diese Klasse repräsentiert KI-generierte News-Daten.
from models.ainewsdata_aine import AinewsDataAine
def __init__(self, app: Flask) -> None:
self.app = app
self._register_blueprints()
# Leertaste vor dem Kommentar.
# Diese Klasse repräsentiert einen Puffer zur Priorisierung von News.
from models.prioritybuffer_prbu import PrioritybufferPrbu
def _register_blueprints(self) -> None:
"""Alle Blueprint-Objekte an der App anmelden."""
for bp in (health_router, tarifs_router, eeccx_router):
self.app.register_blueprint(bp)
# Leertaste vor dem Kommentar.
# Diese Klasse stellt Reporting-Funktionen zur Verfügung, um HTML-Berichte zu erzeugen.
from manager.ReportingManager import ReportingManager
# Leertaste vor dem Kommentar.
# Diese Funktionen stellen Operatoren bereit, um komplexe SQL-Filter zu bauen.
from sqlalchemy import or_, and_
# --------------------------------------------------------------------------- #
# App-Instanz & globale Token-Prüfung
# --------------------------------------------------------------------------- #
TOKEN_VALUE = "12345" # Erlaubter Token-Wert
# Leertaste vor dem Kommentar.
# Diese Variable erzeugt eine neue Flask-Anwendung, die als Webserver agiert.
app = Flask(__name__)
# Leertaste vor dem Kommentar.
# Diese Abfrage startet den Flask-Webserver nur, wenn dieses Skript direkt ausgeführt wird.
if __name__ == '__main__':
# Leertaste vor dem Kommentar.
# Diese Zeile startet die Flask-Anwendung auf allen Schnittstellen und Port 80.
app.run(host='0.0.0.0', port=80)
WebManager(app)
@app.before_request
def _require_token():
"""
Globale Pre-Request-Hook:
Schlägt fehl, wenn der Query-Parameter ?token=12345
nicht exakt vorhanden ist.
"""
if request.endpoint == "static":
# Flask-static-Files nicht schützen
return None
token = request.args.get("token")
if token != TOKEN_VALUE:
return (
jsonify({"status": "ERROR", "message": "Please enter a valid token."}),
401,
)
# --------------------------------------------------------------------------- #
# Startpunkt
# --------------------------------------------------------------------------- #
if __name__ == "__main__":
# Server auf allen Interfaces, Port 80 starten
app.run(host="0.0.0.0", port=80)
......@@ -3,17 +3,52 @@ from sqlalchemy.orm import relationship
from models._system import Base
from models.option_opti import OptionOpti
class BaseBase(Base):
__tablename__ = 'base_base'
id_base = Column(Integer, primary_key=True, autoincrement=True)
provider_base = Column(String(255), nullable=False)
providercode_base= Column(String(255))
name_base = Column(String(255), nullable=False)
alias_base = Column(String(255))
flyerurl_base = Column(String(255))
piburl_base = Column(String(255))
details_base = Column(JSON) # enthält das von GPT extrahierte Tarif‑JSON
created_base = Column(DateTime, nullable=False)
updated_base = Column(DateTime, nullable=False)
deals = relationship("DealDeal", back_populates="base")
options = relationship("OptionOpti", back_populates="base")
id_base = Column(
Integer,
primary_key=True,
autoincrement=True
)
provider_base = Column(
String(255),
nullable=False
)
providercode_base = Column(
String(255)
)
name_base = Column(
String(255),
nullable=False
)
alias_base = Column(
String(255)
)
flyerurl_base = Column(
String(255)
)
piburl_base = Column(
String(255)
)
details_base = Column(
JSON
)
created_base = Column(
DateTime,
nullable=False
)
updated_base = Column(
DateTime,
nullable=False
)
deals = relationship(
"DealDeal",
back_populates="base"
)
options = relationship(
"OptionOpti",
back_populates="base"
)
......@@ -3,23 +3,79 @@ from sqlalchemy.orm import relationship
from models._system import Base
from models.provisiongroup_pgro import ProvisiongroupPgro
class DealDeal(Base):
__tablename__ = 'deal_deal'
__table_args__ = {"mysql_engine": "InnoDB"}
id_deal = Column(Integer, primary_key=True, autoincrement=True)
provisiongroup_deal = Column(Integer, ForeignKey('provisiongroup_pgro.id_pgro'))
base_deal = Column(Integer, ForeignKey('base_base.id_base'), nullable=False)
providercode_deal = Column(String(255))
name_deal = Column(String(255))
alias_deal = Column(String(255))
price_deal = Column(Numeric(8, 5), nullable=False)
starts_deal = Column(DateTime, nullable=False)
stops_deal = Column(DateTime)
provision1_deal = Column(Numeric(10, 5), nullable=False, default=0.00000)
provision2_deal = Column(Numeric(10, 5), nullable=False, default=0.00000)
provision3_deal = Column(Numeric(10, 5), nullable=False, default=0.00000)
provision4_deal = Column(Numeric(10, 5), nullable=False, default=0.00000)
created_deal = Column(DateTime, nullable=False)
updated_deal = Column(DateTime, nullable=False)
base = relationship("BaseBase", back_populates="deals")
provisiongroup = relationship("ProvisiongroupPgro", back_populates="deals")
__table_args__ = {'mysql_engine': 'InnoDB'}
id_deal = Column(
Integer,
primary_key=True,
autoincrement=True
)
provisiongroup_deal = Column(
Integer,
ForeignKey('provisiongroup_pgro.id_pgro')
)
base_deal = Column(
Integer,
ForeignKey('base_base.id_base'),
nullable=False
)
providercode_deal = Column(
String(255)
)
name_deal = Column(
String(255)
)
alias_deal = Column(
String(255)
)
price_deal = Column(
Numeric(8, 5),
nullable=False
)
starts_deal = Column(
DateTime,
nullable=False
)
stops_deal = Column(
DateTime
)
provision1_deal = Column(
Numeric(10, 5),
nullable=False,
default=0.00000
)
provision2_deal = Column(
Numeric(10, 5),
nullable=False,
default=0.00000
)
provision3_deal = Column(
Numeric(10, 5),
nullable=False,
default=0.00000
)
provision4_deal = Column(
Numeric(10, 5),
nullable=False,
default=0.00000
)
created_deal = Column(
DateTime,
nullable=False
)
updated_deal = Column(
DateTime,
nullable=False
)
base = relationship(
"BaseBase",
back_populates="deals"
)
provisiongroup = relationship(
"ProvisiongroupPgro",
back_populates="deals"
)
......@@ -3,24 +3,83 @@ from sqlalchemy.orm import relationship
from models._system import Base
from models.provisiongroup_pgro import ProvisiongroupPgro
class OptionOpti(Base):
__tablename__ = 'option_opti'
__table_args__ = {"mysql_engine": "InnoDB"}
id_opti = Column(Integer, primary_key=True, autoincrement=True)
provisiongroup_opti = Column(Integer, ForeignKey('provisiongroup_pgro.id_pgro'))
base_opti = Column(Integer, ForeignKey('base_base.id_base'), nullable=False)
providercode_opti = Column(String(255))
providercategory_opti = Column(String(255))
name_opti = Column(String(255), nullable=False)
alias_opti = Column(String(255))
price_opti = Column(Numeric(8, 5), nullable=False)
starts_opti = Column(DateTime, nullable=False)
stops_opti = Column(DateTime)
provision1_opti = Column(Numeric(10, 5), nullable=False, default=0.00000)
provision2_opti = Column(Numeric(10, 5), nullable=False, default=0.00000)
provision3_opti = Column(Numeric(10, 5), nullable=False, default=0.00000)
provision4_opti = Column(Numeric(10, 5), nullable=False, default=0.00000)
created_opti = Column(DateTime, nullable=False)
updated_opti = Column(DateTime, nullable=False)
base = relationship("BaseBase", back_populates="options")
provisiongroup = relationship("ProvisiongroupPgro", back_populates="options")
__table_args__ = {'mysql_engine': 'InnoDB'}
id_opti = Column(
Integer,
primary_key=True,
autoincrement=True
)
provisiongroup_opti = Column(
Integer,
ForeignKey('provisiongroup_pgro.id_pgro')
)
base_opti = Column(
Integer,
ForeignKey('base_base.id_base'),
nullable=False
)
providercode_opti = Column(
String(255)
)
providercategory_opti = Column(
String(255)
)
name_opti = Column(
String(255),
nullable=False
)
alias_opti = Column(
String(255)
)
price_opti = Column(
Numeric(8, 5),
nullable=False
)
starts_opti = Column(
DateTime,
nullable=False
)
stops_opti = Column(
DateTime
)
provision1_opti = Column(
Numeric(10, 5),
nullable=False,
default=0.00000
)
provision2_opti = Column(
Numeric(10, 5),
nullable=False,
default=0.00000
)
provision3_opti = Column(
Numeric(10, 5),
nullable=False,
default=0.00000
)
provision4_opti = Column(
Numeric(10, 5),
nullable=False,
default=0.00000
)
created_opti = Column(
DateTime,
nullable=False
)
updated_opti = Column(
DateTime,
nullable=False
)
base = relationship(
"BaseBase",
back_populates="options"
)
provisiongroup = relationship(
"ProvisiongroupPgro",
back_populates="options"
)
......@@ -2,12 +2,38 @@ from sqlalchemy import Column, Integer, String, Numeric, DateTime
from sqlalchemy.orm import relationship
from models._system import Base
class ProvisiongroupPgro(Base):
__tablename__ = 'provisiongroup_pgro'
id_pgro = Column(Integer, primary_key=True, autoincrement=True)
name_pgro = Column(String(255), nullable=False)
percent_pgro = Column(Numeric(5, 2), nullable=False, default=0.00)
created_pgro = Column(DateTime, nullable=False)
updated_pgro = Column(DateTime)
deals = relationship("DealDeal", back_populates="provisiongroup")
options = relationship("OptionOpti", back_populates="provisiongroup")
id_pgro = Column(
Integer,
primary_key=True,
autoincrement=True
)
name_pgro = Column(
String(255),
nullable=False
)
percent_pgro = Column(
Numeric(5, 2),
nullable=False,
default=0.00
)
created_pgro = Column(
DateTime,
nullable=False
)
updated_pgro = Column(
DateTime,
nullable=True
)
deals = relationship(
"DealDeal",
back_populates="provisiongroup"
)
options = relationship(
"OptionOpti",
back_populates="provisiongroup"
)
This diff is collapsed.
......@@ -2,7 +2,7 @@
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"execution_count": 3,
"id": "531d8b07-8f2f-4ef6-b92a-d4bf3364e166",
"metadata": {},
"outputs": [
......@@ -10,12 +10,12 @@
"name": "stdout",
"output_type": "stream",
"text": [
"eyJraWQiOiJpZUFmc2p0UDJLdDhVM2F2VHlGVEkiLCJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX3R5cGUiOiJhcHBsaWNhdGlvbiIsIm9yZyI6Ik1EIiwiY2xpZW50X2lkIjoiOFZ5amJRWnlUVmR4MlQyVU82bUEzWlRFZWlvZEhjcC0iLCJjdXN0b21fZGF0YSI6eyJyZXNwb25zaWJsZVRlYW0iOiJCQ1MgTW9iaWxlIn0sImF6cCI6IjhWeWpiUVp5VFZkeDJUMlVPNm1BM1pURWVpb2RIY3AtIiwic2NvcGUiOiJhZ3JlZW1lbnRUZXJtU2hlZXRzOndyaXRlIG1hdWkudnZpOnJlYWQgY3VzdG9tZXJQcm9kdWN0OnJlYWQgcHJvZHVjdE9mZmVyaW5nOnJlYWQgemFwLmNvbnRyYWN0OnJlYWQgemFwLmNvbnRyYWN0OndyaXRlIHN0cyIsImlhdCI6MTc0NjAxOTcwOCwic3ViIjoiOFZ5amJRWnlUVmR4MlQyVU82bUEzWlRFZWlvZEhjcC0iLCJpc3MiOiJodHRwczovL3N0cy5tZC5kZS92MS9vaWRjLyIsImp0aSI6InJ1OF9GaUYtZi1GUS00eFdIV2JuSyIsImV4cCI6MTc0NjEwNjEwOH0.BiISXTvunrU_v6KR98pzQ4GZLpEl2f_fSNfaIYDLWq1BsKTeNS7f4hUsBLK91yLGIvyY1vfOuTOeLJeYodX_5CFpQOvx97nKlg46_4g9uqiHYiyd6ehGyAnEp4W-DT_mwJ8PgIqprFeN9Tw5_a5WYMmCZKfMwPP940werC90r0iabClML_J-56_DT8NWN_bm_EUPNrpyWRUb65WCphmbutqbFMc8wBCF4xtwI5VD9v2Nd9sXbFs8TevvX-Weg8WdIVQeQimiigvP1uEEiVimybcK8_Lav3PcDkVcQJ5YbtAx8INIGImAQckeCR7Bvd8mdwdrqc9YWa_oEPg_X89uoA\n",
"eyJraWQiOiJpZUFmc2p0UDJLdDhVM2F2VHlGVEkiLCJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX3R5cGUiOiJhcHBsaWNhdGlvbiIsIm9yZyI6Ik1EIiwiY2xpZW50X2lkIjoiOFZ5amJRWnlUVmR4MlQyVU82bUEzWlRFZWlvZEhjcC0iLCJjdXN0b21fZGF0YSI6eyJyZXNwb25zaWJsZVRlYW0iOiJCQ1MgTW9iaWxlIn0sImF6cCI6IjhWeWpiUVp5VFZkeDJUMlVPNm1BM1pURWVpb2RIY3AtIiwic2NvcGUiOiJhZ3JlZW1lbnRUZXJtU2hlZXRzOndyaXRlIG1hdWkudnZpOnJlYWQgY3VzdG9tZXJQcm9kdWN0OnJlYWQgcHJvZHVjdE9mZmVyaW5nOnJlYWQgemFwLmNvbnRyYWN0OnJlYWQgemFwLmNvbnRyYWN0OndyaXRlIHN0cyIsImlhdCI6MTc0NjY5ODQ3MSwic3ViIjoiOFZ5amJRWnlUVmR4MlQyVU82bUEzWlRFZWlvZEhjcC0iLCJpc3MiOiJodHRwczovL3N0cy5tZC5kZS92MS9vaWRjLyIsImp0aSI6Im5QNnJTVE1UcEY3WUlHdkt5QnZjMyIsImV4cCI6MTc0Njc4NDg3MX0.CFY4CqnV3sYyY-JtGg8RE8Z6xqs4cVqzySf3w8DTo61Yh9uFXvcNxWLDCoRSO94IIQq6G6YVNaerb5lnlNBx40448liIkqsRdSeJv4JmgFgBUkclYwKZxCDmA4YeU5SyREXln-_K7EDI349FsCdrzbOXUj39GZ4xkgJIulfuhp5dSfuHkSIePNAmoqag3ihy0Z6lCugx5fQgbpucd8lMbHRrOQVbbuUAgS1qNtJL9WJEJdDAX9ciEqkwfr-5R47BSVwNdW5Tg_zDPcTR8sSLp_GrD7vjWw5qSZRiBdcQXyJbFy3gIKUojobqTzsEMQYmHMTQ3Ouqyajw9nPErzQIxw\n",
"---\n",
"{'error': [], 'pciId': '0DRR-IY89-V1MT', 'pciPdf': 'https://media.mdm.freenet-group.de/downloads/vorvertragliche-dokumente/0DRR-IY89-V1MT/152831_vertragsinformationen.pdf', 'pcsPdf': 'https://media.mdm.freenet-group.de/downloads/vorvertragliche-dokumente/0DRR-IY89-V1MT/152831_vertragszusammenfassung.pdf'}\n",
"{'error': ['Unerlaubter Wert in Parameter service_code.'], 'pciId': '', 'pciPdf': '', 'pcsPdf': ''}\n",
"---\n",
"https://media.mdm.freenet-group.de/downloads/vorvertragliche-dokumente/0DRR-IY89-V1MT/152831_vertragsinformationen.pdf\n",
"https://media.mdm.freenet-group.de/downloads/vorvertragliche-dokumente/0DRR-IY89-V1MT/152831_vertragszusammenfassung.pdf\n"
"\n",
"\n"
]
}
],
......@@ -50,7 +50,7 @@
"exampleTarifId = \"3877325\"\n",
"\n",
"# Die Liste 'exampleServiceCodes' enthält eine Sammlung von Beispiel-Service-Codes für die API-Anfrage.\n",
"exampleServiceCodes = [\"G343\", \"O3729\", \"G4\", \"G19\", \"O1173\", \"G396\", \"O13744\", \"G179\", \"G3181655\", \"O3150921\"]\n",
"exampleServiceCodes = [\"G343\", \"O3729\", \"G4\", \"G19\", \"O1173\", \"G396\", \"O13744\", \"G179\", \"G3181655\", \"O3150921s\"]\n",
"\n",
"# Die Variable 'hdlNr' speichert die Händlernummer, welche als Parameter in der API-Anfrage dient.\n",
"hdlNr = 27502648\n",
......@@ -141,7 +141,7 @@
"output_type": "stream",
"text": [
"---\n",
"PCS PDF erfolgreich heruntergeladen und gespeichert unter: ../cache/0DRR-IY89-V1MT_3877325_pcs.pdf\n",
"PCS PDF erfolgreich heruntergeladen und gespeichert unter: ../cache/YJ7W-3R4K-BPF3_3877325_pcs.pdf\n",
"---\n"
]
}
......
# MAUI Data Toolkit
## JupyterLab
jupyter lab
docker build --platform linux/amd64 -t maui:latest .
# dev
docker run -it -v ./commands:/maui/commands -v ./cache:/maui/cache -v ./config:/maui/config -v ./manager:/maui/manager -v ./models:/maui/models -p 80:80 maui:latest /bin/bash
# ecr
aws ecr get-login-password --region eu-central-1 | docker login --username AWS --password-stdin 181802255479.dkr.ecr.eu-central-1.amazonaws.com
docker tag obsidian:latest 181802255479.dkr.ecr.eu-central-1.amazonaws.com/obsidian:latest
docker push 181802255479.dkr.ecr.eu-central-1.amazonaws.com/obsidian:latest
docker pull 181802255479.dkr.ecr.eu-central-1.amazonaws.com/obsidian:latest
docker run -it -d --restart always -p 2000:80 181802255479.dkr.ecr.eu-central-1.amazonaws.com/obsidian:latest
####
To further develop or test this project use jupyter lab. Take care that "notebooks" are only concepts with your pc as python environment. Production code runs only inside the docker environment (stored in e.g. "commands" or "manager" folder).
```bash
jupyter lab
```
## Docker & ECR
Use Docker to deploy this package in a production environment. Log in to Amazon ECR with the AWS CLI:
```bash
aws ecr get-login-password --region eu-central-1 | docker login --username AWS --password-stdin ???
```
Build, tag, and push the image:
```bash
docker build --platform linux/amd64 -t maui:latest .
docker tag maui:latest ???
docker push ???
```
# MAUI Data Toolkit
To pull and run the container, use:
## Jupyter Lab
```bash
docker pull ???
docker run -it -d --restart always -p 80:80 ???
```
You can use "jupyter lab" to further develop or test this project.
Alternatively, for local development with mounted volumes:
```bash
jupyter lab
docker run -it \
-v ./commands:/maui/commands \
-v ./cache:/maui/cache \
-v ./config:/maui/config \
-v ./manager:/maui/manager \
-v ./routes:/maui/routes \
-v ./models:/maui/models \
-p 80:80 \
maui:latest \
/bin/bash
```
## Sidekick Documentation
Please use Sidekick for extended documentation and for maintaining the data structure.
Use Sidekick for extended documentation and to maintain the data structure:
```bash
docker run -it --rm -p 2000:8888 -v .:/app/work ceetrox/sidekick:latest
docker run -it --rm \
-p 2000:8888 \
-v .:/app/work \
ceetrox/sidekick:latest
```
This diff is collapsed.
"""
Health-Check-Router (Caching & PDF-Rückgabe)
Stellt **einen** Endpunkt bereit
--------------------------------
GET /eeccx/<id>?options=A,B,C
GET /eeccx/<id>?options=A&options=B…
Workflow
--------
1. ID + Options → SHA-256-Hash → ./cache/<hash>.pdf
• Datei vorhanden ⇒ PDF sofort senden.
2. OAuth-Token holen (client-credentials).
3. Partner-API aufrufen.
• Enthält die Antwort ein Feld **error** → Fehlerbotschaft(en)
per JSON an den Client weitergeben.
• Enthält die Antwort kein *pcsPdf/pciPdf* → Fehler aus Antwort
oder Standardmeldung zurückgeben.
4. PDF herunterladen, cachen, ausliefern.
5. Jeder andere Fehler liefert immer
`{"status":"ERROR","message":"…"}`
– **ohne** HTML-Escaping/Unicode-Escapes („ä“, „ü“ usw. bleiben sichtbar).
Hinweis
-------
• SSL-Verifikation ist zu Demo-Zwecken deaktiviert (`verify=False`).
• Die Secrets stammen 1-zu-1 aus Deinem Beispielskript.
"""
from __future__ import annotations
import sys
sys.path.append("..")
import hashlib
import io
import json
import os
from typing import List, Tuple
import requests
import urllib3
from flask import Blueprint, Response, request, send_file
# --------------------------------------------------------------------------- #
# Warnungen zu unsicheren HTTPS-Requests unterdrücken (nur Dev) #
# --------------------------------------------------------------------------- #
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# --------------------------------------------------------------------------- #
# Blueprint #
# --------------------------------------------------------------------------- #
blueprint = Blueprint(__name__.rsplit(".", 1)[-1], __name__)
# --------------------------------------------------------------------------- #
# Konfiguration / Konstanten #
# --------------------------------------------------------------------------- #
TOKEN_URL = "https://sts.md.de/v1/oidc/token"
API_URL = "https://partner-api.md.de/vertragserfassung/ftpOption2Pci.php"
CLIENT_ID = "8VyjbQZyTVdx2T2UO6mA3ZTEeiodHcp-"
CLIENT_SECRET = "FTE3y3Hj3TXeYrR8JbbO9yRRJ5ZGmPPTHH4HpyLFd9_X6wOx"
CF_CLIENT_ID = "e42d165bde7363f8478a157b57425fd5.access"
CF_CLIENT_SECRET = "de141ba4a6fbf9c29f51ba86fce9e81d3479797ff010a7ba43cde01977eac565"
HDL_NR = 28009594
PROV_HDL_NR = 28009594
PRODUKT_KATEGORIE = "O"
CACHE_DIR = "./../cache"
# --------------------------------------------------------------------------- #
# Hilfsfunktionen #
# --------------------------------------------------------------------------- #
def _json_error(message: str, status_code: int = 500) -> Response:
"""
Gibt eine ERROR-Antwort zurück, **ohne** dass Umlaute als \\uXXXX
escaped werden (ensure_ascii=False).
"""
payload = json.dumps({"status": "ERROR", "message": message}, ensure_ascii=False)
return Response(payload, status=status_code, mimetype="application/json")
def _extract_options() -> List[str]:
"""
Wandelt Query-Parameter »options« in eine Liste um.
Akzeptiert:
?options=A,B,C
?options=A&options=B…
"""
raw = request.args.getlist("options")
if len(raw) == 1 and "," in raw[0]:
return [opt.strip() for opt in raw[0].split(",") if opt.strip()]
return [opt for opt in raw if opt]
def _hash_id_options(tarif_id: str, options: List[str]) -> str:
key = f"{tarif_id}:{','.join(sorted(options))}".encode("utf-8")
return hashlib.sha256(key).hexdigest()
def _ensure_cache():
os.makedirs(CACHE_DIR, exist_ok=True)
# ---------------------------- OAuth-Token ---------------------------------- #
def _get_token() -> Tuple[str | None, str | None]:
payload = {
"grant_type": "client_credentials",
"client_id": CLIENT_ID,
"client_secret": CLIENT_SECRET,
}
try:
r = requests.post(TOKEN_URL, data=payload, verify=False, timeout=10)
r.raise_for_status()
except requests.exceptions.RequestException as exc:
return None, f"Token-Abruf fehlgeschlagen: {exc}"
token = r.json().get("access_token")
if not token:
return None, "Kein access_token im Token-Response."
return token, None
# ------------------------- Partner-API-Aufruf ------------------------------ #
def _partner_api(token: str, tarif_id: str, options: List[str]) -> Tuple[dict | None, str | None]:
headers = {
"Authorization": f"Bearer {token}",
"CF-Access-Client-Id": CF_CLIENT_ID,
"CF-Access-Client-Secret": CF_CLIENT_SECRET,
"Content-Type": "application/json",
}
payload = {
"hdl_nr": HDL_NR,
"prov_hdl_nr": PROV_HDL_NR,
"tarif_id": tarif_id,
"produkt_kategorie": PRODUKT_KATEGORIE,
"service_code": options,
}
try:
r = requests.put(API_URL, headers=headers, json=payload, verify=False, timeout=30)
r.raise_for_status()
except requests.exceptions.RequestException as exc:
return None, f"API-Aufruf fehlgeschlagen: {exc}"
try:
data = r.json()
except ValueError:
return None, "Antwort der Partner-API ist kein JSON."
# → Fehlerrückgabe der Partner-API auswerten
err_val = data.get("error")
if err_val:
# Fehler kann Array, Dict oder JSON-String sein
if isinstance(err_val, list):
msg = "; ".join(str(e) for e in err_val)
elif isinstance(err_val, dict):
msg = "; ".join(f"{k}: {v}" for k, v in err_val.items())
elif isinstance(err_val, str):
# eventuell weitere JSON-Ebene
try:
decoded = json.loads(err_val)
if isinstance(decoded, dict):
msg = decoded.get("message", str(decoded))
else:
msg = str(decoded)
except ValueError:
msg = err_val
else:
msg = str(err_val)
return None, msg
return data, None
# ----------------------------- PDF-Download ------------------------------- #
def _download_pdf(url: str) -> Tuple[bytes | None, str | None]:
try:
r = requests.get(url, stream=True, verify=False, timeout=30)
r.raise_for_status()
except requests.exceptions.RequestException as exc:
return None, f"PDF-Download fehlgeschlagen: {exc}"
return r.content, None
# --------------------------------------------------------------------------- #
# Route #
# --------------------------------------------------------------------------- #
@blueprint.route("/freenet-eeccx/<string:tarif_id>", methods=["GET"])
def eeccx_pdf(tarif_id: str):
"""
Beispiel:
/eeccx/3877325?options=G343,O3729
/eeccx/3877325?options=G343&options=O3729
"""
options = _extract_options()
_ensure_cache()
cache_file = os.path.join(CACHE_DIR, f"{_hash_id_options(tarif_id, options)}.pdf")
# 1) Cache-Treffer → PDF sofort
if os.path.isfile(cache_file):
return send_file(cache_file, mimetype="application/pdf")
# 2) Token
token, err = _get_token()
if err:
return _json_error(err, 502)
# 3) Partner-API
api_json, err = _partner_api(token, tarif_id, options)
if err:
return _json_error(err, 502)
# 4) PDF-URL
pdf_url: str | None = api_json.get("pcsPdf") or api_json.get("pciPdf")
if not pdf_url:
# falls API eine Message liefert, diese übernehmen
msg = api_json.get("message") or "Keine PDF-URL in der API-Antwort."
return _json_error(msg, 502)
# 5) PDF laden
pdf_bytes, err = _download_pdf(pdf_url)
if err:
return _json_error(err, 502)
# 6) Cache speichern (Fehler ≠ KO)
try:
with open(cache_file, "wb") as fh:
fh.write(pdf_bytes)
except OSError:
pass # Ignorieren, PDF wird dennoch geliefert
# 7) PDF senden
return send_file(
io.BytesIO(pdf_bytes),
mimetype="application/pdf",
as_attachment=False,
download_name=f"{tarif_id}.pdf",
)
"""
Health-Check-Router
Kapselt den Endpunkt / für den System-Gesundheitscheck.
"""
from flask import Blueprint, jsonify
# Blueprint-Name = Dateiname ohne Punkte; verhindert ValueError
blueprint = Blueprint(__name__.rsplit(".", 1)[-1], __name__)
@blueprint.route("/", methods=["GET"])
def index():
"""
GET /
Liefert einen einfachen JSON-Status.
"""
return jsonify({"status": "ok", "message": "The API is working."})
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment