1
This commit is contained in:
10
00_Globale_Richtlinien/Entworfener_Code/app/src/__init__.py
Normal file
10
00_Globale_Richtlinien/Entworfener_Code/app/src/__init__.py
Normal file
@@ -0,0 +1,10 @@
|
||||
"""
|
||||
Paket für Laufzeit-Module (Config/Logging/etc.) der Referenz-App.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from .config_loader import Settings, load_runtime_config
|
||||
from .logging_setup import init_logging
|
||||
|
||||
__all__ = ["Settings", "load_runtime_config", "init_logging"]
|
||||
239
00_Globale_Richtlinien/Entworfener_Code/app/src/config_loader.py
Normal file
239
00_Globale_Richtlinien/Entworfener_Code/app/src/config_loader.py
Normal file
@@ -0,0 +1,239 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional, Union, cast
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
@dataclass
|
||||
class Settings:
|
||||
"""
|
||||
Universeller Settings-Wrapper für die zur Laufzeit geladene YAML-Konfiguration.
|
||||
|
||||
Unterstützt zwei Schemata, um kompatibel mit den Anforderungen zu bleiben:
|
||||
|
||||
1) "Einfaches Variablen-Schema" (vom Nutzer gefordert):
|
||||
global:
|
||||
variablenname:
|
||||
beschreibung: <text>
|
||||
wert: <any>
|
||||
modulname:
|
||||
variablenname:
|
||||
beschreibung: <text>
|
||||
wert: <any>
|
||||
|
||||
2) "Verschachteltes Schema" (optionale, spätere Erweiterung):
|
||||
global:
|
||||
logging:
|
||||
max_log_size: 10
|
||||
retention_days: 14
|
||||
modules:
|
||||
webserver:
|
||||
port: 8300
|
||||
|
||||
Abfrage-Regeln:
|
||||
- Settings.value("global", "max_log_size")
|
||||
- Prüft zuerst einfaches Schema: global.max_log_size.wert
|
||||
- Dann verschachtelt: global.max_log_size
|
||||
- Alternativ mit Dot-Pfad: value("global", "logging.max_log_size")
|
||||
- Für Module:
|
||||
- module_value("webserver", "port") oder module_value("webserver", "logging.max_log_size")
|
||||
|
||||
Hinweise:
|
||||
- Diese Klasse validiert bewusst minimal, um ohne zusätzliche Dependencies auszukommen.
|
||||
Detaillierte Validierung (z. B. via pydantic) kann später ergänzt werden.
|
||||
"""
|
||||
|
||||
raw: Dict[str, Any]
|
||||
base_dir: Path
|
||||
|
||||
# ---------- Public API ----------
|
||||
|
||||
def section(self, name: str) -> Dict[str, Any]:
|
||||
return _ensure_dict(self.raw.get(name, {}))
|
||||
|
||||
def value(self, section: str, key: str, default: Any = None) -> Any:
|
||||
"""
|
||||
Liest einen Wert aus einem Abschnitt.
|
||||
|
||||
- Unterstützt Dot-Pfade (z. B. "logging.max_log_size").
|
||||
- Unterstützt das "wert"-Wrapper-Format (variablenname: {beschreibung, wert}).
|
||||
"""
|
||||
node = self.section(section)
|
||||
if not node:
|
||||
return default
|
||||
|
||||
# 1) Einfacher Variablenname ohne Dot-Pfad
|
||||
if "." not in key:
|
||||
if key in node:
|
||||
return _unwrap_variable(node.get(key), default)
|
||||
# ggf. verschachteltes Schema: global[key]
|
||||
return _unwrap_variable(node.get(key), default)
|
||||
|
||||
# 2) Dot-Pfad (verschachtelt)
|
||||
current: Any = node
|
||||
for part in key.split("."):
|
||||
if isinstance(current, dict) and part in current:
|
||||
current = cast(Dict[str, Any], current)[part]
|
||||
else:
|
||||
return default
|
||||
return _unwrap_variable(current, default)
|
||||
|
||||
def module_value(self, module: str, key: str, default: Any = None) -> Any:
|
||||
"""
|
||||
Liest einen Wert im Modul-Abschnitt. Es werden folgende Orte geprüft:
|
||||
- raw[module]
|
||||
- raw.get("modules", {}).get(module)
|
||||
"""
|
||||
# Direktes Modul (einfaches Schema)
|
||||
direct_section = _ensure_dict(self.raw.get(module, {}))
|
||||
if direct_section:
|
||||
out = _value_from_node(direct_section, key, default)
|
||||
if out is not _MISSING:
|
||||
return out
|
||||
|
||||
# Verschachteltes "modules"-Schema
|
||||
modules_section = _ensure_dict(self.raw.get("modules", {}))
|
||||
module_section = _ensure_dict(modules_section.get(module, {}))
|
||||
if module_section:
|
||||
out = _value_from_node(module_section, key, default)
|
||||
if out is not _MISSING:
|
||||
return out
|
||||
|
||||
return default
|
||||
|
||||
def get_logging_params(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Liefert gebräuchliche Logging-Parameter mit sinnvollen Defaults.
|
||||
Folgende Schlüssel werden versucht (in dieser Reihenfolge):
|
||||
|
||||
- level: global.level, global.logging.level
|
||||
- max_log_size: global.max_log_size (MB), global.logging.max_log_size
|
||||
- retention_days: global.retention_days, global.logging.retention_days
|
||||
- log_dir: global.log_dir, global.paths.log_dir
|
||||
"""
|
||||
level = (
|
||||
self.value("global", "level")
|
||||
or self.value("global", "logging.level")
|
||||
or "INFO"
|
||||
)
|
||||
|
||||
max_log_size_mb = (
|
||||
self.value("global", "max_log_size")
|
||||
or self.value("global", "logging.max_log_size")
|
||||
or 10
|
||||
)
|
||||
|
||||
retention_days = (
|
||||
self.value("global", "retention_days")
|
||||
or self.value("global", "logging.retention_days")
|
||||
or 7
|
||||
)
|
||||
|
||||
log_dir = (
|
||||
self.value("global", "log_dir")
|
||||
or self.value("global", "paths.log_dir")
|
||||
or "logs"
|
||||
)
|
||||
|
||||
return {
|
||||
"level": str(level),
|
||||
"max_log_size_mb": int(max_log_size_mb),
|
||||
"retention_days": int(retention_days),
|
||||
"log_dir": str(log_dir),
|
||||
}
|
||||
|
||||
def resolve_path(self, p: Union[str, Path]) -> Path:
|
||||
"""
|
||||
Löst relative Pfade relativ zur base_dir (app/) auf.
|
||||
"""
|
||||
pp = Path(p)
|
||||
if pp.is_absolute():
|
||||
return pp
|
||||
return (self.base_dir / pp).resolve()
|
||||
|
||||
# ---------- Constructors ----------
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, path: Path, base_dir: Optional[Path] = None) -> "Settings":
|
||||
data = _load_yaml_safe(path)
|
||||
return cls(raw=data, base_dir=base_dir or path.parent)
|
||||
|
||||
@classmethod
|
||||
def empty(cls, base_dir: Path) -> "Settings":
|
||||
return cls(raw={}, base_dir=base_dir)
|
||||
|
||||
|
||||
# ---------- Module-level helpers ----------
|
||||
|
||||
def load_runtime_config(config_path: Optional[Path] = None, base_dir: Optional[Path] = None) -> Settings:
|
||||
"""
|
||||
Lädt die Laufzeitkonfiguration. Standard:
|
||||
- base_dir = app/ (Ordner der start.py)
|
||||
- config_path = base_dir / "runtime_config.yaml"
|
||||
|
||||
Gibt Settings mit leerer Rohstruktur zurück, falls Datei fehlt.
|
||||
"""
|
||||
# Versuche app/ als Basis zu bestimmen: .../app/src/config_loader.py -> parents[1] = app/
|
||||
default_base = Path(__file__).resolve().parents[1]
|
||||
bdir = base_dir or default_base
|
||||
|
||||
cfg_path = config_path or (bdir / "runtime_config.yaml")
|
||||
if cfg_path.exists():
|
||||
return Settings.from_file(cfg_path, base_dir=bdir)
|
||||
return Settings.empty(base_dir=bdir)
|
||||
|
||||
|
||||
def _ensure_dict(v: Any) -> Dict[str, Any]:
|
||||
return cast(Dict[str, Any], v) if isinstance(v, dict) else {}
|
||||
|
||||
|
||||
def _unwrap_variable(v: Any, default: Any = None) -> Any:
|
||||
"""
|
||||
Falls v ein Mapping nach dem Muster {beschreibung, wert} ist, gib 'wert' zurück.
|
||||
Andernfalls gib v (als Any) selbst oder default zurück.
|
||||
"""
|
||||
if isinstance(v, dict):
|
||||
mv: Dict[str, Any] = cast(Dict[str, Any], v)
|
||||
if "wert" in mv:
|
||||
return mv.get("wert", default)
|
||||
# Pylance: explizit als Any zurückgeben, um Unknown-Typen zu vermeiden
|
||||
return cast(Any, v) if v is not None else default
|
||||
|
||||
|
||||
_MISSING = object()
|
||||
|
||||
|
||||
def _value_from_node(node: Dict[str, Any], key: str, default: Any) -> Any:
|
||||
"""
|
||||
Holt 'key' aus node. Unterstützt Dot-Pfade und das {beschreibung, wert}-Muster.
|
||||
Gibt _MISSING zurück, wenn der Schlüssel nicht existiert (damit Aufrufer fallbacken kann).
|
||||
"""
|
||||
# Direkter Schlüssel ohne Dot
|
||||
if "." not in key:
|
||||
if key in node:
|
||||
return _unwrap_variable(node.get(key), default)
|
||||
return _MISSING
|
||||
|
||||
# Dot-Pfad
|
||||
current: Any = node
|
||||
for part in key.split("."):
|
||||
if isinstance(current, dict) and part in current:
|
||||
current = cast(Dict[str, Any], current)[part]
|
||||
else:
|
||||
return _MISSING
|
||||
return _unwrap_variable(current, default)
|
||||
|
||||
|
||||
def _load_yaml_safe(path: Path) -> Dict[str, Any]:
|
||||
with path.open("r", encoding="utf-8") as f:
|
||||
data = yaml.safe_load(f)
|
||||
return data or {}
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Settings",
|
||||
"load_runtime_config",
|
||||
]
|
||||
198
00_Globale_Richtlinien/Entworfener_Code/app/src/logging_setup.py
Normal file
198
00_Globale_Richtlinien/Entworfener_Code/app/src/logging_setup.py
Normal file
@@ -0,0 +1,198 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import logging.handlers
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import Iterable, List, Tuple
|
||||
|
||||
from .config_loader import Settings
|
||||
|
||||
|
||||
DEFAULT_FORMAT = "%(asctime)s | %(levelname)s | %(name)s | %(filename)s:%(lineno)d | %(message)s"
|
||||
DEFAULT_DATEFMT = "%Y-%m-%d %H:%M:%S"
|
||||
|
||||
|
||||
def init_logging(settings: Settings) -> None:
|
||||
"""
|
||||
Initialisiert das Logging gemäß den Laufzeit-Settings.
|
||||
|
||||
Erfüllt die Anforderungen:
|
||||
- Unter logs/ je Tag eine neue Datei (TimedRotatingFileHandler -> midnight).
|
||||
- Konfigurierbare Aufbewahrungsdauer (retention_days).
|
||||
- Konfigurierbare maximale Log-Größe (interpretiert als maximale Gesamtgröße des Log-Verzeichnisses);
|
||||
älteste Dateien werden bei Überschreitung entfernt.
|
||||
|
||||
Hinweise:
|
||||
- Diese Funktion setzt das Root-Logging neu (entfernt bestehende Handler), um konsistentes Verhalten
|
||||
gegenüber evtl. vorhandener dictConfig zu gewährleisten.
|
||||
- Zusätzlich zur Datei wird immer ein Console-Handler eingerichtet.
|
||||
"""
|
||||
params = settings.get_logging_params()
|
||||
level_name: str = params.get("level", "INFO")
|
||||
level = getattr(logging, level_name.upper(), logging.INFO)
|
||||
|
||||
# Verzeichnis vorbereiten
|
||||
raw_log_dir: str = params.get("log_dir", "logs")
|
||||
log_dir: Path = settings.resolve_path(raw_log_dir)
|
||||
log_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Root-Logger zurücksetzen und konfigurieren
|
||||
root = logging.getLogger()
|
||||
_reset_logger_handlers(root)
|
||||
root.setLevel(level)
|
||||
|
||||
# Formatter
|
||||
formatter = logging.Formatter(fmt=DEFAULT_FORMAT, datefmt=DEFAULT_DATEFMT)
|
||||
|
||||
# Console-Handler
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setLevel(level)
|
||||
console_handler.setFormatter(formatter)
|
||||
root.addHandler(console_handler)
|
||||
|
||||
# Datei-Handler: tägliche Rotation um Mitternacht
|
||||
logfile = log_dir / "app.log"
|
||||
file_handler = logging.handlers.TimedRotatingFileHandler(
|
||||
filename=str(logfile),
|
||||
when="midnight",
|
||||
interval=1,
|
||||
backupCount=int(params.get("retention_days", 7)),
|
||||
encoding="utf-8",
|
||||
utc=False,
|
||||
)
|
||||
file_handler.setLevel(level)
|
||||
file_handler.setFormatter(formatter)
|
||||
root.addHandler(file_handler)
|
||||
|
||||
# Cleanup-Regeln anwenden (maximale Gesamtgröße und zusätzliche tagbasierte Aufbewahrung)
|
||||
try:
|
||||
_cleanup_logs(
|
||||
log_dir=log_dir,
|
||||
max_total_mb=int(params.get("max_log_size_mb", params.get("max_log_size", 10))),
|
||||
retention_days=int(params.get("retention_days", 7)),
|
||||
file_prefix="app",
|
||||
extensions=(".log",),
|
||||
)
|
||||
except Exception: # Schutz vor Start-Abbruch durch Aufräumfehler
|
||||
logging.getLogger(__name__).exception("Fehler beim Log-Cleanup ignoriert.")
|
||||
|
||||
|
||||
def _reset_logger_handlers(logger: logging.Logger) -> None:
|
||||
"""Entfernt alle existierenden Handler vom Logger."""
|
||||
for h in list(logger.handlers):
|
||||
try:
|
||||
logger.removeHandler(h)
|
||||
try:
|
||||
h.close()
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def _cleanup_logs(
|
||||
log_dir: Path,
|
||||
max_total_mb: int,
|
||||
retention_days: int,
|
||||
file_prefix: str = "app",
|
||||
extensions: Tuple[str, ...] = (".log",),
|
||||
) -> None:
|
||||
"""
|
||||
Bereinigt das Log-Verzeichnis nach zwei Regeln:
|
||||
|
||||
1) Zeitbasierte Aufbewahrung (retention_days):
|
||||
- Löscht Dateien, deren Änderungszeitpunkt älter als retention_days ist.
|
||||
|
||||
2) Größenlimit (max_total_mb):
|
||||
- Sicherstellt, dass die Gesamtgröße aller Log-Dateien (passend zu extensions/prefix)
|
||||
das Limit nicht überschreitet. Bei Überschreitung werden die ältesten Dateien
|
||||
entfernt, bis das Limit eingehalten wird.
|
||||
|
||||
Diese Regeln ergänzen den TimedRotatingFileHandler (backupCount), der ohnehin
|
||||
nur eine feste Anzahl von Backups behält. Mit diesem zusätzlichen Cleanup
|
||||
wird explizit die Aufbewahrungsdauer (in Tagen) und die Gesamtgröße kontrolliert.
|
||||
"""
|
||||
if max_total_mb <= 0 and retention_days <= 0:
|
||||
return
|
||||
|
||||
candidates: List[Path] = _collect_log_files(log_dir, file_prefix, extensions)
|
||||
|
||||
# 1) Zeitbasierte Aufbewahrung (zusätzlich zu backupCount)
|
||||
if retention_days > 0:
|
||||
cutoff = datetime.now() - timedelta(days=retention_days)
|
||||
for f in candidates:
|
||||
try:
|
||||
mtime = datetime.fromtimestamp(f.stat().st_mtime)
|
||||
if mtime < cutoff:
|
||||
_safe_unlink(f)
|
||||
except FileNotFoundError:
|
||||
# Wurde zwischenzeitlich rotiert/gelöscht
|
||||
continue
|
||||
|
||||
# Refresh nach Löschungen
|
||||
candidates = _collect_log_files(log_dir, file_prefix, extensions)
|
||||
|
||||
# 2) Größenlimit
|
||||
if max_total_mb > 0:
|
||||
limit_bytes = max_total_mb * 1024 * 1024
|
||||
# Sortiere nach mtime aufsteigend (älteste zuerst)
|
||||
sorted_by_age = sorted(
|
||||
candidates,
|
||||
key=lambda p: (p.stat().st_mtime if p.exists() else float("inf")),
|
||||
)
|
||||
total_size = _total_size(sorted_by_age)
|
||||
|
||||
for f in sorted_by_age:
|
||||
if total_size <= limit_bytes:
|
||||
break
|
||||
try:
|
||||
size_before = f.stat().st_size if f.exists() else 0
|
||||
_safe_unlink(f)
|
||||
total_size -= size_before
|
||||
except FileNotFoundError:
|
||||
# Bereits gelöscht
|
||||
continue
|
||||
|
||||
|
||||
def _collect_log_files(log_dir: Path, prefix: str, exts: Tuple[str, ...]) -> List[Path]:
|
||||
if not log_dir.exists():
|
||||
return []
|
||||
result: List[Path] = []
|
||||
try:
|
||||
for item in log_dir.iterdir():
|
||||
if not item.is_file():
|
||||
continue
|
||||
if not item.suffix.lower() in exts:
|
||||
continue
|
||||
# Akzeptiere Standard-Dateinamen und Rotationssuffixe (z. B. app.log, app.log.2025-11-13, ...)
|
||||
name = item.name
|
||||
if name.startswith(prefix):
|
||||
result.append(item)
|
||||
except Exception:
|
||||
# Defensive: bei Problemen einfach keine Kandidaten liefern
|
||||
return []
|
||||
return result
|
||||
|
||||
|
||||
def _total_size(files: Iterable[Path]) -> int:
|
||||
s = 0
|
||||
for f in files:
|
||||
try:
|
||||
s += f.stat().st_size
|
||||
except FileNotFoundError:
|
||||
continue
|
||||
return s
|
||||
|
||||
|
||||
def _safe_unlink(p: Path) -> None:
|
||||
try:
|
||||
p.unlink(missing_ok=True) # py3.8+: attribute missing_ok in 3.8? Actually 3.8+: No, use exists() check
|
||||
except TypeError:
|
||||
# Fallback für Python-Versionen ohne missing_ok
|
||||
if p.exists():
|
||||
os.remove(p)
|
||||
|
||||
|
||||
__all__ = ["init_logging"]
|
||||
Reference in New Issue
Block a user