refactor: update logger import paths to use app.services.logging

This commit is contained in:
grillazz
2026-02-16 10:25:55 +01:00
parent ab887ad163
commit 4f0dadc936
13 changed files with 130 additions and 14 deletions

View File

@@ -2,7 +2,7 @@ from typing import Annotated
from fastapi import APIRouter, Depends, Query, Request, status
from pydantic import EmailStr
from rotoger import get_logger
from app.services.logging import get_logger
from starlette.concurrency import run_in_threadpool
from app.services.smtp import SMTPEmailService

View File

@@ -2,7 +2,7 @@ from typing import Annotated
from fastapi import APIRouter, Depends, Form
from fastapi.responses import StreamingResponse
from rotoger import get_logger
from app.services.logging import get_logger
from app.services.llm import get_llm_service

View File

@@ -1,5 +1,5 @@
from fastapi import APIRouter, Depends, HTTPException, Request, status
from rotoger import get_logger
from app.services.logging import get_logger
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.ext.asyncio import AsyncSession

View File

@@ -1,7 +1,7 @@
from typing import Annotated
from fastapi import APIRouter, Depends, Form, HTTPException, Request, status
from rotoger import get_logger
from app.services.logging import get_logger
from sqlalchemy.ext.asyncio import AsyncSession
from app.database import get_db

View File

@@ -1,7 +1,7 @@
from collections.abc import AsyncGenerator
from fastapi.exceptions import ResponseValidationError
from rotoger import get_logger
from app.services.logging import get_logger
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine

View File

@@ -1,7 +1,7 @@
import orjson
from attrs import define, field
from fastapi import Request
from rotoger import get_logger
from app.services.logging import get_logger
logger = get_logger()

View File

@@ -5,7 +5,7 @@ import asyncpg
from fastapi import Depends, FastAPI, Request
from fastapi.responses import HTMLResponse
from fastapi.templating import Jinja2Templates
from rotoger import get_logger
from app.services.logging import get_logger
from starlette.middleware import Middleware
from starlette.middleware.gzip import GZipMiddleware
@@ -21,12 +21,13 @@ from app.middleware.profiler import ProfilingMiddleware
from app.redis import get_redis
from app.services.auth import AuthBearer
logger = get_logger()
# logger = get_logger()
templates = Jinja2Templates(directory=Path(__file__).parent.parent / "templates")
@asynccontextmanager
async def lifespan(app: FastAPI):
app.logger = get_logger()
app.redis = await get_redis()
postgres_dsn = global_settings.postgres_url.unicode_string()
try:
@@ -35,12 +36,12 @@ async def lifespan(app: FastAPI):
min_size=5,
max_size=20,
)
await logger.ainfo(
await app.logger.ainfo(
"Postgres pool created", idle_size=app.postgres_pool.get_idle_size()
)
yield
except Exception as e:
await logger.aerror("Error during app startup", error=repr(e))
await app.logger.aerror("Error during app startup", error=repr(e))
raise
finally:
await app.redis.close()

View File

@@ -2,7 +2,7 @@ from typing import Any
from asyncpg import UniqueViolationError
from fastapi import HTTPException, status
from rotoger import get_logger
from app.services.logging import get_logger
from sqlalchemy.exc import IntegrityError, SQLAlchemyError
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import DeclarativeBase, declared_attr

19
app/server.py Normal file
View File

@@ -0,0 +1,19 @@
from granian import Granian
def startup():
print("Server starting up...")
def shutdown():
print("Server shutting down...")
server = Granian(
"main:app",
host="0.0.0.0", # Bind to all interfaces
port=8000,
workers=4,
interface="asgi",
blocking_threads=8 # Optional: threads per worker for blocking ops
)
server.on_startup(startup)
server.on_shutdown(shutdown)
server.serve_forever()

View File

@@ -3,7 +3,7 @@ import time
import jwt
from fastapi import HTTPException, Request
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
from rotoger import get_logger
from app.services.logging import get_logger
from app.config import settings as global_settings
from app.models.user import User

96
app/services/logging.py Normal file
View File

@@ -0,0 +1,96 @@
import logging
import os
from logging.handlers import RotatingFileHandler
from pathlib import Path
import orjson
import structlog
from whenever._whenever import Instant
def _configure_logger() -> structlog.BoundLogger:
"""
Configures and returns a structlog logger with a rotating file handler.
The logger is configured using environment variables for path, file size,
and backup count. It formats logs as JSON.
"""
log_dir = Path(os.environ.get("ROTOGER_LOG_PATH", "."))
log_dir.mkdir(parents=True, exist_ok=True)
log_date = Instant.now().py_datetime().strftime("%Y%m%d")
log_path = log_dir / f"{log_date}_{os.getpid()}.log"
# Use int() to ensure env var values are correctly typed
max_bytes = int(os.environ.get("ROTOGER_LOG_MAX_BYTES", 10 * 1024 * 1024))
backup_count = int(os.environ.get("ROTOGER_LOG_BACKUP_COUNT", 5))
handler = RotatingFileHandler(
filename=log_path,
maxBytes=max_bytes,
backupCount=backup_count,
encoding="utf-8",
)
# Use structlog's standard library integration
structlog.configure(
processors=[
structlog.contextvars.merge_contextvars,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso", utc=True),
structlog.processors.format_exc_info,
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
# structlog.stdlib.add_logger_name,
],
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
# Configure the underlying standard logger
formatter = structlog.stdlib.ProcessorFormatter(
# These run after the processors defined in structlog.configure
foreign_pre_chain=[
structlog.contextvars.merge_contextvars,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso", utc=True),
structlog.processors.format_exc_info,
structlog.stdlib.add_logger_name,
],
processor=structlog.processors.JSONRenderer(
serializer=lambda *args, **kwargs: orjson.dumps(*args, **kwargs).decode()
),
)
handler.setFormatter(formatter)
root_logger = logging.getLogger("root") # Get the root logger
root_logger.addHandler(handler)
root_logger.propagate = False # Prevent logs from being propagated to the root logger
root_logger.setLevel(logging.INFO)
uvicorn_logger = logging.getLogger("uvicorn") # Get the root logger
uvicorn_logger.addHandler(handler)
uvicorn_logger.propagate = False # Prevent logs from being propagated to the root logger
uvicorn_logger.setLevel(logging.INFO)
sa_logger = logging.getLogger("sqlalchemy") # Get the root logger
sa_logger.addHandler(handler)
sa_logger.propagate = False # Prevent logs from being propagated to the root logger
sa_logger.setLevel(logging.WARNING)
# Set SQLAlchemy engine logger level specifically if needed
# logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)
return structlog.get_logger()
# Module-level singleton instance
_logger_instance = _configure_logger()
def get_logger() -> structlog.BoundLogger:
"""
Returns the configured singleton logger instance.
"""
return _logger_instance

View File

@@ -3,7 +3,7 @@ from datetime import datetime
from apscheduler import AsyncScheduler
from apscheduler.triggers.interval import IntervalTrigger
from attrs import define
from rotoger import get_logger
from app.services.logging import get_logger
from sqlalchemy import text
from starlette.types import ASGIApp, Receive, Scope, Send

View File

@@ -5,7 +5,7 @@ from email.mime.text import MIMEText
from attrs import define, field
from fastapi.templating import Jinja2Templates
from pydantic import EmailStr
from rotoger import get_logger
from app.services.logging import get_logger
from app.config import settings as global_settings
from app.utils.singleton import SingletonMetaNoArgs