wip: add structlog

This commit is contained in:
grillazz 2025-06-18 08:49:32 +02:00
parent 3f09b5701e
commit c09c338b37
5 changed files with 72 additions and 80 deletions

View File

@ -3,9 +3,8 @@ from collections.abc import AsyncGenerator
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
from app.config import settings as global_settings
from app.utils.logging import AppLogger
logger = AppLogger().get_logger()
from app.utils.logging import setup_structlog
logger = setup_structlog()
engine = create_async_engine(
global_settings.asyncpg_url.unicode_string(),

View File

@ -1,19 +1,14 @@
import logging
import os
from contextlib import asynccontextmanager
from logging.handlers import RotatingFileHandler
from pathlib import Path
import asyncpg
import orjson
# from apscheduler import AsyncScheduler
# from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
# from apscheduler.eventbrokers.redis import RedisEventBroker
import structlog
from fastapi import Depends, FastAPI, Request
from fastapi.responses import HTMLResponse
from fastapi.templating import Jinja2Templates
from whenever._whenever import Instant
from app.api.health import router as health_router
from app.api.ml import router as ml_router
@ -22,70 +17,42 @@ from app.api.shakespeare import router as shakespeare_router
from app.api.stuff import router as stuff_router
from app.api.user import router as user_router
from app.config import settings as global_settings
# from app.database import engine
from app.redis import get_redis
from app.services.auth import AuthBearer
from whenever._whenever import Instant
from app.utils.logging import setup_structlog
# from app.services.scheduler import SchedulerMiddleware
import structlog
log_date = Instant.now().py_datetime().strftime("%Y%m%d")
structlog.configure(
cache_logger_on_first_use=True,
wrapper_class=structlog.make_filtering_bound_logger(logging.INFO),
processors=[
structlog.contextvars.merge_contextvars,
structlog.processors.add_log_level,
structlog.processors.format_exc_info,
structlog.processors.TimeStamper(fmt="iso", utc=True),
structlog.processors.JSONRenderer(serializer=orjson.dumps),
],
# log per day and per process?
logger_factory=structlog.BytesLoggerFactory(
file=Path(f"cuul_{log_date}_{str(os.getpid())}").with_suffix(".log").open("wb")
)
)
logger = structlog.get_logger()
logger = setup_structlog()
templates = Jinja2Templates(directory=Path(__file__).parent.parent / "templates")
@asynccontextmanager
async def lifespan(_app: FastAPI):
# Load the redis connection
_app.redis = await get_redis()
_postgres_dsn = global_settings.postgres_url.unicode_string()
async def lifespan(app: FastAPI):
app.redis = await get_redis()
postgres_dsn = global_settings.postgres_url.unicode_string()
try:
# TODO: cache with the redis connection
# Initialize the postgres connection pool
_app.postgres_pool = await asyncpg.create_pool(
dsn=_postgres_dsn,
app.postgres_pool = await asyncpg.create_pool(
dsn=postgres_dsn,
min_size=5,
max_size=20,
)
logger.info("Postgres pool created", _app.postgres_pool.get_idle_size())
logger.info("Postgres pool created", idle_size=app.postgres_pool.get_idle_size())
yield
finally:
# close redis connection and release the resources
await _app.redis.close()
# close postgres connection pool and release the resources
await _app.postgres_pool.close()
app = FastAPI(title="Stuff And Nonsense API", version="0.19.0", lifespan=lifespan)
await app.redis.close()
await app.postgres_pool.close()
def create_app() -> FastAPI:
app = FastAPI(
title="Stuff And Nonsense API",
version="0.19.0",
lifespan=lifespan,
)
app.include_router(stuff_router)
app.include_router(nonsense_router)
app.include_router(shakespeare_router)
app.include_router(user_router)
app.include_router(ml_router, prefix="/v1/ml", tags=["ML"])
app.include_router(health_router, prefix="/v1/public/health", tags=["Health, Public"])
app.include_router(
health_router,
@ -94,21 +61,24 @@ app.include_router(
dependencies=[Depends(AuthBearer())],
)
@app.get("/index", response_class=HTMLResponse)
def get_index(request: Request):
return templates.TemplateResponse("index.html", {"request": request})
return app
app = create_app()
# --- Unused/experimental code and TODOs ---
# from apscheduler import AsyncScheduler
# from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
# from apscheduler.eventbrokers.redis import RedisEventBroker
# from app.database import engine
# from app.services.scheduler import SchedulerMiddleware
# _scheduler_data_store = SQLAlchemyDataStore(engine, schema="scheduler")
# _scheduler_event_broker = RedisEventBroker(
# client_or_url=global_settings.redis_url.unicode_string()
# )
# _scheduler_event_broker = RedisEventBroker(client_or_url=global_settings.redis_url.unicode_string())
# _scheduler_himself = AsyncScheduler(_scheduler_data_store, _scheduler_event_broker)
#
# app.add_middleware(SchedulerMiddleware, scheduler=_scheduler_himself)
# TODO: every not GET meth should reset cache
# TODO: every scheduler task which needs to act on database should have access to connection pool via request - maybe ?
# TODO: every non-GET method should reset cache
# TODO: scheduler tasks needing DB should access connection pool via request
# TODO: https://stackoverflow.com/questions/16053364/make-sure-only-one-worker-launches-the-apscheduler-event-in-a-pyramid-web-app-ru

View File

@ -5,6 +5,12 @@ from rich.logging import RichHandler
from app.utils.singleton import SingletonMeta
import logging
import os
import orjson
import structlog
from whenever._whenever import Instant
from pathlib import Path
class AppLogger(metaclass=SingletonMeta):
_logger = None
@ -22,3 +28,22 @@ class RichConsoleHandler(RichHandler):
console=Console(color_system="256", width=width, style=style, stderr=True),
**kwargs,
)
def setup_structlog() -> structlog.BoundLogger:
log_date = Instant.now().py_datetime().strftime("%Y%m%d")
log_path = Path(f"cuul_{log_date}_{os.getpid()}.log")
structlog.configure(
cache_logger_on_first_use=True,
wrapper_class=structlog.make_filtering_bound_logger(logging.INFO),
processors=[
structlog.contextvars.merge_contextvars,
structlog.processors.add_log_level,
structlog.processors.format_exc_info,
structlog.processors.TimeStamper(fmt="iso", utc=True),
structlog.processors.JSONRenderer(serializer=orjson.dumps),
],
logger_factory=structlog.BytesLoggerFactory(
file=log_path.open("wb")
)
)
return structlog.get_logger()

View File

@ -10,7 +10,6 @@ services:
- .secrets
command: bash -c "
uvicorn app.main:app
--log-config ./logging-uvicorn.json
--host 0.0.0.0 --port 8080
--lifespan=on --use-colors --loop uvloop --http httptools
--reload --log-level debug

View File

@ -12,7 +12,6 @@ services:
granian --interface asgi
--host 0.0.0.0 --port 8080
app.main:app --access-log --log-level debug
--log-config ./logging-granian.json
"
volumes:
- ./app:/panettone/app