wip: add structlog

This commit is contained in:
grillazz 2025-06-18 08:49:32 +02:00
parent 3f09b5701e
commit c09c338b37
5 changed files with 72 additions and 80 deletions

View File

@ -3,9 +3,8 @@ from collections.abc import AsyncGenerator
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
from app.config import settings as global_settings from app.config import settings as global_settings
from app.utils.logging import AppLogger from app.utils.logging import setup_structlog
logger = setup_structlog()
logger = AppLogger().get_logger()
engine = create_async_engine( engine = create_async_engine(
global_settings.asyncpg_url.unicode_string(), global_settings.asyncpg_url.unicode_string(),

View File

@ -1,19 +1,14 @@
import logging import logging
import os import os
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
from logging.handlers import RotatingFileHandler
from pathlib import Path from pathlib import Path
import asyncpg import asyncpg
import orjson import orjson
import structlog
# from apscheduler import AsyncScheduler
# from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
# from apscheduler.eventbrokers.redis import RedisEventBroker
from fastapi import Depends, FastAPI, Request from fastapi import Depends, FastAPI, Request
from fastapi.responses import HTMLResponse from fastapi.responses import HTMLResponse
from fastapi.templating import Jinja2Templates from fastapi.templating import Jinja2Templates
from whenever._whenever import Instant
from app.api.health import router as health_router from app.api.health import router as health_router
from app.api.ml import router as ml_router from app.api.ml import router as ml_router
@ -22,93 +17,68 @@ from app.api.shakespeare import router as shakespeare_router
from app.api.stuff import router as stuff_router from app.api.stuff import router as stuff_router
from app.api.user import router as user_router from app.api.user import router as user_router
from app.config import settings as global_settings from app.config import settings as global_settings
# from app.database import engine
from app.redis import get_redis from app.redis import get_redis
from app.services.auth import AuthBearer from app.services.auth import AuthBearer
from whenever._whenever import Instant
from app.utils.logging import setup_structlog
# from app.services.scheduler import SchedulerMiddleware
import structlog
log_date = Instant.now().py_datetime().strftime("%Y%m%d")
structlog.configure(
cache_logger_on_first_use=True,
wrapper_class=structlog.make_filtering_bound_logger(logging.INFO),
processors=[
structlog.contextvars.merge_contextvars,
structlog.processors.add_log_level,
structlog.processors.format_exc_info,
structlog.processors.TimeStamper(fmt="iso", utc=True),
structlog.processors.JSONRenderer(serializer=orjson.dumps),
],
# log per day and per process?
logger_factory=structlog.BytesLoggerFactory(
file=Path(f"cuul_{log_date}_{str(os.getpid())}").with_suffix(".log").open("wb")
)
)
logger = structlog.get_logger()
logger = setup_structlog()
templates = Jinja2Templates(directory=Path(__file__).parent.parent / "templates") templates = Jinja2Templates(directory=Path(__file__).parent.parent / "templates")
@asynccontextmanager @asynccontextmanager
async def lifespan(_app: FastAPI): async def lifespan(app: FastAPI):
# Load the redis connection app.redis = await get_redis()
_app.redis = await get_redis() postgres_dsn = global_settings.postgres_url.unicode_string()
_postgres_dsn = global_settings.postgres_url.unicode_string()
try: try:
# TODO: cache with the redis connection app.postgres_pool = await asyncpg.create_pool(
# Initialize the postgres connection pool dsn=postgres_dsn,
_app.postgres_pool = await asyncpg.create_pool(
dsn=_postgres_dsn,
min_size=5, min_size=5,
max_size=20, max_size=20,
) )
logger.info("Postgres pool created", _app.postgres_pool.get_idle_size()) logger.info("Postgres pool created", idle_size=app.postgres_pool.get_idle_size())
yield yield
finally: finally:
# close redis connection and release the resources await app.redis.close()
await _app.redis.close() await app.postgres_pool.close()
# close postgres connection pool and release the resources
await _app.postgres_pool.close()
def create_app() -> FastAPI:
app = FastAPI(title="Stuff And Nonsense API", version="0.19.0", lifespan=lifespan) app = FastAPI(
title="Stuff And Nonsense API",
app.include_router(stuff_router) version="0.19.0",
app.include_router(nonsense_router) lifespan=lifespan,
app.include_router(shakespeare_router) )
app.include_router(user_router) app.include_router(stuff_router)
app.include_router(ml_router, prefix="/v1/ml", tags=["ML"]) app.include_router(nonsense_router)
app.include_router(shakespeare_router)
app.include_router(user_router)
app.include_router(health_router, prefix="/v1/public/health", tags=["Health, Public"]) app.include_router(ml_router, prefix="/v1/ml", tags=["ML"])
app.include_router( app.include_router(health_router, prefix="/v1/public/health", tags=["Health, Public"])
app.include_router(
health_router, health_router,
prefix="/v1/health", prefix="/v1/health",
tags=["Health, Bearer"], tags=["Health, Bearer"],
dependencies=[Depends(AuthBearer())], dependencies=[Depends(AuthBearer())],
) )
@app.get("/index", response_class=HTMLResponse)
@app.get("/index", response_class=HTMLResponse) def get_index(request: Request):
def get_index(request: Request):
return templates.TemplateResponse("index.html", {"request": request}) return templates.TemplateResponse("index.html", {"request": request})
return app
app = create_app()
# --- Unused/experimental code and TODOs ---
# from apscheduler import AsyncScheduler
# from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
# from apscheduler.eventbrokers.redis import RedisEventBroker
# from app.database import engine
# from app.services.scheduler import SchedulerMiddleware
# _scheduler_data_store = SQLAlchemyDataStore(engine, schema="scheduler") # _scheduler_data_store = SQLAlchemyDataStore(engine, schema="scheduler")
# _scheduler_event_broker = RedisEventBroker( # _scheduler_event_broker = RedisEventBroker(client_or_url=global_settings.redis_url.unicode_string())
# client_or_url=global_settings.redis_url.unicode_string()
# )
# _scheduler_himself = AsyncScheduler(_scheduler_data_store, _scheduler_event_broker) # _scheduler_himself = AsyncScheduler(_scheduler_data_store, _scheduler_event_broker)
#
# app.add_middleware(SchedulerMiddleware, scheduler=_scheduler_himself) # app.add_middleware(SchedulerMiddleware, scheduler=_scheduler_himself)
# TODO: every non-GET method should reset cache
# TODO: scheduler tasks needing DB should access connection pool via request
# TODO: every not GET meth should reset cache
# TODO: every scheduler task which needs to act on database should have access to connection pool via request - maybe ?
# TODO: https://stackoverflow.com/questions/16053364/make-sure-only-one-worker-launches-the-apscheduler-event-in-a-pyramid-web-app-ru # TODO: https://stackoverflow.com/questions/16053364/make-sure-only-one-worker-launches-the-apscheduler-event-in-a-pyramid-web-app-ru

View File

@ -5,6 +5,12 @@ from rich.logging import RichHandler
from app.utils.singleton import SingletonMeta from app.utils.singleton import SingletonMeta
import logging
import os
import orjson
import structlog
from whenever._whenever import Instant
from pathlib import Path
class AppLogger(metaclass=SingletonMeta): class AppLogger(metaclass=SingletonMeta):
_logger = None _logger = None
@ -22,3 +28,22 @@ class RichConsoleHandler(RichHandler):
console=Console(color_system="256", width=width, style=style, stderr=True), console=Console(color_system="256", width=width, style=style, stderr=True),
**kwargs, **kwargs,
) )
def setup_structlog() -> structlog.BoundLogger:
log_date = Instant.now().py_datetime().strftime("%Y%m%d")
log_path = Path(f"cuul_{log_date}_{os.getpid()}.log")
structlog.configure(
cache_logger_on_first_use=True,
wrapper_class=structlog.make_filtering_bound_logger(logging.INFO),
processors=[
structlog.contextvars.merge_contextvars,
structlog.processors.add_log_level,
structlog.processors.format_exc_info,
structlog.processors.TimeStamper(fmt="iso", utc=True),
structlog.processors.JSONRenderer(serializer=orjson.dumps),
],
logger_factory=structlog.BytesLoggerFactory(
file=log_path.open("wb")
)
)
return structlog.get_logger()

View File

@ -10,7 +10,6 @@ services:
- .secrets - .secrets
command: bash -c " command: bash -c "
uvicorn app.main:app uvicorn app.main:app
--log-config ./logging-uvicorn.json
--host 0.0.0.0 --port 8080 --host 0.0.0.0 --port 8080
--lifespan=on --use-colors --loop uvloop --http httptools --lifespan=on --use-colors --loop uvloop --http httptools
--reload --log-level debug --reload --log-level debug

View File

@ -12,7 +12,6 @@ services:
granian --interface asgi granian --interface asgi
--host 0.0.0.0 --port 8080 --host 0.0.0.0 --port 8080
app.main:app --access-log --log-level debug app.main:app --access-log --log-level debug
--log-config ./logging-granian.json
" "
volumes: volumes:
- ./app:/panettone/app - ./app:/panettone/app