Compare commits

..

22 Commits

Author SHA1 Message Date
Ordinary Hobbit
5630196739
Merge pull request #211 from grillazz/12-add-json-field-example
12-add-json-field-example
2025-07-29 17:34:59 +02:00
grillazz
93f2e66bd0 format code 2025-07-29 17:33:17 +02:00
grillazz
060bdb65fe format code 2025-07-29 17:27:56 +02:00
grillazz
7aace85eeb lint 2025-07-29 17:26:54 +02:00
grillazz
f14c586389 add json filed example 2025-07-29 17:26:37 +02:00
Ordinary Hobbit
72bb711227
Merge pull request #210 from grillazz/198-add-simple-caching 2025-07-28 05:44:00 +02:00
grillazz
353ef0da95 lint code 2025-07-27 20:14:51 +02:00
grillazz
289883cf2e Merge remote-tracking branch 'origin/198-add-simple-caching' into 198-add-simple-caching 2025-07-27 20:09:42 +02:00
grillazz
a8c645ad95 switch logger to rotoger 2025-07-27 20:09:30 +02:00
Ordinary Hobbit
6f82883612
Merge pull request #209 from grillazz/198-add-simple-caching
add structure file logging with log files rotating
2025-07-26 19:36:40 +02:00
Ordinary Hobbit
63859e8305
Update app/utils/logging.py
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-07-26 19:33:13 +02:00
grillazz
ffccf8fda0 lint code 2025-07-26 19:28:56 +02:00
grillazz
a99a0e780b wip: async logging 2025-07-26 19:25:46 +02:00
grillazz
6ec8a3ce0a wip: add RotatingBytesLogger 2025-07-26 18:59:28 +02:00
Ordinary Hobbit
6c54aee57b
Merge pull request #208 from grillazz/198-add-simple-caching
198 add structlog
2025-07-19 20:41:34 +02:00
grillazz
8e7692bd32 wip: reimplement AppStructLogger with attrs lib 2025-06-29 21:22:46 +02:00
grillazz
1098e39f71 wip: replace AppLogger with AppStructLogger 2025-06-29 21:19:30 +02:00
grillazz
d0d26687df wip: lint 2025-06-29 08:59:06 +02:00
grillazz
7e0024876c wip: BytesToTextIOWrapper wraps a text handler and encodes bytes to text. 2025-06-28 22:05:45 +02:00
grillazz
9716a0b54c Merge remote-tracking branch 'origin/198-add-simple-caching' into 198-add-simple-caching 2025-06-18 08:49:45 +02:00
grillazz
c09c338b37 wip: add structlog 2025-06-18 08:49:32 +02:00
grillazz
3f09b5701e add structlog 2025-06-17 20:02:04 +02:00
20 changed files with 884 additions and 691 deletions

View File

@ -56,8 +56,6 @@ COPY /templates/ templates/
COPY .env app/
COPY alembic.ini /panettone/alembic.ini
COPY /alembic/ /panettone/alembic/
COPY logging-uvicorn.json /panettone/logging-uvicorn.json
COPY logging-granian.json /panettone/logging-granian.json
COPY pyproject.toml /panettone/pyproject.toml
RUN python -V

View File

@ -21,7 +21,7 @@ docker-apply-db-migrations: ## apply alembic migrations to database/schema
docker compose run --rm app alembic upgrade head
.PHONY: docker-create-db-migration
docker-create-db-migration: ## Create new alembic database migration aka database revision.
docker-create-db-migration: ## Create new alembic database migration aka database revision. Example: make docker-create-db-migration msg="add users table"
docker compose up -d db | true
docker compose run --no-deps app alembic revision --autogenerate -m "$(msg)"

View File

@ -0,0 +1,37 @@
"""add json chaos
Revision ID: d021bd4763a5
Revises: 0c69050b5a3e
Create Date: 2025-07-29 15:21:19.415583
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'd021bd4763a5'
down_revision = '0c69050b5a3e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('random_stuff',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('chaos', postgresql.JSON(astext_type=sa.Text()), nullable=False),
sa.PrimaryKeyConstraint('id'),
schema='happy_hog'
)
op.create_unique_constraint(None, 'nonsense', ['name'], schema='happy_hog')
op.create_unique_constraint(None, 'stuff', ['name'], schema='happy_hog')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'stuff', schema='happy_hog', type_='unique')
op.drop_constraint(None, 'nonsense', schema='happy_hog', type_='unique')
op.drop_table('random_stuff', schema='happy_hog')
# ### end Alembic commands ###

View File

@ -1,14 +1,13 @@
import logging
from typing import Annotated
from fastapi import APIRouter, Depends, Query, Request, status
from pydantic import EmailStr
from rotoger import AppStructLogger
from starlette.concurrency import run_in_threadpool
from app.services.smtp import SMTPEmailService
from app.utils.logging import AppLogger
logger = AppLogger().get_logger()
logger = AppStructLogger().get_logger()
router = APIRouter()
@ -34,7 +33,7 @@ async def redis_check(request: Request):
try:
redis_info = await redis_client.info()
except Exception as e:
logging.error(f"Redis error: {e}")
await logger.aerror(f"Redis error: {e}")
return redis_info
@ -88,7 +87,7 @@ async def smtp_check(
"subject": subject,
}
logger.info("Sending email with data: %s", email_data)
await logger.ainfo("Sending email.", email_data=email_data)
await run_in_threadpool(
smtp.send_email,

View File

@ -2,11 +2,11 @@ from typing import Annotated
from fastapi import APIRouter, Depends, Form
from fastapi.responses import StreamingResponse
from rotoger import AppStructLogger
from app.services.llm import get_llm_service
from app.utils.logging import AppLogger
logger = AppLogger().get_logger()
logger = AppStructLogger().get_logger()
router = APIRouter()

View File

@ -1,17 +1,27 @@
from fastapi import APIRouter, Depends, HTTPException, Request, status
from rotoger import AppStructLogger
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.ext.asyncio import AsyncSession
from app.database import get_db
from app.models.stuff import Stuff
from app.models.stuff import RandomStuff, Stuff
from app.schemas.stuff import RandomStuff as RandomStuffSchema
from app.schemas.stuff import StuffResponse, StuffSchema
from app.utils.logging import AppLogger
logger = AppLogger().get_logger()
logger = AppStructLogger().get_logger()
router = APIRouter(prefix="/v1/stuff")
@router.post("/random", status_code=status.HTTP_201_CREATED)
async def create_random_stuff(
payload: RandomStuffSchema, db_session: AsyncSession = Depends(get_db)
) -> dict[str, str]:
random_stuff = RandomStuff(**payload.model_dump())
await random_stuff.save(db_session)
return {"id": str(random_stuff.id)}
@router.post("/add_many", status_code=status.HTTP_201_CREATED)
async def create_multi_stuff(
payload: list[StuffSchema], db_session: AsyncSession = Depends(get_db)
@ -21,13 +31,13 @@ async def create_multi_stuff(
db_session.add_all(stuff_instances)
await db_session.commit()
except SQLAlchemyError as ex:
logger.error(f"Error inserting instances of Stuff: {repr(ex)}")
await logger.aerror(f"Error inserting instances of Stuff: {repr(ex)}")
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=repr(ex)
) from ex
else:
logger.info(
f"{len(stuff_instances)} instances of Stuff inserted into database."
await logger.ainfo(
f"{len(stuff_instances)} Stuff instances inserted into the database."
)
return True

View File

@ -1,15 +1,15 @@
from typing import Annotated
from fastapi import APIRouter, Depends, Form, HTTPException, Request, status
from rotoger import AppStructLogger
from sqlalchemy.ext.asyncio import AsyncSession
from app.database import get_db
from app.models.user import User
from app.schemas.user import TokenResponse, UserLogin, UserResponse, UserSchema
from app.services.auth import create_access_token
from app.utils.logging import AppLogger
logger = AppLogger().get_logger()
logger = AppStructLogger().get_logger()
router = APIRouter(prefix="/v1/user")
@ -18,7 +18,7 @@ router = APIRouter(prefix="/v1/user")
async def create_user(
payload: UserSchema, request: Request, db_session: AsyncSession = Depends(get_db)
):
logger.info(f"Creating user: {payload}")
await logger.ainfo(f"Creating user: {payload}")
_user: User = User(**payload.model_dump())
await _user.save(db_session)

View File

@ -1,11 +1,11 @@
from collections.abc import AsyncGenerator
from rotoger import AppStructLogger
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
from app.config import settings as global_settings
from app.utils.logging import AppLogger
logger = AppLogger().get_logger()
logger = AppStructLogger().get_logger()
engine = create_async_engine(
global_settings.asyncpg_url.unicode_string(),
@ -29,5 +29,5 @@ async def get_db() -> AsyncGenerator:
try:
yield session
except Exception as e:
logger.error(f"Error getting database session: {e}")
await logger.aerror(f"Error getting database session: {e}")
raise

View File

@ -2,13 +2,10 @@ from contextlib import asynccontextmanager
from pathlib import Path
import asyncpg
# from apscheduler import AsyncScheduler
# from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
# from apscheduler.eventbrokers.redis import RedisEventBroker
from fastapi import Depends, FastAPI, Request
from fastapi.responses import HTMLResponse
from fastapi.templating import Jinja2Templates
from rotoger import AppStructLogger
from app.api.health import router as health_router
from app.api.ml import router as ml_router
@ -17,75 +14,72 @@ from app.api.shakespeare import router as shakespeare_router
from app.api.stuff import router as stuff_router
from app.api.user import router as user_router
from app.config import settings as global_settings
# from app.database import engine
from app.redis import get_redis
from app.services.auth import AuthBearer
# from app.services.scheduler import SchedulerMiddleware
from app.utils.logging import AppLogger
logger = AppLogger().get_logger()
logger = AppStructLogger().get_logger()
templates = Jinja2Templates(directory=Path(__file__).parent.parent / "templates")
@asynccontextmanager
async def lifespan(_app: FastAPI):
# Load the redis connection
_app.redis = await get_redis()
_postgres_dsn = global_settings.postgres_url.unicode_string()
async def lifespan(app: FastAPI):
app.redis = await get_redis()
postgres_dsn = global_settings.postgres_url.unicode_string()
try:
# TODO: cache with the redis connection
# Initialize the postgres connection pool
_app.postgres_pool = await asyncpg.create_pool(
dsn=_postgres_dsn,
app.postgres_pool = await asyncpg.create_pool(
dsn=postgres_dsn,
min_size=5,
max_size=20,
)
logger.info(f"Postgres pool created: {_app.postgres_pool.get_idle_size()=}")
await logger.ainfo(
"Postgres pool created", idle_size=app.postgres_pool.get_idle_size()
)
yield
finally:
# close redis connection and release the resources
await _app.redis.close()
# close postgres connection pool and release the resources
await _app.postgres_pool.close()
await app.redis.close()
await app.postgres_pool.close()
app = FastAPI(title="Stuff And Nonsense API", version="0.19.0", lifespan=lifespan)
def create_app() -> FastAPI:
app = FastAPI(
title="Stuff And Nonsense API",
version="0.19.0",
lifespan=lifespan,
)
app.include_router(stuff_router)
app.include_router(nonsense_router)
app.include_router(shakespeare_router)
app.include_router(user_router)
app.include_router(ml_router, prefix="/v1/ml", tags=["ML"])
app.include_router(
health_router, prefix="/v1/public/health", tags=["Health, Public"]
)
app.include_router(
health_router,
prefix="/v1/health",
tags=["Health, Bearer"],
dependencies=[Depends(AuthBearer())],
)
app.include_router(stuff_router)
app.include_router(nonsense_router)
app.include_router(shakespeare_router)
app.include_router(user_router)
app.include_router(ml_router, prefix="/v1/ml", tags=["ML"])
@app.get("/index", response_class=HTMLResponse)
def get_index(request: Request):
return templates.TemplateResponse("index.html", {"request": request})
return app
app.include_router(health_router, prefix="/v1/public/health", tags=["Health, Public"])
app.include_router(
health_router,
prefix="/v1/health",
tags=["Health, Bearer"],
dependencies=[Depends(AuthBearer())],
)
@app.get("/index", response_class=HTMLResponse)
def get_index(request: Request):
return templates.TemplateResponse("index.html", {"request": request})
app = create_app()
# --- Unused/experimental code and TODOs ---
# from apscheduler import AsyncScheduler
# from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
# from apscheduler.eventbrokers.redis import RedisEventBroker
# from app.database import engine
# from app.services.scheduler import SchedulerMiddleware
# _scheduler_data_store = SQLAlchemyDataStore(engine, schema="scheduler")
# _scheduler_event_broker = RedisEventBroker(
# client_or_url=global_settings.redis_url.unicode_string()
# )
# _scheduler_event_broker = RedisEventBroker(client_or_url=global_settings.redis_url.unicode_string())
# _scheduler_himself = AsyncScheduler(_scheduler_data_store, _scheduler_event_broker)
#
# app.add_middleware(SchedulerMiddleware, scheduler=_scheduler_himself)
# TODO: every not GET meth should reset cache
# TODO: every scheduler task which needs to act on database should have access to connection pool via request - maybe ?
# TODO: every non-GET method should reset cache
# TODO: scheduler tasks needing DB should access connection pool via request
# TODO: https://stackoverflow.com/questions/16053364/make-sure-only-one-worker-launches-the-apscheduler-event-in-a-pyramid-web-app-ru

View File

@ -2,13 +2,12 @@ from typing import Any
from asyncpg import UniqueViolationError
from fastapi import HTTPException, status
from rotoger import AppStructLogger
from sqlalchemy.exc import IntegrityError, SQLAlchemyError
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import DeclarativeBase, declared_attr
from app.utils.logging import AppLogger
logger = AppLogger().get_logger()
logger = AppStructLogger().get_logger()
class Base(DeclarativeBase):
@ -28,9 +27,11 @@ class Base(DeclarativeBase):
"""
try:
db_session.add(self)
return await db_session.commit()
await db_session.commit()
await db_session.refresh(self)
return self
except SQLAlchemyError as ex:
logger.error(f"Error inserting instance of {self}: {repr(ex)}")
await logger.aerror(f"Error inserting instance of {self}: {repr(ex)}")
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=repr(ex)
) from ex

View File

@ -1,7 +1,7 @@
import uuid
from sqlalchemy import ForeignKey, String, select
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.dialects.postgresql import JSON, UUID
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import Mapped, joinedload, mapped_column, relationship
@ -10,6 +10,16 @@ from app.models.nonsense import Nonsense
from app.utils.decorators import compile_sql_or_scalar
class RandomStuff(Base):
__tablename__ = "random_stuff"
__table_args__ = ({"schema": "happy_hog"},)
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True), default=uuid.uuid4, primary_key=True
)
chaos: Mapped[dict] = mapped_column(JSON)
class Stuff(Base):
__tablename__ = "stuff"
__table_args__ = ({"schema": "happy_hog"},)

View File

@ -1,3 +1,4 @@
from typing import Any
from uuid import UUID
from pydantic import BaseModel, ConfigDict, Field
@ -5,6 +6,10 @@ from pydantic import BaseModel, ConfigDict, Field
config = ConfigDict(from_attributes=True)
class RandomStuff(BaseModel):
chaos: dict[str, Any] = Field(..., description="JSON data for chaos field")
class StuffSchema(BaseModel):
name: str = Field(
title="",

View File

@ -3,12 +3,12 @@ import time
import jwt
from fastapi import HTTPException, Request
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
from rotoger import AppStructLogger
from app.config import settings as global_settings
from app.models.user import User
from app.utils.logging import AppLogger
logger = AppLogger().get_logger()
logger = AppStructLogger().get_logger()
async def get_from_redis(request: Request, key: str):
@ -40,7 +40,7 @@ class AuthBearer(HTTPBearer):
raise HTTPException(
status_code=403, detail="Invalid token or expired token."
)
logger.info(f"Token verified: {credentials.credentials}")
await logger.ainfo(f"Token verified: {credentials.credentials}")
return credentials.credentials

View File

@ -15,9 +15,9 @@ logger = AppLogger().get_logger()
async def tick():
async with AsyncSessionFactory() as session:
stmt = text("select 1;")
logger.info(f">>>> Be or not to be...{datetime.now()}")
await logger.ainfo(f">>>> Be or not to be...{datetime.now()}")
result = await session.execute(stmt)
logger.info(f">>>> Result: {result.scalar()}")
await logger.ainfo(f">>>> Result: {result.scalar()}")
return True

View File

@ -5,12 +5,12 @@ from email.mime.text import MIMEText
from attrs import define, field
from fastapi.templating import Jinja2Templates
from pydantic import EmailStr
from rotoger import AppStructLogger
from app.config import settings as global_settings
from app.utils.logging import AppLogger
from app.utils.singleton import SingletonMetaNoArgs
logger = AppLogger().get_logger()
logger = AppStructLogger().get_logger()
@define

View File

@ -1,24 +1,98 @@
import logging
import os
from logging.handlers import RotatingFileHandler
from pathlib import Path
from rich.console import Console
from rich.logging import RichHandler
import orjson
import structlog
from attrs import define, field
from whenever._whenever import Instant
from app.utils.singleton import SingletonMeta
from app.utils.singleton import SingletonMetaNoArgs
class AppLogger(metaclass=SingletonMeta):
_logger = None
class RotatingBytesLogger:
"""Logger that respects RotatingFileHandler's rotation capabilities."""
def __init__(self):
self._logger = logging.getLogger(__name__)
def __init__(self, handler):
self.handler = handler
def get_logger(self):
return self._logger
def msg(self, message):
"""Process a message and pass it through the handler's emit method."""
if isinstance(message, bytes):
message = message.decode("utf-8")
class RichConsoleHandler(RichHandler):
def __init__(self, width=200, style=None, **kwargs):
super().__init__(
console=Console(color_system="256", width=width, style=style, stderr=True),
**kwargs,
# Create a log record that will trigger rotation checks
record = logging.LogRecord(
name="structlog",
level=logging.INFO,
pathname="",
lineno=0,
msg=message.rstrip("\n"),
args=(),
exc_info=None,
)
# Check if rotation is needed before emitting
if self.handler.shouldRollover(record):
self.handler.doRollover()
# Emit the record through the handler
self.handler.emit(record)
# Required methods to make it compatible with structlog
def debug(self, message):
self.msg(message)
def info(self, message):
self.msg(message)
def warning(self, message):
self.msg(message)
def error(self, message):
self.msg(message)
def critical(self, message):
self.msg(message)
class RotatingBytesLoggerFactory:
"""Factory that creates loggers that respect file rotation."""
def __init__(self, handler):
self.handler = handler
def __call__(self, *args, **kwargs):
return RotatingBytesLogger(self.handler)
@define
class AppStructLogger(metaclass=SingletonMetaNoArgs):
_logger: structlog.BoundLogger = field(init=False)
def __attrs_post_init__(self):
_log_date = Instant.now().py_datetime().strftime("%Y%m%d")
_log_path = Path(f"{_log_date}_{os.getpid()}.log")
_handler = RotatingFileHandler(
filename=_log_path,
maxBytes=10 * 1024 * 1024, # 10MB
backupCount=5,
encoding="utf-8",
)
structlog.configure(
cache_logger_on_first_use=True,
wrapper_class=structlog.make_filtering_bound_logger(logging.INFO),
processors=[
structlog.contextvars.merge_contextvars,
structlog.processors.add_log_level,
structlog.processors.format_exc_info,
structlog.processors.TimeStamper(fmt="iso", utc=True),
structlog.processors.JSONRenderer(serializer=orjson.dumps),
],
logger_factory=RotatingBytesLoggerFactory(_handler),
)
self._logger = structlog.get_logger()
def get_logger(self) -> structlog.BoundLogger:
return self._logger

View File

@ -10,7 +10,6 @@ services:
- .secrets
command: bash -c "
uvicorn app.main:app
--log-config ./logging-uvicorn.json
--host 0.0.0.0 --port 8080
--lifespan=on --use-colors --loop uvloop --http httptools
--reload --log-level debug
@ -19,6 +18,7 @@ services:
- ./app:/panettone/app
- ./tests:/panettone/tests
- ./templates:/panettone/templates
- ./alembic:/panettone/alembic
ports:
- "8080:8080"
depends_on:

View File

@ -12,7 +12,6 @@ services:
granian --interface asgi
--host 0.0.0.0 --port 8080
app.main:app --access-log --log-level debug
--log-config ./logging-granian.json
"
volumes:
- ./app:/panettone/app

View File

@ -29,6 +29,7 @@ dependencies = [
"polyfactory>=2.21.0",
"granian>=2.3.2",
"apscheduler[redis,sqlalchemy]>=4.0.0a6",
"rotoger",
]
[tool.uv]

1237
uv.lock generated

File diff suppressed because it is too large Load Diff