Compare commits

..

No commits in common. "56301967391e783849a949a8b4730fce4c160de7" and "9cdc85a1e5eba05ad59b106e46f4e26130cb50ef" have entirely different histories.

20 changed files with 692 additions and 885 deletions

View File

@ -56,6 +56,8 @@ COPY /templates/ templates/
COPY .env app/
COPY alembic.ini /panettone/alembic.ini
COPY /alembic/ /panettone/alembic/
COPY logging-uvicorn.json /panettone/logging-uvicorn.json
COPY logging-granian.json /panettone/logging-granian.json
COPY pyproject.toml /panettone/pyproject.toml
RUN python -V

View File

@ -21,7 +21,7 @@ docker-apply-db-migrations: ## apply alembic migrations to database/schema
docker compose run --rm app alembic upgrade head
.PHONY: docker-create-db-migration
docker-create-db-migration: ## Create new alembic database migration aka database revision. Example: make docker-create-db-migration msg="add users table"
docker-create-db-migration: ## Create new alembic database migration aka database revision.
docker compose up -d db | true
docker compose run --no-deps app alembic revision --autogenerate -m "$(msg)"

View File

@ -1,37 +0,0 @@
"""add json chaos
Revision ID: d021bd4763a5
Revises: 0c69050b5a3e
Create Date: 2025-07-29 15:21:19.415583
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'd021bd4763a5'
down_revision = '0c69050b5a3e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('random_stuff',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('chaos', postgresql.JSON(astext_type=sa.Text()), nullable=False),
sa.PrimaryKeyConstraint('id'),
schema='happy_hog'
)
op.create_unique_constraint(None, 'nonsense', ['name'], schema='happy_hog')
op.create_unique_constraint(None, 'stuff', ['name'], schema='happy_hog')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'stuff', schema='happy_hog', type_='unique')
op.drop_constraint(None, 'nonsense', schema='happy_hog', type_='unique')
op.drop_table('random_stuff', schema='happy_hog')
# ### end Alembic commands ###

View File

@ -1,13 +1,14 @@
import logging
from typing import Annotated
from fastapi import APIRouter, Depends, Query, Request, status
from pydantic import EmailStr
from rotoger import AppStructLogger
from starlette.concurrency import run_in_threadpool
from app.services.smtp import SMTPEmailService
from app.utils.logging import AppLogger
logger = AppStructLogger().get_logger()
logger = AppLogger().get_logger()
router = APIRouter()
@ -33,7 +34,7 @@ async def redis_check(request: Request):
try:
redis_info = await redis_client.info()
except Exception as e:
await logger.aerror(f"Redis error: {e}")
logging.error(f"Redis error: {e}")
return redis_info
@ -87,7 +88,7 @@ async def smtp_check(
"subject": subject,
}
await logger.ainfo("Sending email.", email_data=email_data)
logger.info("Sending email with data: %s", email_data)
await run_in_threadpool(
smtp.send_email,

View File

@ -2,11 +2,11 @@ from typing import Annotated
from fastapi import APIRouter, Depends, Form
from fastapi.responses import StreamingResponse
from rotoger import AppStructLogger
from app.services.llm import get_llm_service
from app.utils.logging import AppLogger
logger = AppStructLogger().get_logger()
logger = AppLogger().get_logger()
router = APIRouter()

View File

@ -1,27 +1,17 @@
from fastapi import APIRouter, Depends, HTTPException, Request, status
from rotoger import AppStructLogger
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.ext.asyncio import AsyncSession
from app.database import get_db
from app.models.stuff import RandomStuff, Stuff
from app.schemas.stuff import RandomStuff as RandomStuffSchema
from app.models.stuff import Stuff
from app.schemas.stuff import StuffResponse, StuffSchema
from app.utils.logging import AppLogger
logger = AppStructLogger().get_logger()
logger = AppLogger().get_logger()
router = APIRouter(prefix="/v1/stuff")
@router.post("/random", status_code=status.HTTP_201_CREATED)
async def create_random_stuff(
payload: RandomStuffSchema, db_session: AsyncSession = Depends(get_db)
) -> dict[str, str]:
random_stuff = RandomStuff(**payload.model_dump())
await random_stuff.save(db_session)
return {"id": str(random_stuff.id)}
@router.post("/add_many", status_code=status.HTTP_201_CREATED)
async def create_multi_stuff(
payload: list[StuffSchema], db_session: AsyncSession = Depends(get_db)
@ -31,13 +21,13 @@ async def create_multi_stuff(
db_session.add_all(stuff_instances)
await db_session.commit()
except SQLAlchemyError as ex:
await logger.aerror(f"Error inserting instances of Stuff: {repr(ex)}")
logger.error(f"Error inserting instances of Stuff: {repr(ex)}")
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=repr(ex)
) from ex
else:
await logger.ainfo(
f"{len(stuff_instances)} Stuff instances inserted into the database."
logger.info(
f"{len(stuff_instances)} instances of Stuff inserted into database."
)
return True

View File

@ -1,15 +1,15 @@
from typing import Annotated
from fastapi import APIRouter, Depends, Form, HTTPException, Request, status
from rotoger import AppStructLogger
from sqlalchemy.ext.asyncio import AsyncSession
from app.database import get_db
from app.models.user import User
from app.schemas.user import TokenResponse, UserLogin, UserResponse, UserSchema
from app.services.auth import create_access_token
from app.utils.logging import AppLogger
logger = AppStructLogger().get_logger()
logger = AppLogger().get_logger()
router = APIRouter(prefix="/v1/user")
@ -18,7 +18,7 @@ router = APIRouter(prefix="/v1/user")
async def create_user(
payload: UserSchema, request: Request, db_session: AsyncSession = Depends(get_db)
):
await logger.ainfo(f"Creating user: {payload}")
logger.info(f"Creating user: {payload}")
_user: User = User(**payload.model_dump())
await _user.save(db_session)

View File

@ -1,11 +1,11 @@
from collections.abc import AsyncGenerator
from rotoger import AppStructLogger
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
from app.config import settings as global_settings
from app.utils.logging import AppLogger
logger = AppStructLogger().get_logger()
logger = AppLogger().get_logger()
engine = create_async_engine(
global_settings.asyncpg_url.unicode_string(),
@ -29,5 +29,5 @@ async def get_db() -> AsyncGenerator:
try:
yield session
except Exception as e:
await logger.aerror(f"Error getting database session: {e}")
logger.error(f"Error getting database session: {e}")
raise

View File

@ -2,10 +2,13 @@ from contextlib import asynccontextmanager
from pathlib import Path
import asyncpg
# from apscheduler import AsyncScheduler
# from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
# from apscheduler.eventbrokers.redis import RedisEventBroker
from fastapi import Depends, FastAPI, Request
from fastapi.responses import HTMLResponse
from fastapi.templating import Jinja2Templates
from rotoger import AppStructLogger
from app.api.health import router as health_router
from app.api.ml import router as ml_router
@ -14,72 +17,75 @@ from app.api.shakespeare import router as shakespeare_router
from app.api.stuff import router as stuff_router
from app.api.user import router as user_router
from app.config import settings as global_settings
# from app.database import engine
from app.redis import get_redis
from app.services.auth import AuthBearer
logger = AppStructLogger().get_logger()
# from app.services.scheduler import SchedulerMiddleware
from app.utils.logging import AppLogger
logger = AppLogger().get_logger()
templates = Jinja2Templates(directory=Path(__file__).parent.parent / "templates")
@asynccontextmanager
async def lifespan(app: FastAPI):
app.redis = await get_redis()
postgres_dsn = global_settings.postgres_url.unicode_string()
async def lifespan(_app: FastAPI):
# Load the redis connection
_app.redis = await get_redis()
_postgres_dsn = global_settings.postgres_url.unicode_string()
try:
app.postgres_pool = await asyncpg.create_pool(
dsn=postgres_dsn,
# TODO: cache with the redis connection
# Initialize the postgres connection pool
_app.postgres_pool = await asyncpg.create_pool(
dsn=_postgres_dsn,
min_size=5,
max_size=20,
)
await logger.ainfo(
"Postgres pool created", idle_size=app.postgres_pool.get_idle_size()
)
logger.info(f"Postgres pool created: {_app.postgres_pool.get_idle_size()=}")
yield
finally:
await app.redis.close()
await app.postgres_pool.close()
# close redis connection and release the resources
await _app.redis.close()
# close postgres connection pool and release the resources
await _app.postgres_pool.close()
def create_app() -> FastAPI:
app = FastAPI(
title="Stuff And Nonsense API",
version="0.19.0",
lifespan=lifespan,
)
app.include_router(stuff_router)
app.include_router(nonsense_router)
app.include_router(shakespeare_router)
app.include_router(user_router)
app.include_router(ml_router, prefix="/v1/ml", tags=["ML"])
app.include_router(
health_router, prefix="/v1/public/health", tags=["Health, Public"]
)
app.include_router(
app = FastAPI(title="Stuff And Nonsense API", version="0.19.0", lifespan=lifespan)
app.include_router(stuff_router)
app.include_router(nonsense_router)
app.include_router(shakespeare_router)
app.include_router(user_router)
app.include_router(ml_router, prefix="/v1/ml", tags=["ML"])
app.include_router(health_router, prefix="/v1/public/health", tags=["Health, Public"])
app.include_router(
health_router,
prefix="/v1/health",
tags=["Health, Bearer"],
dependencies=[Depends(AuthBearer())],
)
)
@app.get("/index", response_class=HTMLResponse)
def get_index(request: Request):
@app.get("/index", response_class=HTMLResponse)
def get_index(request: Request):
return templates.TemplateResponse("index.html", {"request": request})
return app
app = create_app()
# --- Unused/experimental code and TODOs ---
# from apscheduler import AsyncScheduler
# from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
# from apscheduler.eventbrokers.redis import RedisEventBroker
# from app.database import engine
# from app.services.scheduler import SchedulerMiddleware
# _scheduler_data_store = SQLAlchemyDataStore(engine, schema="scheduler")
# _scheduler_event_broker = RedisEventBroker(client_or_url=global_settings.redis_url.unicode_string())
# _scheduler_event_broker = RedisEventBroker(
# client_or_url=global_settings.redis_url.unicode_string()
# )
# _scheduler_himself = AsyncScheduler(_scheduler_data_store, _scheduler_event_broker)
#
# app.add_middleware(SchedulerMiddleware, scheduler=_scheduler_himself)
# TODO: every non-GET method should reset cache
# TODO: scheduler tasks needing DB should access connection pool via request
# TODO: every not GET meth should reset cache
# TODO: every scheduler task which needs to act on database should have access to connection pool via request - maybe ?
# TODO: https://stackoverflow.com/questions/16053364/make-sure-only-one-worker-launches-the-apscheduler-event-in-a-pyramid-web-app-ru

View File

@ -2,12 +2,13 @@ from typing import Any
from asyncpg import UniqueViolationError
from fastapi import HTTPException, status
from rotoger import AppStructLogger
from sqlalchemy.exc import IntegrityError, SQLAlchemyError
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import DeclarativeBase, declared_attr
logger = AppStructLogger().get_logger()
from app.utils.logging import AppLogger
logger = AppLogger().get_logger()
class Base(DeclarativeBase):
@ -27,11 +28,9 @@ class Base(DeclarativeBase):
"""
try:
db_session.add(self)
await db_session.commit()
await db_session.refresh(self)
return self
return await db_session.commit()
except SQLAlchemyError as ex:
await logger.aerror(f"Error inserting instance of {self}: {repr(ex)}")
logger.error(f"Error inserting instance of {self}: {repr(ex)}")
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=repr(ex)
) from ex

View File

@ -1,7 +1,7 @@
import uuid
from sqlalchemy import ForeignKey, String, select
from sqlalchemy.dialects.postgresql import JSON, UUID
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import Mapped, joinedload, mapped_column, relationship
@ -10,16 +10,6 @@ from app.models.nonsense import Nonsense
from app.utils.decorators import compile_sql_or_scalar
class RandomStuff(Base):
__tablename__ = "random_stuff"
__table_args__ = ({"schema": "happy_hog"},)
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True), default=uuid.uuid4, primary_key=True
)
chaos: Mapped[dict] = mapped_column(JSON)
class Stuff(Base):
__tablename__ = "stuff"
__table_args__ = ({"schema": "happy_hog"},)

View File

@ -1,4 +1,3 @@
from typing import Any
from uuid import UUID
from pydantic import BaseModel, ConfigDict, Field
@ -6,10 +5,6 @@ from pydantic import BaseModel, ConfigDict, Field
config = ConfigDict(from_attributes=True)
class RandomStuff(BaseModel):
chaos: dict[str, Any] = Field(..., description="JSON data for chaos field")
class StuffSchema(BaseModel):
name: str = Field(
title="",

View File

@ -3,12 +3,12 @@ import time
import jwt
from fastapi import HTTPException, Request
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
from rotoger import AppStructLogger
from app.config import settings as global_settings
from app.models.user import User
from app.utils.logging import AppLogger
logger = AppStructLogger().get_logger()
logger = AppLogger().get_logger()
async def get_from_redis(request: Request, key: str):
@ -40,7 +40,7 @@ class AuthBearer(HTTPBearer):
raise HTTPException(
status_code=403, detail="Invalid token or expired token."
)
await logger.ainfo(f"Token verified: {credentials.credentials}")
logger.info(f"Token verified: {credentials.credentials}")
return credentials.credentials

View File

@ -15,9 +15,9 @@ logger = AppLogger().get_logger()
async def tick():
async with AsyncSessionFactory() as session:
stmt = text("select 1;")
await logger.ainfo(f">>>> Be or not to be...{datetime.now()}")
logger.info(f">>>> Be or not to be...{datetime.now()}")
result = await session.execute(stmt)
await logger.ainfo(f">>>> Result: {result.scalar()}")
logger.info(f">>>> Result: {result.scalar()}")
return True

View File

@ -5,12 +5,12 @@ from email.mime.text import MIMEText
from attrs import define, field
from fastapi.templating import Jinja2Templates
from pydantic import EmailStr
from rotoger import AppStructLogger
from app.config import settings as global_settings
from app.utils.logging import AppLogger
from app.utils.singleton import SingletonMetaNoArgs
logger = AppStructLogger().get_logger()
logger = AppLogger().get_logger()
@define

View File

@ -1,98 +1,24 @@
import logging
import os
from logging.handlers import RotatingFileHandler
from pathlib import Path
import orjson
import structlog
from attrs import define, field
from whenever._whenever import Instant
from rich.console import Console
from rich.logging import RichHandler
from app.utils.singleton import SingletonMetaNoArgs
from app.utils.singleton import SingletonMeta
class RotatingBytesLogger:
"""Logger that respects RotatingFileHandler's rotation capabilities."""
class AppLogger(metaclass=SingletonMeta):
_logger = None
def __init__(self, handler):
self.handler = handler
def __init__(self):
self._logger = logging.getLogger(__name__)
def msg(self, message):
"""Process a message and pass it through the handler's emit method."""
if isinstance(message, bytes):
message = message.decode("utf-8")
# Create a log record that will trigger rotation checks
record = logging.LogRecord(
name="structlog",
level=logging.INFO,
pathname="",
lineno=0,
msg=message.rstrip("\n"),
args=(),
exc_info=None,
)
# Check if rotation is needed before emitting
if self.handler.shouldRollover(record):
self.handler.doRollover()
# Emit the record through the handler
self.handler.emit(record)
# Required methods to make it compatible with structlog
def debug(self, message):
self.msg(message)
def info(self, message):
self.msg(message)
def warning(self, message):
self.msg(message)
def error(self, message):
self.msg(message)
def critical(self, message):
self.msg(message)
class RotatingBytesLoggerFactory:
"""Factory that creates loggers that respect file rotation."""
def __init__(self, handler):
self.handler = handler
def __call__(self, *args, **kwargs):
return RotatingBytesLogger(self.handler)
@define
class AppStructLogger(metaclass=SingletonMetaNoArgs):
_logger: structlog.BoundLogger = field(init=False)
def __attrs_post_init__(self):
_log_date = Instant.now().py_datetime().strftime("%Y%m%d")
_log_path = Path(f"{_log_date}_{os.getpid()}.log")
_handler = RotatingFileHandler(
filename=_log_path,
maxBytes=10 * 1024 * 1024, # 10MB
backupCount=5,
encoding="utf-8",
)
structlog.configure(
cache_logger_on_first_use=True,
wrapper_class=structlog.make_filtering_bound_logger(logging.INFO),
processors=[
structlog.contextvars.merge_contextvars,
structlog.processors.add_log_level,
structlog.processors.format_exc_info,
structlog.processors.TimeStamper(fmt="iso", utc=True),
structlog.processors.JSONRenderer(serializer=orjson.dumps),
],
logger_factory=RotatingBytesLoggerFactory(_handler),
)
self._logger = structlog.get_logger()
def get_logger(self) -> structlog.BoundLogger:
def get_logger(self):
return self._logger
class RichConsoleHandler(RichHandler):
def __init__(self, width=200, style=None, **kwargs):
super().__init__(
console=Console(color_system="256", width=width, style=style, stderr=True),
**kwargs,
)

View File

@ -10,6 +10,7 @@ services:
- .secrets
command: bash -c "
uvicorn app.main:app
--log-config ./logging-uvicorn.json
--host 0.0.0.0 --port 8080
--lifespan=on --use-colors --loop uvloop --http httptools
--reload --log-level debug
@ -18,7 +19,6 @@ services:
- ./app:/panettone/app
- ./tests:/panettone/tests
- ./templates:/panettone/templates
- ./alembic:/panettone/alembic
ports:
- "8080:8080"
depends_on:

View File

@ -12,6 +12,7 @@ services:
granian --interface asgi
--host 0.0.0.0 --port 8080
app.main:app --access-log --log-level debug
--log-config ./logging-granian.json
"
volumes:
- ./app:/panettone/app

View File

@ -29,7 +29,6 @@ dependencies = [
"polyfactory>=2.21.0",
"granian>=2.3.2",
"apscheduler[redis,sqlalchemy]>=4.0.0a6",
"rotoger",
]
[tool.uv]

1237
uv.lock generated

File diff suppressed because it is too large Load Diff