mirror of
https://github.com/grillazz/fastapi-sqlalchemy-asyncpg.git
synced 2025-08-26 16:40:40 +03:00
Compare commits
No commits in common. "56301967391e783849a949a8b4730fce4c160de7" and "9cdc85a1e5eba05ad59b106e46f4e26130cb50ef" have entirely different histories.
5630196739
...
9cdc85a1e5
@ -56,6 +56,8 @@ COPY /templates/ templates/
|
|||||||
COPY .env app/
|
COPY .env app/
|
||||||
COPY alembic.ini /panettone/alembic.ini
|
COPY alembic.ini /panettone/alembic.ini
|
||||||
COPY /alembic/ /panettone/alembic/
|
COPY /alembic/ /panettone/alembic/
|
||||||
|
COPY logging-uvicorn.json /panettone/logging-uvicorn.json
|
||||||
|
COPY logging-granian.json /panettone/logging-granian.json
|
||||||
COPY pyproject.toml /panettone/pyproject.toml
|
COPY pyproject.toml /panettone/pyproject.toml
|
||||||
|
|
||||||
RUN python -V
|
RUN python -V
|
||||||
|
2
Makefile
2
Makefile
@ -21,7 +21,7 @@ docker-apply-db-migrations: ## apply alembic migrations to database/schema
|
|||||||
docker compose run --rm app alembic upgrade head
|
docker compose run --rm app alembic upgrade head
|
||||||
|
|
||||||
.PHONY: docker-create-db-migration
|
.PHONY: docker-create-db-migration
|
||||||
docker-create-db-migration: ## Create new alembic database migration aka database revision. Example: make docker-create-db-migration msg="add users table"
|
docker-create-db-migration: ## Create new alembic database migration aka database revision.
|
||||||
docker compose up -d db | true
|
docker compose up -d db | true
|
||||||
docker compose run --no-deps app alembic revision --autogenerate -m "$(msg)"
|
docker compose run --no-deps app alembic revision --autogenerate -m "$(msg)"
|
||||||
|
|
||||||
|
@ -1,37 +0,0 @@
|
|||||||
"""add json chaos
|
|
||||||
|
|
||||||
Revision ID: d021bd4763a5
|
|
||||||
Revises: 0c69050b5a3e
|
|
||||||
Create Date: 2025-07-29 15:21:19.415583
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects import postgresql
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = 'd021bd4763a5'
|
|
||||||
down_revision = '0c69050b5a3e'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_table('random_stuff',
|
|
||||||
sa.Column('id', sa.UUID(), nullable=False),
|
|
||||||
sa.Column('chaos', postgresql.JSON(astext_type=sa.Text()), nullable=False),
|
|
||||||
sa.PrimaryKeyConstraint('id'),
|
|
||||||
schema='happy_hog'
|
|
||||||
)
|
|
||||||
op.create_unique_constraint(None, 'nonsense', ['name'], schema='happy_hog')
|
|
||||||
op.create_unique_constraint(None, 'stuff', ['name'], schema='happy_hog')
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_constraint(None, 'stuff', schema='happy_hog', type_='unique')
|
|
||||||
op.drop_constraint(None, 'nonsense', schema='happy_hog', type_='unique')
|
|
||||||
op.drop_table('random_stuff', schema='happy_hog')
|
|
||||||
# ### end Alembic commands ###
|
|
@ -1,13 +1,14 @@
|
|||||||
|
import logging
|
||||||
from typing import Annotated
|
from typing import Annotated
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, Query, Request, status
|
from fastapi import APIRouter, Depends, Query, Request, status
|
||||||
from pydantic import EmailStr
|
from pydantic import EmailStr
|
||||||
from rotoger import AppStructLogger
|
|
||||||
from starlette.concurrency import run_in_threadpool
|
from starlette.concurrency import run_in_threadpool
|
||||||
|
|
||||||
from app.services.smtp import SMTPEmailService
|
from app.services.smtp import SMTPEmailService
|
||||||
|
from app.utils.logging import AppLogger
|
||||||
|
|
||||||
logger = AppStructLogger().get_logger()
|
logger = AppLogger().get_logger()
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
@ -33,7 +34,7 @@ async def redis_check(request: Request):
|
|||||||
try:
|
try:
|
||||||
redis_info = await redis_client.info()
|
redis_info = await redis_client.info()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
await logger.aerror(f"Redis error: {e}")
|
logging.error(f"Redis error: {e}")
|
||||||
return redis_info
|
return redis_info
|
||||||
|
|
||||||
|
|
||||||
@ -87,7 +88,7 @@ async def smtp_check(
|
|||||||
"subject": subject,
|
"subject": subject,
|
||||||
}
|
}
|
||||||
|
|
||||||
await logger.ainfo("Sending email.", email_data=email_data)
|
logger.info("Sending email with data: %s", email_data)
|
||||||
|
|
||||||
await run_in_threadpool(
|
await run_in_threadpool(
|
||||||
smtp.send_email,
|
smtp.send_email,
|
||||||
|
@ -2,11 +2,11 @@ from typing import Annotated
|
|||||||
|
|
||||||
from fastapi import APIRouter, Depends, Form
|
from fastapi import APIRouter, Depends, Form
|
||||||
from fastapi.responses import StreamingResponse
|
from fastapi.responses import StreamingResponse
|
||||||
from rotoger import AppStructLogger
|
|
||||||
|
|
||||||
from app.services.llm import get_llm_service
|
from app.services.llm import get_llm_service
|
||||||
|
from app.utils.logging import AppLogger
|
||||||
|
|
||||||
logger = AppStructLogger().get_logger()
|
logger = AppLogger().get_logger()
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
|
@ -1,27 +1,17 @@
|
|||||||
from fastapi import APIRouter, Depends, HTTPException, Request, status
|
from fastapi import APIRouter, Depends, HTTPException, Request, status
|
||||||
from rotoger import AppStructLogger
|
|
||||||
from sqlalchemy.exc import SQLAlchemyError
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
from app.database import get_db
|
from app.database import get_db
|
||||||
from app.models.stuff import RandomStuff, Stuff
|
from app.models.stuff import Stuff
|
||||||
from app.schemas.stuff import RandomStuff as RandomStuffSchema
|
|
||||||
from app.schemas.stuff import StuffResponse, StuffSchema
|
from app.schemas.stuff import StuffResponse, StuffSchema
|
||||||
|
from app.utils.logging import AppLogger
|
||||||
|
|
||||||
logger = AppStructLogger().get_logger()
|
logger = AppLogger().get_logger()
|
||||||
|
|
||||||
router = APIRouter(prefix="/v1/stuff")
|
router = APIRouter(prefix="/v1/stuff")
|
||||||
|
|
||||||
|
|
||||||
@router.post("/random", status_code=status.HTTP_201_CREATED)
|
|
||||||
async def create_random_stuff(
|
|
||||||
payload: RandomStuffSchema, db_session: AsyncSession = Depends(get_db)
|
|
||||||
) -> dict[str, str]:
|
|
||||||
random_stuff = RandomStuff(**payload.model_dump())
|
|
||||||
await random_stuff.save(db_session)
|
|
||||||
return {"id": str(random_stuff.id)}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/add_many", status_code=status.HTTP_201_CREATED)
|
@router.post("/add_many", status_code=status.HTTP_201_CREATED)
|
||||||
async def create_multi_stuff(
|
async def create_multi_stuff(
|
||||||
payload: list[StuffSchema], db_session: AsyncSession = Depends(get_db)
|
payload: list[StuffSchema], db_session: AsyncSession = Depends(get_db)
|
||||||
@ -31,13 +21,13 @@ async def create_multi_stuff(
|
|||||||
db_session.add_all(stuff_instances)
|
db_session.add_all(stuff_instances)
|
||||||
await db_session.commit()
|
await db_session.commit()
|
||||||
except SQLAlchemyError as ex:
|
except SQLAlchemyError as ex:
|
||||||
await logger.aerror(f"Error inserting instances of Stuff: {repr(ex)}")
|
logger.error(f"Error inserting instances of Stuff: {repr(ex)}")
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=repr(ex)
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=repr(ex)
|
||||||
) from ex
|
) from ex
|
||||||
else:
|
else:
|
||||||
await logger.ainfo(
|
logger.info(
|
||||||
f"{len(stuff_instances)} Stuff instances inserted into the database."
|
f"{len(stuff_instances)} instances of Stuff inserted into database."
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
from typing import Annotated
|
from typing import Annotated
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, Form, HTTPException, Request, status
|
from fastapi import APIRouter, Depends, Form, HTTPException, Request, status
|
||||||
from rotoger import AppStructLogger
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
from app.database import get_db
|
from app.database import get_db
|
||||||
from app.models.user import User
|
from app.models.user import User
|
||||||
from app.schemas.user import TokenResponse, UserLogin, UserResponse, UserSchema
|
from app.schemas.user import TokenResponse, UserLogin, UserResponse, UserSchema
|
||||||
from app.services.auth import create_access_token
|
from app.services.auth import create_access_token
|
||||||
|
from app.utils.logging import AppLogger
|
||||||
|
|
||||||
logger = AppStructLogger().get_logger()
|
logger = AppLogger().get_logger()
|
||||||
|
|
||||||
router = APIRouter(prefix="/v1/user")
|
router = APIRouter(prefix="/v1/user")
|
||||||
|
|
||||||
@ -18,7 +18,7 @@ router = APIRouter(prefix="/v1/user")
|
|||||||
async def create_user(
|
async def create_user(
|
||||||
payload: UserSchema, request: Request, db_session: AsyncSession = Depends(get_db)
|
payload: UserSchema, request: Request, db_session: AsyncSession = Depends(get_db)
|
||||||
):
|
):
|
||||||
await logger.ainfo(f"Creating user: {payload}")
|
logger.info(f"Creating user: {payload}")
|
||||||
_user: User = User(**payload.model_dump())
|
_user: User = User(**payload.model_dump())
|
||||||
await _user.save(db_session)
|
await _user.save(db_session)
|
||||||
|
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
from collections.abc import AsyncGenerator
|
from collections.abc import AsyncGenerator
|
||||||
|
|
||||||
from rotoger import AppStructLogger
|
|
||||||
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
|
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
|
||||||
|
|
||||||
from app.config import settings as global_settings
|
from app.config import settings as global_settings
|
||||||
|
from app.utils.logging import AppLogger
|
||||||
|
|
||||||
logger = AppStructLogger().get_logger()
|
logger = AppLogger().get_logger()
|
||||||
|
|
||||||
engine = create_async_engine(
|
engine = create_async_engine(
|
||||||
global_settings.asyncpg_url.unicode_string(),
|
global_settings.asyncpg_url.unicode_string(),
|
||||||
@ -29,5 +29,5 @@ async def get_db() -> AsyncGenerator:
|
|||||||
try:
|
try:
|
||||||
yield session
|
yield session
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
await logger.aerror(f"Error getting database session: {e}")
|
logger.error(f"Error getting database session: {e}")
|
||||||
raise
|
raise
|
||||||
|
92
app/main.py
92
app/main.py
@ -2,10 +2,13 @@ from contextlib import asynccontextmanager
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import asyncpg
|
import asyncpg
|
||||||
|
|
||||||
|
# from apscheduler import AsyncScheduler
|
||||||
|
# from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
|
||||||
|
# from apscheduler.eventbrokers.redis import RedisEventBroker
|
||||||
from fastapi import Depends, FastAPI, Request
|
from fastapi import Depends, FastAPI, Request
|
||||||
from fastapi.responses import HTMLResponse
|
from fastapi.responses import HTMLResponse
|
||||||
from fastapi.templating import Jinja2Templates
|
from fastapi.templating import Jinja2Templates
|
||||||
from rotoger import AppStructLogger
|
|
||||||
|
|
||||||
from app.api.health import router as health_router
|
from app.api.health import router as health_router
|
||||||
from app.api.ml import router as ml_router
|
from app.api.ml import router as ml_router
|
||||||
@ -14,72 +17,75 @@ from app.api.shakespeare import router as shakespeare_router
|
|||||||
from app.api.stuff import router as stuff_router
|
from app.api.stuff import router as stuff_router
|
||||||
from app.api.user import router as user_router
|
from app.api.user import router as user_router
|
||||||
from app.config import settings as global_settings
|
from app.config import settings as global_settings
|
||||||
|
|
||||||
|
# from app.database import engine
|
||||||
from app.redis import get_redis
|
from app.redis import get_redis
|
||||||
from app.services.auth import AuthBearer
|
from app.services.auth import AuthBearer
|
||||||
|
|
||||||
logger = AppStructLogger().get_logger()
|
# from app.services.scheduler import SchedulerMiddleware
|
||||||
|
from app.utils.logging import AppLogger
|
||||||
|
|
||||||
|
logger = AppLogger().get_logger()
|
||||||
|
|
||||||
templates = Jinja2Templates(directory=Path(__file__).parent.parent / "templates")
|
templates = Jinja2Templates(directory=Path(__file__).parent.parent / "templates")
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def lifespan(app: FastAPI):
|
async def lifespan(_app: FastAPI):
|
||||||
app.redis = await get_redis()
|
# Load the redis connection
|
||||||
postgres_dsn = global_settings.postgres_url.unicode_string()
|
_app.redis = await get_redis()
|
||||||
|
|
||||||
|
_postgres_dsn = global_settings.postgres_url.unicode_string()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
app.postgres_pool = await asyncpg.create_pool(
|
# TODO: cache with the redis connection
|
||||||
dsn=postgres_dsn,
|
# Initialize the postgres connection pool
|
||||||
|
_app.postgres_pool = await asyncpg.create_pool(
|
||||||
|
dsn=_postgres_dsn,
|
||||||
min_size=5,
|
min_size=5,
|
||||||
max_size=20,
|
max_size=20,
|
||||||
)
|
)
|
||||||
await logger.ainfo(
|
logger.info(f"Postgres pool created: {_app.postgres_pool.get_idle_size()=}")
|
||||||
"Postgres pool created", idle_size=app.postgres_pool.get_idle_size()
|
|
||||||
)
|
|
||||||
yield
|
yield
|
||||||
finally:
|
finally:
|
||||||
await app.redis.close()
|
# close redis connection and release the resources
|
||||||
await app.postgres_pool.close()
|
await _app.redis.close()
|
||||||
|
# close postgres connection pool and release the resources
|
||||||
|
await _app.postgres_pool.close()
|
||||||
|
|
||||||
|
|
||||||
def create_app() -> FastAPI:
|
app = FastAPI(title="Stuff And Nonsense API", version="0.19.0", lifespan=lifespan)
|
||||||
app = FastAPI(
|
|
||||||
title="Stuff And Nonsense API",
|
app.include_router(stuff_router)
|
||||||
version="0.19.0",
|
app.include_router(nonsense_router)
|
||||||
lifespan=lifespan,
|
app.include_router(shakespeare_router)
|
||||||
)
|
app.include_router(user_router)
|
||||||
app.include_router(stuff_router)
|
app.include_router(ml_router, prefix="/v1/ml", tags=["ML"])
|
||||||
app.include_router(nonsense_router)
|
|
||||||
app.include_router(shakespeare_router)
|
|
||||||
app.include_router(user_router)
|
app.include_router(health_router, prefix="/v1/public/health", tags=["Health, Public"])
|
||||||
app.include_router(ml_router, prefix="/v1/ml", tags=["ML"])
|
app.include_router(
|
||||||
app.include_router(
|
|
||||||
health_router, prefix="/v1/public/health", tags=["Health, Public"]
|
|
||||||
)
|
|
||||||
app.include_router(
|
|
||||||
health_router,
|
health_router,
|
||||||
prefix="/v1/health",
|
prefix="/v1/health",
|
||||||
tags=["Health, Bearer"],
|
tags=["Health, Bearer"],
|
||||||
dependencies=[Depends(AuthBearer())],
|
dependencies=[Depends(AuthBearer())],
|
||||||
)
|
)
|
||||||
|
|
||||||
@app.get("/index", response_class=HTMLResponse)
|
|
||||||
def get_index(request: Request):
|
@app.get("/index", response_class=HTMLResponse)
|
||||||
|
def get_index(request: Request):
|
||||||
return templates.TemplateResponse("index.html", {"request": request})
|
return templates.TemplateResponse("index.html", {"request": request})
|
||||||
|
|
||||||
return app
|
|
||||||
|
|
||||||
|
|
||||||
app = create_app()
|
|
||||||
|
|
||||||
# --- Unused/experimental code and TODOs ---
|
|
||||||
# from apscheduler import AsyncScheduler
|
|
||||||
# from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
|
|
||||||
# from apscheduler.eventbrokers.redis import RedisEventBroker
|
|
||||||
# from app.database import engine
|
|
||||||
# from app.services.scheduler import SchedulerMiddleware
|
|
||||||
# _scheduler_data_store = SQLAlchemyDataStore(engine, schema="scheduler")
|
# _scheduler_data_store = SQLAlchemyDataStore(engine, schema="scheduler")
|
||||||
# _scheduler_event_broker = RedisEventBroker(client_or_url=global_settings.redis_url.unicode_string())
|
# _scheduler_event_broker = RedisEventBroker(
|
||||||
|
# client_or_url=global_settings.redis_url.unicode_string()
|
||||||
|
# )
|
||||||
# _scheduler_himself = AsyncScheduler(_scheduler_data_store, _scheduler_event_broker)
|
# _scheduler_himself = AsyncScheduler(_scheduler_data_store, _scheduler_event_broker)
|
||||||
|
#
|
||||||
# app.add_middleware(SchedulerMiddleware, scheduler=_scheduler_himself)
|
# app.add_middleware(SchedulerMiddleware, scheduler=_scheduler_himself)
|
||||||
# TODO: every non-GET method should reset cache
|
|
||||||
# TODO: scheduler tasks needing DB should access connection pool via request
|
|
||||||
|
# TODO: every not GET meth should reset cache
|
||||||
|
# TODO: every scheduler task which needs to act on database should have access to connection pool via request - maybe ?
|
||||||
# TODO: https://stackoverflow.com/questions/16053364/make-sure-only-one-worker-launches-the-apscheduler-event-in-a-pyramid-web-app-ru
|
# TODO: https://stackoverflow.com/questions/16053364/make-sure-only-one-worker-launches-the-apscheduler-event-in-a-pyramid-web-app-ru
|
||||||
|
@ -2,12 +2,13 @@ from typing import Any
|
|||||||
|
|
||||||
from asyncpg import UniqueViolationError
|
from asyncpg import UniqueViolationError
|
||||||
from fastapi import HTTPException, status
|
from fastapi import HTTPException, status
|
||||||
from rotoger import AppStructLogger
|
|
||||||
from sqlalchemy.exc import IntegrityError, SQLAlchemyError
|
from sqlalchemy.exc import IntegrityError, SQLAlchemyError
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
from sqlalchemy.orm import DeclarativeBase, declared_attr
|
from sqlalchemy.orm import DeclarativeBase, declared_attr
|
||||||
|
|
||||||
logger = AppStructLogger().get_logger()
|
from app.utils.logging import AppLogger
|
||||||
|
|
||||||
|
logger = AppLogger().get_logger()
|
||||||
|
|
||||||
|
|
||||||
class Base(DeclarativeBase):
|
class Base(DeclarativeBase):
|
||||||
@ -27,11 +28,9 @@ class Base(DeclarativeBase):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
db_session.add(self)
|
db_session.add(self)
|
||||||
await db_session.commit()
|
return await db_session.commit()
|
||||||
await db_session.refresh(self)
|
|
||||||
return self
|
|
||||||
except SQLAlchemyError as ex:
|
except SQLAlchemyError as ex:
|
||||||
await logger.aerror(f"Error inserting instance of {self}: {repr(ex)}")
|
logger.error(f"Error inserting instance of {self}: {repr(ex)}")
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=repr(ex)
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=repr(ex)
|
||||||
) from ex
|
) from ex
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from sqlalchemy import ForeignKey, String, select
|
from sqlalchemy import ForeignKey, String, select
|
||||||
from sqlalchemy.dialects.postgresql import JSON, UUID
|
from sqlalchemy.dialects.postgresql import UUID
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
from sqlalchemy.orm import Mapped, joinedload, mapped_column, relationship
|
from sqlalchemy.orm import Mapped, joinedload, mapped_column, relationship
|
||||||
|
|
||||||
@ -10,16 +10,6 @@ from app.models.nonsense import Nonsense
|
|||||||
from app.utils.decorators import compile_sql_or_scalar
|
from app.utils.decorators import compile_sql_or_scalar
|
||||||
|
|
||||||
|
|
||||||
class RandomStuff(Base):
|
|
||||||
__tablename__ = "random_stuff"
|
|
||||||
__table_args__ = ({"schema": "happy_hog"},)
|
|
||||||
|
|
||||||
id: Mapped[uuid.UUID] = mapped_column(
|
|
||||||
UUID(as_uuid=True), default=uuid.uuid4, primary_key=True
|
|
||||||
)
|
|
||||||
chaos: Mapped[dict] = mapped_column(JSON)
|
|
||||||
|
|
||||||
|
|
||||||
class Stuff(Base):
|
class Stuff(Base):
|
||||||
__tablename__ = "stuff"
|
__tablename__ = "stuff"
|
||||||
__table_args__ = ({"schema": "happy_hog"},)
|
__table_args__ = ({"schema": "happy_hog"},)
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
from typing import Any
|
|
||||||
from uuid import UUID
|
from uuid import UUID
|
||||||
|
|
||||||
from pydantic import BaseModel, ConfigDict, Field
|
from pydantic import BaseModel, ConfigDict, Field
|
||||||
@ -6,10 +5,6 @@ from pydantic import BaseModel, ConfigDict, Field
|
|||||||
config = ConfigDict(from_attributes=True)
|
config = ConfigDict(from_attributes=True)
|
||||||
|
|
||||||
|
|
||||||
class RandomStuff(BaseModel):
|
|
||||||
chaos: dict[str, Any] = Field(..., description="JSON data for chaos field")
|
|
||||||
|
|
||||||
|
|
||||||
class StuffSchema(BaseModel):
|
class StuffSchema(BaseModel):
|
||||||
name: str = Field(
|
name: str = Field(
|
||||||
title="",
|
title="",
|
||||||
|
@ -3,12 +3,12 @@ import time
|
|||||||
import jwt
|
import jwt
|
||||||
from fastapi import HTTPException, Request
|
from fastapi import HTTPException, Request
|
||||||
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
||||||
from rotoger import AppStructLogger
|
|
||||||
|
|
||||||
from app.config import settings as global_settings
|
from app.config import settings as global_settings
|
||||||
from app.models.user import User
|
from app.models.user import User
|
||||||
|
from app.utils.logging import AppLogger
|
||||||
|
|
||||||
logger = AppStructLogger().get_logger()
|
logger = AppLogger().get_logger()
|
||||||
|
|
||||||
|
|
||||||
async def get_from_redis(request: Request, key: str):
|
async def get_from_redis(request: Request, key: str):
|
||||||
@ -40,7 +40,7 @@ class AuthBearer(HTTPBearer):
|
|||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=403, detail="Invalid token or expired token."
|
status_code=403, detail="Invalid token or expired token."
|
||||||
)
|
)
|
||||||
await logger.ainfo(f"Token verified: {credentials.credentials}")
|
logger.info(f"Token verified: {credentials.credentials}")
|
||||||
return credentials.credentials
|
return credentials.credentials
|
||||||
|
|
||||||
|
|
||||||
|
@ -15,9 +15,9 @@ logger = AppLogger().get_logger()
|
|||||||
async def tick():
|
async def tick():
|
||||||
async with AsyncSessionFactory() as session:
|
async with AsyncSessionFactory() as session:
|
||||||
stmt = text("select 1;")
|
stmt = text("select 1;")
|
||||||
await logger.ainfo(f">>>> Be or not to be...{datetime.now()}")
|
logger.info(f">>>> Be or not to be...{datetime.now()}")
|
||||||
result = await session.execute(stmt)
|
result = await session.execute(stmt)
|
||||||
await logger.ainfo(f">>>> Result: {result.scalar()}")
|
logger.info(f">>>> Result: {result.scalar()}")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@ -5,12 +5,12 @@ from email.mime.text import MIMEText
|
|||||||
from attrs import define, field
|
from attrs import define, field
|
||||||
from fastapi.templating import Jinja2Templates
|
from fastapi.templating import Jinja2Templates
|
||||||
from pydantic import EmailStr
|
from pydantic import EmailStr
|
||||||
from rotoger import AppStructLogger
|
|
||||||
|
|
||||||
from app.config import settings as global_settings
|
from app.config import settings as global_settings
|
||||||
|
from app.utils.logging import AppLogger
|
||||||
from app.utils.singleton import SingletonMetaNoArgs
|
from app.utils.singleton import SingletonMetaNoArgs
|
||||||
|
|
||||||
logger = AppStructLogger().get_logger()
|
logger = AppLogger().get_logger()
|
||||||
|
|
||||||
|
|
||||||
@define
|
@define
|
||||||
|
@ -1,98 +1,24 @@
|
|||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
from logging.handlers import RotatingFileHandler
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import orjson
|
from rich.console import Console
|
||||||
import structlog
|
from rich.logging import RichHandler
|
||||||
from attrs import define, field
|
|
||||||
from whenever._whenever import Instant
|
|
||||||
|
|
||||||
from app.utils.singleton import SingletonMetaNoArgs
|
from app.utils.singleton import SingletonMeta
|
||||||
|
|
||||||
|
|
||||||
class RotatingBytesLogger:
|
class AppLogger(metaclass=SingletonMeta):
|
||||||
"""Logger that respects RotatingFileHandler's rotation capabilities."""
|
_logger = None
|
||||||
|
|
||||||
def __init__(self, handler):
|
def __init__(self):
|
||||||
self.handler = handler
|
self._logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
def msg(self, message):
|
def get_logger(self):
|
||||||
"""Process a message and pass it through the handler's emit method."""
|
|
||||||
if isinstance(message, bytes):
|
|
||||||
message = message.decode("utf-8")
|
|
||||||
|
|
||||||
# Create a log record that will trigger rotation checks
|
|
||||||
record = logging.LogRecord(
|
|
||||||
name="structlog",
|
|
||||||
level=logging.INFO,
|
|
||||||
pathname="",
|
|
||||||
lineno=0,
|
|
||||||
msg=message.rstrip("\n"),
|
|
||||||
args=(),
|
|
||||||
exc_info=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check if rotation is needed before emitting
|
|
||||||
if self.handler.shouldRollover(record):
|
|
||||||
self.handler.doRollover()
|
|
||||||
|
|
||||||
# Emit the record through the handler
|
|
||||||
self.handler.emit(record)
|
|
||||||
|
|
||||||
# Required methods to make it compatible with structlog
|
|
||||||
def debug(self, message):
|
|
||||||
self.msg(message)
|
|
||||||
|
|
||||||
def info(self, message):
|
|
||||||
self.msg(message)
|
|
||||||
|
|
||||||
def warning(self, message):
|
|
||||||
self.msg(message)
|
|
||||||
|
|
||||||
def error(self, message):
|
|
||||||
self.msg(message)
|
|
||||||
|
|
||||||
def critical(self, message):
|
|
||||||
self.msg(message)
|
|
||||||
|
|
||||||
|
|
||||||
class RotatingBytesLoggerFactory:
|
|
||||||
"""Factory that creates loggers that respect file rotation."""
|
|
||||||
|
|
||||||
def __init__(self, handler):
|
|
||||||
self.handler = handler
|
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
|
||||||
return RotatingBytesLogger(self.handler)
|
|
||||||
|
|
||||||
|
|
||||||
@define
|
|
||||||
class AppStructLogger(metaclass=SingletonMetaNoArgs):
|
|
||||||
_logger: structlog.BoundLogger = field(init=False)
|
|
||||||
|
|
||||||
def __attrs_post_init__(self):
|
|
||||||
_log_date = Instant.now().py_datetime().strftime("%Y%m%d")
|
|
||||||
_log_path = Path(f"{_log_date}_{os.getpid()}.log")
|
|
||||||
_handler = RotatingFileHandler(
|
|
||||||
filename=_log_path,
|
|
||||||
maxBytes=10 * 1024 * 1024, # 10MB
|
|
||||||
backupCount=5,
|
|
||||||
encoding="utf-8",
|
|
||||||
)
|
|
||||||
structlog.configure(
|
|
||||||
cache_logger_on_first_use=True,
|
|
||||||
wrapper_class=structlog.make_filtering_bound_logger(logging.INFO),
|
|
||||||
processors=[
|
|
||||||
structlog.contextvars.merge_contextvars,
|
|
||||||
structlog.processors.add_log_level,
|
|
||||||
structlog.processors.format_exc_info,
|
|
||||||
structlog.processors.TimeStamper(fmt="iso", utc=True),
|
|
||||||
structlog.processors.JSONRenderer(serializer=orjson.dumps),
|
|
||||||
],
|
|
||||||
logger_factory=RotatingBytesLoggerFactory(_handler),
|
|
||||||
)
|
|
||||||
self._logger = structlog.get_logger()
|
|
||||||
|
|
||||||
def get_logger(self) -> structlog.BoundLogger:
|
|
||||||
return self._logger
|
return self._logger
|
||||||
|
|
||||||
|
|
||||||
|
class RichConsoleHandler(RichHandler):
|
||||||
|
def __init__(self, width=200, style=None, **kwargs):
|
||||||
|
super().__init__(
|
||||||
|
console=Console(color_system="256", width=width, style=style, stderr=True),
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
@ -10,6 +10,7 @@ services:
|
|||||||
- .secrets
|
- .secrets
|
||||||
command: bash -c "
|
command: bash -c "
|
||||||
uvicorn app.main:app
|
uvicorn app.main:app
|
||||||
|
--log-config ./logging-uvicorn.json
|
||||||
--host 0.0.0.0 --port 8080
|
--host 0.0.0.0 --port 8080
|
||||||
--lifespan=on --use-colors --loop uvloop --http httptools
|
--lifespan=on --use-colors --loop uvloop --http httptools
|
||||||
--reload --log-level debug
|
--reload --log-level debug
|
||||||
@ -18,7 +19,6 @@ services:
|
|||||||
- ./app:/panettone/app
|
- ./app:/panettone/app
|
||||||
- ./tests:/panettone/tests
|
- ./tests:/panettone/tests
|
||||||
- ./templates:/panettone/templates
|
- ./templates:/panettone/templates
|
||||||
- ./alembic:/panettone/alembic
|
|
||||||
ports:
|
ports:
|
||||||
- "8080:8080"
|
- "8080:8080"
|
||||||
depends_on:
|
depends_on:
|
||||||
|
@ -12,6 +12,7 @@ services:
|
|||||||
granian --interface asgi
|
granian --interface asgi
|
||||||
--host 0.0.0.0 --port 8080
|
--host 0.0.0.0 --port 8080
|
||||||
app.main:app --access-log --log-level debug
|
app.main:app --access-log --log-level debug
|
||||||
|
--log-config ./logging-granian.json
|
||||||
"
|
"
|
||||||
volumes:
|
volumes:
|
||||||
- ./app:/panettone/app
|
- ./app:/panettone/app
|
||||||
|
@ -29,7 +29,6 @@ dependencies = [
|
|||||||
"polyfactory>=2.21.0",
|
"polyfactory>=2.21.0",
|
||||||
"granian>=2.3.2",
|
"granian>=2.3.2",
|
||||||
"apscheduler[redis,sqlalchemy]>=4.0.0a6",
|
"apscheduler[redis,sqlalchemy]>=4.0.0a6",
|
||||||
"rotoger",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.uv]
|
[tool.uv]
|
||||||
|
Loading…
x
Reference in New Issue
Block a user