diff --git a/app/api/health.py b/app/api/health.py index 4990281..518bd31 100644 --- a/app/api/health.py +++ b/app/api/health.py @@ -2,7 +2,7 @@ from typing import Annotated from fastapi import APIRouter, Depends, Query, Request, status from pydantic import EmailStr -from rotoger import get_logger +from app.services.logging import get_logger from starlette.concurrency import run_in_threadpool from app.services.smtp import SMTPEmailService diff --git a/app/api/ml.py b/app/api/ml.py index 21abde2..f1af2b1 100644 --- a/app/api/ml.py +++ b/app/api/ml.py @@ -2,7 +2,7 @@ from typing import Annotated from fastapi import APIRouter, Depends, Form from fastapi.responses import StreamingResponse -from rotoger import get_logger +from app.services.logging import get_logger from app.services.llm import get_llm_service diff --git a/app/api/stuff.py b/app/api/stuff.py index 2f4743d..6da3c5f 100644 --- a/app/api/stuff.py +++ b/app/api/stuff.py @@ -1,5 +1,5 @@ from fastapi import APIRouter, Depends, HTTPException, Request, status -from rotoger import get_logger +from app.services.logging import get_logger from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.ext.asyncio import AsyncSession diff --git a/app/api/user.py b/app/api/user.py index 5b5ee33..c589a7b 100644 --- a/app/api/user.py +++ b/app/api/user.py @@ -1,7 +1,7 @@ from typing import Annotated from fastapi import APIRouter, Depends, Form, HTTPException, Request, status -from rotoger import get_logger +from app.services.logging import get_logger from sqlalchemy.ext.asyncio import AsyncSession from app.database import get_db diff --git a/app/database.py b/app/database.py index e1003d7..50f26f7 100644 --- a/app/database.py +++ b/app/database.py @@ -1,7 +1,7 @@ from collections.abc import AsyncGenerator from fastapi.exceptions import ResponseValidationError -from rotoger import get_logger +from app.services.logging import get_logger from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine diff --git a/app/exception_handlers/base.py b/app/exception_handlers/base.py index b8357f6..7535f58 100644 --- a/app/exception_handlers/base.py +++ b/app/exception_handlers/base.py @@ -1,7 +1,7 @@ import orjson from attrs import define, field from fastapi import Request -from rotoger import get_logger +from app.services.logging import get_logger logger = get_logger() diff --git a/app/main.py b/app/main.py index 801393d..7bc0ed8 100644 --- a/app/main.py +++ b/app/main.py @@ -5,7 +5,7 @@ import asyncpg from fastapi import Depends, FastAPI, Request from fastapi.responses import HTMLResponse from fastapi.templating import Jinja2Templates -from rotoger import get_logger +from app.services.logging import get_logger from starlette.middleware import Middleware from starlette.middleware.gzip import GZipMiddleware @@ -21,12 +21,13 @@ from app.middleware.profiler import ProfilingMiddleware from app.redis import get_redis from app.services.auth import AuthBearer -logger = get_logger() +# logger = get_logger() templates = Jinja2Templates(directory=Path(__file__).parent.parent / "templates") @asynccontextmanager async def lifespan(app: FastAPI): + app.logger = get_logger() app.redis = await get_redis() postgres_dsn = global_settings.postgres_url.unicode_string() try: @@ -35,12 +36,12 @@ async def lifespan(app: FastAPI): min_size=5, max_size=20, ) - await logger.ainfo( + await app.logger.ainfo( "Postgres pool created", idle_size=app.postgres_pool.get_idle_size() ) yield except Exception as e: - await logger.aerror("Error during app startup", error=repr(e)) + await app.logger.aerror("Error during app startup", error=repr(e)) raise finally: await app.redis.close() diff --git a/app/models/base.py b/app/models/base.py index 54b96d2..ff617e5 100644 --- a/app/models/base.py +++ b/app/models/base.py @@ -2,7 +2,7 @@ from typing import Any from asyncpg import UniqueViolationError from fastapi import HTTPException, status -from rotoger import get_logger +from app.services.logging import get_logger from sqlalchemy.exc import IntegrityError, SQLAlchemyError from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import DeclarativeBase, declared_attr diff --git a/app/server.py b/app/server.py new file mode 100644 index 0000000..40d297f --- /dev/null +++ b/app/server.py @@ -0,0 +1,19 @@ +from granian import Granian + +def startup(): + print("Server starting up...") + +def shutdown(): + print("Server shutting down...") + +server = Granian( + "main:app", + host="0.0.0.0", # Bind to all interfaces + port=8000, + workers=4, + interface="asgi", + blocking_threads=8 # Optional: threads per worker for blocking ops +) +server.on_startup(startup) +server.on_shutdown(shutdown) +server.serve_forever() \ No newline at end of file diff --git a/app/services/auth.py b/app/services/auth.py index 144384e..71b6129 100644 --- a/app/services/auth.py +++ b/app/services/auth.py @@ -3,7 +3,7 @@ import time import jwt from fastapi import HTTPException, Request from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer -from rotoger import get_logger +from app.services.logging import get_logger from app.config import settings as global_settings from app.models.user import User diff --git a/app/services/logging.py b/app/services/logging.py new file mode 100644 index 0000000..593629b --- /dev/null +++ b/app/services/logging.py @@ -0,0 +1,96 @@ +import logging +import os +from logging.handlers import RotatingFileHandler +from pathlib import Path + +import orjson +import structlog +from whenever._whenever import Instant + + +def _configure_logger() -> structlog.BoundLogger: + """ + Configures and returns a structlog logger with a rotating file handler. + + The logger is configured using environment variables for path, file size, + and backup count. It formats logs as JSON. + """ + log_dir = Path(os.environ.get("ROTOGER_LOG_PATH", ".")) + log_dir.mkdir(parents=True, exist_ok=True) + log_date = Instant.now().py_datetime().strftime("%Y%m%d") + log_path = log_dir / f"{log_date}_{os.getpid()}.log" + + # Use int() to ensure env var values are correctly typed + max_bytes = int(os.environ.get("ROTOGER_LOG_MAX_BYTES", 10 * 1024 * 1024)) + backup_count = int(os.environ.get("ROTOGER_LOG_BACKUP_COUNT", 5)) + + handler = RotatingFileHandler( + filename=log_path, + maxBytes=max_bytes, + backupCount=backup_count, + encoding="utf-8", + + ) + + # Use structlog's standard library integration + structlog.configure( + processors=[ + structlog.contextvars.merge_contextvars, + structlog.stdlib.add_log_level, + structlog.stdlib.PositionalArgumentsFormatter(), + structlog.processors.TimeStamper(fmt="iso", utc=True), + structlog.processors.format_exc_info, + structlog.stdlib.ProcessorFormatter.wrap_for_formatter, + # structlog.stdlib.add_logger_name, + ], + logger_factory=structlog.stdlib.LoggerFactory(), + wrapper_class=structlog.stdlib.BoundLogger, + cache_logger_on_first_use=True, + ) + + # Configure the underlying standard logger + formatter = structlog.stdlib.ProcessorFormatter( + # These run after the processors defined in structlog.configure + foreign_pre_chain=[ + structlog.contextvars.merge_contextvars, + structlog.stdlib.add_log_level, + structlog.stdlib.PositionalArgumentsFormatter(), + structlog.processors.TimeStamper(fmt="iso", utc=True), + structlog.processors.format_exc_info, + structlog.stdlib.add_logger_name, + ], + processor=structlog.processors.JSONRenderer( + serializer=lambda *args, **kwargs: orjson.dumps(*args, **kwargs).decode() + ), + ) + handler.setFormatter(formatter) + root_logger = logging.getLogger("root") # Get the root logger + root_logger.addHandler(handler) + root_logger.propagate = False # Prevent logs from being propagated to the root logger + root_logger.setLevel(logging.INFO) + + uvicorn_logger = logging.getLogger("uvicorn") # Get the root logger + uvicorn_logger.addHandler(handler) + uvicorn_logger.propagate = False # Prevent logs from being propagated to the root logger + uvicorn_logger.setLevel(logging.INFO) + + sa_logger = logging.getLogger("sqlalchemy") # Get the root logger + sa_logger.addHandler(handler) + sa_logger.propagate = False # Prevent logs from being propagated to the root logger + sa_logger.setLevel(logging.WARNING) + + # Set SQLAlchemy engine logger level specifically if needed + # logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO) + return structlog.get_logger() + + + +# Module-level singleton instance +_logger_instance = _configure_logger() + + +def get_logger() -> structlog.BoundLogger: + """ + Returns the configured singleton logger instance. + """ + return _logger_instance diff --git a/app/services/scheduler.py b/app/services/scheduler.py index 6352fb6..43a7bf9 100644 --- a/app/services/scheduler.py +++ b/app/services/scheduler.py @@ -3,7 +3,7 @@ from datetime import datetime from apscheduler import AsyncScheduler from apscheduler.triggers.interval import IntervalTrigger from attrs import define -from rotoger import get_logger +from app.services.logging import get_logger from sqlalchemy import text from starlette.types import ASGIApp, Receive, Scope, Send diff --git a/app/services/smtp.py b/app/services/smtp.py index bb5ea24..086cd3c 100644 --- a/app/services/smtp.py +++ b/app/services/smtp.py @@ -5,7 +5,7 @@ from email.mime.text import MIMEText from attrs import define, field from fastapi.templating import Jinja2Templates from pydantic import EmailStr -from rotoger import get_logger +from app.services.logging import get_logger from app.config import settings as global_settings from app.utils.singleton import SingletonMetaNoArgs