mirror of
https://github.com/Balshgit/gpt_chat_bot.git
synced 2026-02-04 16:50:38 +03:00
add database and migration logic (#27)
* update chat_microservice * reformat logger_conf * add database * add service and repository logic * fix constants gpt base url * add models endpoints
This commit is contained in:
0
bot_microservice/infra/database/__init__.py
Normal file
0
bot_microservice/infra/database/__init__.py
Normal file
34
bot_microservice/infra/database/base.py
Normal file
34
bot_microservice/infra/database/base.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from sqlalchemy import Table, inspect
|
||||
from sqlalchemy.orm import as_declarative, declared_attr
|
||||
|
||||
from infra.database.meta import meta
|
||||
|
||||
|
||||
@as_declarative(metadata=meta)
|
||||
class Base:
|
||||
"""
|
||||
Base for all models.
|
||||
|
||||
It has some type definitions to
|
||||
enhance autocompletion.
|
||||
"""
|
||||
|
||||
# Generate __tablename__ automatically
|
||||
@declared_attr
|
||||
def __tablename__(self) -> str:
|
||||
return self.__name__.lower()
|
||||
|
||||
__table__: Table
|
||||
|
||||
@classmethod
|
||||
def get_real_column_name(cls, attr_name: str) -> str:
|
||||
return getattr(inspect(cls).c, attr_name).name # type: ignore
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.__repr__()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
try:
|
||||
return f"{self.__class__.__name__}(id={self.id})" # type: ignore[attr-defined]
|
||||
except AttributeError:
|
||||
return super().__repr__()
|
||||
102
bot_microservice/infra/database/db_adapter.py
Normal file
102
bot_microservice/infra/database/db_adapter.py
Normal file
@@ -0,0 +1,102 @@
|
||||
import os
|
||||
import pkgutil
|
||||
from asyncio import current_task
|
||||
from contextlib import asynccontextmanager
|
||||
from pathlib import Path
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from loguru import logger
|
||||
from sqlalchemy.ext.asyncio import (
|
||||
AsyncEngine,
|
||||
AsyncSession,
|
||||
async_scoped_session,
|
||||
async_sessionmaker,
|
||||
create_async_engine,
|
||||
)
|
||||
|
||||
from settings.config import AppSettings
|
||||
|
||||
|
||||
class Database:
|
||||
def __init__(self, settings: AppSettings) -> None:
|
||||
self.db_connect_url = settings.db_url
|
||||
self.echo_logs = settings.DB_ECHO
|
||||
self.db_file = settings.DB_FILE
|
||||
self._engine: AsyncEngine = create_async_engine(
|
||||
str(settings.db_url),
|
||||
echo=settings.DB_ECHO,
|
||||
execution_options={"isolation_level": "AUTOCOMMIT"},
|
||||
)
|
||||
self._async_session_factory = async_scoped_session(
|
||||
async_sessionmaker(
|
||||
autoflush=False,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
bind=self._engine,
|
||||
),
|
||||
scopefunc=current_task,
|
||||
)
|
||||
|
||||
@asynccontextmanager
|
||||
async def session(self) -> AsyncGenerator[AsyncSession, None]:
|
||||
session: AsyncSession = self._async_session_factory()
|
||||
|
||||
async with session:
|
||||
try:
|
||||
yield session
|
||||
except Exception:
|
||||
await session.rollback()
|
||||
raise
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_transaction_session(self) -> AsyncGenerator[AsyncSession, None]:
|
||||
async with self._async_session_factory() as session, session.begin():
|
||||
try:
|
||||
yield session
|
||||
except Exception as error:
|
||||
await session.rollback()
|
||||
raise error
|
||||
|
||||
async def create_database(self) -> None:
|
||||
"""
|
||||
Create a test database.
|
||||
|
||||
:param engine: Async engine for database creation
|
||||
:param db_path: path to sqlite file
|
||||
|
||||
"""
|
||||
if not self.db_file.exists():
|
||||
from infra.database.meta import meta
|
||||
|
||||
load_all_models()
|
||||
try:
|
||||
async with self._engine.begin() as connection:
|
||||
await connection.run_sync(meta.create_all)
|
||||
|
||||
logger.info("all migrations are applied")
|
||||
except Exception as err:
|
||||
logger.error("Cant run migrations", err=err)
|
||||
|
||||
async def drop_database(self) -> None:
|
||||
"""
|
||||
Drop current database.
|
||||
|
||||
:param path: Delete sqlite database file
|
||||
|
||||
"""
|
||||
if self.db_file.exists():
|
||||
os.remove(self.db_file)
|
||||
|
||||
|
||||
def load_all_models() -> None:
|
||||
"""Load all models from this folder."""
|
||||
package_dir = Path(__file__).resolve().parent.parent
|
||||
package_dir = package_dir.joinpath("core")
|
||||
modules = pkgutil.walk_packages(path=[str(package_dir)], prefix="core.")
|
||||
models_packages = [module for module in modules if module.ispkg and "models" in module.name]
|
||||
for module in models_packages:
|
||||
model_pkgs = pkgutil.walk_packages(
|
||||
path=[os.path.join(str(module.module_finder.path), "models")], prefix=f"{module.name}." # type: ignore
|
||||
)
|
||||
for model_pkg in model_pkgs:
|
||||
__import__(model_pkg.name)
|
||||
20
bot_microservice/infra/database/deps.py
Normal file
20
bot_microservice/infra/database/deps.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from starlette.requests import Request
|
||||
|
||||
|
||||
async def get_db_session(request: Request) -> AsyncGenerator[AsyncSession, None]:
|
||||
"""
|
||||
Create and get database session.
|
||||
|
||||
:param request: current request.
|
||||
:yield: database session.
|
||||
"""
|
||||
session: AsyncSession = request.app.state.db_session_factory()
|
||||
|
||||
try:
|
||||
yield session
|
||||
finally:
|
||||
await session.commit()
|
||||
await session.close()
|
||||
3
bot_microservice/infra/database/meta.py
Normal file
3
bot_microservice/infra/database/meta.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from sqlalchemy import MetaData
|
||||
|
||||
meta = MetaData()
|
||||
1
bot_microservice/infra/database/migrations/__init__.py
Normal file
1
bot_microservice/infra/database/migrations/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Alembic migraions."""
|
||||
81
bot_microservice/infra/database/migrations/env.py
Normal file
81
bot_microservice/infra/database/migrations/env.py
Normal file
@@ -0,0 +1,81 @@
|
||||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy.ext.asyncio.engine import create_async_engine
|
||||
from sqlalchemy.future import Connection
|
||||
|
||||
from infra.database.db_adapter import load_all_models
|
||||
from infra.database.meta import meta
|
||||
from settings.config import settings
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# for 'autogenerate' support from myapp import mymodel
|
||||
load_all_models()
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
target_metadata = meta
|
||||
|
||||
|
||||
async def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
context.configure(
|
||||
url=str(settings.db_url),
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
"""
|
||||
Run actual sync migrations.
|
||||
|
||||
:param connection: connection to the database.
|
||||
|
||||
"""
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_migrations_online() -> None:
|
||||
"""
|
||||
Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = create_async_engine(str(settings.db_url))
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
asyncio.run(run_migrations_offline())
|
||||
else:
|
||||
asyncio.run(run_migrations_online())
|
||||
24
bot_microservice/infra/database/migrations/script.py.mako
Normal file
24
bot_microservice/infra/database/migrations/script.py.mako
Normal file
@@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,34 @@
|
||||
"""initial commit
|
||||
|
||||
Revision ID: eb78565abec7
|
||||
Revises:
|
||||
Create Date: 2023-10-05 18:28:30.915361
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "eb78565abec7"
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"chatgpt",
|
||||
sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column("model", sa.VARCHAR(length=256), nullable=False),
|
||||
sa.Column("priority", sa.SMALLINT(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("model"),
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table("chatgpt")
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,49 @@
|
||||
"""create chat gpt models
|
||||
|
||||
Revision ID: c2e443941930
|
||||
Revises: eb78565abec7
|
||||
Create Date: 2025-10-05 20:44:05.414977
|
||||
|
||||
"""
|
||||
|
||||
from sqlalchemy import create_engine, select
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from constants import ChatGptModelsEnum
|
||||
from core.bot.models.chat_gpt import ChatGpt
|
||||
from settings.config import settings
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "c2e443941930"
|
||||
down_revision = "eb78565abec7"
|
||||
branch_labels: str | None = None
|
||||
depends_on: str | None = None
|
||||
|
||||
engine = create_engine(str(settings.db_url), echo=settings.DB_ECHO)
|
||||
session_factory = sessionmaker(engine)
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
with session_factory() as session:
|
||||
query = select(ChatGpt)
|
||||
results = session.execute(query)
|
||||
models = results.scalars().all()
|
||||
|
||||
if models:
|
||||
return None
|
||||
models = []
|
||||
for model in ChatGptModelsEnum:
|
||||
priority = 0 if model != "gpt-3.5-turbo-stream-FreeGpt" else 1
|
||||
fields = {"model": model, "priority": priority}
|
||||
models.append(ChatGpt(**fields))
|
||||
session.add_all(models)
|
||||
session.commit()
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
with session_factory() as session:
|
||||
session.execute(f"""TRUNCATE TABLE {ChatGpt.__tablename__}""")
|
||||
session.commit()
|
||||
|
||||
|
||||
engine.dispose()
|
||||
Reference in New Issue
Block a user