mirror of
https://github.com/Balshgit/gpt_chat_bot.git
synced 2025-12-16 21:20:39 +03:00
add database and migration logic (#27)
* update chat_microservice * reformat logger_conf * add database * add service and repository logic * fix constants gpt base url * add models endpoints
This commit is contained in:
1
bot_microservice/infra/database/migrations/__init__.py
Normal file
1
bot_microservice/infra/database/migrations/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Alembic migraions."""
|
||||
81
bot_microservice/infra/database/migrations/env.py
Normal file
81
bot_microservice/infra/database/migrations/env.py
Normal file
@@ -0,0 +1,81 @@
|
||||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy.ext.asyncio.engine import create_async_engine
|
||||
from sqlalchemy.future import Connection
|
||||
|
||||
from infra.database.db_adapter import load_all_models
|
||||
from infra.database.meta import meta
|
||||
from settings.config import settings
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# for 'autogenerate' support from myapp import mymodel
|
||||
load_all_models()
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
target_metadata = meta
|
||||
|
||||
|
||||
async def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
context.configure(
|
||||
url=str(settings.db_url),
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
"""
|
||||
Run actual sync migrations.
|
||||
|
||||
:param connection: connection to the database.
|
||||
|
||||
"""
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_migrations_online() -> None:
|
||||
"""
|
||||
Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = create_async_engine(str(settings.db_url))
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
asyncio.run(run_migrations_offline())
|
||||
else:
|
||||
asyncio.run(run_migrations_online())
|
||||
24
bot_microservice/infra/database/migrations/script.py.mako
Normal file
24
bot_microservice/infra/database/migrations/script.py.mako
Normal file
@@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,34 @@
|
||||
"""initial commit
|
||||
|
||||
Revision ID: eb78565abec7
|
||||
Revises:
|
||||
Create Date: 2023-10-05 18:28:30.915361
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "eb78565abec7"
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"chatgpt",
|
||||
sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column("model", sa.VARCHAR(length=256), nullable=False),
|
||||
sa.Column("priority", sa.SMALLINT(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("model"),
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table("chatgpt")
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,49 @@
|
||||
"""create chat gpt models
|
||||
|
||||
Revision ID: c2e443941930
|
||||
Revises: eb78565abec7
|
||||
Create Date: 2025-10-05 20:44:05.414977
|
||||
|
||||
"""
|
||||
|
||||
from sqlalchemy import create_engine, select
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from constants import ChatGptModelsEnum
|
||||
from core.bot.models.chat_gpt import ChatGpt
|
||||
from settings.config import settings
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "c2e443941930"
|
||||
down_revision = "eb78565abec7"
|
||||
branch_labels: str | None = None
|
||||
depends_on: str | None = None
|
||||
|
||||
engine = create_engine(str(settings.db_url), echo=settings.DB_ECHO)
|
||||
session_factory = sessionmaker(engine)
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
with session_factory() as session:
|
||||
query = select(ChatGpt)
|
||||
results = session.execute(query)
|
||||
models = results.scalars().all()
|
||||
|
||||
if models:
|
||||
return None
|
||||
models = []
|
||||
for model in ChatGptModelsEnum:
|
||||
priority = 0 if model != "gpt-3.5-turbo-stream-FreeGpt" else 1
|
||||
fields = {"model": model, "priority": priority}
|
||||
models.append(ChatGpt(**fields))
|
||||
session.add_all(models)
|
||||
session.commit()
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
with session_factory() as session:
|
||||
session.execute(f"""TRUNCATE TABLE {ChatGpt.__tablename__}""")
|
||||
session.commit()
|
||||
|
||||
|
||||
engine.dispose()
|
||||
Reference in New Issue
Block a user