mirror of
https://github.com/Balshgit/gpt_chat_bot.git
synced 2025-09-10 17:20:41 +03:00
add question count table (#73)
* update admin url * update log level * add user question count table * rename ChatGpt to ChatGptModels * change user to root in ci tests * add chatgpt_shared volume
This commit is contained in:
parent
f17a0a72e4
commit
d1ae7f2281
@ -20,3 +20,10 @@ class AccessToken(SQLAlchemyBaseAccessTokenTable[Mapped[int]], Base):
|
||||
@declared_attr
|
||||
def user_id(cls) -> Mapped[int]:
|
||||
return mapped_column(INTEGER, ForeignKey("users.id", ondelete="cascade"), nullable=False)
|
||||
|
||||
|
||||
class UserQuestionCount(Base):
|
||||
__tablename__ = "user_question_count" # type: ignore[assignment]
|
||||
|
||||
user_id: Mapped[int] = mapped_column(INTEGER, ForeignKey("users.id", ondelete="cascade"), primary_key=True)
|
||||
question_count: Mapped[int] = mapped_column(INTEGER, default=0, nullable=False)
|
||||
|
@ -3,10 +3,12 @@ from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from infra.database.base import Base
|
||||
|
||||
__slots__ = ("ChatGpt",)
|
||||
__slots__ = ("ChatGptModels",)
|
||||
|
||||
|
||||
class ChatGpt(Base):
|
||||
class ChatGptModels(Base):
|
||||
__tablename__ = "chatgpt_models" # type: ignore[assignment]
|
||||
|
||||
id: Mapped[int] = mapped_column("id", INTEGER(), primary_key=True, autoincrement=True)
|
||||
model: Mapped[str] = mapped_column("model", VARCHAR(length=256), nullable=False, unique=True)
|
||||
priority: Mapped[int] = mapped_column("priority", SMALLINT(), default=0)
|
@ -10,7 +10,7 @@ from sqlalchemy import delete, desc, select, update
|
||||
from sqlalchemy.dialects.sqlite import insert
|
||||
|
||||
from constants import INVALID_GPT_REQUEST_MESSAGES
|
||||
from core.bot.models.chat_gpt import ChatGpt
|
||||
from core.bot.models.chatgpt import ChatGptModels
|
||||
from infra.database.db_adapter import Database
|
||||
from settings.config import AppSettings
|
||||
|
||||
@ -20,29 +20,29 @@ class ChatGPTRepository:
|
||||
settings: AppSettings
|
||||
db: Database
|
||||
|
||||
async def get_chatgpt_models(self) -> Sequence[ChatGpt]:
|
||||
query = select(ChatGpt).order_by(desc(ChatGpt.priority))
|
||||
async def get_chatgpt_models(self) -> Sequence[ChatGptModels]:
|
||||
query = select(ChatGptModels).order_by(desc(ChatGptModels.priority))
|
||||
|
||||
async with self.db.session() as session:
|
||||
result = await session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
async def change_chatgpt_model_priority(self, model_id: int, priority: int) -> None:
|
||||
query = update(ChatGpt).values(priority=priority).filter(ChatGpt.id == model_id)
|
||||
query = update(ChatGptModels).values(priority=priority).filter(ChatGptModels.id == model_id)
|
||||
async with self.db.get_transaction_session() as session:
|
||||
await session.execute(query)
|
||||
|
||||
async def reset_all_chatgpt_models_priority(self) -> None:
|
||||
query = update(ChatGpt).values(priority=0)
|
||||
query = update(ChatGptModels).values(priority=0)
|
||||
|
||||
async with self.db.session() as session:
|
||||
await session.execute(query)
|
||||
|
||||
async def add_chatgpt_model(self, model: str, priority: int) -> dict[str, str | int]:
|
||||
query = (
|
||||
insert(ChatGpt)
|
||||
insert(ChatGptModels)
|
||||
.values(
|
||||
{ChatGpt.model: model, ChatGpt.priority: priority},
|
||||
{ChatGptModels.model: model, ChatGptModels.priority: priority},
|
||||
)
|
||||
.prefix_with("OR IGNORE")
|
||||
)
|
||||
@ -52,13 +52,13 @@ class ChatGPTRepository:
|
||||
return {"model": model, "priority": priority}
|
||||
|
||||
async def delete_chatgpt_model(self, model_id: int) -> None:
|
||||
query = delete(ChatGpt).filter_by(id=model_id)
|
||||
query = delete(ChatGptModels).filter_by(id=model_id)
|
||||
|
||||
async with self.db.session() as session:
|
||||
await session.execute(query)
|
||||
|
||||
async def get_current_chatgpt_model(self) -> str:
|
||||
query = select(ChatGpt.model).order_by(desc(ChatGpt.priority)).limit(1)
|
||||
query = select(ChatGptModels.model).order_by(desc(ChatGptModels.priority)).limit(1)
|
||||
|
||||
async with self.db.session() as session:
|
||||
result = await session.execute(query)
|
||||
|
@ -15,7 +15,7 @@ from speech_recognition import (
|
||||
)
|
||||
|
||||
from constants import AUDIO_SEGMENT_DURATION
|
||||
from core.bot.models.chat_gpt import ChatGpt
|
||||
from core.bot.models.chatgpt import ChatGptModels
|
||||
from core.bot.repository import ChatGPTRepository
|
||||
from infra.database.db_adapter import Database
|
||||
from settings.config import settings
|
||||
@ -90,7 +90,7 @@ class SpeechToTextService:
|
||||
class ChatGptService:
|
||||
repository: ChatGPTRepository
|
||||
|
||||
async def get_chatgpt_models(self) -> Sequence[ChatGpt]:
|
||||
async def get_chatgpt_models(self) -> Sequence[ChatGptModels]:
|
||||
return await self.repository.get_chatgpt_models()
|
||||
|
||||
async def request_to_chatgpt(self, question: str | None) -> str:
|
||||
|
@ -3,16 +3,16 @@ from typing import TYPE_CHECKING
|
||||
|
||||
from sqladmin import Admin, ModelView
|
||||
|
||||
from core.bot.models.chat_gpt import ChatGpt
|
||||
from core.bot.models.chatgpt import ChatGptModels
|
||||
from settings.config import settings
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from main import Application
|
||||
|
||||
|
||||
class ChatGptAdmin(ModelView, model=ChatGpt):
|
||||
column_list = [ChatGpt.id, ChatGpt.model, ChatGpt.priority]
|
||||
column_sortable_list = [ChatGpt.priority]
|
||||
class ChatGptAdmin(ModelView, model=ChatGptModels):
|
||||
column_list = [ChatGptModels.id, ChatGptModels.model, ChatGptModels.priority]
|
||||
column_sortable_list = [ChatGptModels.priority]
|
||||
column_default_sort = ("priority", True)
|
||||
form_widget_args = {"model": {"readonly": True}}
|
||||
|
||||
@ -21,11 +21,12 @@ class ChatGptAdmin(ModelView, model=ChatGpt):
|
||||
|
||||
|
||||
def create_admin(application: "Application") -> Admin:
|
||||
base_url = os.path.join(settings.URL_PREFIX, "admin")
|
||||
admin = Admin(
|
||||
title="Chat GPT admin",
|
||||
app=application.fastapi_app,
|
||||
engine=application.db.async_engine,
|
||||
base_url=os.path.join(settings.URL_PREFIX, "admin"),
|
||||
base_url=base_url if base_url.startswith("/") else "/" + base_url,
|
||||
authentication_backend=None,
|
||||
)
|
||||
admin.add_view(ChatGptAdmin)
|
||||
|
@ -8,6 +8,8 @@ Create Date: 2023-10-05 18:28:30.915361
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
from core.bot.models.chatgpt import ChatGptModels
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "0001_create_chatgpt_table"
|
||||
down_revision = None
|
||||
@ -18,7 +20,7 @@ depends_on = None
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"chatgpt",
|
||||
ChatGptModels.__tablename__,
|
||||
sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column("model", sa.VARCHAR(length=256), nullable=False),
|
||||
sa.Column("priority", sa.SMALLINT(), nullable=False),
|
||||
@ -30,5 +32,5 @@ def upgrade() -> None:
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table("chatgpt")
|
||||
op.drop_table(ChatGptModels.__tablename__)
|
||||
# ### end Alembic commands ###
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""create_auth_tables
|
||||
|
||||
Revision ID: 0003_create_users_table
|
||||
Revises: 0002_create_chatgpt_models
|
||||
Revision ID: 0002_create_users_table
|
||||
Revises: 0001_create_chatgpt_table
|
||||
Create Date: 2023-11-28 00:58:01.984654
|
||||
|
||||
"""
|
||||
@ -17,8 +17,8 @@ from infra.database.deps import get_sync_session
|
||||
from settings.config import settings
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "0003_create_auth_tables"
|
||||
down_revision = "0002_create_chatgpt_models"
|
||||
revision = "0002_create_auth_tables"
|
||||
down_revision = "0001_create_chatgpt_table"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
@ -0,0 +1,33 @@
|
||||
"""add_user_question_count_table
|
||||
|
||||
Revision ID: 0003_create_user_question_count_table
|
||||
Revises: 0002_create_auth_tables
|
||||
Create Date: 2023-12-28 13:24:42.667724
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "0003_create_user_question_count_table"
|
||||
down_revision = "0002_create_auth_tables"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"user_question_count",
|
||||
sa.Column("user_id", sa.INTEGER(), nullable=False),
|
||||
sa.Column("question_count", sa.INTEGER(), nullable=False),
|
||||
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="cascade"),
|
||||
sa.PrimaryKeyConstraint("user_id"),
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table("user_question_count")
|
||||
# ### end Alembic commands ###
|
@ -1,7 +1,7 @@
|
||||
"""create chatgpt models
|
||||
|
||||
Revision ID: 0002_create_chatgpt_models
|
||||
Revises: 0001_create_chatgpt_table
|
||||
Revision ID: 0004_add_chatgpt_models
|
||||
Revises: 0003_create_user_question_count_table
|
||||
Create Date: 2025-10-05 20:44:05.414977
|
||||
|
||||
"""
|
||||
@ -9,19 +9,19 @@ from loguru import logger
|
||||
from sqlalchemy import select, text
|
||||
|
||||
from constants import ChatGptModelsEnum
|
||||
from core.bot.models.chat_gpt import ChatGpt
|
||||
from core.bot.models.chatgpt import ChatGptModels
|
||||
from infra.database.deps import get_sync_session
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "0002_create_chatgpt_models"
|
||||
down_revision = "0001_create_chatgpt_table"
|
||||
revision = "0004_add_chatgpt_models"
|
||||
down_revision = "0003_create_user_question_count_table"
|
||||
branch_labels: str | None = None
|
||||
depends_on: str | None = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
with get_sync_session() as session:
|
||||
query = select(ChatGpt)
|
||||
query = select(ChatGptModels)
|
||||
results = session.execute(query)
|
||||
models = results.scalars().all()
|
||||
|
||||
@ -29,13 +29,13 @@ def upgrade() -> None:
|
||||
return
|
||||
models = []
|
||||
for data in ChatGptModelsEnum.base_models_priority():
|
||||
models.append(ChatGpt(**data))
|
||||
models.append(ChatGptModels(**data))
|
||||
session.add_all(models)
|
||||
session.commit()
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
chatgpt_table_name = ChatGpt.__tablename__
|
||||
chatgpt_table_name = ChatGptModels.__tablename__
|
||||
with get_sync_session() as session:
|
||||
# Truncate doesn't exists for SQLite
|
||||
session.execute(text(f"""DELETE FROM {chatgpt_table_name}""")) # noqa: S608
|
@ -5,7 +5,6 @@ import sentry_sdk
|
||||
from fastapi import FastAPI
|
||||
from fastapi.responses import UJSONResponse
|
||||
|
||||
from constants import LogLevelEnum
|
||||
from core.bot.app import BotApplication, BotQueue
|
||||
from core.bot.handlers import bot_event_handlers
|
||||
from core.lifetime import shutdown, startup
|
||||
@ -41,7 +40,7 @@ class Application:
|
||||
self.app.include_router(api_router)
|
||||
self.configure_bot_hooks()
|
||||
configure_logging(
|
||||
level=LogLevelEnum.INFO,
|
||||
level=settings.LOG_LEVEL,
|
||||
enable_json_logs=settings.ENABLE_JSON_LOGS,
|
||||
enable_sentry_logs=settings.ENABLE_SENTRY_LOGS,
|
||||
log_to_file=settings.LOG_TO_FILE,
|
||||
|
@ -22,6 +22,7 @@ SENTRY_TRACES_SAMPLE_RATE="0.95"
|
||||
DEPLOY_ENVIRONMENT="stage"
|
||||
|
||||
# ==== logs ====:
|
||||
LOG_LEVEL="info" # "debug", "info", "warning", "error", "critical"
|
||||
ENABLE_JSON_LOGS="true"
|
||||
ENABLE_SENTRY_LOGS="false"
|
||||
|
||||
@ -38,7 +39,7 @@ START_WITH_WEBHOOK="false"
|
||||
|
||||
# ==== domain settings ====
|
||||
DOMAIN="https://mydomain.com"
|
||||
URL_PREFIX="/gpt"
|
||||
URL_PREFIX="/"
|
||||
CHAT_PREFIX="/chat"
|
||||
|
||||
# ==== gpt settings ====
|
||||
|
@ -8,7 +8,7 @@ from pydantic import SecretStr, model_validator
|
||||
from pydantic_settings import BaseSettings
|
||||
from yarl import URL
|
||||
|
||||
from constants import API_PREFIX, CHATGPT_BASE_URI
|
||||
from constants import API_PREFIX, CHATGPT_BASE_URI, LogLevelEnum
|
||||
|
||||
BASE_DIR = Path(__file__).parent.parent
|
||||
SHARED_DIR = BASE_DIR.resolve().joinpath("shared")
|
||||
@ -39,6 +39,7 @@ class SentrySettings(BaseSettings):
|
||||
|
||||
|
||||
class LoggingSettings(BaseSettings):
|
||||
LOG_LEVEL: LogLevelEnum = LogLevelEnum.INFO
|
||||
ENABLE_JSON_LOGS: bool = True
|
||||
ENABLE_SENTRY_LOGS: bool = False
|
||||
|
||||
|
@ -5,7 +5,7 @@ from httpx import AsyncClient
|
||||
from sqlalchemy import desc
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.bot.models.chat_gpt import ChatGpt
|
||||
from core.bot.models.chatgpt import ChatGptModels
|
||||
from tests.integration.factories.bot import ChatGptModelFactory
|
||||
|
||||
pytestmark = [
|
||||
@ -58,7 +58,7 @@ async def test_change_chatgpt_model_priority(
|
||||
response = await rest_client.put(url=f"/api/chatgpt/models/{model2.id}/priority", json={"priority": priority})
|
||||
assert response.status_code == 202
|
||||
|
||||
upd_model1, upd_model2 = dbsession.query(ChatGpt).order_by(ChatGpt.priority).all()
|
||||
upd_model1, upd_model2 = dbsession.query(ChatGptModels).order_by(ChatGptModels.priority).all()
|
||||
|
||||
assert model1.model == upd_model1.model
|
||||
assert model1.priority == upd_model1.priority
|
||||
@ -76,10 +76,10 @@ async def test_reset_chatgpt_models_priority(
|
||||
response = await rest_client.put(url="/api/chatgpt/models/priority/reset")
|
||||
assert response.status_code == 202
|
||||
|
||||
models = dbsession.query(ChatGpt).all()
|
||||
models = dbsession.query(ChatGptModels).all()
|
||||
assert len(models) == 5
|
||||
|
||||
models = dbsession.query(ChatGpt).all()
|
||||
models = dbsession.query(ChatGptModels).all()
|
||||
|
||||
for model in models:
|
||||
assert model.priority == 0
|
||||
@ -96,7 +96,7 @@ async def test_create_new_chatgpt_model(
|
||||
model_name = "new-gpt-model"
|
||||
model_priority = faker.random_int(min=1, max=5)
|
||||
|
||||
models = dbsession.query(ChatGpt).all()
|
||||
models = dbsession.query(ChatGptModels).all()
|
||||
assert len(models) == 3
|
||||
|
||||
response = await rest_client.post(
|
||||
@ -108,10 +108,10 @@ async def test_create_new_chatgpt_model(
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
models = dbsession.query(ChatGpt).all()
|
||||
models = dbsession.query(ChatGptModels).all()
|
||||
assert len(models) == 4
|
||||
|
||||
latest_model = dbsession.query(ChatGpt).order_by(desc(ChatGpt.id)).limit(1).one()
|
||||
latest_model = dbsession.query(ChatGptModels).order_by(desc(ChatGptModels.id)).limit(1).one()
|
||||
assert latest_model.model == model_name
|
||||
assert latest_model.priority == model_priority
|
||||
|
||||
@ -132,7 +132,7 @@ async def test_add_existing_chatgpt_model(
|
||||
model_name = model.model
|
||||
model_priority = faker.random_int(min=1, max=5)
|
||||
|
||||
models = dbsession.query(ChatGpt).all()
|
||||
models = dbsession.query(ChatGptModels).all()
|
||||
assert len(models) == 3
|
||||
|
||||
response = await rest_client.post(
|
||||
@ -144,7 +144,7 @@ async def test_add_existing_chatgpt_model(
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
models = dbsession.query(ChatGpt).all()
|
||||
models = dbsession.query(ChatGptModels).all()
|
||||
assert len(models) == 3
|
||||
|
||||
|
||||
@ -155,13 +155,13 @@ async def test_delete_chatgpt_model(
|
||||
ChatGptModelFactory.create_batch(size=2)
|
||||
model = ChatGptModelFactory(priority=42)
|
||||
|
||||
models = dbsession.query(ChatGpt).all()
|
||||
models = dbsession.query(ChatGptModels).all()
|
||||
assert len(models) == 3
|
||||
|
||||
response = await rest_client.delete(url=f"/api/chatgpt/models/{model.id}")
|
||||
assert response.status_code == 204
|
||||
|
||||
models = dbsession.query(ChatGpt).all()
|
||||
models = dbsession.query(ChatGptModels).all()
|
||||
assert len(models) == 2
|
||||
|
||||
assert model not in models
|
||||
|
@ -6,7 +6,7 @@ import factory.fuzzy
|
||||
from faker import Faker
|
||||
|
||||
from constants import BotStagesEnum
|
||||
from core.bot.models.chat_gpt import ChatGpt
|
||||
from core.bot.models.chatgpt import ChatGptModels
|
||||
from tests.integration.factories.utils import BaseModelFactory
|
||||
|
||||
faker = Faker("ru_RU")
|
||||
@ -58,7 +58,7 @@ class ChatGptModelFactory(BaseModelFactory):
|
||||
priority = factory.Faker("random_int", min=0, max=42)
|
||||
|
||||
class Meta:
|
||||
model = ChatGpt
|
||||
model = ChatGptModels
|
||||
|
||||
|
||||
class BotInfoFactory(factory.DictFactory):
|
||||
|
@ -1,5 +1,9 @@
|
||||
version: '3.9'
|
||||
|
||||
volumes:
|
||||
chatgpt_shared:
|
||||
name: chatgpt_shared
|
||||
|
||||
networks:
|
||||
chatgpt-network:
|
||||
name:
|
||||
@ -25,6 +29,7 @@ services:
|
||||
- bot_microservice/settings/.env
|
||||
volumes:
|
||||
- ./bot_microservice/settings:/app/settings:ro
|
||||
- chatgpt_shared:/app/shared
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
networks:
|
||||
chatgpt-network:
|
||||
|
@ -2,7 +2,12 @@
|
||||
|
||||
set -e
|
||||
|
||||
alembic upgrade "head"
|
||||
if [ -f shared/${DB_NAME:-chatgpt.db} ]
|
||||
then
|
||||
alembic downgrade -1 && alembic upgrade "head"
|
||||
else
|
||||
alembic upgrade "head"
|
||||
fi
|
||||
|
||||
echo "starting the bot"
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user