add database and migration logic (#27)

* update chat_microservice

* reformat logger_conf

* add database

* add service and repository logic

* fix constants gpt base url

* add models endpoints
This commit is contained in:
Dmitry Afanasyev
2023-10-07 00:04:12 +03:00
committed by GitHub
parent c401e1006c
commit 23031b0777
37 changed files with 1785 additions and 487 deletions

View File

@@ -1,7 +1,17 @@
from fastapi import APIRouter, Request
from fastapi import APIRouter, Body, Depends, Path
from starlette import status
from starlette.responses import Response
from starlette.responses import JSONResponse, Response
from telegram import Update
from api.bot.serializers import (
ChatGptModelSerializer,
ChatGptModelsPrioritySerializer,
GETChatGptModelsSerializer,
LightChatGptModel,
)
from api.deps import get_bot_queue, get_chatgpt_service, get_update_from_request
from core.bot.app import BotQueue
from core.bot.services import ChatGptService
from settings.config import settings
router = APIRouter()
@@ -15,5 +25,74 @@ router = APIRouter()
summary="process bot updates",
include_in_schema=False,
)
async def process_bot_updates(request: Request) -> None:
await request.app.state.queue.put_updates_on_queue(request)
async def process_bot_updates(
tg_update: Update = Depends(get_update_from_request),
queue: BotQueue = Depends(get_bot_queue),
) -> None:
await queue.put_updates_on_queue(tg_update)
@router.get(
"/models",
name="bot:models_list",
response_class=JSONResponse,
response_model=list[ChatGptModelSerializer],
status_code=status.HTTP_200_OK,
summary="list of models",
)
async def models_list(
chatgpt_service: ChatGptService = Depends(get_chatgpt_service),
) -> JSONResponse:
"""Получить список всех моделей"""
models = await chatgpt_service.get_chatgpt_models()
return JSONResponse(
content=GETChatGptModelsSerializer(data=models).model_dump(), status_code=status.HTTP_200_OK # type: ignore
)
@router.post(
"/models/{model_id}/priority",
name="bot:change_model_priority",
response_class=Response,
status_code=status.HTTP_202_ACCEPTED,
summary="change gpt model priority",
)
async def change_model_priority(
model_id: int = Path(..., gt=0, description="Id модели для обновления приореитета"),
chatgpt_service: ChatGptService = Depends(get_chatgpt_service),
gpt_model: ChatGptModelsPrioritySerializer = Body(...),
) -> None:
"""Изменить приоритет модели в выдаче"""
await chatgpt_service.change_chatgpt_model_priority(model_id=model_id, priority=gpt_model.priority)
@router.post(
"/models",
name="bot:add_new_model",
response_model=ChatGptModelSerializer,
status_code=status.HTTP_201_CREATED,
summary="add new model",
)
async def add_new_model(
chatgpt_service: ChatGptService = Depends(get_chatgpt_service),
gpt_model: LightChatGptModel = Body(...),
) -> JSONResponse:
"""Добавить новую модель"""
model = await chatgpt_service.add_chatgpt_model(gpt_model=gpt_model.model, priority=gpt_model.priority)
return JSONResponse(content=model, status_code=status.HTTP_201_CREATED)
@router.delete(
"/models/{model_id}",
name="bot:delete_gpt_model",
response_class=Response,
status_code=status.HTTP_204_NO_CONTENT,
summary="delete gpt model",
)
async def delete_model(
model_id: int = Path(..., gt=0, description="Id модели для удаления"),
chatgpt_service: ChatGptService = Depends(get_chatgpt_service),
) -> None:
"""Удалить gpt модель"""
await chatgpt_service.delete_chatgpt_model(model_id=model_id)

View File

@@ -1,13 +0,0 @@
from fastapi import Depends
from starlette.requests import Request
from core.bot.services import ChatGptService
from settings.config import AppSettings
def get_settings(request: Request) -> AppSettings:
return request.app.state.settings
def get_chat_gpt_service(settings: AppSettings = Depends(get_settings)) -> ChatGptService:
return ChatGptService(settings.GPT_MODEL)

View File

@@ -0,0 +1,24 @@
from pydantic import BaseModel, ConfigDict, Field
class LightChatGptModel(BaseModel):
model: str = Field(..., title="Chat Gpt model")
priority: int = Field(default=0, ge=0, title="Приоритет модели")
class ChatGptModelsPrioritySerializer(BaseModel):
priority: int = Field(default=0, ge=0, title="Приоритет модели")
class ChatGptModelSerializer(BaseModel):
id: int = Field(..., gt=0, title="Id модели")
model: str = Field(..., title="Chat Gpt model")
priority: int = Field(..., ge=0, title="Приоритет модели")
model_config = ConfigDict(from_attributes=True)
class GETChatGptModelsSerializer(BaseModel):
data: list[ChatGptModelSerializer] = Field(..., title="Список всех моделей")
model_config = ConfigDict(from_attributes=True)

View File

@@ -0,0 +1,50 @@
from fastapi import Depends
from starlette.requests import Request
from telegram import Update
from core.bot.app import BotApplication, BotQueue
from core.bot.repository import ChatGPTRepository
from core.bot.services import ChatGptService, SpeechToTextService
from infra.database.db_adapter import Database
from settings.config import AppSettings
def get_settings(request: Request) -> AppSettings:
return request.app.state.settings
def get_bot_app(request: Request) -> BotApplication:
return request.app.state.bot_app
def get_bot_queue(request: Request) -> BotQueue:
return request.app.state.queue
async def get_update_from_request(request: Request, bot_app: BotApplication = Depends(get_bot_app)) -> Update | None:
data = await request.json()
return Update.de_json(data, bot_app.bot)
def get_database(settings: AppSettings = Depends(get_settings)) -> Database:
return Database(settings=settings)
def get_chat_gpt_repository(
db: Database = Depends(get_database), settings: AppSettings = Depends(get_settings)
) -> ChatGPTRepository:
return ChatGPTRepository(settings=settings, db=db)
def get_speech_to_text_service() -> SpeechToTextService:
return SpeechToTextService()
def new_bot_queue(bot_app: BotApplication = Depends(get_bot_app)) -> BotQueue:
return BotQueue(bot_app=bot_app)
def get_chatgpt_service(
chat_gpt_repository: ChatGPTRepository = Depends(get_chat_gpt_repository),
) -> ChatGptService:
return ChatGptService(repository=chat_gpt_repository)

View File

@@ -3,7 +3,7 @@ from fastapi.responses import ORJSONResponse
from starlette import status
from starlette.responses import Response
from api.bot.deps import get_chat_gpt_service
from api.deps import get_chatgpt_service
from api.exceptions import BaseAPIException
from constants import INVALID_GPT_REQUEST_MESSAGES
from core.bot.services import ChatGptService
@@ -33,12 +33,11 @@ async def healthcheck() -> ORJSONResponse:
)
async def gpt_healthcheck(
response: Response,
chatgpt_service: ChatGptService = Depends(get_chat_gpt_service),
chatgpt_service: ChatGptService = Depends(get_chatgpt_service),
) -> Response:
data = chatgpt_service.build_request_data("Привет!")
response.status_code = status.HTTP_200_OK
try:
chatgpt_response = await chatgpt_service.do_request(data)
chatgpt_response = await chatgpt_service.request_to_chatgpt_microservice(question="Привет!")
if chatgpt_response.status_code != status.HTTP_200_OK:
response.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR
for message in INVALID_GPT_REQUEST_MESSAGES: