From e465d71320017bbdea774daeddf0b41d1f69d3f6 Mon Sep 17 00:00:00 2001 From: Dmitry Afanasyev <71835315+Balshgit@users.noreply.github.com> Date: Tue, 3 Oct 2023 09:19:33 +0300 Subject: [PATCH] add graylog config (#23) * add additional chat gpt request error * add graylog config --- README.md | 21 +++++++----- bot_microservice/api/bot/controllers.py | 26 --------------- bot_microservice/api/system/controllers.py | 32 +++++++++++++++++++ bot_microservice/constants.py | 2 +- bot_microservice/core/logging.py | 32 ++++++++++++++----- bot_microservice/core/utils.py | 11 ++++--- bot_microservice/settings/.env.template | 2 ++ bot_microservice/settings/config.py | 3 ++ .../tests/integration/test_system.py | 6 ++-- docker-compose.yml | 3 +- poetry.lock | 17 +++++++++- pyproject.toml | 1 + 12 files changed, 103 insertions(+), 53 deletions(-) diff --git a/README.md b/README.md index 5f1bc7c..aa98618 100644 --- a/README.md +++ b/README.md @@ -65,9 +65,9 @@ methods: ## Chat: ```shell -cd bot_microservice -python3 run.py +docker run -p 8858:8858 -it --name freegpt --rm -e CHAT_PATH=/chat balshdocker/freegpt:latest ``` +Open http://localhost:8858/chat/ ```bash @@ -85,11 +85,13 @@ gunicorn main:create_app --workers 10 --bind 0.0.0.0:8083 --worker-class uvicorn ### Run local tests: ```bash -poetry run pytest +cd bot_microservice +STAGE=runtests poetry run pytest ``` ### Run tests in docker compose: ```bash +cd bot_microservice STAGE=runtests docker compose run bot bash -c "coverage run -m pytest -vv --exitfirst && poetry run coverage report" ``` @@ -101,14 +103,17 @@ Docs can be found at on local start can be found at http://localhost/gpt/api/docs +prod docs https://bot.mywistr.ru/gpt/api/docs/ + ## Help article -[Пишем асинхронного Телеграм-бота](https://habr.com/ru/company/kts/blog/598575/) +[Следить за обновлениями этого репозитория](https://github.com/fantasy-peak/cpp-freegpt-webui) ## TODO -- [x] Добавить очередь сообщений -- [x] Исправить запуск локально -- [x] Добавить тестов -- [x] Close connection +- [] Добавить базу данных с моделями +- [] Добавить миграции через alembic +- [] Добавить веса моделей и их смену +- [] Добавить тестов +- [] Добавить сентри diff --git a/bot_microservice/api/bot/controllers.py b/bot_microservice/api/bot/controllers.py index 3e4c8d1..f07609d 100644 --- a/bot_microservice/api/bot/controllers.py +++ b/bot_microservice/api/bot/controllers.py @@ -2,8 +2,6 @@ from fastapi import APIRouter, Request from starlette import status from starlette.responses import Response -from constants import INVALID_GPT_MODEL_MESSAGE -from core.utils import ChatGptService from settings.config import settings router = APIRouter() @@ -19,27 +17,3 @@ router = APIRouter() ) async def process_bot_updates(request: Request) -> None: await request.app.state.queue.put_updates_on_queue(request) - - -@router.get( - "/bot-healthcheck", - name="bot:gpt_healthcheck", - response_class=Response, - summary="bot healthcheck", - responses={ - status.HTTP_500_INTERNAL_SERVER_ERROR: {"description": "Request to chat gpt not success"}, - status.HTTP_200_OK: {"description": "Successful Response"}, - }, -) -async def gpt_healthcheck(response: Response) -> Response: - chatgpt_service = ChatGptService(chat_gpt_model=settings.GPT_MODEL) - data = chatgpt_service.build_request_data("Привет!") - response.status_code = status.HTTP_200_OK - try: - chatgpt_response = await chatgpt_service.do_request(data) - if chatgpt_response.status_code != status.HTTP_200_OK or chatgpt_response.text == INVALID_GPT_MODEL_MESSAGE: - response.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR - except Exception: - response.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR - - return Response(status_code=response.status_code, content=None) diff --git a/bot_microservice/api/system/controllers.py b/bot_microservice/api/system/controllers.py index 16a3204..cd371de 100644 --- a/bot_microservice/api/system/controllers.py +++ b/bot_microservice/api/system/controllers.py @@ -1,6 +1,11 @@ from fastapi import APIRouter from fastapi.responses import ORJSONResponse from starlette import status +from starlette.responses import Response + +from constants import INVALID_GPT_REQUEST_MESSAGES +from core.utils import ChatGptService +from settings.config import settings router = APIRouter() @@ -13,3 +18,30 @@ router = APIRouter() ) async def healthcheck() -> ORJSONResponse: return ORJSONResponse(content=None, status_code=status.HTTP_200_OK) + + +@router.get( + "/bot-healthcheck", + name="system:gpt_healthcheck", + response_class=Response, + summary="Проверяет доступность моделей и если они недоступны, то возвращает код ответа 500", + responses={ + status.HTTP_500_INTERNAL_SERVER_ERROR: {"description": "Request to chat gpt not success"}, + status.HTTP_200_OK: {"description": "Successful Response"}, + }, +) +async def gpt_healthcheck(response: Response) -> Response: + chatgpt_service = ChatGptService(chat_gpt_model=settings.GPT_MODEL) + data = chatgpt_service.build_request_data("Привет!") + response.status_code = status.HTTP_200_OK + try: + chatgpt_response = await chatgpt_service.do_request(data) + if chatgpt_response.status_code != status.HTTP_200_OK: + response.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR + for message in INVALID_GPT_REQUEST_MESSAGES: + if message in chatgpt_response.text: + response.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR + except Exception: + response.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR + + return Response(status_code=response.status_code, content=None) diff --git a/bot_microservice/constants.py b/bot_microservice/constants.py index 5483a06..5e8a4c2 100644 --- a/bot_microservice/constants.py +++ b/bot_microservice/constants.py @@ -4,7 +4,7 @@ AUDIO_SEGMENT_DURATION = 120 * 1000 API_PREFIX = "/api" CHAT_GPT_BASE_URI = "/backend-api/v2/conversation" -INVALID_GPT_MODEL_MESSAGE = "Invalid request model" +INVALID_GPT_REQUEST_MESSAGES = ("Invalid request model", "return unexpected http status code") class BotStagesEnum(StrEnum): diff --git a/bot_microservice/core/logging.py b/bot_microservice/core/logging.py index 2900ae9..cc3a810 100644 --- a/bot_microservice/core/logging.py +++ b/bot_microservice/core/logging.py @@ -3,6 +3,7 @@ import sys from types import FrameType from typing import TYPE_CHECKING, Any, cast +import graypy from loguru import logger from sentry_sdk.integrations.logging import EventHandler @@ -40,20 +41,35 @@ def configure_logging(*, level: LogLevelEnum, enable_json_logs: bool, enable_sen intercept_handler = InterceptHandler() - logging.basicConfig(handlers=[intercept_handler], level=logging_level) - formatter = _json_formatter if enable_json_logs else _text_formatter - logger.configure( - handlers=[ + + base_config_handlers = [intercept_handler] + + loguru_handlers = [ + { + "sink": sys.stdout, + "level": logging_level, + "serialize": enable_json_logs, + "format": formatter, + "colorize": True, + } + ] + + if settings.GRAYLOG_HOST: + graylog_handler = graypy.GELFTCPHandler(settings.GRAYLOG_HOST, 12201) + base_config_handlers.append(graylog_handler) + loguru_handlers.append( { - "sink": sys.stdout, + "sink": graylog_handler, "level": logging_level, "serialize": enable_json_logs, "format": formatter, - "colorize": True, + "colorize": False, } - ], - ) + ) + + logging.basicConfig(handlers=base_config_handlers, level=logging_level) + logger.configure(handlers=loguru_handlers) # sentry sdk не умеет из коробки работать с loguru, нужно добавлять хандлер # https://github.com/getsentry/sentry-python/issues/653#issuecomment-788854865 diff --git a/bot_microservice/core/utils.py b/bot_microservice/core/utils.py index 21d11a7..a23a04c 100644 --- a/bot_microservice/core/utils.py +++ b/bot_microservice/core/utils.py @@ -20,7 +20,7 @@ from speech_recognition import ( from constants import ( AUDIO_SEGMENT_DURATION, CHAT_GPT_BASE_URI, - INVALID_GPT_MODEL_MESSAGE, + INVALID_GPT_REQUEST_MESSAGES, ) from settings.config import settings @@ -124,10 +124,11 @@ class ChatGptService: try: response = await self.do_request(chat_gpt_request) status = response.status_code - if response.text == INVALID_GPT_MODEL_MESSAGE: - message = f"{INVALID_GPT_MODEL_MESSAGE}: {settings.GPT_MODEL}" - logger.info(message, data=chat_gpt_request) - return message + for message in INVALID_GPT_REQUEST_MESSAGES: + if message in response.text: + message = f"{message}: {settings.GPT_MODEL}" + logger.info(message, data=chat_gpt_request) + return message if status != httpx.codes.OK: logger.info(f"got response status: {status} from chat api", data=chat_gpt_request) return "Что-то пошло не так, попробуйте еще раз или обратитесь к администратору" diff --git a/bot_microservice/settings/.env.template b/bot_microservice/settings/.env.template index 1826721..c025a26 100644 --- a/bot_microservice/settings/.env.template +++ b/bot_microservice/settings/.env.template @@ -6,6 +6,8 @@ APP_PORT="8000" # SENTRY_DSN= SENTRY_TRACES_SAMPLE_RATE="0.95" +# GRAYLOG_HOST= + USER="web" TZ="Europe/Moscow" diff --git a/bot_microservice/settings/config.py b/bot_microservice/settings/config.py index d33e94e..2b13707 100644 --- a/bot_microservice/settings/config.py +++ b/bot_microservice/settings/config.py @@ -48,8 +48,11 @@ class AppSettings(SentrySettings, BaseSettings): DOMAIN: str = "https://localhost" URL_PREFIX: str = "" + GRAYLOG_HOST: str | None = None + GPT_MODEL: str = "gpt-3.5-turbo-stream-AItianhuSpace" GPT_BASE_HOST: str = "http://chat_service:8858" + # quantity of workers for uvicorn WORKERS_COUNT: int = 1 # Enable uvicorn reloading diff --git a/bot_microservice/tests/integration/test_system.py b/bot_microservice/tests/integration/test_system.py index e9e1e61..7bbbde3 100644 --- a/bot_microservice/tests/integration/test_system.py +++ b/bot_microservice/tests/integration/test_system.py @@ -32,13 +32,13 @@ async def test_bot_healthcheck_is_ok( assert response.status_code == httpx.codes.OK +@pytest.mark.parametrize("text", ["Invalid request model", "return unexpected http status code"]) async def test_bot_healthcheck_invalid_request_model( - rest_client: AsyncClient, - test_settings: AppSettings, + rest_client: AsyncClient, test_settings: AppSettings, text: str ) -> None: with mocked_ask_question_api( host=test_settings.GPT_BASE_HOST, - return_value=Response(status_code=httpx.codes.OK, text="Invalid request model"), + return_value=Response(status_code=httpx.codes.OK, text=text), ): response = await rest_client.get("/api/bot-healthcheck") assert response.status_code == httpx.codes.INTERNAL_SERVER_ERROR diff --git a/docker-compose.yml b/docker-compose.yml index 5a65fd5..f531f71 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -19,11 +19,12 @@ services: dockerfile: deploy/Dockerfile target: bot-service args: - STAGE: ${STAGE} + STAGE: ${STAGE:-production} restart: unless-stopped env_file: - bot_microservice/settings/.env volumes: + - ./bot_microservice/settings/.env:/app/settings/.env:ro - /etc/localtime:/etc/localtime:ro networks: chat-gpt-network: diff --git a/poetry.lock b/poetry.lock index 21150b9..63a1b74 100644 --- a/poetry.lock +++ b/poetry.lock @@ -999,6 +999,21 @@ gitdb = ">=4.0.1,<5" [package.extras] test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-sugar"] +[[package]] +name = "graypy" +version = "2.1.0" +description = "Python logging handlers that send messages in the Graylog Extended Log Format (GELF)." +optional = false +python-versions = "*" +files = [ + {file = "graypy-2.1.0-py2.py3-none-any.whl", hash = "sha256:5df0102ed52fdaa24dd579bc1e4904480c2c9bbb98917a0b3241ecf510c94207"}, + {file = "graypy-2.1.0.tar.gz", hash = "sha256:fd8dc4a721de1278576d92db10ac015e99b4e480cf1b18892e79429fd9236e16"}, +] + +[package.extras] +amqp = ["amqplib (==1.0.2)"] +docs = ["sphinx (>=2.1.2,<3.0.0)", "sphinx-autodoc-typehints (>=1.6.0,<2.0.0)", "sphinx-rtd-theme (>=0.4.3,<1.0.0)"] + [[package]] name = "greenlet" version = "2.0.2" @@ -3110,4 +3125,4 @@ dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "f8faa71d22eb911772b7607eb35d2feb1e5dbe0b0bf2c602373b1e31bffaf820" +content-hash = "ab644b9882ee200392911afc6b71bf87fdb413e4fdd9f06a460ce33da98687d7" diff --git a/pyproject.toml b/pyproject.toml index a4ab99a..255d4eb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,7 @@ sentry-sdk = "^1.31.0" SpeechRecognition = "^3.8" pydub = "^0.25" greenlet = "^2.0.2" +graypy = "^2.1.0" [tool.poetry.dev-dependencies]