mirror of
https://github.com/Balshgit/gpt_chat_bot.git
synced 2025-09-11 22:30:41 +03:00
add more tests (#19)
This commit is contained in:
parent
7cfda281f7
commit
90ec8ccec1
@ -1,21 +1,21 @@
|
|||||||
from enum import IntEnum, StrEnum, auto
|
from enum import StrEnum
|
||||||
|
|
||||||
AUDIO_SEGMENT_DURATION = 120 * 1000
|
AUDIO_SEGMENT_DURATION = 120 * 1000
|
||||||
|
|
||||||
API_PREFIX = "/api"
|
API_PREFIX = "/api"
|
||||||
CHAT_GPT_BASE_URL = "http://chat_service:8858/backend-api/v2/conversation"
|
CHAT_GPT_BASE_URI = "backend-api/v2/conversation"
|
||||||
|
|
||||||
|
|
||||||
class BotStagesEnum(IntEnum):
|
class BotStagesEnum(StrEnum):
|
||||||
about_me = auto()
|
about_me = "about_me"
|
||||||
website = auto()
|
website = "website"
|
||||||
help = auto()
|
help = "help"
|
||||||
about_bot = auto()
|
about_bot = "about_bot"
|
||||||
|
|
||||||
|
|
||||||
class BotEntryPoints(IntEnum):
|
class BotEntryPoints(StrEnum):
|
||||||
start_routes = auto()
|
start_routes = "start_routes"
|
||||||
end = auto()
|
end = "end"
|
||||||
|
|
||||||
|
|
||||||
class LogLevelEnum(StrEnum):
|
class LogLevelEnum(StrEnum):
|
||||||
|
@ -8,7 +8,7 @@ from typing import Any
|
|||||||
|
|
||||||
from fastapi import Request, Response
|
from fastapi import Request, Response
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
from telegram import Update
|
from telegram import Bot, Update
|
||||||
from telegram.ext import Application
|
from telegram.ext import Application
|
||||||
|
|
||||||
from settings.config import AppSettings
|
from settings.config import AppSettings
|
||||||
@ -19,9 +19,8 @@ class BotApplication:
|
|||||||
self,
|
self,
|
||||||
settings: AppSettings,
|
settings: AppSettings,
|
||||||
handlers: list[Any],
|
handlers: list[Any],
|
||||||
application: Application | None = None, # type: ignore[type-arg]
|
|
||||||
) -> None:
|
) -> None:
|
||||||
self.application: Application = application or ( # type: ignore
|
self.application: Application = ( # type: ignore[type-arg]
|
||||||
Application.builder().token(token=settings.TELEGRAM_API_TOKEN).build()
|
Application.builder().token(token=settings.TELEGRAM_API_TOKEN).build()
|
||||||
)
|
)
|
||||||
self.handlers = handlers
|
self.handlers = handlers
|
||||||
@ -29,6 +28,10 @@ class BotApplication:
|
|||||||
self.start_with_webhook = settings.START_WITH_WEBHOOK
|
self.start_with_webhook = settings.START_WITH_WEBHOOK
|
||||||
self._add_handlers()
|
self._add_handlers()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def bot(self) -> Bot:
|
||||||
|
return self.application.bot
|
||||||
|
|
||||||
async def set_webhook(self) -> None:
|
async def set_webhook(self) -> None:
|
||||||
_, webhook_info = await asyncio.gather(self.application.initialize(), self.application.bot.get_webhook_info())
|
_, webhook_info = await asyncio.gather(self.application.initialize(), self.application.bot.get_webhook_info())
|
||||||
if not webhook_info.url:
|
if not webhook_info.url:
|
||||||
|
@ -10,13 +10,13 @@ from loguru import logger
|
|||||||
from telegram import InlineKeyboardMarkup, Update
|
from telegram import InlineKeyboardMarkup, Update
|
||||||
from telegram.ext import ContextTypes
|
from telegram.ext import ContextTypes
|
||||||
|
|
||||||
from constants import CHAT_GPT_BASE_URL, BotEntryPoints
|
from constants import CHAT_GPT_BASE_URI, BotEntryPoints
|
||||||
from core.keyboards import main_keyboard
|
from core.keyboards import main_keyboard
|
||||||
from core.utils import SpeechToTextService
|
from core.utils import SpeechToTextService
|
||||||
from settings.config import settings
|
from settings.config import settings
|
||||||
|
|
||||||
|
|
||||||
async def main_command(update: Update, context: ContextTypes.DEFAULT_TYPE) -> int:
|
async def main_command(update: Update, context: ContextTypes.DEFAULT_TYPE) -> str:
|
||||||
"""Send message on `/start`."""
|
"""Send message on `/start`."""
|
||||||
if not update.message:
|
if not update.message:
|
||||||
return BotEntryPoints.end
|
return BotEntryPoints.end
|
||||||
@ -29,7 +29,7 @@ async def about_me(update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
|
|||||||
if not update.effective_message:
|
if not update.effective_message:
|
||||||
return None
|
return None
|
||||||
await update.effective_message.reply_text(
|
await update.effective_message.reply_text(
|
||||||
'Автор бота: *Дмитрий Афанасьев*\n\nTg nickname: *Balshtg*', parse_mode='MarkdownV2'
|
"Автор бота: *Дмитрий Афанасьев*\n\nTg nickname: *Balshtg*", parse_mode="MarkdownV2"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -40,7 +40,7 @@ async def about_bot(update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
|
|||||||
"Бот использует бесплатную модель Chat-GPT3.5 для ответов на вопросы. "
|
"Бот использует бесплатную модель Chat-GPT3.5 для ответов на вопросы. "
|
||||||
"Принимает запросы на разных языках. \n\nБот так же умеет переводить голосовые сообщения в текст. "
|
"Принимает запросы на разных языках. \n\nБот так же умеет переводить голосовые сообщения в текст. "
|
||||||
"Просто пришлите голосовуху и получите поток сознания без запятых в виде текста",
|
"Просто пришлите голосовуху и получите поток сознания без запятых в виде текста",
|
||||||
parse_mode='Markdown',
|
parse_mode="Markdown",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -88,12 +88,12 @@ async def ask_question(update: Update, context: ContextTypes.DEFAULT_TYPE) -> No
|
|||||||
}
|
}
|
||||||
|
|
||||||
transport = AsyncHTTPTransport(retries=3)
|
transport = AsyncHTTPTransport(retries=3)
|
||||||
async with AsyncClient(transport=transport, timeout=50) as client:
|
async with AsyncClient(base_url=settings.GPT_BASE_HOST, transport=transport, timeout=50) as client:
|
||||||
try:
|
try:
|
||||||
response = await client.post(CHAT_GPT_BASE_URL, json=chat_gpt_request, timeout=50)
|
response = await client.post(CHAT_GPT_BASE_URI, json=chat_gpt_request, timeout=50)
|
||||||
status = response.status_code
|
status = response.status_code
|
||||||
if status != httpx.codes.OK:
|
if status != httpx.codes.OK:
|
||||||
logger.info(f'got response status: {status} from chat api', data=chat_gpt_request)
|
logger.info(f"got response status: {status} from chat api", data=chat_gpt_request)
|
||||||
await update.message.reply_text(
|
await update.message.reply_text(
|
||||||
"Что-то пошло не так, попробуйте еще раз или обратитесь к администратору"
|
"Что-то пошло не так, попробуйте еще раз или обратитесь к администратору"
|
||||||
)
|
)
|
||||||
@ -117,7 +117,7 @@ async def voice_recognize(update: Update, context: ContextTypes.DEFAULT_TYPE) ->
|
|||||||
with tempfile.NamedTemporaryFile(delete=False) as tmpfile:
|
with tempfile.NamedTemporaryFile(delete=False) as tmpfile:
|
||||||
tmpfile.write(sound_bytes)
|
tmpfile.write(sound_bytes)
|
||||||
|
|
||||||
logger.info('file has been saved', filename=tmpfile.name)
|
logger.info("file has been saved", filename=tmpfile.name)
|
||||||
|
|
||||||
speech_to_text_service = SpeechToTextService(filename=tmpfile.name)
|
speech_to_text_service = SpeechToTextService(filename=tmpfile.name)
|
||||||
|
|
||||||
|
@ -39,16 +39,16 @@ bot_event_handlers.add_handler(
|
|||||||
entry_points=[CommandHandler("start", main_command)],
|
entry_points=[CommandHandler("start", main_command)],
|
||||||
states={
|
states={
|
||||||
BotEntryPoints.start_routes: [
|
BotEntryPoints.start_routes: [
|
||||||
CallbackQueryHandler(about_me, pattern="^" + str(BotStagesEnum.about_me) + "$"),
|
CallbackQueryHandler(about_me, pattern="^" + BotStagesEnum.about_me + "$"),
|
||||||
CallbackQueryHandler(website, pattern="^" + str(BotStagesEnum.website) + "$"),
|
CallbackQueryHandler(website, pattern="^" + BotStagesEnum.website + "$"),
|
||||||
CallbackQueryHandler(help_command, pattern="^" + str(BotStagesEnum.help) + "$"),
|
CallbackQueryHandler(help_command, pattern="^" + BotStagesEnum.help + "$"),
|
||||||
CallbackQueryHandler(about_bot, pattern="^" + str(BotStagesEnum.about_bot) + "$"),
|
CallbackQueryHandler(about_bot, pattern="^" + BotStagesEnum.about_bot + "$"),
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
fallbacks=[CommandHandler("start", main_command)],
|
fallbacks=[CommandHandler("start", main_command)],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
bot_event_handlers.add_handler(CallbackQueryHandler(about_me, pattern="^" + str(BotStagesEnum.about_me) + "$"))
|
bot_event_handlers.add_handler(CallbackQueryHandler(about_me, pattern="^" + BotStagesEnum.about_me + "$"))
|
||||||
bot_event_handlers.add_handler(CallbackQueryHandler(website, pattern="^" + str(BotStagesEnum.website) + "$"))
|
bot_event_handlers.add_handler(CallbackQueryHandler(website, pattern="^" + BotStagesEnum.website + "$"))
|
||||||
bot_event_handlers.add_handler(CallbackQueryHandler(help_command, pattern="^" + str(BotStagesEnum.help) + "$"))
|
bot_event_handlers.add_handler(CallbackQueryHandler(help_command, pattern="^" + BotStagesEnum.help + "$"))
|
||||||
bot_event_handlers.add_handler(CallbackQueryHandler(about_bot, pattern="^" + str(BotStagesEnum.about_bot) + "$"))
|
bot_event_handlers.add_handler(CallbackQueryHandler(about_bot, pattern="^" + BotStagesEnum.about_bot + "$"))
|
||||||
|
@ -2,13 +2,13 @@ from telegram import InlineKeyboardButton
|
|||||||
|
|
||||||
from constants import BotStagesEnum
|
from constants import BotStagesEnum
|
||||||
|
|
||||||
main_keyboard = [
|
main_keyboard = (
|
||||||
[
|
(
|
||||||
InlineKeyboardButton("Обо мне", callback_data=str(BotStagesEnum.about_me)),
|
InlineKeyboardButton("Обо мне", callback_data=str(BotStagesEnum.about_me)),
|
||||||
InlineKeyboardButton("Веб версия", callback_data=str(BotStagesEnum.website)),
|
InlineKeyboardButton("Веб версия", callback_data=str(BotStagesEnum.website)),
|
||||||
],
|
),
|
||||||
[
|
(
|
||||||
InlineKeyboardButton("Помощь", callback_data=str(BotStagesEnum.help)),
|
InlineKeyboardButton("Помощь", callback_data=str(BotStagesEnum.help)),
|
||||||
InlineKeyboardButton("О боте", callback_data=str(BotStagesEnum.about_bot)),
|
InlineKeyboardButton("О боте", callback_data=str(BotStagesEnum.about_bot)),
|
||||||
],
|
),
|
||||||
]
|
)
|
||||||
|
@ -31,7 +31,7 @@ class InterceptHandler(logging.Handler):
|
|||||||
|
|
||||||
logger.opt(depth=depth, exception=record.exc_info).log(
|
logger.opt(depth=depth, exception=record.exc_info).log(
|
||||||
level,
|
level,
|
||||||
record.getMessage().replace(settings.TELEGRAM_API_TOKEN, "TELEGRAM_API_TOKEN".center(24, '*')),
|
record.getMessage().replace(settings.TELEGRAM_API_TOKEN, "TELEGRAM_API_TOKEN".center(24, "*")),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -55,7 +55,7 @@ class SpeechToTextService:
|
|||||||
self._convert_audio_to_text()
|
self._convert_audio_to_text()
|
||||||
|
|
||||||
def _convert_audio_to_text(self) -> None:
|
def _convert_audio_to_text(self) -> None:
|
||||||
wav_filename = f'{self.filename}.wav'
|
wav_filename = f"{self.filename}.wav"
|
||||||
|
|
||||||
speech = AudioSegment.from_wav(wav_filename)
|
speech = AudioSegment.from_wav(wav_filename)
|
||||||
speech_duration = len(speech)
|
speech_duration = len(speech)
|
||||||
@ -82,8 +82,8 @@ class SpeechToTextService:
|
|||||||
logger.error("error temps files not deleted", error=error, filenames=[self.filename, self.filename])
|
logger.error("error temps files not deleted", error=error, filenames=[self.filename, self.filename])
|
||||||
|
|
||||||
def _convert_file_to_wav(self) -> None:
|
def _convert_file_to_wav(self) -> None:
|
||||||
new_filename = self.filename + '.wav'
|
new_filename = self.filename + ".wav"
|
||||||
cmd = ['ffmpeg', '-loglevel', 'quiet', '-i', self.filename, '-vn', new_filename]
|
cmd = ["ffmpeg", "-loglevel", "quiet", "-i", self.filename, "-vn", new_filename]
|
||||||
try:
|
try:
|
||||||
subprocess.run(args=cmd) # noqa: S603
|
subprocess.run(args=cmd) # noqa: S603
|
||||||
logger.info("file has been converted to wav", filename=new_filename)
|
logger.info("file has been converted to wav", filename=new_filename)
|
||||||
@ -96,7 +96,7 @@ class SpeechToTextService:
|
|||||||
with AudioFile(tmp_filename) as source:
|
with AudioFile(tmp_filename) as source:
|
||||||
audio_text = self.recognizer.listen(source)
|
audio_text = self.recognizer.listen(source)
|
||||||
try:
|
try:
|
||||||
text = self.recognizer.recognize_google(audio_text, language='ru-RU')
|
text = self.recognizer.recognize_google(audio_text, language="ru-RU")
|
||||||
os.remove(tmp_filename)
|
os.remove(tmp_filename)
|
||||||
return text
|
return text
|
||||||
except SpeechRecognizerError as error:
|
except SpeechRecognizerError as error:
|
||||||
|
@ -12,6 +12,8 @@ TELEGRAM_API_TOKEN="123456789:AABBCCDDEEFFaabbccddeeff-1234567890"
|
|||||||
DOMAIN="http://localhost"
|
DOMAIN="http://localhost"
|
||||||
URL_PREFIX=
|
URL_PREFIX=
|
||||||
|
|
||||||
|
GPT_BASE_HOST="http://localhost"
|
||||||
|
|
||||||
# set to true to start with webhook. Else bot will start on polling method
|
# set to true to start with webhook. Else bot will start on polling method
|
||||||
START_WITH_WEBHOOK="false"
|
START_WITH_WEBHOOK="false"
|
||||||
|
|
||||||
|
@ -12,6 +12,8 @@ TELEGRAM_API_TOKEN="123456789:AABBCCDDEEFFaabbccddeeff-1234567890"
|
|||||||
DOMAIN="http://localhost"
|
DOMAIN="http://localhost"
|
||||||
URL_PREFIX=
|
URL_PREFIX=
|
||||||
|
|
||||||
|
GPT_BASE_HOST="http://localhost"
|
||||||
|
|
||||||
# set to true to start with webhook. Else bot will start on polling method
|
# set to true to start with webhook. Else bot will start on polling method
|
||||||
START_WITH_WEBHOOK="false"
|
START_WITH_WEBHOOK="false"
|
||||||
|
|
||||||
|
@ -49,6 +49,7 @@ class AppSettings(SentrySettings, BaseSettings):
|
|||||||
URL_PREFIX: str = ""
|
URL_PREFIX: str = ""
|
||||||
|
|
||||||
GPT_MODEL: str = "gpt-3.5-turbo-stream-AItianhuSpace"
|
GPT_MODEL: str = "gpt-3.5-turbo-stream-AItianhuSpace"
|
||||||
|
GPT_BASE_HOST: str = "http://chat_service:8858"
|
||||||
# quantity of workers for uvicorn
|
# quantity of workers for uvicorn
|
||||||
WORKERS_COUNT: int = 1
|
WORKERS_COUNT: int = 1
|
||||||
# Enable uvicorn reloading
|
# Enable uvicorn reloading
|
||||||
|
@ -4,23 +4,25 @@ pytest framework. A common change is to allow monkeypatching of the class member
|
|||||||
enforcing slots in the subclasses."""
|
enforcing slots in the subclasses."""
|
||||||
import asyncio
|
import asyncio
|
||||||
from asyncio import AbstractEventLoop
|
from asyncio import AbstractEventLoop
|
||||||
|
from contextlib import contextmanager
|
||||||
from datetime import tzinfo
|
from datetime import tzinfo
|
||||||
from typing import Any, AsyncGenerator
|
from typing import Any, AsyncGenerator, Iterator
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import pytest_asyncio
|
import pytest_asyncio
|
||||||
from fastapi import FastAPI
|
import respx
|
||||||
from httpx import AsyncClient
|
from httpx import AsyncClient, Response
|
||||||
from pytest_asyncio.plugin import SubRequest
|
from pytest_asyncio.plugin import SubRequest
|
||||||
from telegram import Bot, User
|
from telegram import Bot, User
|
||||||
from telegram.ext import Application, ApplicationBuilder, Defaults, ExtBot
|
from telegram.ext import Application, ApplicationBuilder, Defaults, ExtBot
|
||||||
|
|
||||||
|
from constants import CHAT_GPT_BASE_URI
|
||||||
from core.bot import BotApplication
|
from core.bot import BotApplication
|
||||||
from core.handlers import bot_event_handlers
|
from core.handlers import bot_event_handlers
|
||||||
from main import Application as AppApplication
|
from main import Application as AppApplication
|
||||||
from settings.config import AppSettings, get_settings
|
from settings.config import AppSettings, get_settings
|
||||||
from tests.integration.bot.networking import NonchalantHttpxRequest
|
from tests.integration.bot.networking import NonchalantHttpxRequest
|
||||||
from tests.integration.factories.bot import BotInfoFactory
|
from tests.integration.factories.bot import BotInfoFactory, BotUserFactory
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
@ -123,6 +125,7 @@ def bot_info() -> dict[str, Any]:
|
|||||||
async def bot_application(bot_info: dict[str, Any]) -> AsyncGenerator[Any, None]:
|
async def bot_application(bot_info: dict[str, Any]) -> AsyncGenerator[Any, None]:
|
||||||
# We build a new bot each time so that we use `app` in a context manager without problems
|
# We build a new bot each time so that we use `app` in a context manager without problems
|
||||||
application = ApplicationBuilder().bot(make_bot(bot_info)).application_class(PytestApplication).build()
|
application = ApplicationBuilder().bot(make_bot(bot_info)).application_class(PytestApplication).build()
|
||||||
|
await application.initialize()
|
||||||
yield application
|
yield application
|
||||||
if application.running:
|
if application.running:
|
||||||
await application.stop()
|
await application.stop()
|
||||||
@ -226,27 +229,41 @@ def provider_token(bot_info: dict[str, Any]) -> str:
|
|||||||
@pytest_asyncio.fixture(scope="session")
|
@pytest_asyncio.fixture(scope="session")
|
||||||
async def main_application(
|
async def main_application(
|
||||||
bot_application: PytestApplication, test_settings: AppSettings
|
bot_application: PytestApplication, test_settings: AppSettings
|
||||||
) -> AsyncGenerator[FastAPI, None]:
|
) -> AsyncGenerator[AppApplication, None]:
|
||||||
bot_app = BotApplication(
|
bot_app = BotApplication(
|
||||||
application=bot_application,
|
|
||||||
settings=test_settings,
|
settings=test_settings,
|
||||||
handlers=bot_event_handlers.handlers,
|
handlers=bot_event_handlers.handlers,
|
||||||
)
|
)
|
||||||
fast_api_app = AppApplication(settings=test_settings, bot_app=bot_app).fastapi_app
|
bot_app.application._initialized = True
|
||||||
|
bot_app.application.bot = make_bot(BotInfoFactory())
|
||||||
|
bot_app.application.bot._bot_user = BotUserFactory()
|
||||||
|
fast_api_app = AppApplication(settings=test_settings, bot_app=bot_app)
|
||||||
yield fast_api_app
|
yield fast_api_app
|
||||||
|
|
||||||
|
|
||||||
@pytest_asyncio.fixture()
|
@pytest_asyncio.fixture()
|
||||||
async def rest_client(
|
async def rest_client(
|
||||||
main_application: FastAPI,
|
main_application: AppApplication,
|
||||||
) -> AsyncGenerator[AsyncClient, None]:
|
) -> AsyncGenerator[AsyncClient, None]:
|
||||||
"""
|
"""
|
||||||
Default http client. Use to test unauthorized requests, public endpoints
|
Default http client. Use to test unauthorized requests, public endpoints
|
||||||
or special authorization methods.
|
or special authorization methods.
|
||||||
"""
|
"""
|
||||||
async with AsyncClient(
|
async with AsyncClient(
|
||||||
app=main_application,
|
app=main_application.fastapi_app,
|
||||||
base_url="http://test",
|
base_url="http://test",
|
||||||
headers={"Content-Type": "application/json"},
|
headers={"Content-Type": "application/json"},
|
||||||
) as client:
|
) as client:
|
||||||
yield client
|
yield client
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def mocked_ask_question_api(host: str) -> Iterator[respx.MockRouter]:
|
||||||
|
with respx.mock(
|
||||||
|
assert_all_mocked=True,
|
||||||
|
assert_all_called=True,
|
||||||
|
base_url=host,
|
||||||
|
) as respx_mock:
|
||||||
|
ask_question_route = respx_mock.post(url=CHAT_GPT_BASE_URI, name="ask_question")
|
||||||
|
ask_question_route.return_value = Response(status_code=200, text="Привет! Как я могу помочь вам сегодня?")
|
||||||
|
yield respx_mock
|
||||||
|
@ -1,24 +1,30 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import time
|
|
||||||
from asyncio import AbstractEventLoop
|
from asyncio import AbstractEventLoop
|
||||||
from typing import Any
|
from unittest import mock
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
import telegram
|
||||||
from assertpy import assert_that
|
from assertpy import assert_that
|
||||||
from faker import Faker
|
from faker import Faker
|
||||||
from httpx import AsyncClient
|
from httpx import AsyncClient
|
||||||
|
from telegram import InlineKeyboardButton, InlineKeyboardMarkup, Update
|
||||||
|
|
||||||
|
from constants import BotStagesEnum
|
||||||
from core.bot import BotApplication, BotQueue
|
from core.bot import BotApplication, BotQueue
|
||||||
from main import Application
|
from main import Application
|
||||||
|
from settings.config import AppSettings
|
||||||
|
from tests.integration.bot.conftest import mocked_ask_question_api
|
||||||
from tests.integration.bot.networking import MockedRequest
|
from tests.integration.bot.networking import MockedRequest
|
||||||
from tests.integration.factories.bot import (
|
from tests.integration.factories.bot import (
|
||||||
BotChatFactory,
|
BotCallBackQueryFactory,
|
||||||
BotEntitleFactory,
|
BotMessageFactory,
|
||||||
BotUserFactory,
|
BotUpdateFactory,
|
||||||
|
CallBackFactory,
|
||||||
)
|
)
|
||||||
|
|
||||||
pytestmark = [
|
pytestmark = [
|
||||||
pytest.mark.asyncio,
|
pytest.mark.asyncio,
|
||||||
|
pytest.mark.enable_socket,
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -34,10 +40,10 @@ async def test_bot_webhook_endpoint(
|
|||||||
rest_client: AsyncClient,
|
rest_client: AsyncClient,
|
||||||
main_application: Application,
|
main_application: Application,
|
||||||
) -> None:
|
) -> None:
|
||||||
bot_update = create_bot_update()
|
bot_update = BotUpdateFactory(message=BotMessageFactory.create_instance(text="/help"))
|
||||||
response = await rest_client.post(url="/api/123456789:AABBCCDDEEFFaabbccddeeff-1234567890", json=bot_update)
|
response = await rest_client.post(url="/api/123456789:AABBCCDDEEFFaabbccddeeff-1234567890", json=bot_update)
|
||||||
assert response.status_code == 202
|
assert response.status_code == 202
|
||||||
update = await main_application.state._state["queue"].queue.get() # type: ignore[attr-defined]
|
update = await main_application.fastapi_app.state._state["queue"].queue.get()
|
||||||
update = update.to_dict()
|
update = update.to_dict()
|
||||||
assert update["update_id"] == bot_update["update_id"]
|
assert update["update_id"] == bot_update["update_id"]
|
||||||
assert_that(update["message"]).is_equal_to(
|
assert_that(update["message"]).is_equal_to(
|
||||||
@ -51,22 +57,124 @@ async def test_bot_queue(
|
|||||||
) -> None:
|
) -> None:
|
||||||
bot_queue = BotQueue(bot_app=bot)
|
bot_queue = BotQueue(bot_app=bot)
|
||||||
event_loop.create_task(bot_queue.get_updates_from_queue())
|
event_loop.create_task(bot_queue.get_updates_from_queue())
|
||||||
bot_update = create_bot_update()
|
|
||||||
|
bot_update = BotUpdateFactory(message=BotMessageFactory.create_instance(text="/help"))
|
||||||
|
|
||||||
mocked_request = MockedRequest(bot_update)
|
mocked_request = MockedRequest(bot_update)
|
||||||
await bot_queue.put_updates_on_queue(mocked_request) # type: ignore
|
await bot_queue.put_updates_on_queue(mocked_request) # type: ignore
|
||||||
await asyncio.sleep(1)
|
await asyncio.sleep(1)
|
||||||
assert bot_queue.queue.empty()
|
assert bot_queue.queue.empty()
|
||||||
|
|
||||||
|
|
||||||
def create_bot_update() -> dict[str, Any]:
|
async def test_help_command(
|
||||||
bot_update: dict[str, Any] = {}
|
main_application: Application,
|
||||||
bot_update["update_id"] = faker.random_int(min=10**8, max=10**9 - 1)
|
test_settings: AppSettings,
|
||||||
bot_update["message"] = {
|
) -> None:
|
||||||
"message_id": faker.random_int(min=10**8, max=10**9 - 1),
|
with mock.patch.object(
|
||||||
"from": BotUserFactory()._asdict(),
|
telegram._bot.Bot, "send_message", return_value=lambda *args, **kwargs: (args, kwargs)
|
||||||
"chat": BotChatFactory()._asdict(),
|
) as mocked_send_message:
|
||||||
"date": time.time(),
|
bot_update = BotUpdateFactory(message=BotMessageFactory.create_instance(text="/help"))
|
||||||
"text": "/chatid",
|
|
||||||
"entities": [BotEntitleFactory()],
|
await main_application.bot_app.application.process_update(
|
||||||
}
|
update=Update.de_json(data=bot_update, bot=main_application.bot_app.bot)
|
||||||
return bot_update
|
)
|
||||||
|
|
||||||
|
assert_that(mocked_send_message.call_args.kwargs).is_equal_to(
|
||||||
|
{
|
||||||
|
"text": "Help!",
|
||||||
|
"api_kwargs": {"text": "Список основных команд:"},
|
||||||
|
"chat_id": bot_update["message"]["chat"]["id"],
|
||||||
|
"reply_markup": InlineKeyboardMarkup(
|
||||||
|
inline_keyboard=(
|
||||||
|
(
|
||||||
|
InlineKeyboardButton(callback_data="about_me", text="Обо мне"),
|
||||||
|
InlineKeyboardButton(callback_data="website", text="Веб версия"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
InlineKeyboardButton(callback_data="help", text="Помощь"),
|
||||||
|
InlineKeyboardButton(callback_data="about_bot", text="О боте"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
),
|
||||||
|
},
|
||||||
|
include=["text", "api_kwargs", "chat_id", "reply_markup"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_about_me_callback_action(
|
||||||
|
main_application: Application,
|
||||||
|
test_settings: AppSettings,
|
||||||
|
) -> None:
|
||||||
|
with mock.patch.object(telegram._message.Message, "reply_text") as mocked_reply_text:
|
||||||
|
bot_update = BotCallBackQueryFactory(
|
||||||
|
message=BotMessageFactory.create_instance(text="Список основных команд:"),
|
||||||
|
callback_query=CallBackFactory(data=BotStagesEnum.about_me),
|
||||||
|
)
|
||||||
|
|
||||||
|
await main_application.bot_app.application.process_update(
|
||||||
|
update=Update.de_json(data=bot_update, bot=main_application.bot_app.bot)
|
||||||
|
)
|
||||||
|
|
||||||
|
assert mocked_reply_text.call_args.args == ("Автор бота: *Дмитрий Афанасьев*\n\nTg nickname: *Balshtg*",)
|
||||||
|
assert mocked_reply_text.call_args.kwargs == {"parse_mode": "MarkdownV2"}
|
||||||
|
|
||||||
|
|
||||||
|
async def test_about_bot_callback_action(
|
||||||
|
main_application: Application,
|
||||||
|
test_settings: AppSettings,
|
||||||
|
) -> None:
|
||||||
|
with mock.patch.object(telegram._message.Message, "reply_text") as mocked_reply_text:
|
||||||
|
bot_update = BotCallBackQueryFactory(
|
||||||
|
message=BotMessageFactory.create_instance(text="Список основных команд:"),
|
||||||
|
callback_query=CallBackFactory(data=BotStagesEnum.about_bot),
|
||||||
|
)
|
||||||
|
|
||||||
|
await main_application.bot_app.application.process_update(
|
||||||
|
update=Update.de_json(data=bot_update, bot=main_application.bot_app.bot)
|
||||||
|
)
|
||||||
|
|
||||||
|
assert mocked_reply_text.call_args.args == (
|
||||||
|
"Бот использует бесплатную модель Chat-GPT3.5 для ответов на вопросы. Принимает запросы на разных языках. "
|
||||||
|
"\n\nБот так же умеет переводить голосовые сообщения в текст. Просто пришлите голосовуху и получите поток "
|
||||||
|
"сознания без запятых в виде текста",
|
||||||
|
)
|
||||||
|
assert mocked_reply_text.call_args.kwargs == {"parse_mode": "Markdown"}
|
||||||
|
|
||||||
|
|
||||||
|
async def test_website_callback_action(
|
||||||
|
main_application: Application,
|
||||||
|
test_settings: AppSettings,
|
||||||
|
) -> None:
|
||||||
|
with mock.patch.object(telegram._message.Message, "reply_text") as mocked_reply_text:
|
||||||
|
bot_update = BotCallBackQueryFactory(
|
||||||
|
message=BotMessageFactory.create_instance(text="Список основных команд:"),
|
||||||
|
callback_query=CallBackFactory(data=BotStagesEnum.website),
|
||||||
|
)
|
||||||
|
|
||||||
|
await main_application.bot_app.application.process_update(
|
||||||
|
update=Update.de_json(data=bot_update, bot=main_application.bot_app.bot)
|
||||||
|
)
|
||||||
|
|
||||||
|
assert mocked_reply_text.call_args.args == ("Веб версия: http://localhost/chat/",)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_ask_question_action(
|
||||||
|
main_application: Application,
|
||||||
|
test_settings: AppSettings,
|
||||||
|
) -> None:
|
||||||
|
with mock.patch.object(
|
||||||
|
telegram._bot.Bot, "send_message", return_value=lambda *args, **kwargs: (args, kwargs)
|
||||||
|
) as mocked_send_message, mocked_ask_question_api(host=test_settings.GPT_BASE_HOST):
|
||||||
|
bot_update = BotUpdateFactory(message=BotMessageFactory.create_instance(text="Привет!"))
|
||||||
|
bot_update["message"].pop("entities")
|
||||||
|
|
||||||
|
await main_application.bot_app.application.process_update(
|
||||||
|
update=Update.de_json(data=bot_update, bot=main_application.bot_app.bot)
|
||||||
|
)
|
||||||
|
assert_that(mocked_send_message.call_args.kwargs).is_equal_to(
|
||||||
|
{
|
||||||
|
"text": "Привет! Как я могу помочь вам сегодня?",
|
||||||
|
"chat_id": bot_update["message"]["chat"]["id"],
|
||||||
|
},
|
||||||
|
include=["text", "chat_id"],
|
||||||
|
)
|
||||||
|
@ -1,8 +1,12 @@
|
|||||||
import string
|
import string
|
||||||
|
import time
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
import factory
|
import factory
|
||||||
|
import factory.fuzzy
|
||||||
from faker import Faker
|
from faker import Faker
|
||||||
|
|
||||||
|
from constants import BotStagesEnum
|
||||||
from tests.integration.factories.models import Chat, User
|
from tests.integration.factories.models import Chat, User
|
||||||
|
|
||||||
faker = Faker("ru_RU")
|
faker = Faker("ru_RU")
|
||||||
@ -55,3 +59,38 @@ class BotEntitleFactory(factory.DictFactory):
|
|||||||
type = "bot_command"
|
type = "bot_command"
|
||||||
offset = 0
|
offset = 0
|
||||||
length = 7
|
length = 7
|
||||||
|
|
||||||
|
|
||||||
|
class BotMessageFactory(factory.DictFactory):
|
||||||
|
message_id = factory.Faker("random_int", min=10**8, max=10**9 - 1)
|
||||||
|
chat = factory.LazyFunction(lambda: BotChatFactory()._asdict())
|
||||||
|
date = time.time()
|
||||||
|
text = factory.Faker("text")
|
||||||
|
entities = factory.LazyFunction(lambda: [BotEntitleFactory()])
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_instance(cls, **kwargs: Any) -> dict[str, Any]:
|
||||||
|
data = {**cls.build(**kwargs), "from": BotUserFactory()._asdict()}
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
class BotUpdateFactory(factory.DictFactory):
|
||||||
|
update_id = factory.Faker("random_int", min=10**8, max=10**9 - 1)
|
||||||
|
message = factory.LazyFunction(lambda: BotMessageFactory.create_instance())
|
||||||
|
|
||||||
|
|
||||||
|
class CallBackFactory(factory.DictFactory):
|
||||||
|
id = factory.Faker("bothify", text="###################")
|
||||||
|
chat_instance = factory.Faker("bothify", text="###################")
|
||||||
|
message = factory.LazyFunction(lambda: BotMessageFactory.create_instance())
|
||||||
|
data = factory.fuzzy.FuzzyChoice(BotStagesEnum)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_instance(cls, **kwargs: Any) -> dict[str, Any]:
|
||||||
|
data = {**cls.build(**kwargs), "from": BotUserFactory()._asdict()}
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
class BotCallBackQueryFactory(factory.DictFactory):
|
||||||
|
update_id = factory.Faker("random_int", min=10**8, max=10**9 - 1)
|
||||||
|
callback_query = factory.LazyFunction(lambda: BotMessageFactory.create_instance())
|
||||||
|
28
poetry.lock
generated
28
poetry.lock
generated
@ -583,13 +583,13 @@ python-dateutil = ">=2.4"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fastapi"
|
name = "fastapi"
|
||||||
version = "0.103.1"
|
version = "0.103.2"
|
||||||
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
|
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "fastapi-0.103.1-py3-none-any.whl", hash = "sha256:5e5f17e826dbd9e9b5a5145976c5cd90bcaa61f2bf9a69aca423f2bcebe44d83"},
|
{file = "fastapi-0.103.2-py3-none-any.whl", hash = "sha256:3270de872f0fe9ec809d4bd3d4d890c6d5cc7b9611d721d6438f9dacc8c4ef2e"},
|
||||||
{file = "fastapi-0.103.1.tar.gz", hash = "sha256:345844e6a82062f06a096684196aaf96c1198b25c06b72c1311b882aa2d8a35d"},
|
{file = "fastapi-0.103.2.tar.gz", hash = "sha256:75a11f6bfb8fc4d2bec0bd710c2d5f2829659c0e8c0afd5560fdda6ce25ec653"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@ -2567,6 +2567,20 @@ urllib3 = ">=1.21.1,<3"
|
|||||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "respx"
|
||||||
|
version = "0.20.2"
|
||||||
|
description = "A utility for mocking out the Python HTTPX and HTTP Core libraries."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "respx-0.20.2-py2.py3-none-any.whl", hash = "sha256:ab8e1cf6da28a5b2dd883ea617f8130f77f676736e6e9e4a25817ad116a172c9"},
|
||||||
|
{file = "respx-0.20.2.tar.gz", hash = "sha256:07cf4108b1c88b82010f67d3c831dae33a375c7b436e54d87737c7f9f99be643"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
httpx = ">=0.21.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rich"
|
name = "rich"
|
||||||
version = "13.5.3"
|
version = "13.5.3"
|
||||||
@ -3027,13 +3041,13 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)",
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wcwidth"
|
name = "wcwidth"
|
||||||
version = "0.2.6"
|
version = "0.2.7"
|
||||||
description = "Measures the displayed width of unicode strings in a terminal"
|
description = "Measures the displayed width of unicode strings in a terminal"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
files = [
|
files = [
|
||||||
{file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"},
|
{file = "wcwidth-0.2.7-py2.py3-none-any.whl", hash = "sha256:fabf3e32999d9b0dab7d19d845149f326f04fe29bac67709ee071dbd92640a36"},
|
||||||
{file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"},
|
{file = "wcwidth-0.2.7.tar.gz", hash = "sha256:1b6d30a98ddd5ce9bbdb33658191fd2423fc9da203fe3ef1855407dcb7ee4e26"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3096,4 +3110,4 @@ dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"]
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = "^3.11"
|
python-versions = "^3.11"
|
||||||
content-hash = "10d088f7f2f03522ba0b5e205cdd0d33ba2c41ae59a25fcf580a6c871fd967a1"
|
content-hash = "f8faa71d22eb911772b7607eb35d2feb1e5dbe0b0bf2c602373b1e31bffaf820"
|
||||||
|
@ -63,6 +63,7 @@ pytest-split = "^0.8"
|
|||||||
pytest-freezegun = "^0.4"
|
pytest-freezegun = "^0.4"
|
||||||
pytest-socket = "^0.6"
|
pytest-socket = "^0.6"
|
||||||
assertpy = "^1.1"
|
assertpy = "^1.1"
|
||||||
|
respx = "^0.20"
|
||||||
|
|
||||||
coverage = "^7.3"
|
coverage = "^7.3"
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user