add testing database and chatgpt factories (#28)

* add testing database and chatgpt factories

* include lint job to develop stage

* reformat audioconverter save files to tmp directory

* add api tests

* update README.md
This commit is contained in:
Dmitry Afanasyev
2023-10-08 04:43:24 +03:00
committed by GitHub
parent 23031b0777
commit beb32fb0b9
25 changed files with 434 additions and 255 deletions

View File

@@ -1,10 +1,10 @@
from typing import Any, Callable, Optional
from typing import Any
import pytest
from httpx import AsyncClient, Response
from telegram._utils.defaultvalue import DEFAULT_NONE
from telegram._utils.types import ODVInput
from telegram.error import BadRequest, RetryAfter, TimedOut
from telegram.error import RetryAfter, TimedOut
from telegram.request import HTTPXRequest, RequestData
@@ -17,7 +17,7 @@ class NonchalantHttpxRequest(HTTPXRequest):
self,
url: str,
method: str,
request_data: Optional[RequestData] = None,
request_data: RequestData | None = None,
read_timeout: ODVInput[float] = DEFAULT_NONE,
write_timeout: ODVInput[float] = DEFAULT_NONE,
connect_timeout: ODVInput[float] = DEFAULT_NONE,
@@ -39,29 +39,6 @@ class NonchalantHttpxRequest(HTTPXRequest):
pytest.xfail(f"Ignoring TimedOut error: {e}")
async def expect_bad_request(func: Callable[..., Any], message: str, reason: str) -> Callable[..., Any]:
"""
Wrapper for testing bot functions expected to result in an :class:`telegram.error.BadRequest`.
Makes it XFAIL, if the specified error message is present.
Args:
func: The awaitable to be executed.
message: The expected message of the bad request error. If another message is present,
the error will be reraised.
reason: Explanation for the XFAIL.
Returns:
On success, returns the return value of :attr:`func`
"""
try:
return await func()
except BadRequest as e:
if message in str(e):
pytest.xfail(f"{reason}. {e}")
else:
raise e
async def send_webhook_message(
ip: str,
port: int,

View File

@@ -0,0 +1,168 @@
import pytest
from assertpy import assert_that
from faker import Faker
from httpx import AsyncClient
from sqlalchemy import desc
from sqlalchemy.orm import Session
from core.bot.models.chat_gpt import ChatGpt
from tests.integration.factories.bot import ChatGptModelFactory
pytestmark = [
pytest.mark.asyncio,
pytest.mark.enable_socket,
]
async def test_get_chatgpt_models(
dbsession: Session,
rest_client: AsyncClient,
) -> None:
model1 = ChatGptModelFactory(priority=0)
model2 = ChatGptModelFactory(priority=42)
model3 = ChatGptModelFactory(priority=1)
response = await rest_client.get(url="/api/chatgpt/models")
assert response.status_code == 200
data = response.json()["data"]
assert_that(data).is_equal_to(
[
{
"id": model2.id,
"model": model2.model,
"priority": model2.priority,
},
{
"id": model3.id,
"model": model3.model,
"priority": model3.priority,
},
{
"id": model1.id,
"model": model1.model,
"priority": model1.priority,
},
]
)
async def test_change_chagpt_model_priority(
dbsession: Session,
rest_client: AsyncClient,
faker: Faker,
) -> None:
model1 = ChatGptModelFactory(priority=0)
model2 = ChatGptModelFactory(priority=1)
priority = faker.random_int(min=2, max=7)
response = await rest_client.put(url=f"/api/chatgpt/models/{model2.id}/priority", json={"priority": priority})
assert response.status_code == 202
upd_model1, upd_model2 = dbsession.query(ChatGpt).order_by(ChatGpt.priority).all()
assert model1.model == upd_model1.model
assert model2.model == upd_model2.model
updated_from_db_model = dbsession.get(ChatGpt, model2.id)
assert updated_from_db_model.priority == priority # type: ignore[union-attr]
async def test_reset_chatgpt_models_priority(
dbsession: Session,
rest_client: AsyncClient,
) -> None:
ChatGptModelFactory.create_batch(size=4)
ChatGptModelFactory(priority=42)
response = await rest_client.put(url="/api/chatgpt/models/priority/reset")
assert response.status_code == 202
models = dbsession.query(ChatGpt).all()
assert len(models) == 5
models = dbsession.query(ChatGpt).all()
for model in models:
assert model.priority == 0
async def test_create_new_chatgpt_model(
dbsession: Session,
rest_client: AsyncClient,
faker: Faker,
) -> None:
ChatGptModelFactory.create_batch(size=2)
ChatGptModelFactory(priority=42)
model_name = "new-gpt-model"
model_priority = faker.random_int(min=1, max=5)
models = dbsession.query(ChatGpt).all()
assert len(models) == 3
response = await rest_client.post(
url="/api/chatgpt/models",
json={
"model": model_name,
"priority": model_priority,
},
)
assert response.status_code == 201
models = dbsession.query(ChatGpt).all()
assert len(models) == 4
latest_model = dbsession.query(ChatGpt).order_by(desc(ChatGpt.id)).limit(1).one()
assert latest_model.model == model_name
assert latest_model.priority == model_priority
assert response.json() == {
"model": model_name,
"priority": model_priority,
}
async def test_add_existing_chatgpt_model(
dbsession: Session,
rest_client: AsyncClient,
faker: Faker,
) -> None:
ChatGptModelFactory.create_batch(size=2)
model = ChatGptModelFactory(priority=42)
model_name = model.model
model_priority = faker.random_int(min=1, max=5)
models = dbsession.query(ChatGpt).all()
assert len(models) == 3
response = await rest_client.post(
url="/api/chatgpt/models",
json={
"model": model_name,
"priority": model_priority,
},
)
assert response.status_code == 201
models = dbsession.query(ChatGpt).all()
assert len(models) == 3
async def test_delete_chatgpt_model(
dbsession: Session,
rest_client: AsyncClient,
) -> None:
ChatGptModelFactory.create_batch(size=2)
model = ChatGptModelFactory(priority=42)
models = dbsession.query(ChatGpt).all()
assert len(models) == 3
response = await rest_client.delete(url=f"/api/chatgpt/models/{model.id}")
assert response.status_code == 204
models = dbsession.query(ChatGpt).all()
assert len(models) == 2
assert model not in models

View File

@@ -8,18 +8,20 @@ import telegram
from assertpy import assert_that
from faker import Faker
from httpx import AsyncClient, Response
from sqlalchemy.orm import Session
from telegram import InlineKeyboardButton, InlineKeyboardMarkup, Update
from constants import BotStagesEnum
from core.bot.app import BotApplication, BotQueue
from main import Application
from settings.config import AppSettings, settings
from settings.config import AppSettings
from tests.integration.bot.networking import MockedRequest
from tests.integration.factories.bot import (
BotCallBackQueryFactory,
BotMessageFactory,
BotUpdateFactory,
CallBackFactory,
ChatGptModelFactory,
)
from tests.integration.utils import mocked_ask_question_api
@@ -62,6 +64,22 @@ async def test_bot_queue(
assert bot_queue.queue.empty()
async def test_no_update_message(
main_application: Application,
test_settings: AppSettings,
) -> None:
with mock.patch.object(
telegram._bot.Bot, "send_message", return_value=lambda *args, **kwargs: (args, kwargs)
) as mocked_send_message:
bot_update = BotUpdateFactory(message=None)
await main_application.bot_app.application.process_update(
update=Update.de_json(data=bot_update, bot=main_application.bot_app.bot)
)
assert mocked_send_message.called is False
async def test_help_command(
main_application: Application,
test_settings: AppSettings,
@@ -150,9 +168,12 @@ async def test_about_me_callback_action(
async def test_about_bot_callback_action(
dbsession: Session,
main_application: Application,
test_settings: AppSettings,
) -> None:
ChatGptModelFactory(priority=0)
model_with_highest_priority = ChatGptModelFactory(priority=1)
with mock.patch.object(telegram._message.Message, "reply_text") as mocked_reply_text:
bot_update = BotCallBackQueryFactory(
message=BotMessageFactory.create_instance(text="Список основных команд:"),
@@ -164,7 +185,7 @@ async def test_about_bot_callback_action(
)
assert mocked_reply_text.call_args.args == (
f"Бот использует бесплатную модель {settings.GPT_MODEL} для ответов на вопросы. "
f"Бот использует бесплатную модель {model_with_highest_priority.model} для ответов на вопросы. "
f"\nПринимает запросы на разных языках.\n\nБот так же умеет переводить русские голосовые сообщения "
f"в текст. Просто пришлите голосовуху и получите поток сознания в виде текста, но без знаков препинания",
)
@@ -189,9 +210,11 @@ async def test_website_callback_action(
async def test_ask_question_action(
dbsession: Session,
main_application: Application,
test_settings: AppSettings,
) -> None:
ChatGptModelFactory.create_batch(size=3)
with mock.patch.object(
telegram._bot.Bot, "send_message", return_value=lambda *args, **kwargs: (args, kwargs)
) as mocked_send_message, mocked_ask_question_api(
@@ -214,9 +237,11 @@ async def test_ask_question_action(
async def test_ask_question_action_not_success(
dbsession: Session,
main_application: Application,
test_settings: AppSettings,
) -> None:
ChatGptModelFactory.create_batch(size=3)
with mock.patch.object(
telegram._bot.Bot, "send_message", return_value=lambda *args, **kwargs: (args, kwargs)
) as mocked_send_message, mocked_ask_question_api(
@@ -238,9 +263,11 @@ async def test_ask_question_action_not_success(
async def test_ask_question_action_critical_error(
dbsession: Session,
main_application: Application,
test_settings: AppSettings,
) -> None:
ChatGptModelFactory.create_batch(size=3)
with mock.patch.object(
telegram._bot.Bot, "send_message", return_value=lambda *args, **kwargs: (args, kwargs)
) as mocked_send_message, mocked_ask_question_api(
@@ -260,19 +287,3 @@ async def test_ask_question_action_critical_error(
},
include=["text", "chat_id"],
)
async def test_no_update_message(
main_application: Application,
test_settings: AppSettings,
) -> None:
with mock.patch.object(
telegram._bot.Bot, "send_message", return_value=lambda *args, **kwargs: (args, kwargs)
) as mocked_send_message:
bot_update = BotUpdateFactory(message=None)
await main_application.bot_app.application.process_update(
update=Update.de_json(data=bot_update, bot=main_application.bot_app.bot)
)
assert mocked_send_message.called is False