add more tests (#19)

This commit is contained in:
Dmitry Afanasyev 2023-09-29 13:54:57 +03:00 committed by GitHub
parent 7cfda281f7
commit 90ec8ccec1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 263 additions and 76 deletions

View File

@ -1,21 +1,21 @@
from enum import IntEnum, StrEnum, auto
from enum import StrEnum
AUDIO_SEGMENT_DURATION = 120 * 1000
API_PREFIX = "/api"
CHAT_GPT_BASE_URL = "http://chat_service:8858/backend-api/v2/conversation"
CHAT_GPT_BASE_URI = "backend-api/v2/conversation"
class BotStagesEnum(IntEnum):
about_me = auto()
website = auto()
help = auto()
about_bot = auto()
class BotStagesEnum(StrEnum):
about_me = "about_me"
website = "website"
help = "help"
about_bot = "about_bot"
class BotEntryPoints(IntEnum):
start_routes = auto()
end = auto()
class BotEntryPoints(StrEnum):
start_routes = "start_routes"
end = "end"
class LogLevelEnum(StrEnum):

View File

@ -8,7 +8,7 @@ from typing import Any
from fastapi import Request, Response
from loguru import logger
from telegram import Update
from telegram import Bot, Update
from telegram.ext import Application
from settings.config import AppSettings
@ -19,9 +19,8 @@ class BotApplication:
self,
settings: AppSettings,
handlers: list[Any],
application: Application | None = None, # type: ignore[type-arg]
) -> None:
self.application: Application = application or ( # type: ignore
self.application: Application = ( # type: ignore[type-arg]
Application.builder().token(token=settings.TELEGRAM_API_TOKEN).build()
)
self.handlers = handlers
@ -29,6 +28,10 @@ class BotApplication:
self.start_with_webhook = settings.START_WITH_WEBHOOK
self._add_handlers()
@property
def bot(self) -> Bot:
return self.application.bot
async def set_webhook(self) -> None:
_, webhook_info = await asyncio.gather(self.application.initialize(), self.application.bot.get_webhook_info())
if not webhook_info.url:

View File

@ -10,13 +10,13 @@ from loguru import logger
from telegram import InlineKeyboardMarkup, Update
from telegram.ext import ContextTypes
from constants import CHAT_GPT_BASE_URL, BotEntryPoints
from constants import CHAT_GPT_BASE_URI, BotEntryPoints
from core.keyboards import main_keyboard
from core.utils import SpeechToTextService
from settings.config import settings
async def main_command(update: Update, context: ContextTypes.DEFAULT_TYPE) -> int:
async def main_command(update: Update, context: ContextTypes.DEFAULT_TYPE) -> str:
"""Send message on `/start`."""
if not update.message:
return BotEntryPoints.end
@ -29,7 +29,7 @@ async def about_me(update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
if not update.effective_message:
return None
await update.effective_message.reply_text(
'Автор бота: *Дмитрий Афанасьев*\n\nTg nickname: *Balshtg*', parse_mode='MarkdownV2'
"Автор бота: *Дмитрий Афанасьев*\n\nTg nickname: *Balshtg*", parse_mode="MarkdownV2"
)
@ -40,7 +40,7 @@ async def about_bot(update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
"Бот использует бесплатную модель Chat-GPT3.5 для ответов на вопросы. "
"Принимает запросы на разных языках. \n\nБот так же умеет переводить голосовые сообщения в текст. "
"Просто пришлите голосовуху и получите поток сознания без запятых в виде текста",
parse_mode='Markdown',
parse_mode="Markdown",
)
@ -88,12 +88,12 @@ async def ask_question(update: Update, context: ContextTypes.DEFAULT_TYPE) -> No
}
transport = AsyncHTTPTransport(retries=3)
async with AsyncClient(transport=transport, timeout=50) as client:
async with AsyncClient(base_url=settings.GPT_BASE_HOST, transport=transport, timeout=50) as client:
try:
response = await client.post(CHAT_GPT_BASE_URL, json=chat_gpt_request, timeout=50)
response = await client.post(CHAT_GPT_BASE_URI, json=chat_gpt_request, timeout=50)
status = response.status_code
if status != httpx.codes.OK:
logger.info(f'got response status: {status} from chat api', data=chat_gpt_request)
logger.info(f"got response status: {status} from chat api", data=chat_gpt_request)
await update.message.reply_text(
"Что-то пошло не так, попробуйте еще раз или обратитесь к администратору"
)
@ -117,7 +117,7 @@ async def voice_recognize(update: Update, context: ContextTypes.DEFAULT_TYPE) ->
with tempfile.NamedTemporaryFile(delete=False) as tmpfile:
tmpfile.write(sound_bytes)
logger.info('file has been saved', filename=tmpfile.name)
logger.info("file has been saved", filename=tmpfile.name)
speech_to_text_service = SpeechToTextService(filename=tmpfile.name)

View File

@ -39,16 +39,16 @@ bot_event_handlers.add_handler(
entry_points=[CommandHandler("start", main_command)],
states={
BotEntryPoints.start_routes: [
CallbackQueryHandler(about_me, pattern="^" + str(BotStagesEnum.about_me) + "$"),
CallbackQueryHandler(website, pattern="^" + str(BotStagesEnum.website) + "$"),
CallbackQueryHandler(help_command, pattern="^" + str(BotStagesEnum.help) + "$"),
CallbackQueryHandler(about_bot, pattern="^" + str(BotStagesEnum.about_bot) + "$"),
CallbackQueryHandler(about_me, pattern="^" + BotStagesEnum.about_me + "$"),
CallbackQueryHandler(website, pattern="^" + BotStagesEnum.website + "$"),
CallbackQueryHandler(help_command, pattern="^" + BotStagesEnum.help + "$"),
CallbackQueryHandler(about_bot, pattern="^" + BotStagesEnum.about_bot + "$"),
],
},
fallbacks=[CommandHandler("start", main_command)],
)
)
bot_event_handlers.add_handler(CallbackQueryHandler(about_me, pattern="^" + str(BotStagesEnum.about_me) + "$"))
bot_event_handlers.add_handler(CallbackQueryHandler(website, pattern="^" + str(BotStagesEnum.website) + "$"))
bot_event_handlers.add_handler(CallbackQueryHandler(help_command, pattern="^" + str(BotStagesEnum.help) + "$"))
bot_event_handlers.add_handler(CallbackQueryHandler(about_bot, pattern="^" + str(BotStagesEnum.about_bot) + "$"))
bot_event_handlers.add_handler(CallbackQueryHandler(about_me, pattern="^" + BotStagesEnum.about_me + "$"))
bot_event_handlers.add_handler(CallbackQueryHandler(website, pattern="^" + BotStagesEnum.website + "$"))
bot_event_handlers.add_handler(CallbackQueryHandler(help_command, pattern="^" + BotStagesEnum.help + "$"))
bot_event_handlers.add_handler(CallbackQueryHandler(about_bot, pattern="^" + BotStagesEnum.about_bot + "$"))

View File

@ -2,13 +2,13 @@ from telegram import InlineKeyboardButton
from constants import BotStagesEnum
main_keyboard = [
[
main_keyboard = (
(
InlineKeyboardButton("Обо мне", callback_data=str(BotStagesEnum.about_me)),
InlineKeyboardButton("Веб версия", callback_data=str(BotStagesEnum.website)),
],
[
),
(
InlineKeyboardButton("Помощь", callback_data=str(BotStagesEnum.help)),
InlineKeyboardButton("О боте", callback_data=str(BotStagesEnum.about_bot)),
],
]
),
)

View File

@ -31,7 +31,7 @@ class InterceptHandler(logging.Handler):
logger.opt(depth=depth, exception=record.exc_info).log(
level,
record.getMessage().replace(settings.TELEGRAM_API_TOKEN, "TELEGRAM_API_TOKEN".center(24, '*')),
record.getMessage().replace(settings.TELEGRAM_API_TOKEN, "TELEGRAM_API_TOKEN".center(24, "*")),
)

View File

@ -55,7 +55,7 @@ class SpeechToTextService:
self._convert_audio_to_text()
def _convert_audio_to_text(self) -> None:
wav_filename = f'{self.filename}.wav'
wav_filename = f"{self.filename}.wav"
speech = AudioSegment.from_wav(wav_filename)
speech_duration = len(speech)
@ -82,8 +82,8 @@ class SpeechToTextService:
logger.error("error temps files not deleted", error=error, filenames=[self.filename, self.filename])
def _convert_file_to_wav(self) -> None:
new_filename = self.filename + '.wav'
cmd = ['ffmpeg', '-loglevel', 'quiet', '-i', self.filename, '-vn', new_filename]
new_filename = self.filename + ".wav"
cmd = ["ffmpeg", "-loglevel", "quiet", "-i", self.filename, "-vn", new_filename]
try:
subprocess.run(args=cmd) # noqa: S603
logger.info("file has been converted to wav", filename=new_filename)
@ -96,7 +96,7 @@ class SpeechToTextService:
with AudioFile(tmp_filename) as source:
audio_text = self.recognizer.listen(source)
try:
text = self.recognizer.recognize_google(audio_text, language='ru-RU')
text = self.recognizer.recognize_google(audio_text, language="ru-RU")
os.remove(tmp_filename)
return text
except SpeechRecognizerError as error:

View File

@ -12,6 +12,8 @@ TELEGRAM_API_TOKEN="123456789:AABBCCDDEEFFaabbccddeeff-1234567890"
DOMAIN="http://localhost"
URL_PREFIX=
GPT_BASE_HOST="http://localhost"
# set to true to start with webhook. Else bot will start on polling method
START_WITH_WEBHOOK="false"

View File

@ -12,6 +12,8 @@ TELEGRAM_API_TOKEN="123456789:AABBCCDDEEFFaabbccddeeff-1234567890"
DOMAIN="http://localhost"
URL_PREFIX=
GPT_BASE_HOST="http://localhost"
# set to true to start with webhook. Else bot will start on polling method
START_WITH_WEBHOOK="false"

View File

@ -49,6 +49,7 @@ class AppSettings(SentrySettings, BaseSettings):
URL_PREFIX: str = ""
GPT_MODEL: str = "gpt-3.5-turbo-stream-AItianhuSpace"
GPT_BASE_HOST: str = "http://chat_service:8858"
# quantity of workers for uvicorn
WORKERS_COUNT: int = 1
# Enable uvicorn reloading

View File

@ -4,23 +4,25 @@ pytest framework. A common change is to allow monkeypatching of the class member
enforcing slots in the subclasses."""
import asyncio
from asyncio import AbstractEventLoop
from contextlib import contextmanager
from datetime import tzinfo
from typing import Any, AsyncGenerator
from typing import Any, AsyncGenerator, Iterator
import pytest
import pytest_asyncio
from fastapi import FastAPI
from httpx import AsyncClient
import respx
from httpx import AsyncClient, Response
from pytest_asyncio.plugin import SubRequest
from telegram import Bot, User
from telegram.ext import Application, ApplicationBuilder, Defaults, ExtBot
from constants import CHAT_GPT_BASE_URI
from core.bot import BotApplication
from core.handlers import bot_event_handlers
from main import Application as AppApplication
from settings.config import AppSettings, get_settings
from tests.integration.bot.networking import NonchalantHttpxRequest
from tests.integration.factories.bot import BotInfoFactory
from tests.integration.factories.bot import BotInfoFactory, BotUserFactory
@pytest.fixture(scope="session")
@ -123,6 +125,7 @@ def bot_info() -> dict[str, Any]:
async def bot_application(bot_info: dict[str, Any]) -> AsyncGenerator[Any, None]:
# We build a new bot each time so that we use `app` in a context manager without problems
application = ApplicationBuilder().bot(make_bot(bot_info)).application_class(PytestApplication).build()
await application.initialize()
yield application
if application.running:
await application.stop()
@ -226,27 +229,41 @@ def provider_token(bot_info: dict[str, Any]) -> str:
@pytest_asyncio.fixture(scope="session")
async def main_application(
bot_application: PytestApplication, test_settings: AppSettings
) -> AsyncGenerator[FastAPI, None]:
) -> AsyncGenerator[AppApplication, None]:
bot_app = BotApplication(
application=bot_application,
settings=test_settings,
handlers=bot_event_handlers.handlers,
)
fast_api_app = AppApplication(settings=test_settings, bot_app=bot_app).fastapi_app
bot_app.application._initialized = True
bot_app.application.bot = make_bot(BotInfoFactory())
bot_app.application.bot._bot_user = BotUserFactory()
fast_api_app = AppApplication(settings=test_settings, bot_app=bot_app)
yield fast_api_app
@pytest_asyncio.fixture()
async def rest_client(
main_application: FastAPI,
main_application: AppApplication,
) -> AsyncGenerator[AsyncClient, None]:
"""
Default http client. Use to test unauthorized requests, public endpoints
or special authorization methods.
"""
async with AsyncClient(
app=main_application,
app=main_application.fastapi_app,
base_url="http://test",
headers={"Content-Type": "application/json"},
) as client:
yield client
@contextmanager
def mocked_ask_question_api(host: str) -> Iterator[respx.MockRouter]:
with respx.mock(
assert_all_mocked=True,
assert_all_called=True,
base_url=host,
) as respx_mock:
ask_question_route = respx_mock.post(url=CHAT_GPT_BASE_URI, name="ask_question")
ask_question_route.return_value = Response(status_code=200, text="Привет! Как я могу помочь вам сегодня?")
yield respx_mock

View File

@ -1,24 +1,30 @@
import asyncio
import time
from asyncio import AbstractEventLoop
from typing import Any
from unittest import mock
import pytest
import telegram
from assertpy import assert_that
from faker import Faker
from httpx import AsyncClient
from telegram import InlineKeyboardButton, InlineKeyboardMarkup, Update
from constants import BotStagesEnum
from core.bot import BotApplication, BotQueue
from main import Application
from settings.config import AppSettings
from tests.integration.bot.conftest import mocked_ask_question_api
from tests.integration.bot.networking import MockedRequest
from tests.integration.factories.bot import (
BotChatFactory,
BotEntitleFactory,
BotUserFactory,
BotCallBackQueryFactory,
BotMessageFactory,
BotUpdateFactory,
CallBackFactory,
)
pytestmark = [
pytest.mark.asyncio,
pytest.mark.enable_socket,
]
@ -34,10 +40,10 @@ async def test_bot_webhook_endpoint(
rest_client: AsyncClient,
main_application: Application,
) -> None:
bot_update = create_bot_update()
bot_update = BotUpdateFactory(message=BotMessageFactory.create_instance(text="/help"))
response = await rest_client.post(url="/api/123456789:AABBCCDDEEFFaabbccddeeff-1234567890", json=bot_update)
assert response.status_code == 202
update = await main_application.state._state["queue"].queue.get() # type: ignore[attr-defined]
update = await main_application.fastapi_app.state._state["queue"].queue.get()
update = update.to_dict()
assert update["update_id"] == bot_update["update_id"]
assert_that(update["message"]).is_equal_to(
@ -51,22 +57,124 @@ async def test_bot_queue(
) -> None:
bot_queue = BotQueue(bot_app=bot)
event_loop.create_task(bot_queue.get_updates_from_queue())
bot_update = create_bot_update()
bot_update = BotUpdateFactory(message=BotMessageFactory.create_instance(text="/help"))
mocked_request = MockedRequest(bot_update)
await bot_queue.put_updates_on_queue(mocked_request) # type: ignore
await asyncio.sleep(1)
assert bot_queue.queue.empty()
def create_bot_update() -> dict[str, Any]:
bot_update: dict[str, Any] = {}
bot_update["update_id"] = faker.random_int(min=10**8, max=10**9 - 1)
bot_update["message"] = {
"message_id": faker.random_int(min=10**8, max=10**9 - 1),
"from": BotUserFactory()._asdict(),
"chat": BotChatFactory()._asdict(),
"date": time.time(),
"text": "/chatid",
"entities": [BotEntitleFactory()],
}
return bot_update
async def test_help_command(
main_application: Application,
test_settings: AppSettings,
) -> None:
with mock.patch.object(
telegram._bot.Bot, "send_message", return_value=lambda *args, **kwargs: (args, kwargs)
) as mocked_send_message:
bot_update = BotUpdateFactory(message=BotMessageFactory.create_instance(text="/help"))
await main_application.bot_app.application.process_update(
update=Update.de_json(data=bot_update, bot=main_application.bot_app.bot)
)
assert_that(mocked_send_message.call_args.kwargs).is_equal_to(
{
"text": "Help!",
"api_kwargs": {"text": "Список основных команд:"},
"chat_id": bot_update["message"]["chat"]["id"],
"reply_markup": InlineKeyboardMarkup(
inline_keyboard=(
(
InlineKeyboardButton(callback_data="about_me", text="Обо мне"),
InlineKeyboardButton(callback_data="website", text="Веб версия"),
),
(
InlineKeyboardButton(callback_data="help", text="Помощь"),
InlineKeyboardButton(callback_data="about_bot", text="О боте"),
),
)
),
},
include=["text", "api_kwargs", "chat_id", "reply_markup"],
)
async def test_about_me_callback_action(
main_application: Application,
test_settings: AppSettings,
) -> None:
with mock.patch.object(telegram._message.Message, "reply_text") as mocked_reply_text:
bot_update = BotCallBackQueryFactory(
message=BotMessageFactory.create_instance(text="Список основных команд:"),
callback_query=CallBackFactory(data=BotStagesEnum.about_me),
)
await main_application.bot_app.application.process_update(
update=Update.de_json(data=bot_update, bot=main_application.bot_app.bot)
)
assert mocked_reply_text.call_args.args == ("Автор бота: *Дмитрий Афанасьев*\n\nTg nickname: *Balshtg*",)
assert mocked_reply_text.call_args.kwargs == {"parse_mode": "MarkdownV2"}
async def test_about_bot_callback_action(
main_application: Application,
test_settings: AppSettings,
) -> None:
with mock.patch.object(telegram._message.Message, "reply_text") as mocked_reply_text:
bot_update = BotCallBackQueryFactory(
message=BotMessageFactory.create_instance(text="Список основных команд:"),
callback_query=CallBackFactory(data=BotStagesEnum.about_bot),
)
await main_application.bot_app.application.process_update(
update=Update.de_json(data=bot_update, bot=main_application.bot_app.bot)
)
assert mocked_reply_text.call_args.args == (
"Бот использует бесплатную модель Chat-GPT3.5 для ответов на вопросы. Принимает запросы на разных языках. "
"\n\nБот так же умеет переводить голосовые сообщения в текст. Просто пришлите голосовуху и получите поток "
"сознания без запятых в виде текста",
)
assert mocked_reply_text.call_args.kwargs == {"parse_mode": "Markdown"}
async def test_website_callback_action(
main_application: Application,
test_settings: AppSettings,
) -> None:
with mock.patch.object(telegram._message.Message, "reply_text") as mocked_reply_text:
bot_update = BotCallBackQueryFactory(
message=BotMessageFactory.create_instance(text="Список основных команд:"),
callback_query=CallBackFactory(data=BotStagesEnum.website),
)
await main_application.bot_app.application.process_update(
update=Update.de_json(data=bot_update, bot=main_application.bot_app.bot)
)
assert mocked_reply_text.call_args.args == ("Веб версия: http://localhost/chat/",)
async def test_ask_question_action(
main_application: Application,
test_settings: AppSettings,
) -> None:
with mock.patch.object(
telegram._bot.Bot, "send_message", return_value=lambda *args, **kwargs: (args, kwargs)
) as mocked_send_message, mocked_ask_question_api(host=test_settings.GPT_BASE_HOST):
bot_update = BotUpdateFactory(message=BotMessageFactory.create_instance(text="Привет!"))
bot_update["message"].pop("entities")
await main_application.bot_app.application.process_update(
update=Update.de_json(data=bot_update, bot=main_application.bot_app.bot)
)
assert_that(mocked_send_message.call_args.kwargs).is_equal_to(
{
"text": "Привет! Как я могу помочь вам сегодня?",
"chat_id": bot_update["message"]["chat"]["id"],
},
include=["text", "chat_id"],
)

View File

@ -1,8 +1,12 @@
import string
import time
from typing import Any
import factory
import factory.fuzzy
from faker import Faker
from constants import BotStagesEnum
from tests.integration.factories.models import Chat, User
faker = Faker("ru_RU")
@ -55,3 +59,38 @@ class BotEntitleFactory(factory.DictFactory):
type = "bot_command"
offset = 0
length = 7
class BotMessageFactory(factory.DictFactory):
message_id = factory.Faker("random_int", min=10**8, max=10**9 - 1)
chat = factory.LazyFunction(lambda: BotChatFactory()._asdict())
date = time.time()
text = factory.Faker("text")
entities = factory.LazyFunction(lambda: [BotEntitleFactory()])
@classmethod
def create_instance(cls, **kwargs: Any) -> dict[str, Any]:
data = {**cls.build(**kwargs), "from": BotUserFactory()._asdict()}
return data
class BotUpdateFactory(factory.DictFactory):
update_id = factory.Faker("random_int", min=10**8, max=10**9 - 1)
message = factory.LazyFunction(lambda: BotMessageFactory.create_instance())
class CallBackFactory(factory.DictFactory):
id = factory.Faker("bothify", text="###################")
chat_instance = factory.Faker("bothify", text="###################")
message = factory.LazyFunction(lambda: BotMessageFactory.create_instance())
data = factory.fuzzy.FuzzyChoice(BotStagesEnum)
@classmethod
def create_instance(cls, **kwargs: Any) -> dict[str, Any]:
data = {**cls.build(**kwargs), "from": BotUserFactory()._asdict()}
return data
class BotCallBackQueryFactory(factory.DictFactory):
update_id = factory.Faker("random_int", min=10**8, max=10**9 - 1)
callback_query = factory.LazyFunction(lambda: BotMessageFactory.create_instance())

28
poetry.lock generated
View File

@ -583,13 +583,13 @@ python-dateutil = ">=2.4"
[[package]]
name = "fastapi"
version = "0.103.1"
version = "0.103.2"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
optional = false
python-versions = ">=3.7"
files = [
{file = "fastapi-0.103.1-py3-none-any.whl", hash = "sha256:5e5f17e826dbd9e9b5a5145976c5cd90bcaa61f2bf9a69aca423f2bcebe44d83"},
{file = "fastapi-0.103.1.tar.gz", hash = "sha256:345844e6a82062f06a096684196aaf96c1198b25c06b72c1311b882aa2d8a35d"},
{file = "fastapi-0.103.2-py3-none-any.whl", hash = "sha256:3270de872f0fe9ec809d4bd3d4d890c6d5cc7b9611d721d6438f9dacc8c4ef2e"},
{file = "fastapi-0.103.2.tar.gz", hash = "sha256:75a11f6bfb8fc4d2bec0bd710c2d5f2829659c0e8c0afd5560fdda6ce25ec653"},
]
[package.dependencies]
@ -2567,6 +2567,20 @@ urllib3 = ">=1.21.1,<3"
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "respx"
version = "0.20.2"
description = "A utility for mocking out the Python HTTPX and HTTP Core libraries."
optional = false
python-versions = ">=3.7"
files = [
{file = "respx-0.20.2-py2.py3-none-any.whl", hash = "sha256:ab8e1cf6da28a5b2dd883ea617f8130f77f676736e6e9e4a25817ad116a172c9"},
{file = "respx-0.20.2.tar.gz", hash = "sha256:07cf4108b1c88b82010f67d3c831dae33a375c7b436e54d87737c7f9f99be643"},
]
[package.dependencies]
httpx = ">=0.21.0"
[[package]]
name = "rich"
version = "13.5.3"
@ -3027,13 +3041,13 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)",
[[package]]
name = "wcwidth"
version = "0.2.6"
version = "0.2.7"
description = "Measures the displayed width of unicode strings in a terminal"
optional = false
python-versions = "*"
files = [
{file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"},
{file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"},
{file = "wcwidth-0.2.7-py2.py3-none-any.whl", hash = "sha256:fabf3e32999d9b0dab7d19d845149f326f04fe29bac67709ee071dbd92640a36"},
{file = "wcwidth-0.2.7.tar.gz", hash = "sha256:1b6d30a98ddd5ce9bbdb33658191fd2423fc9da203fe3ef1855407dcb7ee4e26"},
]
[[package]]
@ -3096,4 +3110,4 @@ dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"]
[metadata]
lock-version = "2.0"
python-versions = "^3.11"
content-hash = "10d088f7f2f03522ba0b5e205cdd0d33ba2c41ae59a25fcf580a6c871fd967a1"
content-hash = "f8faa71d22eb911772b7607eb35d2feb1e5dbe0b0bf2c602373b1e31bffaf820"

View File

@ -63,6 +63,7 @@ pytest-split = "^0.8"
pytest-freezegun = "^0.4"
pytest-socket = "^0.6"
assertpy = "^1.1"
respx = "^0.20"
coverage = "^7.3"