mirror of
https://github.com/Balshgit/gpt_chat_bot.git
synced 2025-09-10 17:20:41 +03:00
light refactoring (#35)
* update poetry lock * simple refactoring * move gpt-3.5-turbo-stream-aivvm to deprecated provider
This commit is contained in:
parent
7ef8d6e19d
commit
94b50f1b7c
@ -46,7 +46,6 @@ class ChatGptModelsEnum(StrEnum):
|
||||
gpt_3_stream_binjie = "gpt-3-stream-binjie"
|
||||
gpt_3_5_turbo_stream_CodeLinkAva = "gpt-3.5-turbo-stream-CodeLinkAva"
|
||||
gpt_4_stream_ChatBase = "gpt-4-stream-ChatBase"
|
||||
gpt_3_5_turbo_stream_aivvm = "gpt-3.5-turbo-stream-aivvm"
|
||||
gpt_3_5_turbo_16k_stream_Ylokh = "gpt-3.5-turbo-16k-stream-Ylokh"
|
||||
gpt_3_5_turbo_stream_Vitalentum = "gpt-3.5-turbo-stream-Vitalentum"
|
||||
gpt_3_5_turbo_stream_GptGo = "gpt-3.5-turbo-stream-GptGo"
|
||||
@ -56,9 +55,7 @@ class ChatGptModelsEnum(StrEnum):
|
||||
gpt_4_stream_Chatgpt4Online = "gpt-4-stream-Chatgpt4Online"
|
||||
gpt_3_5_turbo_stream_gptalk = "gpt-3.5-turbo-stream-gptalk"
|
||||
gpt_3_5_turbo_stream_ChatgptDemo = "gpt-3.5-turbo-stream-ChatgptDemo"
|
||||
gpt_3_5_turbo_stream_H2o = "gpt-3.5-turbo-stream-H2o"
|
||||
gpt_3_5_turbo_stream_gptforlove = "gpt-3.5-turbo-stream-gptforlove"
|
||||
gpt_3_5_turbo_ChatgptDuo = "gpt-3.5-turbo-ChatgptDuo"
|
||||
|
||||
@classmethod
|
||||
def values(cls) -> set[str]:
|
||||
@ -66,4 +63,7 @@ class ChatGptModelsEnum(StrEnum):
|
||||
|
||||
@staticmethod
|
||||
def _deprecated() -> set[str]:
|
||||
return {"gpt-3.5-turbo-stream-H2o", "gpt-3.5-turbo-stream-gptforlove", "gpt-3.5-turbo-ChatgptDuo"}
|
||||
return {
|
||||
"gpt-3.5-turbo-stream-gptforlove",
|
||||
"gpt-3.5-turbo-stream-aivvm",
|
||||
}
|
||||
|
@ -27,11 +27,12 @@ class Application:
|
||||
openapi_url="/" + "/".join([settings.api_prefix.strip("/"), "openapi.json"]),
|
||||
default_response_class=UJSONResponse,
|
||||
)
|
||||
self.app.state.settings = settings
|
||||
self.app.state.queue = BotQueue(bot_app=bot_app)
|
||||
self.app.state.bot_app = bot_app
|
||||
self.db = Database(settings)
|
||||
self.bot_app = bot_app
|
||||
self.db = Database(settings)
|
||||
self._bot_queue = BotQueue(bot_app=self.bot_app)
|
||||
self.app.state.settings = settings
|
||||
self.app.state.queue = self._bot_queue
|
||||
self.app.state.bot_app = self.bot_app
|
||||
|
||||
self.app.on_event("startup")(startup(self.app, settings))
|
||||
self.app.on_event("shutdown")(shutdown(self.app))
|
||||
@ -57,6 +58,10 @@ class Application:
|
||||
def fastapi_app(self) -> FastAPI:
|
||||
return self.app
|
||||
|
||||
@cached_property
|
||||
def bot_queue(self) -> BotQueue:
|
||||
return self._bot_queue
|
||||
|
||||
def configure_hooks(self) -> None:
|
||||
if self.bot_app.start_with_webhook:
|
||||
self.app.add_event_handler("startup", self._bot_start_up)
|
||||
|
@ -14,8 +14,8 @@ BASE_DIR = Path(__file__).parent.parent
|
||||
SHARED_DIR = BASE_DIR.resolve().joinpath("shared")
|
||||
SHARED_DIR.mkdir(exist_ok=True)
|
||||
|
||||
SHARED_DIR.joinpath("logs").mkdir(exist_ok=True)
|
||||
DIR_LOGS = SHARED_DIR.joinpath("logs")
|
||||
DIR_LOGS.mkdir(exist_ok=True)
|
||||
|
||||
env_path = f"{BASE_DIR}/settings/.env"
|
||||
|
||||
|
@ -41,7 +41,7 @@ async def test_bot_webhook_endpoint(
|
||||
bot_update = BotUpdateFactory(message=BotMessageFactory.create_instance(text="/help"))
|
||||
response = await rest_client.post(url="/api/CDDEEFFaabbccdd", json=bot_update)
|
||||
assert response.status_code == 202
|
||||
update = await main_application.fastapi_app.state._state["queue"].queue.get()
|
||||
update = await main_application.bot_queue.queue.get()
|
||||
update = update.to_dict()
|
||||
assert update["update_id"] == bot_update["update_id"]
|
||||
assert_that(update["message"]).is_equal_to(
|
||||
|
@ -204,9 +204,7 @@ async def main_application(
|
||||
|
||||
|
||||
@pytest_asyncio.fixture()
|
||||
async def rest_client(
|
||||
main_application: AppApplication,
|
||||
) -> AsyncGenerator[AsyncClient, None]:
|
||||
async def rest_client(main_application: AppApplication) -> AsyncGenerator[AsyncClient, None]:
|
||||
"""
|
||||
Default http client. Use to test unauthorized requests, public endpoints
|
||||
or special authorization methods.
|
||||
|
@ -1005,3 +1005,70 @@ boost::asio::awaitable<void> FreeGpt::chatGptDuo(std::shared_ptr<Channel> ch, nl
|
||||
}
|
||||
co_return;
|
||||
}
|
||||
|
||||
boost::asio::awaitable<void> FreeGpt::aivvm(std::shared_ptr<Channel> ch, nlohmann::json json) {
|
||||
boost::system::error_code err{};
|
||||
ScopeExit auto_exit{[&] { ch->close(); }};
|
||||
|
||||
constexpr std::string_view host = "chat.aivvm.com";
|
||||
constexpr std::string_view port = "443";
|
||||
|
||||
constexpr std::string_view user_agent{
|
||||
R"(Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:102.0) Gecko/20100101 Firefox/102.0)"};
|
||||
|
||||
boost::asio::ssl::context ctx(boost::asio::ssl::context::tls);
|
||||
ctx.set_verify_mode(boost::asio::ssl::verify_none);
|
||||
|
||||
auto client = co_await createHttpClient(ctx, host, port);
|
||||
if (!client.has_value()) {
|
||||
SPDLOG_ERROR("createHttpClient: {}", client.error());
|
||||
co_await ch->async_send(err, client.error(), use_nothrow_awaitable);
|
||||
co_return;
|
||||
}
|
||||
auto& stream_ = client.value();
|
||||
|
||||
boost::beast::http::request<boost::beast::http::string_body> req{boost::beast::http::verb::post, "/api/chat", 11};
|
||||
req.set(boost::beast::http::field::host, host);
|
||||
req.set(boost::beast::http::field::user_agent, user_agent);
|
||||
req.set("Accept", "*/*");
|
||||
req.set("accept-language", "zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2");
|
||||
req.set("origin", "https://chat.aivvm.com");
|
||||
req.set("referer", "https://chat.aivvm.com/zh");
|
||||
req.set(boost::beast::http::field::content_type, "application/json");
|
||||
req.set("sec-fetch-dest", "empty");
|
||||
req.set("sec-fetch-mode", "cors");
|
||||
req.set("sec-fetch-site", "same-origin");
|
||||
req.set("DNT", "1");
|
||||
|
||||
constexpr std::string_view json_str = R"({
|
||||
"model":{
|
||||
"id":"gpt-3.5-turbo",
|
||||
"name":"GPT-3.5",
|
||||
"maxLength":12000,
|
||||
"tokenLimit":4096
|
||||
},
|
||||
"messages":[
|
||||
{
|
||||
"role":"user",
|
||||
"content":"hello"
|
||||
}
|
||||
],
|
||||
"key":"",
|
||||
"prompt":"You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.",
|
||||
"temperature":0.7
|
||||
})";
|
||||
nlohmann::json request = nlohmann::json::parse(json_str, nullptr, false);
|
||||
|
||||
request["messages"] = getConversationJson(json);
|
||||
SPDLOG_INFO("{}", request.dump(2));
|
||||
|
||||
req.body() = request.dump();
|
||||
req.prepare_payload();
|
||||
|
||||
auto result = co_await sendRequestRecvChunk(ch, stream_, req, 200, [&ch](std::string str) {
|
||||
boost::system::error_code err{};
|
||||
if (!str.empty())
|
||||
ch->try_send(err, str);
|
||||
});
|
||||
co_return;
|
||||
}
|
||||
|
@ -26,7 +26,6 @@ public:
|
||||
boost::asio::awaitable<void> you(std::shared_ptr<Channel>, nlohmann::json);
|
||||
boost::asio::awaitable<void> binjie(std::shared_ptr<Channel>, nlohmann::json);
|
||||
boost::asio::awaitable<void> chatBase(std::shared_ptr<Channel>, nlohmann::json);
|
||||
boost::asio::awaitable<void> aivvm(std::shared_ptr<Channel>, nlohmann::json);
|
||||
boost::asio::awaitable<void> ylokh(std::shared_ptr<Channel>, nlohmann::json);
|
||||
boost::asio::awaitable<void> vitalentum(std::shared_ptr<Channel>, nlohmann::json);
|
||||
boost::asio::awaitable<void> gptGo(std::shared_ptr<Channel>, nlohmann::json);
|
||||
|
@ -1471,73 +1471,6 @@ boost::asio::awaitable<void> FreeGpt::chatBase(std::shared_ptr<Channel> ch, nloh
|
||||
co_return;
|
||||
}
|
||||
|
||||
boost::asio::awaitable<void> FreeGpt::aivvm(std::shared_ptr<Channel> ch, nlohmann::json json) {
|
||||
boost::system::error_code err{};
|
||||
ScopeExit auto_exit{[&] { ch->close(); }};
|
||||
|
||||
constexpr std::string_view host = "chat.aivvm.com";
|
||||
constexpr std::string_view port = "443";
|
||||
|
||||
constexpr std::string_view user_agent{
|
||||
R"(Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:102.0) Gecko/20100101 Firefox/102.0)"};
|
||||
|
||||
boost::asio::ssl::context ctx(boost::asio::ssl::context::tls);
|
||||
ctx.set_verify_mode(boost::asio::ssl::verify_none);
|
||||
|
||||
auto client = co_await createHttpClient(ctx, host, port);
|
||||
if (!client.has_value()) {
|
||||
SPDLOG_ERROR("createHttpClient: {}", client.error());
|
||||
co_await ch->async_send(err, client.error(), use_nothrow_awaitable);
|
||||
co_return;
|
||||
}
|
||||
auto& stream_ = client.value();
|
||||
|
||||
boost::beast::http::request<boost::beast::http::string_body> req{boost::beast::http::verb::post, "/api/chat", 11};
|
||||
req.set(boost::beast::http::field::host, host);
|
||||
req.set(boost::beast::http::field::user_agent, user_agent);
|
||||
req.set("Accept", "*/*");
|
||||
req.set("accept-language", "zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2");
|
||||
req.set("origin", "https://chat.aivvm.com");
|
||||
req.set("referer", "https://chat.aivvm.com/zh");
|
||||
req.set(boost::beast::http::field::content_type, "application/json");
|
||||
req.set("sec-fetch-dest", "empty");
|
||||
req.set("sec-fetch-mode", "cors");
|
||||
req.set("sec-fetch-site", "same-origin");
|
||||
req.set("DNT", "1");
|
||||
|
||||
constexpr std::string_view json_str = R"({
|
||||
"model":{
|
||||
"id":"gpt-3.5-turbo",
|
||||
"name":"GPT-3.5",
|
||||
"maxLength":12000,
|
||||
"tokenLimit":4096
|
||||
},
|
||||
"messages":[
|
||||
{
|
||||
"role":"user",
|
||||
"content":"hello"
|
||||
}
|
||||
],
|
||||
"key":"",
|
||||
"prompt":"You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.",
|
||||
"temperature":0.7
|
||||
})";
|
||||
nlohmann::json request = nlohmann::json::parse(json_str, nullptr, false);
|
||||
|
||||
request["messages"] = getConversationJson(json);
|
||||
SPDLOG_INFO("{}", request.dump(2));
|
||||
|
||||
req.body() = request.dump();
|
||||
req.prepare_payload();
|
||||
|
||||
auto result = co_await sendRequestRecvChunk(ch, stream_, req, 200, [&ch](std::string str) {
|
||||
boost::system::error_code err{};
|
||||
if (!str.empty())
|
||||
ch->try_send(err, str);
|
||||
});
|
||||
co_return;
|
||||
}
|
||||
|
||||
boost::asio::awaitable<void> FreeGpt::ylokh(std::shared_ptr<Channel> ch, nlohmann::json json) {
|
||||
boost::system::error_code err{};
|
||||
ScopeExit auto_exit{[&] { ch->close(); }};
|
||||
|
@ -341,7 +341,6 @@ int main(int argc, char** argv) {
|
||||
ADD_METHOD("gpt-4-turbo-stream-you", FreeGpt::you);
|
||||
ADD_METHOD("gpt-3-stream-binjie", FreeGpt::binjie);
|
||||
ADD_METHOD("gpt-4-stream-ChatBase", FreeGpt::chatBase);
|
||||
ADD_METHOD("gpt-3.5-turbo-stream-aivvm", FreeGpt::aivvm);
|
||||
ADD_METHOD("gpt-3.5-turbo-16k-stream-Ylokh", FreeGpt::ylokh);
|
||||
ADD_METHOD("gpt-3.5-turbo-stream-Vitalentum", FreeGpt::vitalentum);
|
||||
ADD_METHOD("gpt-3.5-turbo-stream-GptGo", FreeGpt::gptGo);
|
||||
|
@ -1,16 +1,9 @@
|
||||
FROM ubuntu:23.04
|
||||
FROM rockylinux:9.2
|
||||
|
||||
RUN rm /bin/sh && ln -s /bin/bash /bin/sh
|
||||
RUN apt-get update -y
|
||||
RUN apt-get install -y python3/lunar python3.11-venv dbus-x11/lunar curl nodejs/lunar tree
|
||||
RUN dnf upgrade --refresh -y
|
||||
|
||||
# install Chrome
|
||||
# https://stackoverflow.com/questions/70955307/how-to-install-google-chrome-in-a-docker-container
|
||||
RUN curl -LO https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb
|
||||
RUN apt-get install -y ./google-chrome-stable_current_amd64.deb
|
||||
RUN rm google-chrome-stable_current_amd64.deb
|
||||
# Check chrome version
|
||||
RUN echo "Chrome: " && google-chrome --version
|
||||
RUN dnf install https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-next-release-latest-9.noarch.rpm -y
|
||||
RUN dnf install chromium -y
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
|
@ -1,44 +1,4 @@
|
||||
altgraph==0.17.4
|
||||
attrs==23.1.0
|
||||
black==23.9.1
|
||||
blinker==1.6.2
|
||||
browser-cookie3==0.19.1
|
||||
certifi==2023.7.22
|
||||
charset-normalizer==3.3.0
|
||||
click==8.1.7
|
||||
docopt==0.6.2
|
||||
Flask==3.0.0
|
||||
Flask-Cors==4.0.0
|
||||
h11==0.14.0
|
||||
idna==3.4
|
||||
itsdangerous==2.1.2
|
||||
jeepney==0.8.0
|
||||
Jinja2==3.1.2
|
||||
Js2Py==0.74
|
||||
lz4==4.3.2
|
||||
MarkupSafe==2.1.3
|
||||
mypy-extensions==1.0.0
|
||||
outcome==1.2.0
|
||||
packaging==23.2
|
||||
pathspec==0.11.2
|
||||
pipreqs==0.4.13
|
||||
platformdirs==3.11.0
|
||||
pycryptodomex==3.19.0
|
||||
PyExecJS==1.5.1
|
||||
pyinstaller==6.0.0
|
||||
pyinstaller-hooks-contrib==2023.9
|
||||
pyjsparser==2.7.1
|
||||
PySocks==1.7.1
|
||||
requests==2.31.0
|
||||
selenium==4.13.0
|
||||
six==1.16.0
|
||||
sniffio==1.3.0
|
||||
sortedcontainers==2.4.0
|
||||
trio==0.22.2
|
||||
trio-websocket==0.11.1
|
||||
typing_extensions==4.8.0
|
||||
tzlocal==5.1
|
||||
urllib3==2.0.6
|
||||
selenium==4.14.0
|
||||
Werkzeug==3.0.0
|
||||
wsproto==1.2.0
|
||||
yarg==0.1.9
|
@ -27,7 +27,7 @@ def deepai_refresh():
|
||||
driver.get("https://deepai.org")
|
||||
WebDriverWait(driver, 15)
|
||||
cookies = driver.get_cookies()
|
||||
print(cookies)
|
||||
print(cookies, flush=True)
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
driver.quit()
|
||||
@ -60,6 +60,6 @@ if __name__ == "__main__":
|
||||
thread.start()
|
||||
port = os.getenv("PORT", "8860")
|
||||
ip = os.getenv("IP", "0.0.0.0")
|
||||
print(f"start zeus at {ip}:{port}")
|
||||
print(f"start zeus at {ip}:{port}", flush=True)
|
||||
server = ThreadedWSGIServer(ip, port, app)
|
||||
server.serve_forever()
|
||||
|
14
poetry.lock
generated
14
poetry.lock
generated
@ -618,13 +618,13 @@ doc = ["Sphinx", "sphinx-rtd-theme", "sphinxcontrib-spelling"]
|
||||
|
||||
[[package]]
|
||||
name = "faker"
|
||||
version = "19.9.0"
|
||||
version = "19.10.0"
|
||||
description = "Faker is a Python package that generates fake data for you."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "Faker-19.9.0-py3-none-any.whl", hash = "sha256:85468e16d4a9a8712bfdb98ba55aaf17c60658266a76958d099aee6a18c0a6c5"},
|
||||
{file = "Faker-19.9.0.tar.gz", hash = "sha256:d75401c631a991b32d3595f26250f42c007cc32653ac3e522b626f3d80770571"},
|
||||
{file = "Faker-19.10.0-py3-none-any.whl", hash = "sha256:f321e657ed61616fbfe14dbb9ccc6b2e8282652bbcfcb503c1bd0231ff834df6"},
|
||||
{file = "Faker-19.10.0.tar.gz", hash = "sha256:63da90512d0cb3acdb71bd833bb3071cb8a196020d08b8567a01d232954f1820"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -933,13 +933,13 @@ flake8-plugin-utils = ">=1.3.2,<2.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-simplify"
|
||||
version = "0.20.0"
|
||||
version = "0.21.0"
|
||||
description = "flake8 plugin which checks for code that can be simplified"
|
||||
optional = false
|
||||
python-versions = ">=3.6.1"
|
||||
files = [
|
||||
{file = "flake8_simplify-0.20.0-py3-none-any.whl", hash = "sha256:599a47824726c93fadcf0274e569daed45052e38cd906360d9080eaa3bd76d61"},
|
||||
{file = "flake8_simplify-0.20.0.tar.gz", hash = "sha256:7b8796bbea8aed45f56621c389d0556cc86f0afa5d992581139451240a8fbeca"},
|
||||
{file = "flake8_simplify-0.21.0-py3-none-any.whl", hash = "sha256:439391e762a9370b371208add0b5c5c40c3d25a98e1f5421d263215d08194183"},
|
||||
{file = "flake8_simplify-0.21.0.tar.gz", hash = "sha256:c95ff1dcc1de5949af47e0087cbf1164445881131b15bcd7a71252670f492f4d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -3517,4 +3517,4 @@ multidict = ">=4.0"
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.11"
|
||||
content-hash = "6680823e54023a1bea0652422167d179651dcdaa63b72ed1d708490605ff5e1a"
|
||||
content-hash = "1aa05c3271633f4ff4b5cbcb43eaba57c996c26fb426f5f4d9a1f30999948753"
|
||||
|
@ -1,9 +1,13 @@
|
||||
[tool.poetry]
|
||||
name = "chat_gpt_bot"
|
||||
version = "0.8.0"
|
||||
description = "Bot to integrated with chat-gpt"
|
||||
version = "1.2.3"
|
||||
description = "Bot to integrated with Chat gpt"
|
||||
authors = ["Dmitry Afanasyev <Balshbox@gmail.com>"]
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.6.1"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.11"
|
||||
|
||||
@ -86,7 +90,7 @@ flake8-warnings = "^0.4"
|
||||
flake8-debugger = "^4.1"
|
||||
flake8-annotations-complexity = "^0.0.8"
|
||||
flake8-fixme = "^1.1"
|
||||
flake8-simplify = "^0.20"
|
||||
flake8-simplify = "^0.21"
|
||||
flake8-variables-names = "^0.0.6"
|
||||
flake8-bandit = "^4.1"
|
||||
flake8-tidy-imports = "^4.10"
|
||||
|
Loading…
x
Reference in New Issue
Block a user