mirror of
https://github.com/Balshgit/gpt_chat_bot.git
synced 2025-09-11 22:30:41 +03:00
add GitHub callback (#38)
* add gpt-3.5-turbo-stream-GptChatly provider * add GitHub callback
This commit is contained in:
parent
b322e3c1da
commit
4c3c6039e3
@ -16,8 +16,8 @@ UTC_TZ = timezone.utc
|
|||||||
class BotStagesEnum(StrEnum):
|
class BotStagesEnum(StrEnum):
|
||||||
about_me = "about_me"
|
about_me = "about_me"
|
||||||
website = "website"
|
website = "website"
|
||||||
help = "help"
|
|
||||||
about_bot = "about_bot"
|
about_bot = "about_bot"
|
||||||
|
github = "github"
|
||||||
|
|
||||||
|
|
||||||
class BotEntryPoints(StrEnum):
|
class BotEntryPoints(StrEnum):
|
||||||
@ -55,6 +55,7 @@ class ChatGptModelsEnum(StrEnum):
|
|||||||
gpt_4_stream_Chatgpt4Online = "gpt-4-stream-Chatgpt4Online"
|
gpt_4_stream_Chatgpt4Online = "gpt-4-stream-Chatgpt4Online"
|
||||||
gpt_3_5_turbo_stream_gptalk = "gpt-3.5-turbo-stream-gptalk"
|
gpt_3_5_turbo_stream_gptalk = "gpt-3.5-turbo-stream-gptalk"
|
||||||
llama2 = "llama2"
|
llama2 = "llama2"
|
||||||
|
gpt_3_5_turbo_stream_GptChatly = "gpt-3.5-turbo-stream-GptChatly"
|
||||||
gpt_3_5_turbo_stream_ChatgptDemo = "gpt-3.5-turbo-stream-ChatgptDemo"
|
gpt_3_5_turbo_stream_ChatgptDemo = "gpt-3.5-turbo-stream-ChatgptDemo"
|
||||||
gpt_3_5_turbo_stream_gptforlove = "gpt-3.5-turbo-stream-gptforlove"
|
gpt_3_5_turbo_stream_gptforlove = "gpt-3.5-turbo-stream-gptforlove"
|
||||||
|
|
||||||
|
@ -18,6 +18,7 @@ async def main_command(update: Update, context: ContextTypes.DEFAULT_TYPE) -> st
|
|||||||
return BotEntryPoints.end
|
return BotEntryPoints.end
|
||||||
reply_markup = InlineKeyboardMarkup(main_keyboard)
|
reply_markup = InlineKeyboardMarkup(main_keyboard)
|
||||||
await update.message.reply_text("Выберете команду:", reply_markup=reply_markup)
|
await update.message.reply_text("Выберете команду:", reply_markup=reply_markup)
|
||||||
|
await update.message.reply_text("Список этих команд всегда можно получить набрав /help")
|
||||||
return BotEntryPoints.start_routes
|
return BotEntryPoints.start_routes
|
||||||
|
|
||||||
|
|
||||||
@ -35,9 +36,9 @@ async def about_bot(update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
|
|||||||
chatgpt_service = ChatGptService.build()
|
chatgpt_service = ChatGptService.build()
|
||||||
model = await chatgpt_service.get_current_chatgpt_model()
|
model = await chatgpt_service.get_current_chatgpt_model()
|
||||||
await update.effective_message.reply_text(
|
await update.effective_message.reply_text(
|
||||||
f"Бот использует бесплатную модель {model} для ответов на вопросы. "
|
f"Бот использует бесплатную модель *{model}* для ответов на вопросы.\nПринимает запросы на разных языках."
|
||||||
f"\nПринимает запросы на разных языках.\n\nБот так же умеет переводить русские голосовые сообщения в текст. "
|
f"\n\nБот так же умеет переводить русские голосовые сообщения в текст. Просто пришлите или перешлите "
|
||||||
f"Просто пришлите голосовуху и получите поток сознания в виде текста, но без знаков препинания",
|
f"голосовуху боту и получите поток сознания в виде текста, но без знаков препинания.",
|
||||||
parse_mode="Markdown",
|
parse_mode="Markdown",
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -63,6 +64,17 @@ async def help_command(update: Update, context: ContextTypes.DEFAULT_TYPE) -> No
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def github(update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
|
||||||
|
"""Send a message when the command /help is issued."""
|
||||||
|
|
||||||
|
if not update.effective_message:
|
||||||
|
return
|
||||||
|
await update.effective_message.reply_text(
|
||||||
|
"Проект на [GitHub](https://github.com/Balshgit/gpt_chat_bot)",
|
||||||
|
parse_mode="Markdown",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def ask_question(update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
|
async def ask_question(update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
|
||||||
if not update.message:
|
if not update.message:
|
||||||
return
|
return
|
||||||
|
@ -14,6 +14,7 @@ from core.bot.commands import (
|
|||||||
about_bot,
|
about_bot,
|
||||||
about_me,
|
about_me,
|
||||||
ask_question,
|
ask_question,
|
||||||
|
github,
|
||||||
help_command,
|
help_command,
|
||||||
main_command,
|
main_command,
|
||||||
voice_recognize,
|
voice_recognize,
|
||||||
@ -41,7 +42,7 @@ bot_event_handlers.add_handler(
|
|||||||
BotEntryPoints.start_routes: [
|
BotEntryPoints.start_routes: [
|
||||||
CallbackQueryHandler(about_me, pattern="^" + BotStagesEnum.about_me + "$"),
|
CallbackQueryHandler(about_me, pattern="^" + BotStagesEnum.about_me + "$"),
|
||||||
CallbackQueryHandler(website, pattern="^" + BotStagesEnum.website + "$"),
|
CallbackQueryHandler(website, pattern="^" + BotStagesEnum.website + "$"),
|
||||||
CallbackQueryHandler(help_command, pattern="^" + BotStagesEnum.help + "$"),
|
CallbackQueryHandler(github, pattern="^" + BotStagesEnum.github + "$"),
|
||||||
CallbackQueryHandler(about_bot, pattern="^" + BotStagesEnum.about_bot + "$"),
|
CallbackQueryHandler(about_bot, pattern="^" + BotStagesEnum.about_bot + "$"),
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
@ -50,5 +51,5 @@ bot_event_handlers.add_handler(
|
|||||||
)
|
)
|
||||||
bot_event_handlers.add_handler(CallbackQueryHandler(about_me, pattern="^" + BotStagesEnum.about_me + "$"))
|
bot_event_handlers.add_handler(CallbackQueryHandler(about_me, pattern="^" + BotStagesEnum.about_me + "$"))
|
||||||
bot_event_handlers.add_handler(CallbackQueryHandler(website, pattern="^" + BotStagesEnum.website + "$"))
|
bot_event_handlers.add_handler(CallbackQueryHandler(website, pattern="^" + BotStagesEnum.website + "$"))
|
||||||
bot_event_handlers.add_handler(CallbackQueryHandler(help_command, pattern="^" + BotStagesEnum.help + "$"))
|
bot_event_handlers.add_handler(CallbackQueryHandler(github, pattern="^" + BotStagesEnum.github + "$"))
|
||||||
bot_event_handlers.add_handler(CallbackQueryHandler(about_bot, pattern="^" + BotStagesEnum.about_bot + "$"))
|
bot_event_handlers.add_handler(CallbackQueryHandler(about_bot, pattern="^" + BotStagesEnum.about_bot + "$"))
|
||||||
|
@ -8,7 +8,7 @@ main_keyboard = (
|
|||||||
InlineKeyboardButton("Веб версия", callback_data=str(BotStagesEnum.website)),
|
InlineKeyboardButton("Веб версия", callback_data=str(BotStagesEnum.website)),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
InlineKeyboardButton("Помощь", callback_data=str(BotStagesEnum.help)),
|
InlineKeyboardButton("GitHub", callback_data=str(BotStagesEnum.github)),
|
||||||
InlineKeyboardButton("О боте", callback_data=str(BotStagesEnum.about_bot)),
|
InlineKeyboardButton("О боте", callback_data=str(BotStagesEnum.about_bot)),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
@ -105,7 +105,7 @@ async def test_help_command(
|
|||||||
InlineKeyboardButton(callback_data="website", text="Веб версия"),
|
InlineKeyboardButton(callback_data="website", text="Веб версия"),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
InlineKeyboardButton(callback_data="help", text="Помощь"),
|
InlineKeyboardButton(callback_data="github", text="GitHub"),
|
||||||
InlineKeyboardButton(callback_data="about_bot", text="О боте"),
|
InlineKeyboardButton(callback_data="about_bot", text="О боте"),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@ -128,7 +128,7 @@ async def test_start_entry(
|
|||||||
update=Update.de_json(data=bot_update, bot=main_application.bot_app.bot)
|
update=Update.de_json(data=bot_update, bot=main_application.bot_app.bot)
|
||||||
)
|
)
|
||||||
|
|
||||||
assert_that(mocked_send_message.call_args.kwargs).is_equal_to(
|
assert_that(mocked_send_message.call_args_list[0].kwargs).is_equal_to(
|
||||||
{
|
{
|
||||||
"text": "Выберете команду:",
|
"text": "Выберете команду:",
|
||||||
"chat_id": bot_update["message"]["chat"]["id"],
|
"chat_id": bot_update["message"]["chat"]["id"],
|
||||||
@ -139,7 +139,7 @@ async def test_start_entry(
|
|||||||
InlineKeyboardButton(callback_data="website", text="Веб версия"),
|
InlineKeyboardButton(callback_data="website", text="Веб версия"),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
InlineKeyboardButton(callback_data="help", text="Помощь"),
|
InlineKeyboardButton(callback_data="github", text="GitHub"),
|
||||||
InlineKeyboardButton(callback_data="about_bot", text="О боте"),
|
InlineKeyboardButton(callback_data="about_bot", text="О боте"),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@ -147,6 +147,14 @@ async def test_start_entry(
|
|||||||
},
|
},
|
||||||
include=["text", "chat_id", "reply_markup"],
|
include=["text", "chat_id", "reply_markup"],
|
||||||
)
|
)
|
||||||
|
assert_that(mocked_send_message.call_args_list[1].kwargs).is_equal_to(
|
||||||
|
{
|
||||||
|
"text": "Список этих команд всегда можно получить набрав /help",
|
||||||
|
"chat_id": bot_update["message"]["chat"]["id"],
|
||||||
|
"reply_markup": None,
|
||||||
|
},
|
||||||
|
include=["text", "chat_id", "reply_markup"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def test_about_me_callback_action(
|
async def test_about_me_callback_action(
|
||||||
@ -167,6 +175,24 @@ async def test_about_me_callback_action(
|
|||||||
assert mocked_reply_text.call_args.kwargs == {"parse_mode": "MarkdownV2"}
|
assert mocked_reply_text.call_args.kwargs == {"parse_mode": "MarkdownV2"}
|
||||||
|
|
||||||
|
|
||||||
|
async def test_github_callback_action(
|
||||||
|
main_application: Application,
|
||||||
|
test_settings: AppSettings,
|
||||||
|
) -> None:
|
||||||
|
with mock.patch.object(telegram._message.Message, "reply_text") as mocked_reply_text:
|
||||||
|
bot_update = BotCallBackQueryFactory(
|
||||||
|
message=BotMessageFactory.create_instance(text="Список основных команд:"),
|
||||||
|
callback_query=CallBackFactory(data=BotStagesEnum.github),
|
||||||
|
)
|
||||||
|
|
||||||
|
await main_application.bot_app.application.process_update(
|
||||||
|
update=Update.de_json(data=bot_update, bot=main_application.bot_app.bot)
|
||||||
|
)
|
||||||
|
|
||||||
|
assert mocked_reply_text.call_args.args == ("Проект на [GitHub](https://github.com/Balshgit/gpt_chat_bot)",)
|
||||||
|
assert mocked_reply_text.call_args.kwargs == {"parse_mode": "Markdown"}
|
||||||
|
|
||||||
|
|
||||||
async def test_about_bot_callback_action(
|
async def test_about_bot_callback_action(
|
||||||
dbsession: Session,
|
dbsession: Session,
|
||||||
main_application: Application,
|
main_application: Application,
|
||||||
@ -185,9 +211,10 @@ async def test_about_bot_callback_action(
|
|||||||
)
|
)
|
||||||
|
|
||||||
assert mocked_reply_text.call_args.args == (
|
assert mocked_reply_text.call_args.args == (
|
||||||
f"Бот использует бесплатную модель {model_with_highest_priority.model} для ответов на вопросы. "
|
f"Бот использует бесплатную модель *{model_with_highest_priority.model}* для ответов на вопросы.\n"
|
||||||
f"\nПринимает запросы на разных языках.\n\nБот так же умеет переводить русские голосовые сообщения "
|
f"Принимает запросы на разных языках.\n\nБот так же умеет переводить русские голосовые сообщения в текст. "
|
||||||
f"в текст. Просто пришлите голосовуху и получите поток сознания в виде текста, но без знаков препинания",
|
f"Просто пришлите или перешлите голосовуху боту и получите поток сознания в виде текста, "
|
||||||
|
f"но без знаков препинания.",
|
||||||
)
|
)
|
||||||
assert mocked_reply_text.call_args.kwargs == {"parse_mode": "Markdown"}
|
assert mocked_reply_text.call_args.kwargs == {"parse_mode": "Markdown"}
|
||||||
|
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
FROM ubuntu:23.04
|
FROM ubuntu:23.04
|
||||||
|
|
||||||
#use --build-arg LIB_DIR=/usr/lib for arm64 cpus
|
#use --build-arg LIB_DIR=/usr/lib for arm64 cpus
|
||||||
ARG LIB_DIR=/usr/lib64
|
ARG LIB_DIR=/local/lib
|
||||||
|
RUN mkdir -p /local/lib
|
||||||
|
|
||||||
ENV LD_LIBRARY_PATH=$LIB_DIR:$LD_LIBRARY_PATH
|
ENV LD_LIBRARY_PATH=$LIB_DIR:$LD_LIBRARY_PATH
|
||||||
ENV LIBRARY_PATH=$LIB_DIR:$LIBRARY_PATH
|
ENV LIBRARY_PATH=$LIB_DIR:$LIBRARY_PATH
|
||||||
@ -10,9 +11,9 @@ RUN apt-get update -y
|
|||||||
RUN apt-get install -y libcurl4-openssl-dev wget libnss3 nss-plugin-pem ca-certificates
|
RUN apt-get install -y libcurl4-openssl-dev wget libnss3 nss-plugin-pem ca-certificates
|
||||||
# RUN strings /lib/$(arch)-linux-gnu/libstdc++.so.6 | grep GLIBCXX_3.4
|
# RUN strings /lib/$(arch)-linux-gnu/libstdc++.so.6 | grep GLIBCXX_3.4
|
||||||
|
|
||||||
RUN wget https://github.com/lwthiker/curl-impersonate/releases/download/v0.5.4/libcurl-impersonate-v0.5.4.$(arch)-linux-gnu.tar.gz
|
RUN wget https://github.com/lwthiker/curl-impersonate/releases/download/v0.6.0-alpha.1/libcurl-impersonate-v0.6.0-alpha.1.$(arch)-linux-gnu.tar.gz
|
||||||
RUN mv libcurl-impersonate-v0.5.4.$(arch)-linux-gnu.tar.gz $LIB_DIR
|
RUN mv libcurl-impersonate-v0.6.0-alpha.1.$(arch)-linux-gnu.tar.gz $LIB_DIR
|
||||||
RUN cd $LIB_DIR && tar -xvf libcurl-impersonate-v0.5.4.$(arch)-linux-gnu.tar.gz && rm -rf libcurl-impersonate-v0.5.4.$(arch)-linux-gnu.tar.gz
|
RUN cd $LIB_DIR && tar -xvf libcurl-impersonate-v0.6.0-alpha.1.$(arch)-linux-gnu.tar.gz && rm -rf libcurl-impersonate-v0.6.0-alpha.1.$(arch)-linux-gnu.tar.gz
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
@ -35,10 +35,10 @@ chmod 777 xmake-v2.8.2.xz.run
|
|||||||
source ~/.xmake/profile
|
source ~/.xmake/profile
|
||||||
|
|
||||||
3. install libcurl-impersonate, ubuntu (apt-get install libcurl4-openssl-dev) centos7 (yum install libcurl-devel.x86_64)
|
3. install libcurl-impersonate, ubuntu (apt-get install libcurl4-openssl-dev) centos7 (yum install libcurl-devel.x86_64)
|
||||||
wget https://github.com/lwthiker/curl-impersonate/releases/download/v0.5.4/libcurl-impersonate-v0.5.4.x86_64-linux-gnu.tar.gz
|
wget https://github.com/lwthiker/curl-impersonate/releases/download/v0.6.0-alpha.1/libcurl-impersonate-v0.6.0-alpha.1.x86_64-linux-gnu.tar.gz
|
||||||
sudo mv libcurl-impersonate-v0.5.4.x86_64-linux-gnu.tar.gz /usr/lib64
|
sudo mv libcurl-impersonate-v0.6.0-alpha.1.x86_64-linux-gnu.tar.gz /usr/lib64
|
||||||
cd /usr/lib64
|
cd /usr/lib64
|
||||||
sudo tar -xvf libcurl-impersonate-v0.5.4.x86_64-linux-gnu.tar.gz
|
sudo tar -xvf libcurl-impersonate-v0.6.0-alpha.1.x86_64-linux-gnu.tar.gz
|
||||||
export LD_LIBRARY_PATH=/usr/lib64:$LD_LIBRARY_PATH
|
export LD_LIBRARY_PATH=/usr/lib64:$LD_LIBRARY_PATH
|
||||||
export LIBRARY_PATH=/usr/lib64:$LIBRARY_PATH
|
export LIBRARY_PATH=/usr/lib64:$LIBRARY_PATH
|
||||||
|
|
||||||
|
@ -37,6 +37,7 @@ public:
|
|||||||
boost::asio::awaitable<void> gptForLove(std::shared_ptr<Channel>, nlohmann::json);
|
boost::asio::awaitable<void> gptForLove(std::shared_ptr<Channel>, nlohmann::json);
|
||||||
boost::asio::awaitable<void> chatGptDemo(std::shared_ptr<Channel>, nlohmann::json);
|
boost::asio::awaitable<void> chatGptDemo(std::shared_ptr<Channel>, nlohmann::json);
|
||||||
boost::asio::awaitable<void> llama2(std::shared_ptr<Channel>, nlohmann::json);
|
boost::asio::awaitable<void> llama2(std::shared_ptr<Channel>, nlohmann::json);
|
||||||
|
boost::asio::awaitable<void> gptChatly(std::shared_ptr<Channel>, nlohmann::json);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
boost::asio::awaitable<std::expected<boost::beast::ssl_stream<boost::beast::tcp_stream>, std::string>>
|
boost::asio::awaitable<std::expected<boost::beast::ssl_stream<boost::beast::tcp_stream>, std::string>>
|
||||||
|
@ -2892,3 +2892,82 @@ boost::asio::awaitable<void> FreeGpt::llama2(std::shared_ptr<Channel> ch, nlohma
|
|||||||
}
|
}
|
||||||
co_return;
|
co_return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
boost::asio::awaitable<void> FreeGpt::gptChatly(std::shared_ptr<Channel> ch, nlohmann::json json) {
|
||||||
|
co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable));
|
||||||
|
ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }};
|
||||||
|
boost::system::error_code err{};
|
||||||
|
|
||||||
|
auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get<std::string>();
|
||||||
|
|
||||||
|
struct Input {
|
||||||
|
std::shared_ptr<Channel> ch;
|
||||||
|
std::string recv;
|
||||||
|
};
|
||||||
|
Input input;
|
||||||
|
|
||||||
|
CURLcode res;
|
||||||
|
CURL* curl = curl_easy_init();
|
||||||
|
if (!curl) {
|
||||||
|
auto error_info = std::format("curl_easy_init() failed:{}", curl_easy_strerror(res));
|
||||||
|
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
||||||
|
ch->try_send(err, error_info);
|
||||||
|
co_return;
|
||||||
|
}
|
||||||
|
ScopeExit auto_exit{[=] { curl_easy_cleanup(curl); }};
|
||||||
|
|
||||||
|
auto ret = sendHttpRequest(CurlHttpRequest{
|
||||||
|
.curl = curl,
|
||||||
|
.url = "https://gptchatly.com/fetch-response",
|
||||||
|
.http_proxy = m_cfg.http_proxy,
|
||||||
|
.cb = [](void* contents, size_t size, size_t nmemb, void* userp) mutable -> size_t {
|
||||||
|
boost::system::error_code err{};
|
||||||
|
auto input_ptr = static_cast<Input*>(userp);
|
||||||
|
std::string data{(char*)contents, size * nmemb};
|
||||||
|
auto& [ch, recv] = *input_ptr;
|
||||||
|
nlohmann::json line_json = nlohmann::json::parse(data, nullptr, false);
|
||||||
|
if (line_json.is_discarded()) {
|
||||||
|
SPDLOG_ERROR("json parse error: [{}]", data);
|
||||||
|
boost::asio::post(ch->get_executor(),
|
||||||
|
[=] { ch->try_send(err, std::format("json parse error: [{}]", data)); });
|
||||||
|
return size * nmemb;
|
||||||
|
}
|
||||||
|
auto str = line_json["chatGPTResponse"].get<std::string>();
|
||||||
|
boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, str); });
|
||||||
|
return size * nmemb;
|
||||||
|
},
|
||||||
|
.input = [&] -> void* {
|
||||||
|
input.recv.clear();
|
||||||
|
input.ch = ch;
|
||||||
|
return &input;
|
||||||
|
}(),
|
||||||
|
.headers = [&] -> auto& {
|
||||||
|
static std::unordered_map<std::string, std::string> headers{
|
||||||
|
{"Accept", "*/*"},
|
||||||
|
{"origin", "https://gptchatly.com"},
|
||||||
|
{"referer", "https://gptchatly.com/"},
|
||||||
|
{"Content-Type", "application/json"},
|
||||||
|
};
|
||||||
|
return headers;
|
||||||
|
}(),
|
||||||
|
.body = [&] -> std::string {
|
||||||
|
constexpr std::string_view ask_json_str = R"({
|
||||||
|
"past_conversations": ""
|
||||||
|
})";
|
||||||
|
nlohmann::json ask_request = nlohmann::json::parse(ask_json_str, nullptr, false);
|
||||||
|
ask_request["past_conversations"] = getConversationJson(json);
|
||||||
|
std::string ask_request_str = ask_request.dump();
|
||||||
|
SPDLOG_INFO("ask_request_str: [{}]", ask_request_str);
|
||||||
|
return ask_request_str;
|
||||||
|
}(),
|
||||||
|
.response_header_ptr = nullptr,
|
||||||
|
.expect_response_code = 200,
|
||||||
|
.ssl_verify = false,
|
||||||
|
});
|
||||||
|
if (ret) {
|
||||||
|
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
||||||
|
ch->try_send(err, ret.value());
|
||||||
|
co_return;
|
||||||
|
}
|
||||||
|
co_return;
|
||||||
|
}
|
||||||
|
@ -352,6 +352,7 @@ int main(int argc, char** argv) {
|
|||||||
ADD_METHOD("gpt-3.5-turbo-stream-gptforlove", FreeGpt::gptForLove);
|
ADD_METHOD("gpt-3.5-turbo-stream-gptforlove", FreeGpt::gptForLove);
|
||||||
ADD_METHOD("gpt-3.5-turbo-stream-ChatgptDemo", FreeGpt::chatGptDemo);
|
ADD_METHOD("gpt-3.5-turbo-stream-ChatgptDemo", FreeGpt::chatGptDemo);
|
||||||
ADD_METHOD("llama2", FreeGpt::llama2);
|
ADD_METHOD("llama2", FreeGpt::llama2);
|
||||||
|
ADD_METHOD("gpt-3.5-turbo-stream-GptChatly", FreeGpt::gptChatly);
|
||||||
|
|
||||||
SPDLOG_INFO("active provider:");
|
SPDLOG_INFO("active provider:");
|
||||||
for (auto& [provider, _] : gpt_function)
|
for (auto& [provider, _] : gpt_function)
|
||||||
|
@ -3,7 +3,7 @@ FROM rockylinux:9.2
|
|||||||
RUN dnf upgrade --refresh -y
|
RUN dnf upgrade --refresh -y
|
||||||
|
|
||||||
RUN dnf install https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-next-release-latest-9.noarch.rpm -y
|
RUN dnf install https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm https://dl.fedoraproject.org/pub/epel/epel-next-release-latest-9.noarch.rpm -y
|
||||||
RUN dnf install chromium -y
|
RUN dnf install chromium nodejs -y
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user