add provider aura (#70)

This commit is contained in:
Dmitry Afanasyev 2023-12-21 23:11:36 +03:00 committed by GitHub
parent 31499a3861
commit 29204f1592
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 68 additions and 4 deletions

View File

@ -60,6 +60,7 @@ class ChatGptModelsEnum(StrEnum):
gpt_3_5_turbo_stream_GeekGpt = "gpt-3.5-turbo-stream-GeekGpt" gpt_3_5_turbo_stream_GeekGpt = "gpt-3.5-turbo-stream-GeekGpt"
gpt_3_5_turbo_stream_gptforlove = "gpt-3.5-turbo-stream-gptforlove" gpt_3_5_turbo_stream_gptforlove = "gpt-3.5-turbo-stream-gptforlove"
gpt_3_5_turbo_stream_fakeGpt = "gpt-3.5-turbo-stream-fakeGpt" gpt_3_5_turbo_stream_fakeGpt = "gpt-3.5-turbo-stream-fakeGpt"
gpt_3_5_turbo_stream_aura = "gpt-3.5-turbo-stream-aura"
@classmethod @classmethod
def values(cls) -> set[str]: def values(cls) -> set[str]:
@ -71,14 +72,14 @@ class ChatGptModelsEnum(StrEnum):
for model in ChatGptModelsEnum.values(): for model in ChatGptModelsEnum.values():
priority = 0 priority = 0
match model: match model:
case "gpt-3-stream-binjie":
priority = 3
case "gpt-3.5-turbo-stream-yqcloud":
priority = 3
case "gpt-3.5-turbo-stream-GeekGpt": case "gpt-3.5-turbo-stream-GeekGpt":
priority = 2 priority = 2
case "gpt-3.5-turbo-stream-fakeGpt": case "gpt-3.5-turbo-stream-fakeGpt":
priority = 2 priority = 2
case "gpt-3-stream-binjie":
priority = 1
case "gpt-3.5-turbo-stream-yqcloud":
priority = 1
fields = {"model": model, "priority": priority} fields = {"model": model, "priority": priority}
models.append(fields) models.append(fields)
return models return models

View File

@ -32,6 +32,7 @@ public:
boost::asio::awaitable<void> gptChatly(std::shared_ptr<Channel>, nlohmann::json); boost::asio::awaitable<void> gptChatly(std::shared_ptr<Channel>, nlohmann::json);
boost::asio::awaitable<void> aiChatOnline(std::shared_ptr<Channel>, nlohmann::json); boost::asio::awaitable<void> aiChatOnline(std::shared_ptr<Channel>, nlohmann::json);
boost::asio::awaitable<void> fakeGpt(std::shared_ptr<Channel>, nlohmann::json); boost::asio::awaitable<void> fakeGpt(std::shared_ptr<Channel>, nlohmann::json);
boost::asio::awaitable<void> aura(std::shared_ptr<Channel>, nlohmann::json);
private: private:
boost::asio::awaitable<std::expected<boost::beast::ssl_stream<boost::beast::tcp_stream>, std::string>> boost::asio::awaitable<std::expected<boost::beast::ssl_stream<boost::beast::tcp_stream>, std::string>>

View File

@ -2036,3 +2036,64 @@ boost::asio::awaitable<void> FreeGpt::fakeGpt(std::shared_ptr<Channel> ch, nlohm
co_return; co_return;
} }
} }
boost::asio::awaitable<void> FreeGpt::aura(std::shared_ptr<Channel> ch, nlohmann::json json) {
co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable));
ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }};
auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get<std::string>();
boost::system::error_code err{};
std::unordered_multimap<std::string, std::string> headers{
{"Accept", "*/*"},
{"content-type", "application/json"},
{"Referer", "https://openchat.team/"},
{"Origin", "https://openchat.team"},
{"Alt-Used", "aichatonline.org"},
{"Sec-Fetch-Dest", "empty"},
{"Sec-Fetch-Mode", "cors"},
{"Sec-Fetch-Site", "same-origin"},
{"Sec-Ch-Ua-Mobile", "?0"},
};
std::string recv;
auto ret = Curl()
.setUrl("https://openchat.team/api/chat")
.setProxy(m_cfg.http_proxy)
.setRecvHeadersCallback([](std::string) { return; })
.setRecvBodyCallback([&](std::string str) mutable {
boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, str); });
})
.setBody([&] {
constexpr std::string_view ask_json_str = R"({
"model":{
"id":"openchat_v3.2_mistral",
"name":"OpenChat Aura",
"maxLength":24576,
"tokenLimit":8192
},
"messages":[
{
"role":"user",
"content":"Hello"
}
],
"key":"",
"prompt":" ",
"temperature":0.5
})";
nlohmann::json ask_request = nlohmann::json::parse(ask_json_str, nullptr, false);
ask_request["messages"] = getConversationJson(json);
std::string ask_request_str = ask_request.dump();
SPDLOG_INFO("request: [{}]", ask_request_str);
return ask_request_str;
}())
.clearHeaders()
.setHttpHeaders(headers)
.perform();
if (ret.has_value()) {
SPDLOG_ERROR("{}", ret.value());
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, ret.value());
}
co_return;
}

View File

@ -349,6 +349,7 @@ int main(int, char** argv) {
ADD_METHOD("gpt-3.5-turbo-gptChatly", FreeGpt::gptChatly); ADD_METHOD("gpt-3.5-turbo-gptChatly", FreeGpt::gptChatly);
ADD_METHOD("gpt-3.5-turbo-stream-AiChatOnline", FreeGpt::aiChatOnline); ADD_METHOD("gpt-3.5-turbo-stream-AiChatOnline", FreeGpt::aiChatOnline);
ADD_METHOD("gpt-3.5-turbo-stream-fakeGpt", FreeGpt::fakeGpt); ADD_METHOD("gpt-3.5-turbo-stream-fakeGpt", FreeGpt::fakeGpt);
ADD_METHOD("gpt-3.5-turbo-stream-aura", FreeGpt::aura);
SPDLOG_INFO("active provider:"); SPDLOG_INFO("active provider:");
for (auto& [provider, _] : gpt_function) for (auto& [provider, _] : gpt_function)