activate FakeGpt (#69)

This commit is contained in:
Dmitry Afanasyev 2023-12-19 22:49:41 +03:00 committed by GitHub
parent a8276167d8
commit 31499a3861
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 249 additions and 245 deletions

View File

@ -59,6 +59,7 @@ class ChatGptModelsEnum(StrEnum):
gpt_3_5_turbo_stream_chatGptAi = "gpt-3.5-turbo-stream-chatGptAi" gpt_3_5_turbo_stream_chatGptAi = "gpt-3.5-turbo-stream-chatGptAi"
gpt_3_5_turbo_stream_GeekGpt = "gpt-3.5-turbo-stream-GeekGpt" gpt_3_5_turbo_stream_GeekGpt = "gpt-3.5-turbo-stream-GeekGpt"
gpt_3_5_turbo_stream_gptforlove = "gpt-3.5-turbo-stream-gptforlove" gpt_3_5_turbo_stream_gptforlove = "gpt-3.5-turbo-stream-gptforlove"
gpt_3_5_turbo_stream_fakeGpt = "gpt-3.5-turbo-stream-fakeGpt"
@classmethod @classmethod
def values(cls) -> set[str]: def values(cls) -> set[str]:
@ -70,14 +71,14 @@ class ChatGptModelsEnum(StrEnum):
for model in ChatGptModelsEnum.values(): for model in ChatGptModelsEnum.values():
priority = 0 priority = 0
match model: match model:
case "gpt-3-stream-binjie":
priority = 3
case "gpt-3.5-turbo-stream-yqcloud":
priority = 3
case "gpt-3.5-turbo-stream-GeekGpt": case "gpt-3.5-turbo-stream-GeekGpt":
priority = 2 priority = 2
case "llama2": case "gpt-3.5-turbo-stream-fakeGpt":
priority = 2 priority = 2
case "gpt-3-stream-binjie":
priority = 1
case "gpt-3.5-turbo-stream-yqcloud":
priority = 1
fields = {"model": model, "priority": priority} fields = {"model": model, "priority": priority}
models.append(fields) models.append(fields)
return models return models

View File

@ -1815,246 +1815,6 @@ boost::asio::awaitable<void> FreeGpt::deepAi(std::shared_ptr<Channel> ch, nlohma
co_return; co_return;
} }
boost::asio::awaitable<void> FreeGpt::fakeGpt(std::shared_ptr<Channel> ch, nlohmann::json json) {
co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable));
ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }};
boost::system::error_code err{};
auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get<std::string>();
std::unordered_multimap<std::string, std::string> headers{
{"Accept", "*/*"},
{"referer", "https://chat-shared2.zhile.io/?v=2"},
};
std::multimap<std::string, std::string> api_load_params{
{"t", std::to_string(getTimestamp<std::chrono::seconds>())},
};
auto api_load_url = std::format("https://chat-shared2.zhile.io/api/loads?{}", paramsToQueryStr(api_load_params));
std::string chunk_body;
Curl curl;
auto ret = curl.setUrl(api_load_url)
.setProxy(m_cfg.http_proxy)
.setRecvHeadersCallback([](std::string) { return; })
.setRecvBodyCallback([&](std::string str) {
chunk_body.append(str);
return;
})
.setHttpHeaders([&] -> auto& { return headers; }())
.perform();
if (ret.has_value()) {
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, ret.value());
co_return;
}
nlohmann::json json_result = nlohmann::json::parse(chunk_body, nullptr, false);
if (json_result.is_discarded()) {
SPDLOG_ERROR("json parse error: [{}]", chunk_body);
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, std::format("json parse error: [{}]", chunk_body));
co_return;
}
std::vector<nlohmann::json> random_j;
for (auto& j : json_result["loads"]) {
if (j["count"].get<int32_t>() == 0)
random_j.emplace_back(std::move(j));
}
if (random_j.empty()) {
SPDLOG_ERROR("random_j is empty!!!");
ch->try_send(err, json_result.dump());
co_return;
}
std::mt19937 g{std::random_device{}()};
std::uniform_int_distribution<std::size_t> d{0, random_j.size()};
auto token_id = random_j[d(g)];
std::cout << token_id.dump() << std::endl;
headers.emplace("Content-Type", "application/x-www-form-urlencoded");
// send login
std::multimap<std::string, std::string> login_params{
{"token_key", token_id["token_id"].get<std::string>()},
{"session_password",
[](int len) -> std::string {
static std::string chars{"abcdefghijklmnopqrstuvwxyz"};
static std::string letter{"0123456789"};
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> dis(0, 1000000);
std::string random_string;
random_string += letter[dis(gen) % letter.length()];
len = len - 1;
for (int i = 0; i < len; i++)
random_string += chars[dis(gen) % chars.length()];
return random_string;
}(10)},
};
chunk_body.clear();
headers.erase("Content-Type");
std::string header_str;
auto body = paramsToQueryStr(login_params);
ret = curl.setUrl("https://chat-shared2.zhile.io/auth/login")
.setProxy(m_cfg.http_proxy)
.setRecvHeadersCallback([&](std::string str) {
header_str.append(str);
return;
})
.setRecvBodyCallback([&](std::string str) {
chunk_body.append(str);
return;
})
.setBody(body)
.clearHeaders()
.setHttpHeaders([&] -> auto& { return headers; }())
.perform();
if (ret.has_value()) {
SPDLOG_ERROR("{}", ret.value());
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, ret.value());
co_return;
}
auto response_header = Curl::parseHttpHeaders(header_str);
auto range = response_header.equal_range("set-cookie");
std::string cookie;
for (auto it = range.first; it != range.second; ++it) {
if (!(it->second.contains("credential=")))
continue;
auto view = it->second | std::views::drop_while(isspace) | std::views::reverse |
std::views::drop_while(isspace) | std::views::reverse;
auto fields = splitString(std::string{view.begin(), view.end()}, " ");
if (fields.size() < 1) {
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, "can't get cookie");
co_return;
}
cookie = std::move(fields[0]);
break;
}
SPDLOG_INFO("cookie: [{}]", cookie);
SPDLOG_INFO("rsp: [{}]", chunk_body);
chunk_body.clear();
headers.emplace("cookie", cookie);
// /api/auth/session
ret = curl.setUrl("https://chat-shared2.zhile.io/api/auth/session")
.setProxy(m_cfg.http_proxy)
.setOpt(CURLOPT_HTTPGET, 1L)
.setRecvHeadersCallback([](std::string str) {
std::cout << str << std::endl;
return;
})
.setRecvBodyCallback([&](std::string str) mutable {
chunk_body.append(str);
return;
})
.clearHeaders()
.setHttpHeaders([&] -> auto& { return headers; }())
.perform();
if (ret.has_value()) {
SPDLOG_ERROR("{}", ret.value());
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, ret.value());
co_return;
}
json_result.clear();
json_result = nlohmann::json::parse(chunk_body, nullptr, false);
if (json_result.is_discarded()) {
SPDLOG_ERROR("/api/auth/session json parse error: [{}]", chunk_body);
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, std::format("/api/auth/session parse error: [{}]", chunk_body));
co_return;
}
auto cache_token = json_result["accessToken"].get<std::string>();
SPDLOG_INFO("accessToken: [{}]", cache_token);
headers.erase("Accept");
headers.emplace("Content-Type", "application/json");
headers.emplace("Accept", "text/event-stream");
auto auth = std::format("Bearer {}", cache_token);
SPDLOG_INFO("auth: [{}]", auth);
headers.emplace("X-Authorization", auth);
std::string recv;
std::string last_message;
ret = curl.setUrl("https://chat-shared2.zhile.io/api/conversation")
.setProxy(m_cfg.http_proxy)
.setRecvHeadersCallback([](std::string) { return; })
.setRecvBodyCallback([&](std::string str) mutable {
recv.append(str);
while (true) {
auto position = recv.find("\n");
if (position == std::string::npos)
break;
auto msg = recv.substr(0, position + 1);
recv.erase(0, position + 1);
msg.pop_back();
if (msg.empty() || !msg.starts_with("data: ") || !msg.contains("content"))
continue;
msg.erase(0, 6);
if (msg == "[DONE]")
break;
boost::system::error_code err{};
nlohmann::json line_json = nlohmann::json::parse(msg, nullptr, false);
if (line_json.is_discarded()) {
SPDLOG_ERROR("json parse error: [{}]", msg);
boost::asio::post(ch->get_executor(),
[=] { ch->try_send(err, std::format("json parse error: [{}]", msg)); });
continue;
}
auto type = line_json["message"]["content"]["content_type"].get<std::string>();
if (type == "text") {
auto new_message = line_json["message"]["content"]["parts"][0].get<std::string>();
if (new_message.empty())
continue;
std::string tmp{new_message};
new_message.erase(0, last_message.size());
last_message = std::move(tmp);
if (!new_message.empty())
boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, new_message); });
}
}
return;
})
.setBody([&] {
constexpr std::string_view json_str = R"({
"action":"next",
"messages":[
{
"id":"a68cd787-c96c-4234-8ec9-00805f73a7b8",
"author":{"role":"user"},
"content":{
"content_type":"text",
"parts":["hello"]
},
"metadata":{}
}
],
"parent_message_id":"fdc171e6-dd0d-4494-93ce-e7d219e6ed05",
"model":"text-davinci-002-render-sha",
"plugin_ids":[],
"timezone_offset_min":-120,
"suggestions":[],
"history_and_training_disabled":true,
"arkose_token":"",
"force_paragen":false
})";
nlohmann::json request = nlohmann::json::parse(json_str, nullptr, false);
request["parent_message_id"] = createUuidString();
request["messages"][0]["id"] = createUuidString();
request["messages"][0]["content"]["parts"][0] = prompt;
SPDLOG_INFO("request: [{}]", request.dump(2));
return request.dump();
}())
.clearHeaders()
.setHttpHeaders(headers)
.perform();
if (ret.has_value()) {
SPDLOG_ERROR("{}", ret.value());
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, ret.value());
co_return;
}
}
boost::asio::awaitable<void> FreeGpt::vercel(std::shared_ptr<Channel> ch, nlohmann::json json) { boost::asio::awaitable<void> FreeGpt::vercel(std::shared_ptr<Channel> ch, nlohmann::json json) {
co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable)); co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable));
ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }}; ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }};

View File

@ -31,6 +31,7 @@ public:
boost::asio::awaitable<void> deepInfra(std::shared_ptr<Channel>, nlohmann::json); boost::asio::awaitable<void> deepInfra(std::shared_ptr<Channel>, nlohmann::json);
boost::asio::awaitable<void> gptChatly(std::shared_ptr<Channel>, nlohmann::json); boost::asio::awaitable<void> gptChatly(std::shared_ptr<Channel>, nlohmann::json);
boost::asio::awaitable<void> aiChatOnline(std::shared_ptr<Channel>, nlohmann::json); boost::asio::awaitable<void> aiChatOnline(std::shared_ptr<Channel>, nlohmann::json);
boost::asio::awaitable<void> fakeGpt(std::shared_ptr<Channel>, nlohmann::json);
private: private:
boost::asio::awaitable<std::expected<boost::beast::ssl_stream<boost::beast::tcp_stream>, std::string>> boost::asio::awaitable<std::expected<boost::beast::ssl_stream<boost::beast::tcp_stream>, std::string>>

View File

@ -1795,3 +1795,244 @@ boost::asio::awaitable<void> FreeGpt::aiChatOnline(std::shared_ptr<Channel> ch,
} }
co_return; co_return;
} }
boost::asio::awaitable<void> FreeGpt::fakeGpt(std::shared_ptr<Channel> ch, nlohmann::json json) {
co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable));
ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }};
boost::system::error_code err{};
auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get<std::string>();
std::unordered_multimap<std::string, std::string> headers{
{"Accept", "*/*"},
{"referer", "https://chat-shared2.zhile.io/?v=2"},
};
std::multimap<std::string, std::string> api_load_params{
{"t", std::to_string(getTimestamp<std::chrono::seconds>())},
};
auto api_load_url = std::format("https://chat-shared2.zhile.io/api/loads?{}", paramsToQueryStr(api_load_params));
std::string chunk_body;
Curl curl;
auto ret = curl.setUrl(api_load_url)
.setProxy(m_cfg.http_proxy)
.setRecvHeadersCallback([](std::string) { return; })
.setRecvBodyCallback([&](std::string str) {
chunk_body.append(str);
return;
})
.setHttpHeaders([&] -> auto& { return headers; }())
.perform();
if (ret.has_value()) {
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, ret.value());
co_return;
}
nlohmann::json json_result = nlohmann::json::parse(chunk_body, nullptr, false);
if (json_result.is_discarded()) {
SPDLOG_ERROR("json parse error: [{}]", chunk_body);
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, std::format("json parse error: [{}]", chunk_body));
co_return;
}
std::vector<nlohmann::json> random_j;
for (auto& j : json_result["loads"]) {
if (j["count"].get<int32_t>() == 0)
random_j.emplace_back(std::move(j));
}
if (random_j.empty()) {
SPDLOG_ERROR("random_j is empty!!!");
ch->try_send(err, json_result.dump());
co_return;
}
std::mt19937 g{std::random_device{}()};
std::uniform_int_distribution<std::size_t> d{0, random_j.size()};
auto token_id = random_j[d(g)];
std::cout << token_id.dump() << std::endl;
headers.emplace("Content-Type", "application/x-www-form-urlencoded");
// send login
std::multimap<std::string, std::string> login_params{
{"token_key", token_id["token_id"].get<std::string>()},
{"session_password",
[](int len) -> std::string {
static std::string chars{"abcdefghijklmnopqrstuvwxyz"};
static std::string letter{"0123456789"};
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> dis(0, 1000000);
std::string random_string;
random_string += letter[dis(gen) % letter.length()];
len = len - 1;
for (int i = 0; i < len; i++)
random_string += chars[dis(gen) % chars.length()];
return random_string;
}(10)},
};
chunk_body.clear();
headers.erase("Content-Type");
std::string header_str;
auto body = paramsToQueryStr(login_params);
ret = curl.setUrl("https://chat-shared2.zhile.io/auth/login")
.setProxy(m_cfg.http_proxy)
.setRecvHeadersCallback([&](std::string str) {
header_str.append(str);
return;
})
.setRecvBodyCallback([&](std::string str) {
chunk_body.append(str);
return;
})
.setBody(body)
.clearHeaders()
.setHttpHeaders([&] -> auto& { return headers; }())
.perform();
if (ret.has_value()) {
SPDLOG_ERROR("{}", ret.value());
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, ret.value());
co_return;
}
auto response_header = Curl::parseHttpHeaders(header_str);
auto range = response_header.equal_range("set-cookie");
std::string cookie;
for (auto it = range.first; it != range.second; ++it) {
if (!(it->second.contains("credential=")))
continue;
auto view = it->second | std::views::drop_while(isspace) | std::views::reverse |
std::views::drop_while(isspace) | std::views::reverse;
auto fields = splitString(std::string{view.begin(), view.end()}, " ");
if (fields.size() < 1) {
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, "can't get cookie");
co_return;
}
cookie = std::move(fields[0]);
break;
}
SPDLOG_INFO("cookie: [{}]", cookie);
SPDLOG_INFO("rsp: [{}]", chunk_body);
chunk_body.clear();
headers.emplace("cookie", cookie);
// /api/auth/session
ret = curl.setUrl("https://chat-shared2.zhile.io/api/auth/session")
.setProxy(m_cfg.http_proxy)
.setOpt(CURLOPT_HTTPGET, 1L)
.setRecvHeadersCallback([](std::string str) {
std::cout << str << std::endl;
return;
})
.setRecvBodyCallback([&](std::string str) mutable {
chunk_body.append(str);
return;
})
.clearHeaders()
.setHttpHeaders([&] -> auto& { return headers; }())
.perform();
if (ret.has_value()) {
SPDLOG_ERROR("{}", ret.value());
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, ret.value());
co_return;
}
json_result.clear();
json_result = nlohmann::json::parse(chunk_body, nullptr, false);
if (json_result.is_discarded()) {
SPDLOG_ERROR("/api/auth/session json parse error: [{}]", chunk_body);
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, std::format("/api/auth/session parse error: [{}]", chunk_body));
co_return;
}
auto cache_token = json_result["accessToken"].get<std::string>();
SPDLOG_INFO("accessToken: [{}]", cache_token);
headers.erase("Accept");
headers.emplace("Content-Type", "application/json");
headers.emplace("Accept", "text/event-stream");
auto auth = std::format("Bearer {}", cache_token);
SPDLOG_INFO("auth: [{}]", auth);
headers.emplace("X-Authorization", auth);
std::string recv;
std::string last_message;
ret = curl.setUrl("https://chat-shared2.zhile.io/api/conversation")
.setProxy(m_cfg.http_proxy)
.setRecvHeadersCallback([](std::string) { return; })
.setRecvBodyCallback([&](std::string str) mutable {
recv.append(str);
while (true) {
auto position = recv.find("\n");
if (position == std::string::npos)
break;
auto msg = recv.substr(0, position + 1);
recv.erase(0, position + 1);
msg.pop_back();
if (msg.empty() || !msg.starts_with("data: ") || !msg.contains("content"))
continue;
msg.erase(0, 6);
if (msg == "[DONE]")
break;
boost::system::error_code err{};
nlohmann::json line_json = nlohmann::json::parse(msg, nullptr, false);
if (line_json.is_discarded()) {
SPDLOG_ERROR("json parse error: [{}]", msg);
boost::asio::post(ch->get_executor(),
[=] { ch->try_send(err, std::format("json parse error: [{}]", msg)); });
continue;
}
auto type = line_json["message"]["content"]["content_type"].get<std::string>();
if (type == "text") {
auto new_message = line_json["message"]["content"]["parts"][0].get<std::string>();
if (new_message.empty())
continue;
std::string tmp{new_message};
new_message.erase(0, last_message.size());
last_message = std::move(tmp);
if (!new_message.empty())
boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, new_message); });
}
}
return;
})
.setBody([&] {
constexpr std::string_view json_str = R"({
"action":"next",
"messages":[
{
"id":"a68cd787-c96c-4234-8ec9-00805f73a7b8",
"author":{"role":"user"},
"content":{
"content_type":"text",
"parts":["hello"]
},
"metadata":{}
}
],
"parent_message_id":"fdc171e6-dd0d-4494-93ce-e7d219e6ed05",
"model":"text-davinci-002-render-sha",
"plugin_ids":[],
"timezone_offset_min":-120,
"suggestions":[],
"history_and_training_disabled":true,
"arkose_token":"",
"force_paragen":false
})";
nlohmann::json request = nlohmann::json::parse(json_str, nullptr, false);
request["parent_message_id"] = createUuidString();
request["messages"][0]["id"] = createUuidString();
request["messages"][0]["content"]["parts"][0] = prompt;
SPDLOG_INFO("request: [{}]", request.dump(2));
return request.dump();
}())
.clearHeaders()
.setHttpHeaders(headers)
.perform();
if (ret.has_value()) {
SPDLOG_ERROR("{}", ret.value());
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, ret.value());
co_return;
}
}

View File

@ -348,6 +348,7 @@ int main(int, char** argv) {
ADD_METHOD("Llama-2-70b-chat-hf-stream-DeepInfra", FreeGpt::deepInfra); ADD_METHOD("Llama-2-70b-chat-hf-stream-DeepInfra", FreeGpt::deepInfra);
ADD_METHOD("gpt-3.5-turbo-gptChatly", FreeGpt::gptChatly); ADD_METHOD("gpt-3.5-turbo-gptChatly", FreeGpt::gptChatly);
ADD_METHOD("gpt-3.5-turbo-stream-AiChatOnline", FreeGpt::aiChatOnline); ADD_METHOD("gpt-3.5-turbo-stream-AiChatOnline", FreeGpt::aiChatOnline);
ADD_METHOD("gpt-3.5-turbo-stream-fakeGpt", FreeGpt::fakeGpt);
SPDLOG_INFO("active provider:"); SPDLOG_INFO("active provider:");
for (auto& [provider, _] : gpt_function) for (auto& [provider, _] : gpt_function)