mirror of
https://github.com/Balshgit/gpt_chat_bot.git
synced 2025-09-11 22:30:41 +03:00
remove gpt-4-stream-Chatgpt4Online (#51)
This commit is contained in:
parent
6262059bc7
commit
18ea0a556a
@ -51,7 +51,6 @@ class ChatGptModelsEnum(StrEnum):
|
|||||||
gpt_3_5_turbo_stream_GptGo = "gpt-3.5-turbo-stream-GptGo"
|
gpt_3_5_turbo_stream_GptGo = "gpt-3.5-turbo-stream-GptGo"
|
||||||
gpt_3_5_turbo_stream_FreeGpt = "gpt-3.5-turbo-stream-FreeGpt"
|
gpt_3_5_turbo_stream_FreeGpt = "gpt-3.5-turbo-stream-FreeGpt"
|
||||||
gpt_3_5_turbo_stream_Cromicle = "gpt-3.5-turbo-stream-Cromicle"
|
gpt_3_5_turbo_stream_Cromicle = "gpt-3.5-turbo-stream-Cromicle"
|
||||||
gpt_4_stream_Chatgpt4Online = "gpt-4-stream-Chatgpt4Online"
|
|
||||||
gpt_3_5_turbo_stream_gptalk = "gpt-3.5-turbo-stream-gptalk"
|
gpt_3_5_turbo_stream_gptalk = "gpt-3.5-turbo-stream-gptalk"
|
||||||
gpt_3_5_turbo_stream_ChatgptDemo = "gpt-3.5-turbo-stream-ChatgptDemo"
|
gpt_3_5_turbo_stream_ChatgptDemo = "gpt-3.5-turbo-stream-ChatgptDemo"
|
||||||
llama2 = "llama2"
|
llama2 = "llama2"
|
||||||
|
@ -1398,3 +1398,115 @@ boost::asio::awaitable<void> FreeGpt::aibn(std::shared_ptr<Channel> ch, nlohmann
|
|||||||
}
|
}
|
||||||
co_return;
|
co_return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
boost::asio::awaitable<void> FreeGpt::chatGpt4Online(std::shared_ptr<Channel> ch, nlohmann::json json) {
|
||||||
|
co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable));
|
||||||
|
|
||||||
|
boost::system::error_code err{};
|
||||||
|
ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }};
|
||||||
|
auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get<std::string>();
|
||||||
|
|
||||||
|
CURLcode res;
|
||||||
|
CURL* curl = curl_easy_init();
|
||||||
|
if (!curl) {
|
||||||
|
auto error_info = std::format("curl_easy_init() failed:{}", curl_easy_strerror(res));
|
||||||
|
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
||||||
|
ch->try_send(err, error_info);
|
||||||
|
co_return;
|
||||||
|
}
|
||||||
|
curl_easy_setopt(curl, CURLOPT_URL, "https://chatgpt4online.org/wp-json/mwai-ui/v1/chats/submit");
|
||||||
|
if (!m_cfg.http_proxy.empty())
|
||||||
|
curl_easy_setopt(curl, CURLOPT_PROXY, m_cfg.http_proxy.c_str());
|
||||||
|
|
||||||
|
struct Input {
|
||||||
|
std::shared_ptr<Channel> ch;
|
||||||
|
std::string recv;
|
||||||
|
};
|
||||||
|
Input input{ch};
|
||||||
|
auto action_cb = [](void* contents, size_t size, size_t nmemb, void* userp) -> size_t {
|
||||||
|
auto input_ptr = static_cast<Input*>(userp);
|
||||||
|
std::string data{(char*)contents, size * nmemb};
|
||||||
|
auto& [ch, recv] = *input_ptr;
|
||||||
|
recv.append(data);
|
||||||
|
while (true) {
|
||||||
|
auto position = recv.find("\n");
|
||||||
|
if (position == std::string::npos)
|
||||||
|
break;
|
||||||
|
auto msg = recv.substr(0, position + 1);
|
||||||
|
recv.erase(0, position + 1);
|
||||||
|
msg.pop_back();
|
||||||
|
if (msg.empty())
|
||||||
|
continue;
|
||||||
|
auto fields = splitString(msg, "data: ");
|
||||||
|
boost::system::error_code err{};
|
||||||
|
nlohmann::json line_json = nlohmann::json::parse(fields.back(), nullptr, false);
|
||||||
|
if (line_json.is_discarded()) {
|
||||||
|
SPDLOG_ERROR("json parse error: [{}]", fields.back());
|
||||||
|
boost::asio::post(ch->get_executor(),
|
||||||
|
[=] { ch->try_send(err, std::format("json parse error: [{}]", fields.back())); });
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
auto type = line_json["type"].get<std::string>();
|
||||||
|
if (type == "live") {
|
||||||
|
auto str = line_json["data"].get<std::string>();
|
||||||
|
boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, str); });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return size * nmemb;
|
||||||
|
};
|
||||||
|
size_t (*action_fn)(void* contents, size_t size, size_t nmemb, void* userp) = action_cb;
|
||||||
|
curlEasySetopt(curl);
|
||||||
|
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, action_fn);
|
||||||
|
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &input);
|
||||||
|
|
||||||
|
constexpr std::string_view request_str{R"({
|
||||||
|
"botId":"default",
|
||||||
|
"customId":null,
|
||||||
|
"session":"N/A",
|
||||||
|
"chatId":"",
|
||||||
|
"contextId":58,
|
||||||
|
"messages":[
|
||||||
|
{
|
||||||
|
"role":"user",
|
||||||
|
"content":"hello"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"newMessage":"hello",
|
||||||
|
"stream":true
|
||||||
|
})"};
|
||||||
|
nlohmann::json request = nlohmann::json::parse(request_str, nullptr, false);
|
||||||
|
|
||||||
|
request["messages"] = getConversationJson(json);
|
||||||
|
request["newMessage"] = prompt;
|
||||||
|
|
||||||
|
auto str = request.dump();
|
||||||
|
SPDLOG_INFO("request : [{}]", str);
|
||||||
|
|
||||||
|
curl_easy_setopt(curl, CURLOPT_POSTFIELDS, str.c_str());
|
||||||
|
|
||||||
|
struct curl_slist* headers = nullptr;
|
||||||
|
headers = curl_slist_append(headers, "Content-Type: application/json");
|
||||||
|
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers);
|
||||||
|
|
||||||
|
ScopeExit auto_exit{[=] {
|
||||||
|
curl_slist_free_all(headers);
|
||||||
|
curl_easy_cleanup(curl);
|
||||||
|
}};
|
||||||
|
|
||||||
|
res = curl_easy_perform(curl);
|
||||||
|
|
||||||
|
if (res != CURLE_OK) {
|
||||||
|
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
||||||
|
auto error_info = std::format("curl_easy_perform() failed:{}", curl_easy_strerror(res));
|
||||||
|
ch->try_send(err, error_info);
|
||||||
|
co_return;
|
||||||
|
}
|
||||||
|
int32_t response_code;
|
||||||
|
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &response_code);
|
||||||
|
if (response_code != 200) {
|
||||||
|
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
||||||
|
ch->try_send(err, std::format("you http code:{}", response_code));
|
||||||
|
co_return;
|
||||||
|
}
|
||||||
|
co_return;
|
||||||
|
}
|
||||||
|
@ -28,7 +28,6 @@ public:
|
|||||||
boost::asio::awaitable<void> gptGo(std::shared_ptr<Channel>, nlohmann::json);
|
boost::asio::awaitable<void> gptGo(std::shared_ptr<Channel>, nlohmann::json);
|
||||||
boost::asio::awaitable<void> chatForAi(std::shared_ptr<Channel>, nlohmann::json);
|
boost::asio::awaitable<void> chatForAi(std::shared_ptr<Channel>, nlohmann::json);
|
||||||
boost::asio::awaitable<void> freeGpt(std::shared_ptr<Channel>, nlohmann::json);
|
boost::asio::awaitable<void> freeGpt(std::shared_ptr<Channel>, nlohmann::json);
|
||||||
boost::asio::awaitable<void> chatGpt4Online(std::shared_ptr<Channel>, nlohmann::json);
|
|
||||||
boost::asio::awaitable<void> gptalk(std::shared_ptr<Channel>, nlohmann::json);
|
boost::asio::awaitable<void> gptalk(std::shared_ptr<Channel>, nlohmann::json);
|
||||||
boost::asio::awaitable<void> gptForLove(std::shared_ptr<Channel>, nlohmann::json);
|
boost::asio::awaitable<void> gptForLove(std::shared_ptr<Channel>, nlohmann::json);
|
||||||
boost::asio::awaitable<void> chatGptDemo(std::shared_ptr<Channel>, nlohmann::json);
|
boost::asio::awaitable<void> chatGptDemo(std::shared_ptr<Channel>, nlohmann::json);
|
||||||
|
@ -445,6 +445,20 @@ public:
|
|||||||
m_http_status_code = code;
|
m_http_status_code = code;
|
||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
static auto parseHttpHeaders(const std::string& buffer) {
|
||||||
|
std::regex pattern(R"(([^:\r\n]+):([^\r\n]+))");
|
||||||
|
std::smatch matches;
|
||||||
|
auto start = buffer.cbegin();
|
||||||
|
auto end = buffer.cend();
|
||||||
|
std::multimap<std::string, std::string> response_header;
|
||||||
|
while (std::regex_search(start, end, matches, pattern)) {
|
||||||
|
std::string field_name = matches[1].str();
|
||||||
|
std::string field_value = matches[2].str();
|
||||||
|
response_header.insert(std::pair{field_name, field_value});
|
||||||
|
start = matches[0].second;
|
||||||
|
}
|
||||||
|
return response_header;
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
CURL* m_curl{nullptr};
|
CURL* m_curl{nullptr};
|
||||||
@ -459,40 +473,23 @@ private:
|
|||||||
};
|
};
|
||||||
|
|
||||||
std::expected<nlohmann::json, std::string> callZeus(const std::string& host, const std::string& request_body) {
|
std::expected<nlohmann::json, std::string> callZeus(const std::string& host, const std::string& request_body) {
|
||||||
CURLcode res;
|
static std::unordered_multimap<std::string, std::string> headers{
|
||||||
CURL* curl = curl_easy_init();
|
{"Accept", "*/*"},
|
||||||
if (!curl) {
|
{"Content-Type", "application/json"},
|
||||||
auto error_info = std::format("callZeus curl_easy_init() failed:{}", curl_easy_strerror(res));
|
};
|
||||||
return std::unexpected(error_info);
|
|
||||||
}
|
|
||||||
ScopeExit auto_exit{[=] { curl_easy_cleanup(curl); }};
|
|
||||||
std::string http_proxy;
|
|
||||||
std::string recv;
|
std::string recv;
|
||||||
|
auto ret = Curl()
|
||||||
auto ret = sendHttpRequest(CurlHttpRequest{
|
.setUrl(host)
|
||||||
.curl = curl,
|
.setRecvHeadersCallback([](std::string) { return; })
|
||||||
.url = host,
|
.setRecvBodyCallback([&](std::string str) { recv.append(str); })
|
||||||
.http_proxy = http_proxy,
|
.setBody(request_body)
|
||||||
.cb = [](void* contents, size_t size, size_t nmemb, void* userp) mutable -> size_t {
|
.clearHeaders()
|
||||||
auto recv_ptr = static_cast<std::string*>(userp);
|
.setHttpHeaders(headers)
|
||||||
std::string data{(char*)contents, size * nmemb};
|
.perform();
|
||||||
recv_ptr->append(data);
|
if (ret) {
|
||||||
return size * nmemb;
|
SPDLOG_ERROR("call zeus error: {}", ret.value());
|
||||||
},
|
|
||||||
.input = &recv,
|
|
||||||
.headers = [&] -> auto& {
|
|
||||||
static std::unordered_map<std::string, std::string> headers{
|
|
||||||
{"Content-Type", "application/json"},
|
|
||||||
};
|
|
||||||
return headers;
|
|
||||||
}(),
|
|
||||||
.body = request_body,
|
|
||||||
.response_header_ptr = nullptr,
|
|
||||||
.expect_response_code = 200,
|
|
||||||
.ssl_verify = false,
|
|
||||||
});
|
|
||||||
if (ret)
|
|
||||||
return std::unexpected(ret.value());
|
return std::unexpected(ret.value());
|
||||||
|
}
|
||||||
nlohmann::json rsp = nlohmann::json::parse(recv, nullptr, false);
|
nlohmann::json rsp = nlohmann::json::parse(recv, nullptr, false);
|
||||||
if (rsp.is_discarded()) {
|
if (rsp.is_discarded()) {
|
||||||
SPDLOG_ERROR("json parse error");
|
SPDLOG_ERROR("json parse error");
|
||||||
@ -1048,9 +1045,7 @@ boost::asio::awaitable<void> FreeGpt::you(std::shared_ptr<Channel> ch, nlohmann:
|
|||||||
.curl = curl,
|
.curl = curl,
|
||||||
.url = "https://you.com",
|
.url = "https://you.com",
|
||||||
.http_proxy = m_cfg.http_proxy,
|
.http_proxy = m_cfg.http_proxy,
|
||||||
.cb = [](void* contents, size_t size, size_t nmemb, void* userp) mutable -> size_t {
|
.cb = [](void*, size_t size, size_t nmemb, void*) mutable -> size_t { return size * nmemb; },
|
||||||
return size * nmemb;
|
|
||||||
},
|
|
||||||
.headers = headers,
|
.headers = headers,
|
||||||
.response_header_ptr = &response_header,
|
.response_header_ptr = &response_header,
|
||||||
});
|
});
|
||||||
@ -1315,7 +1310,7 @@ boost::asio::awaitable<void> FreeGpt::gptGo(std::shared_ptr<Channel> ch, nlohman
|
|||||||
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &response_code);
|
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &response_code);
|
||||||
if (response_code != 200) {
|
if (response_code != 200) {
|
||||||
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
||||||
ch->try_send(err, std::format("you http code:{}", response_code));
|
ch->try_send(err, std::format("gptGo http code:{}", response_code));
|
||||||
co_return;
|
co_return;
|
||||||
}
|
}
|
||||||
SPDLOG_INFO("recv_str: [{}]", recv_str);
|
SPDLOG_INFO("recv_str: [{}]", recv_str);
|
||||||
@ -1347,7 +1342,6 @@ boost::asio::awaitable<void> FreeGpt::gptGo(std::shared_ptr<Channel> ch, nlohman
|
|||||||
};
|
};
|
||||||
Input input{ch};
|
Input input{ch};
|
||||||
auto action_cb = [](void* contents, size_t size, size_t nmemb, void* userp) -> size_t {
|
auto action_cb = [](void* contents, size_t size, size_t nmemb, void* userp) -> size_t {
|
||||||
boost::system::error_code err{};
|
|
||||||
auto input_ptr = static_cast<Input*>(userp);
|
auto input_ptr = static_cast<Input*>(userp);
|
||||||
std::string data{(char*)contents, size * nmemb};
|
std::string data{(char*)contents, size * nmemb};
|
||||||
auto& [ch, recv] = *input_ptr;
|
auto& [ch, recv] = *input_ptr;
|
||||||
@ -1405,36 +1399,6 @@ boost::asio::awaitable<void> FreeGpt::chatForAi(std::shared_ptr<Channel> ch, nlo
|
|||||||
ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }};
|
ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }};
|
||||||
auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get<std::string>();
|
auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get<std::string>();
|
||||||
|
|
||||||
CURLcode res;
|
|
||||||
CURL* curl = curl_easy_init();
|
|
||||||
if (!curl) {
|
|
||||||
auto error_info = std::format("curl_easy_init() failed:{}", curl_easy_strerror(res));
|
|
||||||
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
|
||||||
ch->try_send(err, error_info);
|
|
||||||
co_return;
|
|
||||||
}
|
|
||||||
curl_easy_setopt(curl, CURLOPT_URL, "https://chatforai.store/api/handle/provider-openai");
|
|
||||||
if (!m_cfg.http_proxy.empty())
|
|
||||||
curl_easy_setopt(curl, CURLOPT_PROXY, m_cfg.http_proxy.c_str());
|
|
||||||
|
|
||||||
struct Input {
|
|
||||||
std::shared_ptr<Channel> ch;
|
|
||||||
std::string recv;
|
|
||||||
};
|
|
||||||
Input input{ch};
|
|
||||||
auto action_cb = [](void* contents, size_t size, size_t nmemb, void* userp) -> size_t {
|
|
||||||
boost::system::error_code err{};
|
|
||||||
auto input_ptr = static_cast<Input*>(userp);
|
|
||||||
std::string data{(char*)contents, size * nmemb};
|
|
||||||
auto& [ch, recv] = *input_ptr;
|
|
||||||
boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, data); });
|
|
||||||
return size * nmemb;
|
|
||||||
};
|
|
||||||
size_t (*action_fn)(void* contents, size_t size, size_t nmemb, void* userp) = action_cb;
|
|
||||||
curlEasySetopt(curl);
|
|
||||||
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, action_fn);
|
|
||||||
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &input);
|
|
||||||
|
|
||||||
auto generate_signature = [](uint64_t timestamp, const std::string& message, const std::string& id) {
|
auto generate_signature = [](uint64_t timestamp, const std::string& message, const std::string& id) {
|
||||||
std::string s = std::to_string(timestamp) + ":" + id + ":" + message + ":7YN8z6d6";
|
std::string s = std::to_string(timestamp) + ":" + id + ":" + message + ":7YN8z6d6";
|
||||||
unsigned char hash[SHA256_DIGEST_LENGTH];
|
unsigned char hash[SHA256_DIGEST_LENGTH];
|
||||||
@ -1450,65 +1414,57 @@ boost::asio::awaitable<void> FreeGpt::chatForAi(std::shared_ptr<Channel> ch, nlo
|
|||||||
ss << std::hex << std::setw(2) << std::setfill('0') << static_cast<int>(hash[i]);
|
ss << std::hex << std::setw(2) << std::setfill('0') << static_cast<int>(hash[i]);
|
||||||
return ss.str();
|
return ss.str();
|
||||||
};
|
};
|
||||||
uint64_t timestamp = getTimestamp();
|
static std::unordered_multimap<std::string, std::string> headers{
|
||||||
constexpr std::string_view request_str{R"({
|
{"Content-Type", "application/json"},
|
||||||
"conversationId": "id_1696984301982",
|
{"Origin", "https://chatforai.store"},
|
||||||
"conversationType": "chat_continuous",
|
{"Referer", "https://chatforai.store/"},
|
||||||
"botId": "chat_continuous",
|
};
|
||||||
"globalSettings": {
|
auto ret = Curl()
|
||||||
"baseUrl": "https://api.openai.com",
|
.setUrl("https://chatforai.store/api/handle/provider-openai")
|
||||||
"model": "gpt-3.5-turbo",
|
.setProxy(m_cfg.http_proxy)
|
||||||
"messageHistorySize": 5,
|
.setRecvHeadersCallback([](std::string) { return; })
|
||||||
"temperature": 0.7,
|
.setRecvBodyCallback([&](std::string str) {
|
||||||
"top_p": 1
|
boost::asio::post(ch->get_executor(), [=, str = std::move(str)] { ch->try_send(err, str); });
|
||||||
},
|
return;
|
||||||
"botSettings": {},
|
})
|
||||||
"prompt": "hello",
|
.setBody([&] {
|
||||||
"messages": [{
|
uint64_t timestamp = getTimestamp();
|
||||||
"role": "user",
|
constexpr std::string_view request_str{R"({
|
||||||
"content": "hello"
|
"conversationId": "id_1696984301982",
|
||||||
}],
|
"conversationType": "chat_continuous",
|
||||||
"sign": "15d8e701706743ffa74f8b96c97bd1f79354c7da4a97438c81c6bb259004cd77",
|
"botId": "chat_continuous",
|
||||||
"timestamp": 1696984302017
|
"globalSettings": {
|
||||||
})"};
|
"baseUrl": "https://api.openai.com",
|
||||||
nlohmann::json request = nlohmann::json::parse(request_str, nullptr, false);
|
"model": "gpt-3.5-turbo",
|
||||||
auto conversation_id = std::format("id_{}", timestamp - 35);
|
"messageHistorySize": 5,
|
||||||
request["conversationId"] = conversation_id;
|
"temperature": 0.7,
|
||||||
request["timestamp"] = timestamp;
|
"top_p": 1
|
||||||
request["sign"] = generate_signature(timestamp, prompt, conversation_id);
|
},
|
||||||
request["messages"] = getConversationJson(json);
|
"botSettings": {},
|
||||||
request["prompt"] = prompt;
|
"prompt": "hello",
|
||||||
|
"messages": [{
|
||||||
auto str = request.dump();
|
"role": "user",
|
||||||
SPDLOG_INFO("request : [{}]", str);
|
"content": "hello"
|
||||||
|
}],
|
||||||
curl_easy_setopt(curl, CURLOPT_POSTFIELDS, str.c_str());
|
"sign": "15d8e701706743ffa74f8b96c97bd1f79354c7da4a97438c81c6bb259004cd77",
|
||||||
|
"timestamp": 1696984302017
|
||||||
struct curl_slist* headers = nullptr;
|
})"};
|
||||||
headers = curl_slist_append(headers, "Content-Type: application/json");
|
nlohmann::json request = nlohmann::json::parse(request_str, nullptr, false);
|
||||||
headers = curl_slist_append(headers, "Origin: https://chatforai.store");
|
auto conversation_id = std::format("id_{}", timestamp - 35);
|
||||||
headers = curl_slist_append(headers, "Referer: https://chatforai.store/");
|
request["conversationId"] = conversation_id;
|
||||||
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers);
|
request["timestamp"] = timestamp;
|
||||||
|
request["sign"] = generate_signature(timestamp, prompt, conversation_id);
|
||||||
ScopeExit auto_exit{[=] {
|
request["messages"] = getConversationJson(json);
|
||||||
curl_slist_free_all(headers);
|
request["prompt"] = prompt;
|
||||||
curl_easy_cleanup(curl);
|
auto str = request.dump();
|
||||||
}};
|
SPDLOG_INFO("request : [{}]", str);
|
||||||
|
return str;
|
||||||
res = curl_easy_perform(curl);
|
}())
|
||||||
|
.setHttpHeaders(headers)
|
||||||
if (res != CURLE_OK) {
|
.perform();
|
||||||
|
if (ret.has_value()) {
|
||||||
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
||||||
auto error_info = std::format("curl_easy_perform() failed:{}", curl_easy_strerror(res));
|
ch->try_send(err, ret.value());
|
||||||
ch->try_send(err, error_info);
|
|
||||||
co_return;
|
|
||||||
}
|
|
||||||
int32_t response_code;
|
|
||||||
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &response_code);
|
|
||||||
if (response_code != 200) {
|
|
||||||
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
|
||||||
ch->try_send(err, std::format("you http code:{}", response_code));
|
|
||||||
co_return;
|
|
||||||
}
|
}
|
||||||
co_return;
|
co_return;
|
||||||
}
|
}
|
||||||
@ -1578,119 +1534,6 @@ boost::asio::awaitable<void> FreeGpt::freeGpt(std::shared_ptr<Channel> ch, nlohm
|
|||||||
co_return;
|
co_return;
|
||||||
}
|
}
|
||||||
|
|
||||||
boost::asio::awaitable<void> FreeGpt::chatGpt4Online(std::shared_ptr<Channel> ch, nlohmann::json json) {
|
|
||||||
co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable));
|
|
||||||
|
|
||||||
boost::system::error_code err{};
|
|
||||||
ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }};
|
|
||||||
auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get<std::string>();
|
|
||||||
|
|
||||||
CURLcode res;
|
|
||||||
CURL* curl = curl_easy_init();
|
|
||||||
if (!curl) {
|
|
||||||
auto error_info = std::format("curl_easy_init() failed:{}", curl_easy_strerror(res));
|
|
||||||
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
|
||||||
ch->try_send(err, error_info);
|
|
||||||
co_return;
|
|
||||||
}
|
|
||||||
curl_easy_setopt(curl, CURLOPT_URL, "https://chatgpt4online.org/wp-json/mwai-ui/v1/chats/submit");
|
|
||||||
if (!m_cfg.http_proxy.empty())
|
|
||||||
curl_easy_setopt(curl, CURLOPT_PROXY, m_cfg.http_proxy.c_str());
|
|
||||||
|
|
||||||
struct Input {
|
|
||||||
std::shared_ptr<Channel> ch;
|
|
||||||
std::string recv;
|
|
||||||
};
|
|
||||||
Input input{ch};
|
|
||||||
auto action_cb = [](void* contents, size_t size, size_t nmemb, void* userp) -> size_t {
|
|
||||||
boost::system::error_code err{};
|
|
||||||
auto input_ptr = static_cast<Input*>(userp);
|
|
||||||
std::string data{(char*)contents, size * nmemb};
|
|
||||||
auto& [ch, recv] = *input_ptr;
|
|
||||||
recv.append(data);
|
|
||||||
while (true) {
|
|
||||||
auto position = recv.find("\n");
|
|
||||||
if (position == std::string::npos)
|
|
||||||
break;
|
|
||||||
auto msg = recv.substr(0, position + 1);
|
|
||||||
recv.erase(0, position + 1);
|
|
||||||
msg.pop_back();
|
|
||||||
if (msg.empty())
|
|
||||||
continue;
|
|
||||||
auto fields = splitString(msg, "data: ");
|
|
||||||
boost::system::error_code err{};
|
|
||||||
nlohmann::json line_json = nlohmann::json::parse(fields.back(), nullptr, false);
|
|
||||||
if (line_json.is_discarded()) {
|
|
||||||
SPDLOG_ERROR("json parse error: [{}]", fields.back());
|
|
||||||
boost::asio::post(ch->get_executor(),
|
|
||||||
[=] { ch->try_send(err, std::format("json parse error: [{}]", fields.back())); });
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
auto type = line_json["type"].get<std::string>();
|
|
||||||
if (type == "live") {
|
|
||||||
auto str = line_json["data"].get<std::string>();
|
|
||||||
boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, str); });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return size * nmemb;
|
|
||||||
};
|
|
||||||
size_t (*action_fn)(void* contents, size_t size, size_t nmemb, void* userp) = action_cb;
|
|
||||||
curlEasySetopt(curl);
|
|
||||||
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, action_fn);
|
|
||||||
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &input);
|
|
||||||
|
|
||||||
constexpr std::string_view request_str{R"({
|
|
||||||
"botId":"default",
|
|
||||||
"customId":null,
|
|
||||||
"session":"N/A",
|
|
||||||
"chatId":"",
|
|
||||||
"contextId":58,
|
|
||||||
"messages":[
|
|
||||||
{
|
|
||||||
"role":"user",
|
|
||||||
"content":"hello"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"newMessage":"hello",
|
|
||||||
"stream":true
|
|
||||||
})"};
|
|
||||||
nlohmann::json request = nlohmann::json::parse(request_str, nullptr, false);
|
|
||||||
|
|
||||||
request["messages"] = getConversationJson(json);
|
|
||||||
request["newMessage"] = prompt;
|
|
||||||
|
|
||||||
auto str = request.dump();
|
|
||||||
SPDLOG_INFO("request : [{}]", str);
|
|
||||||
|
|
||||||
curl_easy_setopt(curl, CURLOPT_POSTFIELDS, str.c_str());
|
|
||||||
|
|
||||||
struct curl_slist* headers = nullptr;
|
|
||||||
headers = curl_slist_append(headers, "Content-Type: application/json");
|
|
||||||
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers);
|
|
||||||
|
|
||||||
ScopeExit auto_exit{[=] {
|
|
||||||
curl_slist_free_all(headers);
|
|
||||||
curl_easy_cleanup(curl);
|
|
||||||
}};
|
|
||||||
|
|
||||||
res = curl_easy_perform(curl);
|
|
||||||
|
|
||||||
if (res != CURLE_OK) {
|
|
||||||
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
|
||||||
auto error_info = std::format("curl_easy_perform() failed:{}", curl_easy_strerror(res));
|
|
||||||
ch->try_send(err, error_info);
|
|
||||||
co_return;
|
|
||||||
}
|
|
||||||
int32_t response_code;
|
|
||||||
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &response_code);
|
|
||||||
if (response_code != 200) {
|
|
||||||
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
|
||||||
ch->try_send(err, std::format("you http code:{}", response_code));
|
|
||||||
co_return;
|
|
||||||
}
|
|
||||||
co_return;
|
|
||||||
}
|
|
||||||
|
|
||||||
boost::asio::awaitable<void> FreeGpt::gptalk(std::shared_ptr<Channel> ch, nlohmann::json json) {
|
boost::asio::awaitable<void> FreeGpt::gptalk(std::shared_ptr<Channel> ch, nlohmann::json json) {
|
||||||
co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable));
|
co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable));
|
||||||
ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }};
|
ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }};
|
||||||
@ -1923,77 +1766,10 @@ boost::asio::awaitable<void> FreeGpt::gptalk(std::shared_ptr<Channel> ch, nlohma
|
|||||||
|
|
||||||
boost::asio::awaitable<void> FreeGpt::gptForLove(std::shared_ptr<Channel> ch, nlohmann::json json) {
|
boost::asio::awaitable<void> FreeGpt::gptForLove(std::shared_ptr<Channel> ch, nlohmann::json json) {
|
||||||
co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable));
|
co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable));
|
||||||
|
|
||||||
boost::system::error_code err{};
|
boost::system::error_code err{};
|
||||||
ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }};
|
ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }};
|
||||||
auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get<std::string>();
|
auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get<std::string>();
|
||||||
|
auto secret_rsp = callZeus(std::format("{}/gptforlove", m_cfg.zeus), "{}");
|
||||||
CURLcode res;
|
|
||||||
CURL* curl = curl_easy_init();
|
|
||||||
if (!curl) {
|
|
||||||
auto error_info = std::format("curl_easy_init() failed:{}", curl_easy_strerror(res));
|
|
||||||
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
|
||||||
ch->try_send(err, error_info);
|
|
||||||
co_return;
|
|
||||||
}
|
|
||||||
curl_easy_setopt(curl, CURLOPT_URL, "https://api.gptplus.one/chat-process");
|
|
||||||
if (!m_cfg.http_proxy.empty())
|
|
||||||
curl_easy_setopt(curl, CURLOPT_PROXY, m_cfg.http_proxy.c_str());
|
|
||||||
struct Input {
|
|
||||||
std::shared_ptr<Channel> ch;
|
|
||||||
std::string recv;
|
|
||||||
};
|
|
||||||
Input input{ch};
|
|
||||||
auto action_cb = [](void* contents, size_t size, size_t nmemb, void* userp) -> size_t {
|
|
||||||
boost::system::error_code err{};
|
|
||||||
auto input_ptr = static_cast<Input*>(userp);
|
|
||||||
std::string data{(char*)contents, size * nmemb};
|
|
||||||
auto& [ch, recv] = *input_ptr;
|
|
||||||
recv.append(data);
|
|
||||||
while (true) {
|
|
||||||
auto position = recv.find("\n");
|
|
||||||
if (position == std::string::npos)
|
|
||||||
break;
|
|
||||||
auto msg = recv.substr(0, position + 1);
|
|
||||||
recv.erase(0, position + 1);
|
|
||||||
msg.pop_back();
|
|
||||||
if (msg.contains("10分钟内提问超过了5次")) {
|
|
||||||
boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, msg); });
|
|
||||||
return size * nmemb;
|
|
||||||
}
|
|
||||||
if (msg.empty() || !msg.contains("content"))
|
|
||||||
continue;
|
|
||||||
boost::system::error_code err{};
|
|
||||||
nlohmann::json line_json = nlohmann::json::parse(msg, nullptr, false);
|
|
||||||
if (line_json.is_discarded()) {
|
|
||||||
SPDLOG_ERROR("json parse error: [{}]", msg);
|
|
||||||
boost::asio::post(ch->get_executor(),
|
|
||||||
[=] { ch->try_send(err, std::format("json parse error: [{}]", msg)); });
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
auto str = line_json["detail"]["choices"][0]["delta"]["content"].get<std::string>();
|
|
||||||
if (!str.empty())
|
|
||||||
boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, str); });
|
|
||||||
}
|
|
||||||
return size * nmemb;
|
|
||||||
};
|
|
||||||
size_t (*action_fn)(void* contents, size_t size, size_t nmemb, void* userp) = action_cb;
|
|
||||||
curlEasySetopt(curl);
|
|
||||||
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, action_fn);
|
|
||||||
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &input);
|
|
||||||
|
|
||||||
constexpr std::string_view request_str{R"({
|
|
||||||
"prompt": "hello",
|
|
||||||
"options": {},
|
|
||||||
"systemMessage": "You are ChatGPT, the version is GPT3.5, a large language model trained by OpenAI. Follow the user's instructions carefully.",
|
|
||||||
"temperature": 0.8,
|
|
||||||
"top_p": 1,
|
|
||||||
"secret": "U2FsdGVkX18vdtlMj0nP1LoUzEqJTP0is+Q2+bQJNMk=",
|
|
||||||
"stream": false
|
|
||||||
})"};
|
|
||||||
nlohmann::json request = nlohmann::json::parse(request_str, nullptr, false);
|
|
||||||
|
|
||||||
auto secret_rsp = callZeus("http://127.0.0.1:8860/gptforlove", "{}");
|
|
||||||
if (!secret_rsp.has_value()) {
|
if (!secret_rsp.has_value()) {
|
||||||
SPDLOG_ERROR("callZeus error: {}", secret_rsp.error());
|
SPDLOG_ERROR("callZeus error: {}", secret_rsp.error());
|
||||||
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
||||||
@ -2001,40 +1777,68 @@ boost::asio::awaitable<void> FreeGpt::gptForLove(std::shared_ptr<Channel> ch, nl
|
|||||||
co_return;
|
co_return;
|
||||||
}
|
}
|
||||||
SPDLOG_INFO("zeus: [{}]", secret_rsp.value().dump());
|
SPDLOG_INFO("zeus: [{}]", secret_rsp.value().dump());
|
||||||
request["secret"] = secret_rsp.value()["secret"];
|
static std::unordered_multimap<std::string, std::string> headers{
|
||||||
request["prompt"] = prompt;
|
{"Content-Type", "application/json"},
|
||||||
|
{"referer", "https://ai18.gptforlove.com/"},
|
||||||
auto str = request.dump();
|
{"origin", "https://ai18.gptforlove.com"},
|
||||||
SPDLOG_INFO("request : [{}]", str);
|
{"authority", "api.gptplus.one"},
|
||||||
|
};
|
||||||
curl_easy_setopt(curl, CURLOPT_POSTFIELDS, str.c_str());
|
std::string recv;
|
||||||
|
auto ret = Curl()
|
||||||
struct curl_slist* headers = nullptr;
|
.setUrl("https://api.gptplus.one/chat-process")
|
||||||
headers = curl_slist_append(headers, "Content-Type: application/json");
|
.setProxy(m_cfg.http_proxy)
|
||||||
headers = curl_slist_append(headers, "referer: https://ai18.gptforlove.com/");
|
.setRecvBodyCallback([&](std::string str) {
|
||||||
headers = curl_slist_append(headers, "origin: https://ai18.gptforlove.com");
|
recv.append(str);
|
||||||
headers = curl_slist_append(headers, "authority: api.gptplus.one");
|
while (true) {
|
||||||
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers);
|
auto position = recv.find("\n");
|
||||||
|
if (position == std::string::npos)
|
||||||
ScopeExit auto_exit{[=] {
|
break;
|
||||||
curl_slist_free_all(headers);
|
auto msg = recv.substr(0, position + 1);
|
||||||
curl_easy_cleanup(curl);
|
recv.erase(0, position + 1);
|
||||||
}};
|
msg.pop_back();
|
||||||
|
if (msg.contains("10分钟内提问超过了5次")) {
|
||||||
res = curl_easy_perform(curl);
|
boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, msg); });
|
||||||
|
return;
|
||||||
if (res != CURLE_OK) {
|
}
|
||||||
|
if (msg.empty() || !msg.contains("content"))
|
||||||
|
continue;
|
||||||
|
boost::system::error_code err{};
|
||||||
|
nlohmann::json line_json = nlohmann::json::parse(msg, nullptr, false);
|
||||||
|
if (line_json.is_discarded()) {
|
||||||
|
SPDLOG_ERROR("json parse error: [{}]", msg);
|
||||||
|
boost::asio::post(ch->get_executor(), [=] {
|
||||||
|
ch->try_send(err, std::format("json parse error: [{}]", msg));
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
auto str = line_json["detail"]["choices"][0]["delta"]["content"].get<std::string>();
|
||||||
|
if (!str.empty())
|
||||||
|
boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, str); });
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.setBody([&] {
|
||||||
|
constexpr std::string_view request_str{R"({
|
||||||
|
"prompt": "hello",
|
||||||
|
"options": {},
|
||||||
|
"systemMessage": "You are ChatGPT, the version is GPT3.5, a large language model trained by OpenAI. Follow the user's instructions carefully.",
|
||||||
|
"temperature": 0.8,
|
||||||
|
"top_p": 1,
|
||||||
|
"secret": "U2FsdGVkX18vdtlMj0nP1LoUzEqJTP0is+Q2+bQJNMk=",
|
||||||
|
"stream": false
|
||||||
|
})"};
|
||||||
|
nlohmann::json request = nlohmann::json::parse(request_str, nullptr, false);
|
||||||
|
request["secret"] = secret_rsp.value()["secret"];
|
||||||
|
request["prompt"] = prompt;
|
||||||
|
auto str = request.dump();
|
||||||
|
SPDLOG_INFO("request : [{}]", str);
|
||||||
|
return str;
|
||||||
|
}())
|
||||||
|
.setHttpHeaders(headers)
|
||||||
|
.perform();
|
||||||
|
if (ret.has_value()) {
|
||||||
|
SPDLOG_ERROR("call api.gptplus.one error: [{}]", ret.value());
|
||||||
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
||||||
auto error_info = std::format("curl_easy_perform() failed:{}", curl_easy_strerror(res));
|
ch->try_send(err, ret.value());
|
||||||
ch->try_send(err, error_info);
|
|
||||||
co_return;
|
|
||||||
}
|
|
||||||
int32_t response_code;
|
|
||||||
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &response_code);
|
|
||||||
if (response_code != 200) {
|
|
||||||
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
|
||||||
ch->try_send(err, std::format("you http code:{}", response_code));
|
|
||||||
co_return;
|
|
||||||
}
|
}
|
||||||
co_return;
|
co_return;
|
||||||
}
|
}
|
||||||
@ -2712,21 +2516,7 @@ boost::asio::awaitable<void> FreeGpt::fakeGpt(std::shared_ptr<Channel> ch, nlohm
|
|||||||
ch->try_send(err, ret.value());
|
ch->try_send(err, ret.value());
|
||||||
co_return;
|
co_return;
|
||||||
}
|
}
|
||||||
auto parse = [](auto& buffer) {
|
auto response_header = Curl::parseHttpHeaders(header_str);
|
||||||
std::regex pattern(R"(([^:\r\n]+):([^\r\n]+))");
|
|
||||||
std::smatch matches;
|
|
||||||
auto start = buffer.cbegin();
|
|
||||||
auto end = buffer.cend();
|
|
||||||
std::multimap<std::string, std::string> response_header;
|
|
||||||
while (std::regex_search(start, end, matches, pattern)) {
|
|
||||||
std::string field_name = matches[1].str();
|
|
||||||
std::string field_value = matches[2].str();
|
|
||||||
response_header.insert(std::pair{field_name, field_value});
|
|
||||||
start = matches[0].second;
|
|
||||||
}
|
|
||||||
return response_header;
|
|
||||||
};
|
|
||||||
auto response_header = parse(header_str);
|
|
||||||
auto range = response_header.equal_range("set-cookie");
|
auto range = response_header.equal_range("set-cookie");
|
||||||
std::string cookie;
|
std::string cookie;
|
||||||
for (auto it = range.first; it != range.second; ++it) {
|
for (auto it = range.first; it != range.second; ++it) {
|
||||||
|
@ -344,7 +344,6 @@ int main(int, char** argv) {
|
|||||||
ADD_METHOD("gpt-4-stream-ChatBase", FreeGpt::chatBase);
|
ADD_METHOD("gpt-4-stream-ChatBase", FreeGpt::chatBase);
|
||||||
ADD_METHOD("gpt-3.5-turbo-stream-GptGo", FreeGpt::gptGo);
|
ADD_METHOD("gpt-3.5-turbo-stream-GptGo", FreeGpt::gptGo);
|
||||||
ADD_METHOD("gpt-3.5-turbo-stream-FreeGpt", FreeGpt::freeGpt);
|
ADD_METHOD("gpt-3.5-turbo-stream-FreeGpt", FreeGpt::freeGpt);
|
||||||
ADD_METHOD("gpt-4-stream-Chatgpt4Online", FreeGpt::chatGpt4Online);
|
|
||||||
ADD_METHOD("gpt-3.5-turbo-stream-gptalk", FreeGpt::gptalk);
|
ADD_METHOD("gpt-3.5-turbo-stream-gptalk", FreeGpt::gptalk);
|
||||||
ADD_METHOD("gpt-3.5-turbo-stream-ChatForAi", FreeGpt::chatForAi);
|
ADD_METHOD("gpt-3.5-turbo-stream-ChatForAi", FreeGpt::chatForAi);
|
||||||
ADD_METHOD("gpt-3.5-turbo-stream-gptforlove", FreeGpt::gptForLove);
|
ADD_METHOD("gpt-3.5-turbo-stream-gptforlove", FreeGpt::gptForLove);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user