#include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include "free_gpt.h" #include "helper.hpp" namespace { std::string md5(const std::string& str, bool reverse = true) { unsigned char hash[MD5_DIGEST_LENGTH]; MD5_CTX md5; MD5_Init(&md5); MD5_Update(&md5, str.c_str(), str.size()); MD5_Final(hash, &md5); std::stringstream ss; for (int i = 0; i < MD5_DIGEST_LENGTH; i++) ss << std::hex << std::setw(2) << std::setfill('0') << static_cast(hash[i]); auto md5_str = ss.str(); if (reverse) std::ranges::reverse(md5_str); return md5_str; } auto splitString(const std::string& input, const std::string& delimiter) { std::vector fields; std::string::size_type start = 0; std::string::size_type end = input.find(delimiter); while (end != std::string::npos) { fields.emplace_back(input.substr(start, end - start)); start = end + delimiter.size(); end = input.find(delimiter, start); } fields.emplace_back(input.substr(start)); return fields; } std::vector findAll(const std::string& pattern, const std::string& text) { std::regex re(pattern); std::sregex_iterator it(text.begin(), text.end(), re); std::sregex_iterator end; std::vector matches; while (it != end) { matches.push_back(it->str()); ++it; } return matches; } std::string paramsToQueryStr(const std::multimap& params) { auto encode_query_param = [](const std::string& value) { std::ostringstream escaped; escaped.fill('0'); escaped << std::hex; for (auto c : value) { if (std::isalnum(static_cast(c)) || c == '-' || c == '_' || c == '.' || c == '!' || c == '~' || c == '*' || c == '\'' || c == '(' || c == ')') { escaped << c; } else { escaped << std::uppercase; escaped << '%' << std::setw(2) << static_cast(static_cast(c)); escaped << std::nouppercase; } } return escaped.str(); }; std::string query; for (auto it = params.begin(); it != params.end(); ++it) { if (it != params.begin()) query += "&"; query += it->first; query += "="; query += encode_query_param(it->second); } return query; } enum class Status : uint8_t { Ok, Close, HasError, UnexpectedHttpCode, }; void printHttpHeader(auto& http_packet) { std::stringstream ss; ss << http_packet.base(); SPDLOG_INFO("\n{}", ss.str()); } std::optional parse(const std::string& url) { static const auto url_regex = std::regex(R"regex((http|https)://([^/ :]+):?([^/ ]*)((/?[^ #?]*)\x3f?([^ #]*)#?([^ ]*)))regex", std::regex_constants::icase | std::regex_constants::optimize); auto match = std::smatch(); if (!std::regex_match(url, match, url_regex)) { SPDLOG_ERROR("invalid http_proxy: {}", url); return std::nullopt; } return match; } boost::asio::awaitable sendRequestRecvChunk( std::string& error_info, auto& stream_, auto& req, std::size_t http_code, std::function cb, std::function&)> h_cb = nullptr) { boost::system::error_code err{}; auto [ec, count] = co_await boost::beast::http::async_write(stream_, req, use_nothrow_awaitable); if (ec) { SPDLOG_ERROR("{}", ec.message()); error_info = ec.message(); co_return Status::HasError; } boost::beast::flat_buffer buffer; boost::beast::http::parser p; std::tie(ec, count) = co_await boost::beast::http::async_read_header(stream_, buffer, p, use_nothrow_awaitable); if (ec == boost::beast::http::error::end_of_stream) { SPDLOG_INFO("server close!!!"); co_return Status::Close; } if (ec) { SPDLOG_ERROR("{}", ec.message()); error_info = ec.message(); co_return Status::HasError; } if (h_cb) h_cb(p); auto& headers = p.get(); printHttpHeader(headers); auto result_int = headers.result_int(); if (result_int != http_code) { std::string reason{headers.reason()}; SPDLOG_ERROR("http response code: {}, reason: {}", headers.result_int(), reason); error_info = std::format("return unexpected http status code: {}({})", result_int, reason); co_return Status::UnexpectedHttpCode; } boost::beast::http::chunk_extensions ce; std::string chunk; auto header_cb = [&](std::uint64_t size, std::string_view extensions, boost::beast::error_code& ev) { ce.parse(extensions, ev); if (ev) return; if (size > (std::numeric_limits::max)()) { ev = boost::beast::http::error::body_limit; return; } chunk.reserve(static_cast(size)); chunk.clear(); }; p.on_chunk_header(header_cb); auto body_cb = [&](std::uint64_t remain, std::string_view body, boost::beast::error_code& ec) { if (remain == body.size()) ec = boost::beast::http::error::end_of_chunk; chunk.append(body.data(), body.size()); std::string chunk_str{body}; cb(std::move(chunk_str)); return body.size(); }; p.on_chunk_body(body_cb); while (!p.is_done()) { std::tie(ec, count) = co_await boost::beast::http::async_read(stream_, buffer, p, use_nothrow_awaitable); if (!ec) continue; else if (ec != boost::beast::http::error::end_of_chunk) { co_return Status::HasError; } else ec = {}; } co_return Status::Ok; } boost::asio::awaitable sendRequestRecvChunk( auto& ch, auto& stream_, auto& req, std::size_t http_code, std::function cb, std::function&)> header_cb = nullptr) { std::string error_info; auto ret = co_await sendRequestRecvChunk(error_info, stream_, req, http_code, std::move(cb), header_cb); if (!error_info.empty()) { boost::system::error_code err{}; co_await ch->async_send(err, std::move(error_info), use_nothrow_awaitable); } co_return ret; } boost::asio::awaitable< std::expected, boost::asio::ssl::context, boost::beast::ssl_stream>, std::string>> sendRequestRecvResponse(auto& req, std::string_view host, std::string_view port, auto create_http_client) { int recreate_num{0}; create_client: boost::asio::ssl::context ctx(boost::asio::ssl::context::tls); ctx.set_verify_mode(boost::asio::ssl::verify_none); auto client = co_await create_http_client(ctx, host, port); if (!client.has_value()) { SPDLOG_ERROR("createHttpClient: {}", client.error()); co_return std::unexpected(client.error()); } auto& stream_ = client.value(); auto [ec, count] = co_await boost::beast::http::async_write(stream_, req, use_nothrow_awaitable); if (ec) { SPDLOG_ERROR("{}", ec.message()); co_return std::unexpected(ec.message()); } boost::beast::flat_buffer b; boost::beast::http::response res; std::tie(ec, count) = co_await boost::beast::http::async_read(stream_, b, res, use_nothrow_awaitable); if (ec == boost::beast::http::error::end_of_stream) { if (recreate_num == 0) { recreate_num++; goto create_client; } } if (ec) { SPDLOG_ERROR("{}", ec.message()); co_return std::unexpected(ec.message()); } co_return std::make_tuple(res, std::move(ctx), std::move(stream_)); } auto getConversationJson(const nlohmann::json& json) { auto conversation = json.at("meta").at("content").at("conversation"); conversation.push_back(json.at("meta").at("content").at("parts").at(0)); return conversation; } template uint64_t getTimestamp(std::chrono::time_point now = std::chrono::system_clock::now()) { uint64_t timestamp = std::chrono::duration_cast(now.time_since_epoch()).count(); return timestamp; } class Curl final { public: Curl() { m_curl = curl_easy_init(); if (!m_curl) throw std::runtime_error("curl_easy_init() failed"); curl_easy_setopt(m_curl, CURLOPT_MAXREDIRS, 20L); curl_easy_setopt(m_curl, CURLOPT_FOLLOWLOCATION, 1L); curl_easy_setopt(m_curl, CURLOPT_TIMEOUT, 120L); curl_easy_setopt(m_curl, CURLOPT_CONNECTTIMEOUT, 30L); curl_easy_setopt(m_curl, CURLOPT_CAINFO, nullptr); curl_easy_setopt(m_curl, CURLOPT_SSL_VERIFYPEER, 0L); curl_easy_setopt(m_curl, CURLOPT_SSL_VERIFYHOST, 0L); } ~Curl() { if (m_headers) curl_slist_free_all(m_headers); if (m_curl) curl_easy_cleanup(m_curl); } Curl(const Curl&) = delete; Curl& operator=(const Curl&) = delete; auto& setUrl(std::string_view url) { m_url = url; curl_easy_setopt(m_curl, CURLOPT_URL, m_url.data()); return *this; } auto& setBody(std::string body) { if (!body.empty()) { m_body = std::move(body); curl_easy_setopt(m_curl, CURLOPT_POSTFIELDS, m_body.c_str()); } return *this; } auto& setProxy(std::string_view http_proxy) { if (!http_proxy.empty()) { m_http_proxy = http_proxy; curl_easy_setopt(m_curl, CURLOPT_PROXY, m_http_proxy.data()); } return *this; } auto& setHttpHeaders(const std::unordered_multimap& http_headers) { for (auto& [k, v] : http_headers) m_headers_list.emplace_back(std::format("{}: {}", k, v)); for (auto& header : m_headers_list) m_headers = curl_slist_append(m_headers, header.c_str()); curl_easy_setopt(m_curl, CURLOPT_HTTPHEADER, m_headers); return *this; } static size_t recvCallback(void* contents, size_t size, size_t nmemb, void* userp) { auto cb = static_cast*>(userp); std::string data{(char*)contents, size * nmemb}; (*cb)(std::move(data)); return size * nmemb; } auto& setRecvBodyCallback(std::function cb) { m_recv_body_cb = std::move(cb); curl_easy_setopt(m_curl, CURLOPT_WRITEFUNCTION, &recvCallback); curl_easy_setopt(m_curl, CURLOPT_WRITEDATA, &m_recv_body_cb); return *this; } auto& setRecvHeadersCallback(std::function cb) { m_recv_headers_cb = std::move(cb); curl_easy_setopt(m_curl, CURLOPT_HEADERFUNCTION, &recvCallback); curl_easy_setopt(m_curl, CURLOPT_HEADERDATA, &m_recv_headers_cb); return *this; } auto& setOpt(auto option, auto value) { curl_easy_setopt(m_curl, option, value); return *this; } std::optional perform() { auto res = curl_easy_perform(m_curl); if (res != CURLE_OK) { auto error_info = std::format("[{}] -> [{}]", m_url, curl_easy_strerror(res)); return error_info; } int32_t response_code; curl_easy_getinfo(m_curl, CURLINFO_RESPONSE_CODE, &response_code); if (m_http_status_code != response_code) return std::format("[{}] -> [Http Status Code:{}]", m_url, response_code); return std::nullopt; } auto& clearHeaders() { if (m_headers) curl_slist_free_all(m_headers); m_headers_list.clear(); m_headers = nullptr; return *this; } auto& setHttpStatusCode(int32_t code) { m_http_status_code = code; return *this; } static auto parseHttpHeaders(const std::string& buffer) { std::regex pattern(R"(([^:\r\n]+):([^\r\n]+))"); std::smatch matches; auto start = buffer.cbegin(); auto end = buffer.cend(); std::multimap response_header; while (std::regex_search(start, end, matches, pattern)) { std::string field_name = matches[1].str(); std::string field_value = matches[2].str(); response_header.insert(std::pair{field_name, field_value}); start = matches[0].second; } return response_header; } private: CURL* m_curl{nullptr}; std::string_view m_url; std::string m_body; std::string_view m_http_proxy; struct curl_slist* m_headers{nullptr}; std::vector m_headers_list; std::function m_recv_body_cb{[](std::string) {}}; std::function m_recv_headers_cb{[](std::string) {}}; int32_t m_http_status_code{200}; }; std::expected callZeus(const std::string& host, const std::string& request_body) { static std::unordered_multimap headers{ {"Accept", "*/*"}, {"Content-Type", "application/json"}, }; std::string recv; auto ret = Curl() .setUrl(host) .setRecvHeadersCallback([](std::string) { return; }) .setRecvBodyCallback([&](std::string str) { recv.append(str); }) .setBody(request_body) .clearHeaders() .setHttpHeaders(headers) .perform(); if (ret) { SPDLOG_ERROR("{}", ret.value()); return std::unexpected(ret.value()); } nlohmann::json rsp = nlohmann::json::parse(recv, nullptr, false); if (rsp.is_discarded()) { SPDLOG_ERROR("json parse error: {}", recv); return std::unexpected("parse callZeus error"); } return rsp; } } // namespace FreeGpt::FreeGpt(Config& cfg) : m_cfg(cfg), m_thread_pool_ptr(std::make_shared(m_cfg.work_thread_num * 2)) {} boost::asio::awaitable, std::string>> FreeGpt::createHttpClient(boost::asio::ssl::context& ctx, std::string_view host, std::string_view port) { if (m_cfg.http_proxy.empty()) { boost::beast::ssl_stream stream_{co_await boost::asio::this_coro::executor, ctx}; boost::system::error_code err{}; if (!SSL_set_tlsext_host_name(stream_.native_handle(), host.data())) { SPDLOG_ERROR("SSL_set_tlsext_host_name"); co_return std::unexpected(std::string("SSL_set_tlsext_host_name")); } auto resolver = boost::asio::ip::tcp::resolver(co_await boost::asio::this_coro::executor); auto [ec, results] = co_await resolver.async_resolve(host.data(), port.data(), use_nothrow_awaitable); if (ec) { SPDLOG_INFO("async_resolve: {}", ec.message()); co_return std::unexpected(ec.message()); } for (auto& endpoint : results) { std::stringstream ss; ss << endpoint.endpoint(); SPDLOG_INFO("resolver_results: [{}]", ss.str()); } boost::beast::get_lowest_layer(stream_).expires_after(std::chrono::seconds(30)); if (auto [ec, _] = co_await boost::beast::get_lowest_layer(stream_).async_connect(results, use_nothrow_awaitable); ec) { co_return std::unexpected(ec.message()); } boost::beast::get_lowest_layer(stream_).expires_never(); std::tie(ec) = co_await stream_.async_handshake(boost::asio::ssl::stream_base::client, use_nothrow_awaitable); if (ec) { SPDLOG_INFO("async_handshake: {}", ec.message()); co_return std::unexpected(ec.message()); } co_return stream_; } std::string userinfo, proxy_host, proxy_port; auto is_auth_proxy = [](const std::string& str) -> bool { std::regex pattern("^http://[^:]+:[^@]+@[^:]+:[0-9]+$"); return std::regex_match(str, pattern); }; // http://username:password@proxy.example.com:8080 if (is_auth_proxy(m_cfg.http_proxy)) { static std::regex pattern("(http|https)://([^:]+):([^@]+)@([^:]+):([0-9]+)"); std::smatch matches; if (!std::regex_match(m_cfg.http_proxy, matches, pattern)) co_return std::unexpected(std::format("invalid http_proxy: {}", m_cfg.http_proxy)); // std::string protocol = matches[1]; std::string username = matches[2]; std::string password = matches[3]; proxy_host = matches[4]; proxy_port = matches[5]; userinfo = std::format("{}:{}", username, password); } else { auto match_opt = parse(m_cfg.http_proxy); if (!match_opt.has_value()) co_return std::unexpected(std::format("invalid http_proxy: {}", m_cfg.http_proxy)); auto& match = match_opt.value(); // auto& protocol = match[1]; // auto& target = match[4]; proxy_host = match[2]; proxy_port = match[3]; } SPDLOG_INFO("CONNECT TO HTTP_PROXY [{}:{}]", proxy_host, proxy_port); auto resolver = boost::asio::ip::tcp::resolver(co_await boost::asio::this_coro::executor); auto [ec, results] = co_await resolver.async_resolve(proxy_host, proxy_port, use_nothrow_awaitable); if (ec) { SPDLOG_INFO("async_resolve: {}", ec.message()); co_return std::unexpected(ec.message()); } boost::asio::ip::tcp::socket socket_{co_await boost::asio::this_coro::executor}; if (auto [ec, count] = co_await boost::asio::async_connect(socket_, results, use_nothrow_awaitable); ec) { SPDLOG_INFO("async_connect: {}", ec.message()); co_return std::unexpected(ec.message()); } boost::beast::ssl_stream stream_{std::move(socket_), ctx}; int http_version = 11; boost::beast::http::request connect_req{ boost::beast::http::verb::connect, std::format("{}:{}", host, port), http_version}; connect_req.set(boost::beast::http::field::host, host); if (!userinfo.empty()) { std::string result(boost::beast::detail::base64::encoded_size(userinfo.size()), 0); auto len = boost::beast::detail::base64::encode(static_cast(result.data()), userinfo.c_str(), userinfo.size()); result.resize(len); result = "Basic " + result; connect_req.set(boost::beast::http::field::proxy_authorization, result); } std::size_t count; std::tie(ec, count) = co_await boost::beast::http::async_write(boost::beast::get_lowest_layer(stream_), connect_req, use_nothrow_awaitable); if (ec) { SPDLOG_ERROR("{}", ec.message()); co_return std::unexpected(ec.message()); } boost::beast::http::response res; boost::beast::http::parser http_parser(res); http_parser.skip(true); boost::beast::flat_buffer buffer; std::tie(ec, count) = co_await boost::beast::http::async_read(boost::beast::get_lowest_layer(stream_), buffer, http_parser, use_nothrow_awaitable); if (boost::beast::http::status::ok != res.result()) { SPDLOG_ERROR("Proxy response failed : {}", res.result_int()); co_return std::unexpected(ec.message()); } if (!SSL_set_tlsext_host_name(stream_.native_handle(), host.data())) { SPDLOG_ERROR("SSL_set_tlsext_host_name"); co_return std::unexpected(std::string("SSL_set_tlsext_host_name")); } std::tie(ec) = co_await stream_.async_handshake(boost::asio::ssl::stream_base::client, use_nothrow_awaitable); if (ec) { SPDLOG_INFO("async_handshake: {}", ec.message()); co_return std::unexpected(ec.message()); } co_return stream_; } boost::asio::awaitable FreeGpt::deepAi(std::shared_ptr ch, nlohmann::json json) { co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable)); ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }}; boost::system::error_code err{}; auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get(); std::string user_agent{ R"(Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36)"}; std::random_device rd; std::mt19937 mt(rd()); std::uniform_int_distribution dist(0, 100000000); uint64_t part1{dist(mt)}; auto part2 = md5(user_agent + md5(user_agent + md5(std::format("{}{}x", user_agent, part1)))); auto api_key = std::format("tryit-{}-{}", part1, part2); constexpr char CRLF[] = "\r\n"; static std::string MULTI_PART_BOUNDARY = "9bc627aea4f77e150e6057f78036e73f"; auto content_type_str = std::format("multipart/form-data; boundary={}", MULTI_PART_BOUNDARY); SPDLOG_INFO("content_type_str: {}", content_type_str); auto api_key_str = std::format("api-key: {}", api_key); std::unordered_multimap headers{ {"Content-Type", content_type_str}, {"api-key", api_key}, }; auto ret = Curl() .setUrl("https://api.deepai.org/hacking_is_a_crime") .setProxy(m_cfg.http_proxy) .setRecvHeadersCallback([&](std::string) {}) .setRecvBodyCallback([&](std::string str) { boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, str); }); return; }) .setBody([&] { nlohmann::json request_json{{{"role", "user"}, {"content", std::move(prompt)}}}; std::ostringstream payload; payload << "--" << MULTI_PART_BOUNDARY << CRLF << R"(Content-Disposition: form-data; name="chat_style")" << CRLF << CRLF << "chat" << CRLF << "--" << MULTI_PART_BOUNDARY << CRLF << R"(Content-Disposition: form-data; name="chatHistory")" << CRLF << CRLF << request_json.dump() << CRLF << "--" << MULTI_PART_BOUNDARY << "--" << CRLF; SPDLOG_INFO("{}", payload.str()); auto str = payload.str(); return str; }()) .clearHeaders() .setHttpHeaders(headers) .perform(); if (ret.has_value()) { SPDLOG_ERROR("{}", ret.value()); co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); ch->try_send(err, ret.value()); } co_return; } boost::asio::awaitable FreeGpt::openAi(std::shared_ptr ch, nlohmann::json json) { boost::system::error_code err{}; ScopeExit auto_exit{[&] { ch->close(); }}; constexpr std::string_view host = "api.openai.com"; constexpr std::string_view port = "443"; constexpr std::string_view user_agent{ R"(Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36)"}; boost::asio::ssl::context ctx(boost::asio::ssl::context::tls); ctx.set_verify_mode(boost::asio::ssl::verify_none); auto client = co_await createHttpClient(ctx, host, port); if (!client.has_value()) { SPDLOG_ERROR("createHttpClient: {}", client.error()); co_await ch->async_send(err, client.error(), use_nothrow_awaitable); co_return; } auto& stream_ = client.value(); boost::beast::http::request req{boost::beast::http::verb::post, "/v1/chat/completions", 11}; req.set(boost::beast::http::field::host, host); req.set(boost::beast::http::field::user_agent, user_agent); req.set(boost::beast::http::field::content_type, "application/json"); req.set(boost::beast::http::field::authorization, std::format("Bearer {}", m_cfg.api_key)); constexpr std::string_view json_str = R"({ "messages": [ { "role": "user", "content": "hello" } ], "stream": true, "model": "gpt-3.5-turbo" })"; nlohmann::json request = nlohmann::json::parse(json_str, nullptr, false); request["messages"] = getConversationJson(json); SPDLOG_INFO("{}", request.dump(2)); req.body() = request.dump(); req.prepare_payload(); std::string recv; co_await sendRequestRecvChunk(ch, stream_, req, 200, [&ch, &recv](std::string chunk_str) { recv.append(chunk_str); while (true) { auto position = recv.find("\n"); if (position == std::string::npos) break; auto msg = recv.substr(0, position + 1); recv.erase(0, position + 1); msg.pop_back(); if (msg.empty() || !msg.contains("content")) continue; auto fields = splitString(msg, "data: "); boost::system::error_code err{}; nlohmann::json line_json = nlohmann::json::parse(fields.back(), nullptr, false); if (line_json.is_discarded()) { SPDLOG_ERROR("json parse error: [{}]", fields.back()); ch->try_send(err, std::format("json parse error: [{}]", fields.back())); continue; } auto str = line_json["choices"][0]["delta"]["content"].get(); if (!str.empty()) ch->try_send(err, str); } }); co_return; } boost::asio::awaitable FreeGpt::yqcloud(std::shared_ptr ch, nlohmann::json json) { boost::system::error_code err{}; ScopeExit auto_exit{[&] { ch->close(); }}; auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get(); constexpr std::string_view host = "api.aichatos.cloud"; constexpr std::string_view port = "443"; constexpr std::string_view user_agent{ R"(Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/115.0)"}; boost::beast::http::request req{boost::beast::http::verb::post, "/api/generateStream", 11}; req.set("authority", "p5.v50.ltd"); req.set(boost::beast::http::field::host, host); req.set(boost::beast::http::field::user_agent, user_agent); req.set("accept", "application/json, text/plain, */*"); req.set("Content-Type", "application/json"); req.set("origin", "https://chat9.yqcloud.top"); constexpr std::string_view json_str = R"({ "prompt":"hello", "network":true, "system":"", "withoutContext":false, "stream":false })"; nlohmann::json request = nlohmann::json::parse(json_str, nullptr, false); request["prompt"] = std::move(prompt); req.body() = request.dump(); req.prepare_payload(); int recreate_num{0}; create_client: boost::asio::ssl::context ctx(boost::asio::ssl::context::tls); ctx.set_verify_mode(boost::asio::ssl::verify_none); auto client = co_await createHttpClient(ctx, host, port); if (!client.has_value()) { SPDLOG_ERROR("createHttpClient: {}", client.error()); co_await ch->async_send(err, client.error(), use_nothrow_awaitable); co_return; } auto& stream_ = client.value(); auto ret = co_await sendRequestRecvChunk(ch, stream_, req, 200, [&ch](std::string str) { boost::system::error_code err{}; ch->try_send(err, std::move(str)); }); if (ret == Status::Close && recreate_num == 0) { recreate_num++; goto create_client; } co_return; } boost::asio::awaitable FreeGpt::huggingChat(std::shared_ptr ch, nlohmann::json json) { boost::system::error_code err{}; ScopeExit auto_exit{[&] { ch->close(); }}; auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get(); constexpr std::string_view host = "huggingface.co"; constexpr std::string_view port = "443"; constexpr std::string_view user_agent{ R"(Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/115.0)"}; boost::beast::http::request req_init_cookie{boost::beast::http::verb::get, "/chat/", 11}; req_init_cookie.set(boost::beast::http::field::host, host); req_init_cookie.set(boost::beast::http::field::user_agent, user_agent); auto ret = co_await sendRequestRecvResponse(req_init_cookie, host, port, std::bind_front(&FreeGpt::createHttpClient, *this)); if (!ret.has_value()) { co_await ch->async_send(err, ret.error(), use_nothrow_awaitable); co_return; } auto& [response, ctx, stream_] = ret.value(); if (boost::beast::http::status::ok != response.result()) { SPDLOG_ERROR("http status code: {}", response.result_int()); co_await ch->async_send(err, response.reason(), use_nothrow_awaitable); co_return; } auto fields = splitString(response["Set-Cookie"], " "); if (fields.empty()) { std::stringstream ss; ss << response.base(); SPDLOG_ERROR("get cookie error: {}", ss.str()); co_await ch->async_send(err, "can't get cookie", use_nothrow_awaitable); co_return; } fields[0].pop_back(); std::string cookie{std::move(fields[0])}; SPDLOG_INFO("cookie: {}", cookie); boost::beast::http::request req_init_conversation{boost::beast::http::verb::post, "/chat/conversation", 11}; req_init_conversation.set("Cookie", cookie); req_init_conversation.set(boost::beast::http::field::host, host); req_init_conversation.set(boost::beast::http::field::user_agent, user_agent); req_init_conversation.set("Accept", "*/*"); req_init_conversation.set("Content-Type", "application/json"); req_init_conversation.body() = R"({"model": "meta-llama/Llama-2-70b-chat-hf"})"; req_init_conversation.prepare_payload(); auto [ec, count] = co_await boost::beast::http::async_write(stream_, req_init_conversation, use_nothrow_awaitable); if (ec) { SPDLOG_ERROR("{}", ec.message()); co_await ch->async_send(err, ec.message(), use_nothrow_awaitable); co_return; } boost::beast::flat_buffer b; boost::beast::http::response res; std::tie(ec, count) = co_await boost::beast::http::async_read(stream_, b, res, use_nothrow_awaitable); if (ec) { SPDLOG_ERROR("{}", ec.message()); co_await ch->async_send(err, ec.message(), use_nothrow_awaitable); co_return; } if (res.result_int() != 200) { std::string reason{res.reason()}; SPDLOG_ERROR("reason: {}", reason); co_await ch->async_send(err, std::format("return unexpected http status code: {}({})", res.result_int(), reason), use_nothrow_awaitable); co_return; } nlohmann::json rsp_json = nlohmann::json::parse(res.body(), nullptr, false); if (rsp_json.is_discarded()) { SPDLOG_ERROR("json parse error: [{}]", fields.back()); ch->try_send(err, std::format("json parse error: [{}]", fields.back())); co_return; } if (!rsp_json.contains("conversationId")) { SPDLOG_ERROR("not contains conversationId: {}", res.body()); co_await ch->async_send(err, res.body(), use_nothrow_awaitable); co_return; } auto conversation_id = rsp_json["conversationId"].get(); SPDLOG_INFO("conversation_id: [{}]", conversation_id); constexpr std::string_view json_str = R"({ "inputs":"hello", "parameters":{ "temperature":0.2, "truncate":1000, "max_new_tokens":1024, "stop":[ "" ], "top_p":0.95, "repetition_penalty":1.2, "top_k":50, "return_full_text":false }, "stream":true, "options":{ "id":"9e9b8bc4-6604-40c6-994e-8eb78fa32e37", "response_id":"04ce2602-3bea-45e8-8efc-cef00680376a", "is_retry":false, "use_cache":false, "web_search_id":"" } })"; nlohmann::json request = nlohmann::json::parse(json_str, nullptr, false); request["inputs"] = prompt; request["options"]["response_id"] = createUuidString(); request["options"]["id"] = createUuidString(); boost::beast::http::request req{ boost::beast::http::verb::post, std::format("/chat/conversation/{}", conversation_id), 11}; req.set("Cookie", cookie); req.set(boost::beast::http::field::host, host); req.set(boost::beast::http::field::user_agent, user_agent); req.set("Accept", "*/*"); req.set("Content-Type", "application/json"); req.body() = request.dump(); req.prepare_payload(); std::string recv; co_await sendRequestRecvChunk(ch, stream_, req, 200, [&ch, &recv](std::string chunk_str) { recv.append(chunk_str); while (true) { auto position = recv.find("\n"); if (position == std::string::npos) break; auto msg = recv.substr(0, position + 1); recv.erase(0, position + 1); msg.pop_back(); if (msg.empty()) continue; boost::system::error_code err{}; nlohmann::json line_json = nlohmann::json::parse(msg, nullptr, false); if (line_json.is_discarded()) { SPDLOG_ERROR("json parse error: [{}]", msg); ch->try_send(err, std::format("json parse error: [{}]", msg)); continue; } if (!line_json.contains("type")) { SPDLOG_ERROR("invalid json format: [{}]", line_json.dump()); continue; } auto type = line_json["type"].get(); if (type == "stream") { if (auto str = line_json["token"].get(); !str.empty()) ch->try_send(err, str); } else if (type == "finalAnswer") { ch->close(); } } return; }); co_return; } boost::asio::awaitable FreeGpt::you(std::shared_ptr ch, nlohmann::json json) { co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable)); boost::system::error_code err{}; ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }}; auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get(); using Tuple = std::tuple, std::string>; static moodycamel::ConcurrentQueue cookie_queue; Tuple item; bool found{false}; if (cookie_queue.try_dequeue(item)) { auto& [time_point, cookie] = item; if (std::chrono::system_clock::now() - time_point < std::chrono::minutes(120)) found = true; } if (!found) { std::string header_str; auto ret = Curl() .setUrl("https://you.com") .setProxy(m_cfg.http_proxy) .setRecvHeadersCallback([&](std::string str) { header_str.append(str); }) .setRecvBodyCallback([&](std::string) { return; }) .perform(); if (ret.has_value()) { SPDLOG_ERROR("{}", ret.value()); co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); ch->try_send(err, ret.value()); co_return; } auto response_header = Curl::parseHttpHeaders(header_str); std::string cookie; auto range = response_header.equal_range("set-cookie"); for (auto it = range.first; it != range.second; ++it) { if (!(it->second.contains("__cf_bm="))) continue; auto view = it->second | std::views::drop_while(isspace) | std::views::reverse | std::views::drop_while(isspace) | std::views::reverse; auto fields = splitString(std::string{view.begin(), view.end()}, " "); if (fields.size() < 1) { co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); ch->try_send(err, "can't get cookie"); co_return; } cookie = std::move(fields[0]); break; } if (cookie.empty()) { co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); ch->try_send(err, "cookie is empty"); co_return; } item = std::make_tuple(std::chrono::system_clock::now(), std::move(cookie)); } SPDLOG_INFO("cookie: {}", std::get<1>(item)); ScopeExit auto_free([&] mutable { auto& [time_point, cookie] = item; if (std::chrono::system_clock::now() - time_point < std::chrono::minutes(120)) cookie_queue.enqueue(std::move(item)); }); auto cookie_str = std::format("uuid_guest={}; safesearch_guest=Off; {}", createUuidString(), std::get<1>(item)); std::multimap params{ {"q", prompt}, {"page", "1"}, {"count", "10"}, {"safeSearch", "Off"}, {"onShoppingPage", "False"}, {"mkt", ""}, {"responseFilter", "WebPages,Translations,TimeZone,Computation,RelatedSearches"}, {"domain", "youchat"}, {"queryTraceId", createUuidString()}, }; auto request_url = std::format("https://you.com/api/streamingSearch?{}", paramsToQueryStr(params)); auto ret = Curl() .setUrl(request_url) .setProxy(m_cfg.http_proxy) .setOpt(CURLOPT_COOKIE, std::get<1>(item).c_str()) .setRecvHeadersCallback([&](std::string) { return; }) .setRecvBodyCallback([&](std::string data) { boost::system::error_code err{}; if (data.starts_with(R"(event: youChatToken)")) { static std::string to_erase{"event: youChatToken\ndata: "}; size_t pos = data.find(to_erase); if (pos != std::string::npos) data.erase(pos, to_erase.length()); nlohmann::json line_json = nlohmann::json::parse(data, nullptr, false); if (line_json.is_discarded()) { SPDLOG_ERROR("json parse error: [{}]", data); boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, std::format("json parse error: [{}]", data)); }); return; } auto str = line_json["youChatToken"].get(); boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, str); }); } return; }) .clearHeaders() .setHttpHeaders([&] -> auto { std::unordered_multimap headers{ {"referer", "https://you.com/search?q=gpt4&tbm=youchat"}, {"Accept", "text/event-stream"}, {"cookie", cookie_str}, }; return headers; }()) .perform(); if (ret) { co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); ch->try_send(err, ret.value()); } co_return; } boost::asio::awaitable FreeGpt::binjie(std::shared_ptr ch, nlohmann::json json) { boost::system::error_code err{}; ScopeExit auto_exit{[&] { ch->close(); }}; auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get(); constexpr std::string_view host = "api.binjie.fun"; constexpr std::string_view port = "443"; boost::beast::http::request req{boost::beast::http::verb::post, "/api/generateStream", 11}; req.set(boost::beast::http::field::host, host); req.set( boost::beast::http::field::user_agent, R"(Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36)"); req.set("Accept", "application/json, text/plain, */*"); req.set("accept-language", "id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7"); req.set(boost::beast::http::field::content_type, "application/json"); req.set("origin", "https://chat.jinshutuan.com"); constexpr std::string_view json_str = R"({ "prompt":"user: hello\nassistant:", "system": "Always talk in English.", "withoutContext":true, "stream":true })"; nlohmann::json request = nlohmann::json::parse(json_str, nullptr, false); request["prompt"] = prompt; SPDLOG_INFO("{}", request.dump(2)); req.body() = request.dump(); req.prepare_payload(); boost::asio::ssl::context ctx(boost::asio::ssl::context::tls); ctx.set_verify_mode(boost::asio::ssl::verify_none); auto client = co_await createHttpClient(ctx, host, port); if (!client.has_value()) { SPDLOG_ERROR("createHttpClient: {}", client.error()); co_await ch->async_send(err, client.error(), use_nothrow_awaitable); co_return; } co_await sendRequestRecvChunk(ch, client.value(), req, 200, [&ch](std::string str) { boost::system::error_code err{}; ch->try_send(err, std::move(str)); }); co_return; } boost::asio::awaitable FreeGpt::gptGo(std::shared_ptr ch, nlohmann::json json) { co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable)); ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }}; boost::system::error_code err{}; auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get(); std::multimap params{ {"q", prompt}, {"hlgpt", "default"}, {"hl", "en"}, }; auto get_token_url = std::format("https://gptgo.ai/action_get_token.php?{}", paramsToQueryStr(params)); std::string recv; Curl curl; auto ret = curl.setUrl(get_token_url) .setProxy(m_cfg.http_proxy) .setRecvBodyCallback([&](std::string str) { recv.append(str); return; }) .perform(); if (ret.has_value()) { SPDLOG_ERROR("{}", ret.value()); co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); ch->try_send(err, ret.value()); co_return; } SPDLOG_INFO("recv: [{}]", recv); nlohmann::json line_json = nlohmann::json::parse(recv, nullptr, false); if (line_json.is_discarded()) { SPDLOG_ERROR("json parse error: [{}]", recv); co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); ch->try_send(err, std::format("json parse error:{}", recv)); co_return; } auto status = line_json["status"].get(); if (!status) { SPDLOG_ERROR("status is false: [{}]", recv); co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); ch->try_send(err, recv); co_return; } auto token = line_json["token"].get(); SPDLOG_INFO("token: [{}]", token); recv.clear(); auto url = std::format("https://gptgo.ai/action_ai_gpt.php?token={}", token); ret = curl.setUrl(url) .setProxy(m_cfg.http_proxy) .setRecvBodyCallback([&](std::string str) { recv.append(str); while (true) { auto position = recv.find("\n"); if (position == std::string::npos) break; auto msg = recv.substr(0, position + 1); recv.erase(0, position + 1); msg.pop_back(); if (msg.empty() || !msg.contains("content")) continue; auto fields = splitString(msg, "data: "); boost::system::error_code err{}; nlohmann::json line_json = nlohmann::json::parse(fields.back(), nullptr, false); if (line_json.is_discarded()) { SPDLOG_ERROR("json parse error: [{}]", fields.back()); boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, std::format("json parse error: [{}]", fields.back())); }); continue; } auto str = line_json["choices"][0]["delta"]["content"].get(); if (!str.empty() && str != "[DONE]") boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, str); }); } return; }) .perform(); if (ret.has_value()) { SPDLOG_ERROR("{}", ret.value()); co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); ch->try_send(err, ret.value()); } co_return; } boost::asio::awaitable FreeGpt::chatForAi(std::shared_ptr ch, nlohmann::json json) { co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable)); boost::system::error_code err{}; ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }}; auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get(); auto generate_signature = [](uint64_t timestamp, const std::string& message, const std::string& id) { std::string s = std::to_string(timestamp) + ":" + id + ":" + message + ":7YN8z6d6"; unsigned char hash[SHA256_DIGEST_LENGTH]; SHA256_CTX sha256; if (!SHA256_Init(&sha256)) throw std::runtime_error("SHA-256 initialization failed"); if (!SHA256_Update(&sha256, s.c_str(), s.length())) throw std::runtime_error("SHA-256 update failed"); if (!SHA256_Final(hash, &sha256)) throw std::runtime_error("SHA-256 finalization failed"); std::stringstream ss; for (int i = 0; i < SHA256_DIGEST_LENGTH; i++) ss << std::hex << std::setw(2) << std::setfill('0') << static_cast(hash[i]); return ss.str(); }; static std::unordered_multimap headers{ {"Content-Type", "application/json"}, {"Origin", "https://chatforai.store"}, {"Referer", "https://chatforai.store/"}, }; auto ret = Curl() .setUrl("https://chatforai.store/api/handle/provider-openai") .setProxy(m_cfg.http_proxy) .setRecvHeadersCallback([](std::string) { return; }) .setRecvBodyCallback([&](std::string str) { boost::asio::post(ch->get_executor(), [=, str = std::move(str)] { ch->try_send(err, str); }); return; }) .setBody([&] { uint64_t timestamp = getTimestamp(); constexpr std::string_view request_str{R"({ "conversationId": "id_1696984301982", "conversationType": "chat_continuous", "botId": "chat_continuous", "globalSettings": { "baseUrl": "https://api.openai.com", "model": "gpt-3.5-turbo", "messageHistorySize": 5, "temperature": 0.7, "top_p": 1 }, "botSettings": {}, "prompt": "hello", "messages": [{ "role": "user", "content": "hello" }], "sign": "15d8e701706743ffa74f8b96c97bd1f79354c7da4a97438c81c6bb259004cd77", "timestamp": 1696984302017 })"}; nlohmann::json request = nlohmann::json::parse(request_str, nullptr, false); auto conversation_id = std::format("id_{}", timestamp - 35); request["conversationId"] = conversation_id; request["timestamp"] = timestamp; request["sign"] = generate_signature(timestamp, prompt, conversation_id); request["messages"] = getConversationJson(json); request["prompt"] = prompt; auto str = request.dump(); SPDLOG_INFO("request : [{}]", str); return str; }()) .setHttpHeaders(headers) .perform(); if (ret.has_value()) { co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); ch->try_send(err, ret.value()); } co_return; } boost::asio::awaitable FreeGpt::gptalk(std::shared_ptr ch, nlohmann::json json) { co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable)); ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }}; boost::system::error_code err{}; auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get(); auto generate_token_hex = [](int32_t length) { std::random_device rd; std::stringstream ss; std::mt19937 gen(rd()); std::uniform_int_distribution<> dis(0, 15); for (int i = 0; i < length; ++i) ss << std::hex << dis(gen); std::string token = ss.str(); token = std::string(length * 2 - token.length(), '0') + token; return token; }; uint64_t timestamp = getTimestamp(); std::string recv; Curl curl; std::unordered_multimap headers{ {"Accept", "*/*"}, {"Content-Type", "application/json"}, {"authority", "gptalk.net"}, {"origin", "https://gptalk.net"}, {"x-auth-appid", "2229"}, {"x-auth-openid", ""}, {"x-auth-platform", ""}, {"x-auth-timestamp", std::to_string(timestamp)}, }; auto ret = curl.setUrl("https://gptalk.net/api/chatgpt/user/login") .setProxy(m_cfg.http_proxy) .setRecvHeadersCallback([&](std::string) {}) .setRecvBodyCallback([&](std::string str) { recv.append(str); return; }) .setBody([&] { nlohmann::json login_json; login_json["fingerprint"] = generate_token_hex(16); login_json["platform"] = "fingerprint"; std::string request_str = login_json.dump(); return request_str; }()) .clearHeaders() .setHttpHeaders(headers) .perform(); if (ret.has_value()) { SPDLOG_ERROR("{}", ret.value()); co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); ch->try_send(err, ret.value()); co_return; } SPDLOG_INFO("login rsp: [{}]", recv); nlohmann::json auth_rsp = nlohmann::json::parse(recv, nullptr, false); auto auth_token = auth_rsp["data"]["token"].get(); SPDLOG_INFO("token: [{}]", auth_token); auto auth_str = std::format("Bearer {}", auth_token); headers.emplace("authorization", auth_str); recv.clear(); ret = curl.setUrl("https://gptalk.net/api/chatgpt/chatapi/text") .setProxy(m_cfg.http_proxy) .setRecvHeadersCallback([&](std::string) {}) .setRecvBodyCallback([&](std::string str) { recv.append(str); return; }) .setBody([&] { constexpr std::string_view json_str = R"({ "content":"hello", "accept":"stream", "from":1, "model":"gpt-3.5-turbo", "is_mobile":0, "user_agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36", "is_open_ctx":0, "prompt":"", "roid":111, "temperature":0, "ctx_msg_count":3, "created_at":1696655321 })"; nlohmann::json request = nlohmann::json::parse(json_str, nullptr, false); request["created_at"] = timestamp; request["content"] = prompt; auto request_str = request.dump(); return request_str; }()) .clearHeaders() .setHttpHeaders(headers) .perform(); if (ret.has_value()) { SPDLOG_ERROR("{}", ret.value()); co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); ch->try_send(err, ret.value()); co_return; } SPDLOG_INFO("input.recv: [{}]", recv); nlohmann::json get_text_rsp = nlohmann::json::parse(recv, nullptr, false); auto token = get_text_rsp["data"]["token"].get(); SPDLOG_INFO("token: [{}]", token); recv.clear(); std::string last_message; auto url = std::format("https://gptalk.net/api/chatgpt/chatapi/stream?token={}", token); ret = curl.setUrl(url) .setProxy(m_cfg.http_proxy) .setRecvHeadersCallback([&](std::string) {}) .setOpt(CURLOPT_HTTPGET, 1L) .setRecvBodyCallback([&](std::string str) { recv.append(str); while (true) { auto position = recv.find("\n"); if (position == std::string::npos) break; auto msg = recv.substr(0, position + 1); recv.erase(0, position + 1); msg.pop_back(); if (msg.empty() || !msg.contains("content") || !msg.starts_with("data: ")) continue; msg.erase(0, 6); boost::system::error_code err{}; nlohmann::json line_json = nlohmann::json::parse(msg, nullptr, false); if (line_json.is_discarded()) { SPDLOG_ERROR("json parse error: [{}]", msg); boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, std::format("json parse error: [{}]", msg)); }); continue; } auto content = line_json["content"].get(); if (last_message.empty()) last_message = content; else { auto count = last_message.size(); last_message = content; content.erase(0, count); } if (content.empty()) continue; boost::asio::post(ch->get_executor(), [=, content = std::move(content)] { ch->try_send(err, content); }); } }) .clearHeaders() .setHttpHeaders([] -> auto& { static std::unordered_multimap headers{ {"Accept", "*/*"}, {"Content-Type", "application/json"}, {"authority", "gptalk.net"}, {"origin", "https://gptalk.net"}, {"x-auth-appid", "2229"}, {"x-auth-openid", ""}, {"x-auth-platform", ""}, }; return headers; }()) .perform(); if (ret.has_value()) { SPDLOG_ERROR("{}", ret.value()); co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); ch->try_send(err, ret.value()); } co_return; } boost::asio::awaitable FreeGpt::gptForLove(std::shared_ptr ch, nlohmann::json json) { co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable)); ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }}; boost::system::error_code err{}; auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get(); auto secret_rsp = callZeus(std::format("{}/gptforlove", m_cfg.zeus), "{}"); if (!secret_rsp.has_value()) { SPDLOG_ERROR("callZeus error: {}", secret_rsp.error()); co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); ch->try_send(err, secret_rsp.error()); co_return; } SPDLOG_INFO("zeus: [{}]", secret_rsp.value().dump()); static std::unordered_multimap headers{ {"Content-Type", "application/json"}, {"referer", "https://ai18.gptforlove.com/"}, {"origin", "https://ai18.gptforlove.com"}, {"authority", "api.gptplus.one"}, }; std::string recv; auto ret = Curl() .setUrl("https://api.gptplus.one/chat-process") .setProxy(m_cfg.http_proxy) .setRecvBodyCallback([&](std::string str) { recv.append(str); while (true) { auto position = recv.find("\n"); if (position == std::string::npos) break; auto msg = recv.substr(0, position + 1); recv.erase(0, position + 1); msg.pop_back(); if (msg.contains("10分钟内提问超过了5次")) { boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, msg); }); return; } if (msg.empty() || !msg.contains("content")) continue; boost::system::error_code err{}; nlohmann::json line_json = nlohmann::json::parse(msg, nullptr, false); if (line_json.is_discarded()) { SPDLOG_ERROR("json parse error: [{}]", msg); boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, std::format("json parse error: [{}]", msg)); }); continue; } auto str = line_json["detail"]["choices"][0]["delta"]["content"].get(); if (!str.empty()) boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, str); }); } }) .setBody([&] { constexpr std::string_view request_str{R"({ "prompt": "hello", "options": {}, "systemMessage": "You are ChatGPT, the version is GPT3.5, a large language model trained by OpenAI. Follow the user's instructions carefully.", "temperature": 0.8, "top_p": 1, "secret": "U2FsdGVkX18vdtlMj0nP1LoUzEqJTP0is+Q2+bQJNMk=", "stream": false })"}; nlohmann::json request = nlohmann::json::parse(request_str, nullptr, false); request["secret"] = secret_rsp.value()["secret"]; request["prompt"] = prompt; auto str = request.dump(); SPDLOG_INFO("request : [{}]", str); return str; }()) .setHttpHeaders(headers) .perform(); if (ret.has_value()) { SPDLOG_ERROR("{}", ret.value()); co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); ch->try_send(err, ret.value()); } co_return; } boost::asio::awaitable FreeGpt::chatGptDemo(std::shared_ptr ch, nlohmann::json json) { co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable)); ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }}; boost::system::error_code err{}; auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get(); std::unordered_multimap http_headers{ {"authority", "chat.chatgptdemo.net"}, {"origin", "https://chat.chatgptdemo.net"}, {"referer", "https://chat.chatgptdemo.net/"}, }; std::string recv, header_str; Curl curl; auto ret = curl.setUrl("https://chat.chatgptdemo.net/") .setProxy(m_cfg.http_proxy) .setRecvHeadersCallback([&](std::string str) { header_str.append(str); }) .setRecvBodyCallback([&](std::string str) { recv.append(str); }) .setHttpHeaders(http_headers) .perform(); if (ret) { co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); ch->try_send(err, ret.value()); co_return; } // auto response_header = Curl::parseHttpHeaders(header_str); // for (auto& [k, v] : response_header) // SPDLOG_INFO("{}: {}", k, v); auto match_ret = findAll(R"(