diff --git a/bot_microservice/constants.py b/bot_microservice/constants.py index 35f379d..368a3bd 100644 --- a/bot_microservice/constants.py +++ b/bot_microservice/constants.py @@ -60,6 +60,7 @@ class ChatGptModelsEnum(StrEnum): Llama_2_70b_chat_hf_stream_DeepInfra = "Llama-2-70b-chat-hf-stream-DeepInfra" gpt_4_stream_aivvm = "gpt-4-stream-aivvm" gpt_3_5_turbo_stream_AiChatOnline = "gpt-3.5-turbo-stream-AiChatOnline" + gpt_3_5_turbo_stream_FreeChatgpt = "gpt-3.5-turbo-stream-FreeChatgpt" llama2_70B = "llama2-70B" gpt6 = "gpt6" gpt_3_5_turbo_streamc_hatxyz = "gpt-3.5-turbo-stream-chatxyz" diff --git a/chatgpt_microservice/include/free_gpt.h b/chatgpt_microservice/include/free_gpt.h index a28dcf9..4fb1169 100644 --- a/chatgpt_microservice/include/free_gpt.h +++ b/chatgpt_microservice/include/free_gpt.h @@ -36,6 +36,7 @@ public: boost::asio::awaitable gpt6(std::shared_ptr, nlohmann::json); boost::asio::awaitable chatxyz(std::shared_ptr, nlohmann::json); boost::asio::awaitable geminiProChat(std::shared_ptr, nlohmann::json); + boost::asio::awaitable freeChatGpt(std::shared_ptr, nlohmann::json); private: boost::asio::awaitable, std::string>> diff --git a/chatgpt_microservice/src/free_gpt.cpp b/chatgpt_microservice/src/free_gpt.cpp index ad4eaef..5eb650f 100644 --- a/chatgpt_microservice/src/free_gpt.cpp +++ b/chatgpt_microservice/src/free_gpt.cpp @@ -2351,3 +2351,79 @@ boost::asio::awaitable FreeGpt::geminiProChat(std::shared_ptr ch, } co_return; } + +boost::asio::awaitable FreeGpt::freeChatGpt(std::shared_ptr ch, nlohmann::json json) { + co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable)); + ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }}; + + boost::system::error_code err{}; + auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get(); + std::string recv; + auto ret = Curl() + .setUrl("https://free.chatgpt.org.uk/api/openai/v1/chat/completions") + .setProxy(m_cfg.http_proxy) + .setBody([&] { + constexpr std::string_view ask_json_str = R"({ + "messages":[ + { + "role":"user", + "content":"Hello" + } + ], + "stream":true, + "model":"gpt-3.5-turbo", + "temperature":0.5, + "presence_penalty":0, + "frequency_penalty":0, + "top_p":1 + })"; + nlohmann::json ask_request = nlohmann::json::parse(ask_json_str, nullptr, false); + ask_request["messages"] = getConversationJson(json); + std::string ask_request_str = ask_request.dump(); + SPDLOG_INFO("request: [{}]", ask_request_str); + return ask_request_str; + }()) + .setRecvBodyCallback([&](std::string str) { + recv.append(str); + while (true) { + auto position = recv.find("\n"); + if (position == std::string::npos) + break; + auto msg = recv.substr(0, position + 1); + recv.erase(0, position + 1); + msg.pop_back(); + if (msg.empty() || !msg.contains("content")) + continue; + auto fields = splitString(msg, "data: "); + boost::system::error_code err{}; + nlohmann::json line_json = nlohmann::json::parse(fields.back(), nullptr, false); + if (line_json.is_discarded()) { + SPDLOG_ERROR("json parse error: [{}]", fields.back()); + boost::asio::post(ch->get_executor(), [=] { + ch->try_send(err, std::format("json parse error: [{}]", fields.back())); + }); + continue; + } + auto str = line_json["choices"][0]["delta"]["content"].get(); + if (!str.empty() && str != "[DONE]") + boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, str); }); + } + return; + }) + .setHttpHeaders([&] -> auto { + std::unordered_multimap headers{ + {"Accept", "application/json, text/event-stream"}, + {"Origin", "https://free.chatgpt.org.uk"}, + {"Referer", "https://free.chatgpt.org.uk/"}, + {"Host", "free.chatgpt.org.uk"}, + }; + return headers; + }()) + .perform(); + if (ret.has_value()) { + SPDLOG_ERROR("{}", ret.value()); + co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); + ch->try_send(err, ret.value()); + } + co_return; +} diff --git a/chatgpt_microservice/src/main.cpp b/chatgpt_microservice/src/main.cpp index b44abd6..42eba56 100644 --- a/chatgpt_microservice/src/main.cpp +++ b/chatgpt_microservice/src/main.cpp @@ -325,6 +325,7 @@ int main(int, char** argv) { ADD_METHOD("gpt6", FreeGpt::gpt6); ADD_METHOD("gpt-3.5-turbo-stream-chatxyz", FreeGpt::chatxyz); ADD_METHOD("gpt-3.5-turbo-stream-geminiProChat", FreeGpt::geminiProChat); + ADD_METHOD("gpt-3.5-turbo-stream-FreeChatgpt", FreeGpt::freeChatGpt); SPDLOG_INFO("active provider:"); for (auto& [provider, _] : gpt_function)