mirror of
https://github.com/Balshgit/gpt_chat_bot.git
synced 2025-09-11 22:30:41 +03:00
auto load environment variables (#72)
* auto load environment variables * add providers gpt6 & chatxyz
This commit is contained in:
parent
460123ef28
commit
f17a0a72e4
@ -60,6 +60,8 @@ class ChatGptModelsEnum(StrEnum):
|
|||||||
gpt_4_stream_aivvm = "gpt-4-stream-aivvm"
|
gpt_4_stream_aivvm = "gpt-4-stream-aivvm"
|
||||||
gpt_3_5_turbo_stream_AiChatOnline = "gpt-3.5-turbo-stream-AiChatOnline"
|
gpt_3_5_turbo_stream_AiChatOnline = "gpt-3.5-turbo-stream-AiChatOnline"
|
||||||
llama2_70B = "llama2-70B"
|
llama2_70B = "llama2-70B"
|
||||||
|
gpt6 = "gpt6"
|
||||||
|
gpt_3_5_turbo_streamc_hatxyz = "gpt-3.5-turbo-stream-chatxyz"
|
||||||
gpt_3_5_turbo_gptChatly = "gpt-3.5-turbo-gptChatly"
|
gpt_3_5_turbo_gptChatly = "gpt-3.5-turbo-gptChatly"
|
||||||
gpt_3_5_turbo_stream_Berlin = "gpt-3.5-turbo-stream-Berlin"
|
gpt_3_5_turbo_stream_Berlin = "gpt-3.5-turbo-stream-Berlin"
|
||||||
gpt_3_5_turbo_stream_chatGptAi = "gpt-3.5-turbo-stream-chatGptAi"
|
gpt_3_5_turbo_stream_chatGptAi = "gpt-3.5-turbo-stream-chatGptAi"
|
||||||
|
@ -1,5 +1,13 @@
|
|||||||
---
|
---
|
||||||
client_root_path: "../client"
|
client_root_path: "../client"
|
||||||
enable_proxy: true
|
interval: 300
|
||||||
|
work_thread_num: 8
|
||||||
|
host: "0.0.0.0"
|
||||||
|
port: 8858
|
||||||
|
chat_path: "/chat"
|
||||||
providers: []
|
providers: []
|
||||||
|
enable_proxy: true
|
||||||
|
api_key: ""
|
||||||
ip_white_list: []
|
ip_white_list: []
|
||||||
|
zeus: "http://127.0.0.1:8860"
|
||||||
|
flaresolverr: "http://127.0.0.1:8191/v1"
|
||||||
|
@ -1,24 +1,21 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <thread>
|
|
||||||
|
|
||||||
#include <yaml_cpp_struct.hpp>
|
#include <yaml_cpp_struct.hpp>
|
||||||
|
|
||||||
struct Config {
|
struct Config {
|
||||||
std::string client_root_path;
|
std::string client_root_path;
|
||||||
std::size_t interval{300};
|
std::size_t interval;
|
||||||
std::size_t work_thread_num{std::thread::hardware_concurrency() == 1 ? 2
|
std::size_t work_thread_num;
|
||||||
: std::thread::hardware_concurrency() * 2};
|
std::string host;
|
||||||
std::string host{"0.0.0.0"};
|
std::string port;
|
||||||
std::string port{"8858"};
|
std::string chat_path;
|
||||||
std::string chat_path{"/chat"};
|
|
||||||
std::vector<std::string> providers;
|
std::vector<std::string> providers;
|
||||||
bool enable_proxy;
|
bool enable_proxy;
|
||||||
std::string http_proxy;
|
std::string http_proxy;
|
||||||
std::string api_key;
|
std::string api_key;
|
||||||
std::vector<std::string> ip_white_list;
|
std::vector<std::string> ip_white_list;
|
||||||
std::string zeus{"http://127.0.0.1:8860"};
|
std::string zeus;
|
||||||
std::string flaresolverr{"http://127.0.0.1:8191/v1"};
|
std::string flaresolverr;
|
||||||
};
|
};
|
||||||
YCS_ADD_STRUCT(Config, client_root_path, interval, work_thread_num, host, port, chat_path, providers, enable_proxy,
|
YCS_ADD_STRUCT(Config, client_root_path, interval, work_thread_num, host, port, chat_path, providers, enable_proxy,
|
||||||
http_proxy, api_key, ip_white_list, zeus, flaresolverr)
|
http_proxy, api_key, ip_white_list, zeus, flaresolverr)
|
||||||
|
@ -33,6 +33,8 @@ public:
|
|||||||
boost::asio::awaitable<void> aiChatOnline(std::shared_ptr<Channel>, nlohmann::json);
|
boost::asio::awaitable<void> aiChatOnline(std::shared_ptr<Channel>, nlohmann::json);
|
||||||
boost::asio::awaitable<void> fakeGpt(std::shared_ptr<Channel>, nlohmann::json);
|
boost::asio::awaitable<void> fakeGpt(std::shared_ptr<Channel>, nlohmann::json);
|
||||||
boost::asio::awaitable<void> aura(std::shared_ptr<Channel>, nlohmann::json);
|
boost::asio::awaitable<void> aura(std::shared_ptr<Channel>, nlohmann::json);
|
||||||
|
boost::asio::awaitable<void> gpt6(std::shared_ptr<Channel>, nlohmann::json);
|
||||||
|
boost::asio::awaitable<void> chatxyz(std::shared_ptr<Channel>, nlohmann::json);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
boost::asio::awaitable<std::expected<boost::beast::ssl_stream<boost::beast::tcp_stream>, std::string>>
|
boost::asio::awaitable<std::expected<boost::beast::ssl_stream<boost::beast::tcp_stream>, std::string>>
|
||||||
|
@ -2097,3 +2097,178 @@ boost::asio::awaitable<void> FreeGpt::aura(std::shared_ptr<Channel> ch, nlohmann
|
|||||||
}
|
}
|
||||||
co_return;
|
co_return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
boost::asio::awaitable<void> FreeGpt::gpt6(std::shared_ptr<Channel> ch, nlohmann::json json) {
|
||||||
|
co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable));
|
||||||
|
ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }};
|
||||||
|
|
||||||
|
auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get<std::string>();
|
||||||
|
|
||||||
|
boost::system::error_code err{};
|
||||||
|
std::unordered_multimap<std::string, std::string> headers{
|
||||||
|
{"Accept", "*/*"},
|
||||||
|
{"content-type", "application/json"},
|
||||||
|
{"Referer", "https://gpt6.ai/"},
|
||||||
|
{"Origin", "https://gpt6.ai"},
|
||||||
|
{"Sec-Fetch-Dest", "empty"},
|
||||||
|
{"Sec-Fetch-Mode", "cors"},
|
||||||
|
{"Sec-Fetch-Site", "cross-site"},
|
||||||
|
{"TE", "trailers"},
|
||||||
|
};
|
||||||
|
std::string recv;
|
||||||
|
auto ret = Curl()
|
||||||
|
.setUrl("https://seahorse-app-d29hu.ondigitalocean.app/api/v1/query")
|
||||||
|
.setProxy(m_cfg.http_proxy)
|
||||||
|
.setRecvHeadersCallback([](std::string) { return; })
|
||||||
|
.setRecvBodyCallback([&](std::string chunk_str) mutable {
|
||||||
|
recv.append(chunk_str);
|
||||||
|
while (true) {
|
||||||
|
auto position = recv.find("\n");
|
||||||
|
if (position == std::string::npos)
|
||||||
|
break;
|
||||||
|
auto msg = recv.substr(0, position + 1);
|
||||||
|
recv.erase(0, position + 1);
|
||||||
|
msg.pop_back();
|
||||||
|
if (msg.empty() || !msg.contains("content"))
|
||||||
|
continue;
|
||||||
|
auto fields = splitString(msg, "data: ");
|
||||||
|
boost::system::error_code err{};
|
||||||
|
nlohmann::json line_json = nlohmann::json::parse(fields.back(), nullptr, false);
|
||||||
|
if (line_json.is_discarded()) {
|
||||||
|
SPDLOG_ERROR("json parse error: [{}]", fields.back());
|
||||||
|
ch->try_send(err, std::format("json parse error: [{}]", fields.back()));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
auto str = line_json["choices"][0]["delta"]["content"].get<std::string>();
|
||||||
|
if (!str.empty())
|
||||||
|
ch->try_send(err, str);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.setBody([&] {
|
||||||
|
constexpr std::string_view ask_json_str = R"({
|
||||||
|
"prompts":[
|
||||||
|
{
|
||||||
|
"role":"user",
|
||||||
|
"content":"Hello"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"geoInfo":{
|
||||||
|
"ip":"100.90.100.222",
|
||||||
|
"hostname":"ip-100-090-100-222.um36.pools.vodafone-ip.de",
|
||||||
|
"city":"Muenchen",
|
||||||
|
"region":"North Rhine-Westphalia",
|
||||||
|
"country":"DE",
|
||||||
|
"loc":"44.0910,5.5827",
|
||||||
|
"org":"AS3209 Vodafone GmbH",
|
||||||
|
"postal":"41507",
|
||||||
|
"timezone":"Europe/Berlin"
|
||||||
|
},
|
||||||
|
"paid":false,
|
||||||
|
"character":{
|
||||||
|
"textContent":"",
|
||||||
|
"id":"52690ad6-22e4-4674-93d4-1784721e9944",
|
||||||
|
"name":"GPT6",
|
||||||
|
"htmlContent":""
|
||||||
|
}
|
||||||
|
})";
|
||||||
|
nlohmann::json ask_request = nlohmann::json::parse(ask_json_str, nullptr, false);
|
||||||
|
ask_request["prompts"] = getConversationJson(json);
|
||||||
|
std::string ask_request_str = ask_request.dump();
|
||||||
|
SPDLOG_INFO("request: [{}]", ask_request_str);
|
||||||
|
return ask_request_str;
|
||||||
|
}())
|
||||||
|
.clearHeaders()
|
||||||
|
.setHttpHeaders(headers)
|
||||||
|
.perform();
|
||||||
|
if (ret.has_value()) {
|
||||||
|
SPDLOG_ERROR("{}", ret.value());
|
||||||
|
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
||||||
|
ch->try_send(err, ret.value());
|
||||||
|
}
|
||||||
|
co_return;
|
||||||
|
}
|
||||||
|
|
||||||
|
boost::asio::awaitable<void> FreeGpt::chatxyz(std::shared_ptr<Channel> ch, nlohmann::json json) {
|
||||||
|
co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable));
|
||||||
|
ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }};
|
||||||
|
|
||||||
|
auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get<std::string>();
|
||||||
|
|
||||||
|
boost::system::error_code err{};
|
||||||
|
std::unordered_multimap<std::string, std::string> headers{
|
||||||
|
{"Accept", "text/event-stream"},
|
||||||
|
{"content-type", "application/json"},
|
||||||
|
{"Referer", "https://chat.3211000.xyz/"},
|
||||||
|
{"Origin", "https://chat.3211000.xyz"},
|
||||||
|
{"Sec-Fetch-Dest", "empty"},
|
||||||
|
{"Sec-Fetch-Mode", "cors"},
|
||||||
|
{"Sec-Fetch-Site", "same-origin"},
|
||||||
|
{"TE", "trailers"},
|
||||||
|
{"x-requested-with", "XMLHttpRequest"},
|
||||||
|
};
|
||||||
|
std::string recv;
|
||||||
|
auto ret = Curl()
|
||||||
|
.setUrl("https://chat.3211000.xyz/api/openai/v1/chat/completions")
|
||||||
|
.setProxy(m_cfg.http_proxy)
|
||||||
|
.setRecvHeadersCallback([](std::string) { return; })
|
||||||
|
.setRecvBodyCallback([&](std::string chunk_str) mutable {
|
||||||
|
recv.append(chunk_str);
|
||||||
|
while (true) {
|
||||||
|
auto position = recv.find("\n");
|
||||||
|
if (position == std::string::npos)
|
||||||
|
break;
|
||||||
|
auto msg = recv.substr(0, position + 1);
|
||||||
|
recv.erase(0, position + 1);
|
||||||
|
msg.pop_back();
|
||||||
|
if (msg.empty() || !msg.contains("content"))
|
||||||
|
continue;
|
||||||
|
auto fields = splitString(msg, "data: ");
|
||||||
|
boost::system::error_code err{};
|
||||||
|
nlohmann::json line_json = nlohmann::json::parse(fields.back(), nullptr, false);
|
||||||
|
if (line_json.is_discarded()) {
|
||||||
|
SPDLOG_ERROR("json parse error: [{}]", fields.back());
|
||||||
|
ch->try_send(err, std::format("json parse error: [{}]", fields.back()));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (line_json["choices"][0]["delta"]["content"].is_null())
|
||||||
|
continue;
|
||||||
|
auto str = line_json["choices"][0]["delta"]["content"].get<std::string>();
|
||||||
|
if (!str.empty())
|
||||||
|
ch->try_send(err, str);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.setBody([&] {
|
||||||
|
constexpr std::string_view ask_json_str = R"({
|
||||||
|
"messages":[
|
||||||
|
{
|
||||||
|
"role":"system",
|
||||||
|
"content":"\nYou are ChatGPT, a large language model trained by OpenAI.\nCarefully heed the user's instructions.\nRespond using Markdown.\nKnowledge cutoff: 2021-09\nCurrent model: gpt-3.5-turbo\nCurrent time: 2023/12/26 14:12:34\nLatex inline: $x^2$ \nLatex block: $$e=mc^2$$\n\n"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role":"user",
|
||||||
|
"content":"hello"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stream":true,
|
||||||
|
"model":"gpt-3.5-turbo",
|
||||||
|
"temperature":0.5,
|
||||||
|
"presence_penalty":0,
|
||||||
|
"frequency_penalty":0,
|
||||||
|
"top_p":1
|
||||||
|
})";
|
||||||
|
nlohmann::json ask_request = nlohmann::json::parse(ask_json_str, nullptr, false);
|
||||||
|
ask_request["messages"][1]["content"] = prompt;
|
||||||
|
std::string ask_request_str = ask_request.dump();
|
||||||
|
SPDLOG_INFO("request: [{}]", ask_request_str);
|
||||||
|
return ask_request_str;
|
||||||
|
}())
|
||||||
|
.clearHeaders()
|
||||||
|
.setHttpHeaders(headers)
|
||||||
|
.perform();
|
||||||
|
if (ret.has_value()) {
|
||||||
|
SPDLOG_ERROR("{}", ret.value());
|
||||||
|
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
|
||||||
|
ch->try_send(err, ret.value());
|
||||||
|
}
|
||||||
|
co_return;
|
||||||
|
}
|
||||||
|
@ -39,36 +39,8 @@ void setEnvironment(auto& cfg) {
|
|||||||
if (!upper_http_proxy.empty())
|
if (!upper_http_proxy.empty())
|
||||||
cfg.http_proxy = std::move(upper_http_proxy);
|
cfg.http_proxy = std::move(upper_http_proxy);
|
||||||
}
|
}
|
||||||
if (auto [chat_path] = getEnv("CHAT_PATH"); !chat_path.empty()) {
|
|
||||||
cfg.chat_path = std::move(chat_path);
|
|
||||||
}
|
|
||||||
if (cfg.chat_path.back() == '/')
|
if (cfg.chat_path.back() == '/')
|
||||||
cfg.chat_path.pop_back();
|
cfg.chat_path.pop_back();
|
||||||
if (auto [port] = getEnv("PORT"); !port.empty())
|
|
||||||
cfg.port = std::move(port);
|
|
||||||
if (auto [host] = getEnv("HOST"); !host.empty())
|
|
||||||
cfg.host = std::move(host);
|
|
||||||
if (auto [work_thread_num] = getEnv("WORK_THREAD_NUM"); !work_thread_num.empty())
|
|
||||||
cfg.work_thread_num = std::atol(work_thread_num.c_str());
|
|
||||||
if (auto [providers] = getEnv("PROVIDERS"); !providers.empty()) {
|
|
||||||
nlohmann::json providers_list = nlohmann::json::parse(providers, nullptr, false);
|
|
||||||
if (!providers_list.is_discarded())
|
|
||||||
cfg.providers = providers_list.get<std::vector<std::string>>();
|
|
||||||
}
|
|
||||||
if (auto [api_key] = getEnv("API_KEY"); !api_key.empty())
|
|
||||||
cfg.api_key = std::move(api_key);
|
|
||||||
if (auto [interval] = getEnv("INTERVAL"); !interval.empty())
|
|
||||||
cfg.interval = std::atol(interval.c_str());
|
|
||||||
// export IP_WHITE_LIST="[\"127.0.0.1\",\"192.168.1.1\"]"
|
|
||||||
if (auto [ip_white_list_str] = getEnv("IP_WHITE_LIST"); !ip_white_list_str.empty()) {
|
|
||||||
nlohmann::json ip_white_list = nlohmann::json::parse(ip_white_list_str, nullptr, false);
|
|
||||||
if (!ip_white_list.is_discarded())
|
|
||||||
cfg.ip_white_list = ip_white_list.get<std::vector<std::string>>();
|
|
||||||
}
|
|
||||||
if (auto [zeus] = getEnv("ZEUS"); !zeus.empty())
|
|
||||||
cfg.zeus = std::move(zeus);
|
|
||||||
if (auto [flaresolverr] = getEnv("FLARESOLVERR"); !flaresolverr.empty())
|
|
||||||
cfg.flaresolverr = std::move(flaresolverr);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string createIndexHtml(const std::string& file, const Config& cfg) {
|
std::string createIndexHtml(const std::string& file, const Config& cfg) {
|
||||||
@ -321,7 +293,7 @@ int main(int, char** argv) {
|
|||||||
ScopeExit cleanup{[=] { curl_global_cleanup(); }};
|
ScopeExit cleanup{[=] { curl_global_cleanup(); }};
|
||||||
|
|
||||||
spdlog::set_pattern("[%Y-%m-%d %H:%M:%S.%e][thread %t][%!][%s:%#][%l] %v");
|
spdlog::set_pattern("[%Y-%m-%d %H:%M:%S.%e][thread %t][%!][%s:%#][%l] %v");
|
||||||
auto [config, error] = yaml_cpp_struct::from_yaml<Config>(argv[1]);
|
auto [config, error] = yaml_cpp_struct::from_yaml_env<Config>(argv[1], "");
|
||||||
if (!config) {
|
if (!config) {
|
||||||
SPDLOG_ERROR("{}", error);
|
SPDLOG_ERROR("{}", error);
|
||||||
return EXIT_FAILURE;
|
return EXIT_FAILURE;
|
||||||
@ -350,6 +322,8 @@ int main(int, char** argv) {
|
|||||||
ADD_METHOD("gpt-3.5-turbo-stream-AiChatOnline", FreeGpt::aiChatOnline);
|
ADD_METHOD("gpt-3.5-turbo-stream-AiChatOnline", FreeGpt::aiChatOnline);
|
||||||
ADD_METHOD("gpt-3.5-turbo-stream-fakeGpt", FreeGpt::fakeGpt);
|
ADD_METHOD("gpt-3.5-turbo-stream-fakeGpt", FreeGpt::fakeGpt);
|
||||||
ADD_METHOD("gpt-3.5-turbo-stream-aura", FreeGpt::aura);
|
ADD_METHOD("gpt-3.5-turbo-stream-aura", FreeGpt::aura);
|
||||||
|
ADD_METHOD("gpt6", FreeGpt::gpt6);
|
||||||
|
ADD_METHOD("gpt-3.5-turbo-stream-chatxyz", FreeGpt::chatxyz);
|
||||||
|
|
||||||
SPDLOG_INFO("active provider:");
|
SPDLOG_INFO("active provider:");
|
||||||
for (auto& [provider, _] : gpt_function)
|
for (auto& [provider, _] : gpt_function)
|
||||||
|
@ -6,7 +6,7 @@ add_repositories("my_private_repo https://github.com/fantasy-peak/xmake-repo.git
|
|||||||
|
|
||||||
add_requires("openssl", {system = false})
|
add_requires("openssl", {system = false})
|
||||||
add_requires("zlib", {system = false})
|
add_requires("zlib", {system = false})
|
||||||
add_requires("yaml_cpp_struct", "nlohmann_json", "spdlog", "inja", "plusaes", "concurrentqueue")
|
add_requires("yaml_cpp_struct v1.0.4", "nlohmann_json", "spdlog", "inja", "plusaes", "concurrentqueue")
|
||||||
add_requires("boost", {configs = {iostreams = true}})
|
add_requires("boost", {configs = {iostreams = true}})
|
||||||
|
|
||||||
set_languages("c++23")
|
set_languages("c++23")
|
||||||
|
Loading…
x
Reference in New Issue
Block a user