update provider geekgpt && llama2 (#50)

Update provider geekgpt && llama2
This commit is contained in:
Dmitry Afanasyev 2023-11-02 20:22:56 +03:00 committed by GitHub
parent 2789b33677
commit 6262059bc7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 178 additions and 198 deletions

View File

@ -1,5 +1,7 @@
# Cpp FreeGPT WebUI # Cpp FreeGPT WebUI
[![](https://github.com/fantasy-peak/cpp-freegpt-webui/workflows/ubuntu-gcc13/badge.svg)](https://github.com/fantasy-peak/cpp-freegpt-webui/actions) [![](https://github.com/fantasy-peak/cpp-freegpt-webui/workflows/ubuntu-clang18/badge.svg)](https://github.com/fantasy-peak/cpp-freegpt-webui/actions)
## GPT 3.5/4 ## GPT 3.5/4
<strong>NOT REQUIRE ANY API KEY</strong> ❌🔑 <strong>NOT REQUIRE ANY API KEY</strong> ❌🔑
@ -29,9 +31,7 @@ To run the application, run the following command:
1. Check local g++ version, need g++ version >= gcc version 13.1.0 (GCC) 1. Check local g++ version, need g++ version >= gcc version 13.1.0 (GCC)
2. install xmake 2. install xmake
wget https://github.com/xmake-io/xmake/releases/download/v2.8.2/xmake-v2.8.2.xz.run curl -kfsSL https://xmake.io/shget.text | bash -s v2.8.3
chmod 777 xmake-v2.8.2.xz.run
./xmake-v2.8.2.xz.run
source ~/.xmake/profile source ~/.xmake/profile
3. install libcurl-impersonate, ubuntu (apt-get install libcurl4-openssl-dev) centos7 (yum install libcurl-devel.x86_64) 3. install libcurl-impersonate, ubuntu (apt-get install libcurl4-openssl-dev) centos7 (yum install libcurl-devel.x86_64)
@ -64,25 +64,52 @@ docker pull fantasypeak/freegpt:latest
Run the application using Docker: Run the application using Docker:
``` ```
docker run --rm -p 8858:8858 -it --name freegpt fantasypeak/freegpt:latest docker run -it --rm \
// OR -p 8858:8858 \
docker run --rm --net=host -it --name freegpt fantasypeak/freegpt:latest --name freegpt \
// use http_proxy fantasypeak/freegpt:latest
docker run --rm -p 8858:8858 -it --name freegpt -e HTTP_PROXY=http://127.0.0.1:8080 -e CHAT_PATH=/chat fantasypeak/freegpt:latest ```
// set active providers Run the application(use http proxy) using Docker:
docker run --rm -p 8858:8858 -it --name freegpt -e CHAT_PATH=/chat -e PROVIDERS="[\"gpt-4-ChatgptAi\",\"gpt-3.5-turbo-stream-DeepAi\"]" fantasypeak/freegpt:latest ```
// enable ip white list function docker run --rm -it \
docker run --rm -p 8858:8858 -it --name freegpt -e IP_WHITE_LIST="[\"127.0.0.1\",\"192.168.1.1\"]" fantasypeak/freegpt:latest -p 8858:8858 \
--name freegpt \
-e HTTP_PROXY=http://127.0.0.1:8080 \
fantasypeak/freegpt:latest
```
Configurable environment variables
```
01. CHAT_PATH=/chat
02. HTTP_PROXY=http://127.0.0.1:8080
03. PROVIDERS="[\"gpt-4-ChatgptAi\",\"gpt-3.5-turbo-stream-DeepAi\"]"
04. IP_WHITE_LIST="[\"127.0.0.1\",\"192.168.1.1\"]"
05. PORT=8858
06. HOST=0.0.0.0
07. WORK_THREAD_NUM=8
08. INTERVAL=300
09. ZEUS=http://127.0.0.1:8860
10. FLARESOLVERR=http://127.0.0.1:8191/v1
``` ```
### Start the Zeus Service [optional] ### Start the Zeus Service [optional]
This is not necessary, Zeus is a cpp-freegpt-webui auxiliary service, because some provider needs to perform specific operations such as get cookies and refreshing web pages etc. This is not necessary, Zeus is a cpp-freegpt-webui auxiliary service, because some provider needs to perform specific operations such as get cookies and refreshing web pages etc.
If you need to use these specific providers, you need to start it(Zeus Docker) If you need to use these specific providers, you need to start it(Zeus Docker)
Start zeus service
``` ```
docker pull fantasypeak/freegpt-zeus:latest docker run -d \
docker run --rm --net=host -it --name zeus fantasypeak/freegpt-zeus:latest --name=zeus \
docker pull fantasypeak/freegpt:latest -p 8860:8860 \
docker run --rm --net=host -it --name freegpt fantasypeak/freegpt:latest --rm \
fantasypeak/freegpt-zeus:latest
```
Start the application
```
docker run -it --rm \
--net=host \
--name freegpt \
fantasypeak/freegpt:latest
``` ```
### Start the flaresolverr docker [optional] ### Start the flaresolverr docker [optional]
@ -97,9 +124,13 @@ docker run -d \
``` ```
### Call OpenAi Api ### Call OpenAi Api
It supports calling OpenAI's API, but need set API_KEY
``` ```
// It supports calling OpenAI's API, but need set API_KEY docker run --rm -it \
docker run --rm -p 8858:8858 -it --name freegpt -e CHAT_PATH=/chat -e API_KEY=a40f22f2-c1a2-4b1d-a47f-55ae1a7ddbed fantasypeak/freegpt:latest -p 8858:8858 \
--name freegpt \
-e API_KEY=a40f22f2-c1a2-4b1d-a47f-55ae1a7ddbed \
fantasypeak/freegpt:latest
``` ```
### WebUI ### WebUI

View File

@ -1,11 +1,13 @@
#pragma once #pragma once
#include <thread>
#include <yaml_cpp_struct.hpp> #include <yaml_cpp_struct.hpp>
struct Config { struct Config {
std::string client_root_path; std::string client_root_path;
std::size_t interval{300}; std::size_t interval{300};
std::size_t work_thread_num{8}; std::size_t work_thread_num{std::thread::hardware_concurrency() * 2};
std::string host{"0.0.0.0"}; std::string host{"0.0.0.0"};
std::string port{"8858"}; std::string port{"8858"};
std::string chat_path{"/chat"}; std::string chat_path{"/chat"};

View File

@ -101,3 +101,24 @@ inline std::string createUuidString() {
static thread_local boost::uuids::random_generator gen; static thread_local boost::uuids::random_generator gen;
return boost::uuids::to_string(gen()); return boost::uuids::to_string(gen());
} }
// clang-format off
namespace detail {
template <typename C>
struct to_helper {};
template <typename Container, std::ranges::range R>
requires std::convertible_to<std::ranges::range_value_t<R>, typename Container::value_type>
Container operator|(R&& r, to_helper<Container>) {
return Container{r.begin(), r.end()};
}
} // namespace detail
template <std::ranges::range Container>
requires(!std::ranges::view<Container>)
inline auto to() {
return detail::to_helper<Container>{};
}
// clang-format on

View File

@ -21,27 +21,6 @@
namespace { namespace {
// clang-format off
namespace detail {
template <typename C>
struct to_helper {};
template <typename Container, std::ranges::range R>
requires std::convertible_to<std::ranges::range_value_t<R>, typename Container::value_type>
Container operator|(R&& r, to_helper<Container>) {
return Container{r.begin(), r.end()};
}
} // namespace detail
template <std::ranges::range Container>
requires(!std::ranges::view<Container>)
inline auto to() {
return detail::to_helper<Container>{};
}
// clang-format on
std::string md5(const std::string& str, bool reverse = true) { std::string md5(const std::string& str, bool reverse = true) {
unsigned char hash[MD5_DIGEST_LENGTH]; unsigned char hash[MD5_DIGEST_LENGTH];
@ -391,6 +370,8 @@ public:
if (m_curl) if (m_curl)
curl_easy_cleanup(m_curl); curl_easy_cleanup(m_curl);
} }
Curl(const Curl&) = delete;
Curl& operator=(const Curl&) = delete;
auto& setUrl(std::string_view url) { auto& setUrl(std::string_view url) {
m_url = url; m_url = url;
@ -2245,51 +2226,21 @@ boost::asio::awaitable<void> FreeGpt::llama2(std::shared_ptr<Channel> ch, nlohma
auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get<std::string>(); auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get<std::string>();
struct Input { static std::unordered_multimap<std::string, std::string> headers{
std::shared_ptr<Channel> ch;
std::string recv;
};
Input input;
CURLcode res;
CURL* curl = curl_easy_init();
if (!curl) {
auto error_info = std::format("curl_easy_init() failed:{}", curl_easy_strerror(res));
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, error_info);
co_return;
}
ScopeExit auto_exit{[=] { curl_easy_cleanup(curl); }};
auto ret = sendHttpRequest(CurlHttpRequest{
.curl = curl,
.url = "https://www.llama2.ai/api",
.http_proxy = m_cfg.http_proxy,
.cb = [](void* contents, size_t size, size_t nmemb, void* userp) mutable -> size_t {
auto input_ptr = static_cast<Input*>(userp);
std::string data{(char*)contents, size * nmemb};
auto& [ch, recv] = *input_ptr;
boost::asio::post(ch->get_executor(), [=] {
boost::system::error_code err{};
ch->try_send(err, data);
});
return size * nmemb;
},
.input = [&] -> void* {
input.recv.clear();
input.ch = ch;
return &input;
}(),
.headers = [&] -> auto& {
static std::unordered_map<std::string, std::string> headers{
{"Accept", "*/*"}, {"Accept", "*/*"},
{"origin", "https://www.llama2.ai"}, {"origin", "https://www.llama2.ai"},
{"referer", "https://www.llama2.ai/"}, {"referer", "https://www.llama2.ai/"},
{"Content-Type", "text/plain;charset=UTF-8"}, {"Content-Type", "text/plain;charset=UTF-8"},
}; };
return headers; auto ret = Curl()
}(), .setUrl("https://www.llama2.ai/api")
.body = [&] -> std::string { .setProxy(m_cfg.http_proxy)
.setRecvHeadersCallback([](std::string) { return; })
.setRecvBodyCallback([&](std::string str) mutable {
boost::asio::post(ch->get_executor(), [=, str = std::move(str)] { ch->try_send(err, str); });
return;
})
.setBody([&] {
constexpr std::string_view ask_json_str = R"({ constexpr std::string_view ask_json_str = R"({
"prompt":"[INST] hello [/INST]\n[INST] hello [/INST]\n", "prompt":"[INST] hello [/INST]\n[INST] hello [/INST]\n",
"version":"d24902e3fa9b698cc208b5e63136c4e26e828659a9f09827ca6ec5bb83014381", "version":"d24902e3fa9b698cc208b5e63136c4e26e828659a9f09827ca6ec5bb83014381",
@ -2305,15 +2256,15 @@ boost::asio::awaitable<void> FreeGpt::llama2(std::shared_ptr<Channel> ch, nlohma
std::string ask_request_str = ask_request.dump(); std::string ask_request_str = ask_request.dump();
SPDLOG_INFO("ask_request_str: [{}]", ask_request_str); SPDLOG_INFO("ask_request_str: [{}]", ask_request_str);
return ask_request_str; return ask_request_str;
}(), }())
.response_header_ptr = nullptr, .clearHeaders()
.expect_response_code = 200, .setHttpHeaders(headers)
.ssl_verify = false, .perform();
});
if (ret) { if (ret) {
SPDLOG_ERROR("https://www.llama2.ai/api: [{}]", ret.value());
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, ret.value()); ch->try_send(err, ret.value());
co_return;
} }
co_return; co_return;
} }
@ -2418,34 +2369,26 @@ boost::asio::awaitable<void> FreeGpt::geekGpt(std::shared_ptr<Channel> ch, nlohm
co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable)); co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable));
ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }}; ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }};
boost::system::error_code err{}; boost::system::error_code err{};
static std::unordered_multimap<std::string, std::string> headers{
auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get<std::string>(); {"Accept", "*/*"},
{"authority", "ai.fakeopen.com"},
struct Input { {"content-type", "application/json"},
std::shared_ptr<Channel> ch; {"referer", "https://chat.geekgpt.org/"},
std::string recv; {"origin", "https://chat.geekgpt.org"},
{"sec-ch-ua", R"("Google Chrome";v="117", "Not;A=Brand";v="8", "Chromium";v="117")"},
{"sec-ch-ua-mobile", R"(?0)"},
{"sec-ch-ua-platform", R"("macOS")"},
{"cache-control", "no-cache"},
{"pragma", "no-cache"},
{"authorization", "Bearer pk-this-is-a-real-free-pool-token-for-everyone"},
}; };
Input input; std::string recv;
auto ret = Curl()
CURLcode res; .setUrl("https://ai.fakeopen.com/v1/chat/completions")
CURL* curl = curl_easy_init(); .setProxy(m_cfg.http_proxy)
if (!curl) { .setRecvHeadersCallback([](std::string) { return; })
auto error_info = std::format("curl_easy_init() failed:{}", curl_easy_strerror(res)); .setRecvBodyCallback([&](std::string str) mutable {
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); recv.append(str);
ch->try_send(err, error_info);
co_return;
}
ScopeExit auto_exit{[=] { curl_easy_cleanup(curl); }};
auto ret = sendHttpRequest(CurlHttpRequest{
.curl = curl,
.url = "https://ai.fakeopen.com/v1/chat/completions",
.http_proxy = m_cfg.http_proxy,
.cb = [](void* contents, size_t size, size_t nmemb, void* userp) mutable -> size_t {
auto input_ptr = static_cast<Input*>(userp);
std::string data{(char*)contents, size * nmemb};
auto& [ch, recv] = *input_ptr;
recv.append(data);
while (true) { while (true) {
auto position = recv.find("\n"); auto position = recv.find("\n");
if (position == std::string::npos) if (position == std::string::npos)
@ -2469,25 +2412,9 @@ boost::asio::awaitable<void> FreeGpt::geekGpt(std::shared_ptr<Channel> ch, nlohm
if (!str.empty() && str != "[DONE]") if (!str.empty() && str != "[DONE]")
boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, str); }); boost::asio::post(ch->get_executor(), [=] { ch->try_send(err, str); });
} }
return size * nmemb; return;
}, })
.input = [&] -> void* { .setBody([&] {
input.recv.clear();
input.ch = ch;
return &input;
}(),
.headers = [&] -> auto& {
static std::unordered_map<std::string, std::string> headers{
{"Accept", "*/*"},
{"origin", "https://chat.geekgpt.org"},
{"referer", "https://chat.geekgpt.org/"},
{"Content-Type", "application/json"},
{"authority", "ai.fakeopen.com"},
{"authorization", "Bearer pk-this-is-a-real-free-pool-token-for-everyone"},
};
return headers;
}(),
.body = [&] -> std::string {
constexpr std::string_view ask_json_str = R"({ constexpr std::string_view ask_json_str = R"({
"messages": [{ "messages": [{
"role": "user", "role": "user",
@ -2503,17 +2430,16 @@ boost::asio::awaitable<void> FreeGpt::geekGpt(std::shared_ptr<Channel> ch, nlohm
nlohmann::json ask_request = nlohmann::json::parse(ask_json_str, nullptr, false); nlohmann::json ask_request = nlohmann::json::parse(ask_json_str, nullptr, false);
ask_request["messages"] = getConversationJson(json); ask_request["messages"] = getConversationJson(json);
std::string ask_request_str = ask_request.dump(); std::string ask_request_str = ask_request.dump();
SPDLOG_INFO("ask_request_str: [{}]", ask_request_str); SPDLOG_INFO("request: [{}]", ask_request_str);
return ask_request_str; return ask_request_str;
}(), }())
.response_header_ptr = nullptr, .clearHeaders()
.expect_response_code = 200, .setHttpHeaders(headers)
.ssl_verify = false, .perform();
}); if (ret.has_value()) {
if (ret) { SPDLOG_ERROR("https://ai.fakeopen.com/v1/chat/completions: [{}]", ret.value());
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, ret.value()); ch->try_send(err, ret.value());
co_return;
} }
co_return; co_return;
} }

View File

@ -316,7 +316,7 @@ boost::asio::awaitable<void> doSession(boost::asio::ip::tcp::acceptor& acceptor,
co_return; co_return;
} }
int main(int argc, char** argv) { int main(int, char** argv) {
curl_global_init(CURL_GLOBAL_ALL); curl_global_init(CURL_GLOBAL_ALL);
ScopeExit cleanup{[=] { curl_global_cleanup(); }}; ScopeExit cleanup{[=] { curl_global_cleanup(); }};