add zeus-Vercel and update html (#46)

This commit is contained in:
Dmitry Afanasyev 2023-10-27 18:37:25 +03:00 committed by GitHub
parent 2e7d4880e0
commit 11cfccbb01
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 594 additions and 441 deletions

View File

@ -59,6 +59,7 @@ class ChatGptModelsEnum(StrEnum):
gpt_3_5_turbo_stream_FakeGpt = "gpt-3.5-turbo-stream-FakeGpt"
gpt_3_5_turbo_stream_GeekGpt = "gpt-3.5-turbo-stream-GeekGpt"
gpt_3_5_turbo_stream_gptforlove = "gpt-3.5-turbo-stream-gptforlove"
gpt_3_5_turbo_stream_Vercel = "gpt-3.5-turbo-stream-Vercel"
@classmethod
def values(cls) -> set[str]:

View File

@ -22,7 +22,7 @@ class ChatGptAdmin(ModelView, model=ChatGpt):
def create_admin(application: "Application") -> Admin:
admin = Admin(
title='Chat GPT admin',
title="Chat GPT admin",
app=application.fastapi_app,
engine=application.db.async_engine,
base_url=os.path.join(settings.URL_PREFIX, "admin"),

View File

@ -627,13 +627,12 @@ ul {
}
}
@media screen and (max-height: 640px) {
@media screen and (max-height: 640px) and (min-width: 990px) {
body {
height: 87vh
}
}
.shown {
display: flex;
}

View File

@ -57,7 +57,7 @@
</style>
<script src="{{chat_path}}/assets/js/highlight.min.js"></script>
<script src="{{chat_path}}/assets/js/highlightjs-copy.min.js"></script>
<script>window.conversation_id = {{chat_id}} </script>
<script>window.conversation_id = "{{chat_id}}" </script>
<title>ChatGPT</title>
</head>
<body>

View File

@ -1,7 +1,3 @@
const query = (obj) =>
Object.keys(obj)
.map((k) => encodeURIComponent(k) + "=" + encodeURIComponent(obj[k]))
.join("&");
const colorThemes = document.querySelectorAll('[name="theme"]');
const markdown = window.markdownit();
const message_box = document.getElementById(`messages`);
@ -14,11 +10,6 @@ let prompt_lock = false;
hljs.addPlugin(new CopyButtonPlugin());
function resizeTextarea(textarea) {
textarea.style.height = '80px';
textarea.style.height = Math.min(textarea.scrollHeight, 200) + 'px';
}
const format = (text) => {
return text.replace(/(?:\r\n|\r|\n)/g, "<br>");
};
@ -64,7 +55,7 @@ const ask_gpt = async (message) => {
message_input.innerHTML = ``;
message_input.innerText = ``;
add_conversation(window.conversation_id, message.substr(0, 20));
add_conversation(window.conversation_id, message);
window.scrollTo(0, 0);
window.controller = new AbortController();
@ -88,9 +79,7 @@ const ask_gpt = async (message) => {
<i class="fa fa-trash trash-icon" onclick="deleteMessage('${token}')"></i>
</div>
`;
/* .replace(/(?:\r\n|\r|\n)/g, '<br>') */
console.log(message_box.innerHTML)
message_box.scrollTop = message_box.scrollHeight;
window.scrollTo(0, 0);
await new Promise((r) => setTimeout(r, 500));
@ -116,7 +105,7 @@ const ask_gpt = async (message) => {
method: `POST`,
signal: window.controller.signal,
headers: {
"content-type": `application/json`,
'content-type': `application/json`,
accept: `text/event-stream`,
},
body: JSON.stringify({
@ -128,12 +117,12 @@ const ask_gpt = async (message) => {
id: window.token,
content: {
conversation: await get_conversation(window.conversation_id),
internet_access: document.getElementById("switch").checked,
content_type: "text",
internet_access: document.getElementById(`switch`).checked,
content_type: `text`,
parts: [
{
content: message,
role: "user",
role: `user`,
},
],
},
@ -149,27 +138,9 @@ const ask_gpt = async (message) => {
chunk = new TextDecoder().decode(value);
if (
chunk.includes(
`<form id="challenge-form" action="{{api_path}}/backend-api/v2/conversation?`
)
) {
chunk = `cloudflare token expired, please refresh the page.`;
}
text += chunk;
// const objects = chunk.match(/({.+?})/g);
// try { if (JSON.parse(objects[0]).success === false) throw new Error(JSON.parse(objects[0]).error) } catch (e) {}
// objects.forEach((object) => {
// console.log(object)
// try { text += h2a(JSON.parse(object).content) } catch(t) { console.log(t); throw new Error(t)}
// });
document.getElementById(`gpt_${window.token}`).innerHTML =
markdown.render(text);
document.getElementById(`gpt_${window.token}`).innerHTML = markdown.render(text);
document.querySelectorAll(`code`).forEach((el) => {
hljs.highlightElement(el);
});
@ -178,14 +149,8 @@ const ask_gpt = async (message) => {
message_box.scrollTo({ top: message_box.scrollHeight, behavior: "auto" });
}
// if text contains :
if (
text.includes(
`instead. Maintaining this website and API costs a lot of money`
)
) {
document.getElementById(`gpt_${window.token}`).innerHTML =
"An error occured, please reload / refresh cache and try again.";
if (text.includes(`G4F_ERROR`)) {
document.getElementById(`gpt_${window.token}`).innerHTML = "An error occured, please try again, if the problem persists, please reload / refresh cache or use a differnet browser";
}
add_message(window.conversation_id, "user", message, token);
@ -197,6 +162,7 @@ const ask_gpt = async (message) => {
await load_conversations(20, 0);
window.scrollTo(0, 0);
} catch (e) {
add_message(window.conversation_id, "user", message, token);
@ -255,20 +221,20 @@ const show_option = async (conversation_id) => {
const yes = document.getElementById(`yes-${conversation_id}`);
const not = document.getElementById(`not-${conversation_id}`);
conv.style.display = "none";
yes.style.display = "block";
not.style.display = "block";
}
conv.style.display = `none`;
yes.style.display = `block`;
not.style.display = `block`;
};
const hide_option = async (conversation_id) => {
const conv = document.getElementById(`conv-${conversation_id}`);
const yes = document.getElementById(`yes-${conversation_id}`);
const not = document.getElementById(`not-${conversation_id}`);
conv.style.display = "block";
yes.style.display = "none";
not.style.display = "none";
}
conv.style.display = `block`;
yes.style.display = `none`;
not.style.display = `none`;
};
const delete_conversation = async (conversation_id) => {
localStorage.removeItem(`conversation:${conversation_id}`);
@ -293,11 +259,14 @@ const set_conversation = async (conversation_id) => {
};
const new_conversation = async () => {
history.pushState({}, null, `{{chat_path}}/`);
window.conversation_id = uuid();
await clear_conversation();
await load_conversations(20, 0, true);
await say_hello()
};
const load_conversation = async (conversation_id) => {
@ -311,30 +280,27 @@ const load_conversation = async (conversation_id) => {
<div class="message">
<div class="user">
${item.role == "assistant" ? gpt_image : user_image}
${
item.role == "assistant"
${item.role == "assistant"
? `<i class="fa-regular fa-phone-arrow-down-left"></i>`
: `<i class="fa-regular fa-phone-arrow-up-right"></i>`
}
</div>
${
item.role == "user"
${item.role == "user"
? `<div class="content" id="user_${item.token}">`
: `<div class="content" id="gpt_${item.token}">`
}
${
item.role == "assistant"
${item.role == "assistant"
? markdown.render(item.content)
: item.content
}
</div>
${
item.role == "user"
${item.role == "user"
? `<i class="fa fa-trash trash-icon" onclick="deleteMessage('${item.token}')"></i>`
: ''
}
</div>
`;
// console.log(message_box.innerHTML)
}
document.querySelectorAll(`code`).forEach((el) => {
@ -361,7 +327,13 @@ const get_conversation = async (conversation_id) => {
return result;
};
const add_conversation = async (conversation_id, title) => {
const add_conversation = async (conversation_id, content) => {
if (content.length > 17) {
title = content.substring(0, 17) + '..'
} else {
title = content + '&nbsp;'.repeat(19 - content.length)
}
if (localStorage.getItem(`conversation:${conversation_id}`) == null) {
localStorage.setItem(
`conversation:${conversation_id}`,
@ -392,9 +364,6 @@ const add_message = async (conversation_id, role, content, token) => {
};
const load_conversations = async (limit, offset, loader) => {
//console.log(loader);
//if (loader === undefined) box_conversations.appendChild(spinner);
let conversations = [];
for (let i = 0; i < localStorage.length; i++) {
if (localStorage.key(i).startsWith("conversation:")) {
@ -403,7 +372,6 @@ const load_conversations = async (limit, offset, loader) => {
}
}
//if (loader === undefined) spinner.parentNode.removeChild(spinner)
await clear_conversations();
for (conversation of conversations) {
@ -430,17 +398,6 @@ document.getElementById(`cancelButton`).addEventListener(`click`, async () => {
console.log(`aborted ${window.conversation_id}`);
});
function h2a(str1) {
var hex = str1.toString();
var str = "";
for (var n = 0; n < hex.length; n += 2) {
str += String.fromCharCode(parseInt(hex.substr(n, 2), 16));
}
return str;
}
const uuid = () => {
return `xxxxxxxx-xxxx-4xxx-yxxx-${Date.now().toString(16)}`.replace(
/[xy]/g,
@ -461,49 +418,6 @@ const message_id = () => {
return BigInt(`0b${unix}${random_bytes}`).toString();
};
window.onload = async () => {
load_settings_localstorage();
conversations = 0;
for (let i = 0; i < localStorage.length; i++) {
if (localStorage.key(i).startsWith("conversation:")) {
conversations += 1;
}
}
if (conversations == 0) localStorage.clear();
await setTimeout(() => {
load_conversations(20, 0);
}, 1);
if (!window.location.href.endsWith(`#`)) {
if (/{{chat_path}}\/.+/.test(window.location.href)) {
await load_conversation(window.conversation_id);
}
}
message_input.addEventListener(`keydown`, async (evt) => {
if (prompt_lock) return;
if (evt.keyCode === 13 && !evt.shiftKey) {
evt.preventDefault();
console.log('pressed enter');
await handle_ask();
} else {
message_input.style.removeProperty("height");
message_input.style.height = message_input.scrollHeight + 4 + "px";
}
});
send_button.addEventListener(`click`, async () => {
console.log("clicked send");
if (prompt_lock) return;
await handle_ask();
});
register_settings_localstorage();
};
document.querySelector(".mobile-sidebar").addEventListener("click", (event) => {
const sidebar = document.querySelector(".conversations");
@ -556,6 +470,29 @@ const load_settings_localstorage = async () => {
});
};
const say_hello = async () => {
tokens = [`Hello`, `!`, ` How`, ` can`, ` I`, ` assist`, ` you`, ` today`, `?`]
message_box.innerHTML += `
<div class="message">
<div class="user">
${gpt_image}
<i class="fa-regular fa-phone-arrow-down-left"></i>
</div>
<div class="content welcome-message">
</div>
</div>
`;
content = ``
to_modify = document.querySelector(`.welcome-message`);
for (token of tokens) {
await new Promise(resolve => setTimeout(resolve, (Math.random() * (100 - 200) + 100)))
content += token;
to_modify.innerHTML = markdown.render(content);
}
}
// Theme storage for recurring viewers
const storeTheme = function (theme) {
localStorage.setItem("theme", theme);
@ -581,13 +518,89 @@ colorThemes.forEach((themeOption) => {
});
});
function resizeTextarea(textarea) {
textarea.style.height = '80px';
textarea.style.height = Math.min(textarea.scrollHeight, 200) + 'px';
}
window.onload = async () => {
load_settings_localstorage();
setTheme();
conversations = 0;
for (let i = 0; i < localStorage.length; i++) {
if (localStorage.key(i).startsWith("conversation:")) {
conversations += 1;
}
}
if (conversations == 0) localStorage.clear();
await setTimeout(() => {
load_conversations(20, 0);
}, 1);
if (!window.location.href.endsWith(`#`)) {
if (/{{chat_path}}\/.+/.test(window.location.href)) {
await load_conversation(window.conversation_id);
}
}
// await load_models();
await say_hello()
message_input.addEventListener(`keydown`, async (evt) => {
if (prompt_lock) return;
if (evt.keyCode === 13 && !evt.shiftKey) {
evt.preventDefault();
console.log("pressed enter");
await handle_ask();
} else {
message_input.style.removeProperty("height");
message_input.style.height = message_input.scrollHeight + "px";
}
});
send_button.addEventListener(`click`, async () => {
console.log("clicked send");
if (prompt_lock) return;
await handle_ask();
});
register_settings_localstorage();
};
const observer = new MutationObserver((mutationsList) => {
for (const mutation of mutationsList) {
if (mutation.type === 'attributes' && mutation.attributeName === 'style') {
const height = message_input.offsetHeight;
let heightValues = {
81: "20px",
82: "20px",
100: "30px",
119: "39px",
138: "49px",
150: "55px"
}
send_button.style.top = heightValues[height] || '';
}
}
});
observer.observe(message_input, { attributes: true });
function deleteMessage(token) {
console.log(token)
const messageDivUser = document.getElementById(`user_${token}`)
const messageDivGpt = document.getElementById(`gpt_${token}`)
if (messageDivUser) messageDivUser.parentNode.remove();
if (messageDivGpt) messageDivGpt.parentNode.remove();
const conversation = JSON.parse(localStorage.getItem(`conversation:${window.conversation_id}`));
console.log(conversation)
conversation.items = conversation.items.filter(item => item.token !== token);
console.log(conversation)
localStorage.setItem(`conversation:${window.conversation_id}`, JSON.stringify(conversation));
const messages = document.getElementsByClassName("message");
@ -595,5 +608,3 @@ function deleteMessage(token) {
delete_conversation(window.conversation_id);
};
}
document.onload = setTheme();

View File

@ -38,6 +38,7 @@ public:
boost::asio::awaitable<void> geekGpt(std::shared_ptr<Channel>, nlohmann::json);
boost::asio::awaitable<void> chatGptAi(std::shared_ptr<Channel>, nlohmann::json);
boost::asio::awaitable<void> fakeGpt(std::shared_ptr<Channel>, nlohmann::json);
boost::asio::awaitable<void> vercel(std::shared_ptr<Channel>, nlohmann::json);
private:
boost::asio::awaitable<std::expected<boost::beast::ssl_stream<boost::beast::tcp_stream>, std::string>>

View File

@ -3042,3 +3042,121 @@ boost::asio::awaitable<void> FreeGpt::fakeGpt(std::shared_ptr<Channel> ch, nlohm
co_return;
}
}
boost::asio::awaitable<void> FreeGpt::vercel(std::shared_ptr<Channel> ch, nlohmann::json json) {
co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable));
ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }};
boost::system::error_code err{};
auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get<std::string>();
auto create_random_number = [] {
std::random_device rd;
std::mt19937 mt(rd());
std::uniform_int_distribution<int> distribution(99, 999);
int random_number = distribution(mt);
return random_number;
};
constexpr std::string_view user_agent_str{
R"(Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.{}.{} Safari/537.36)"};
std::unordered_multimap<std::string, std::string> headers{
{"Accept", "*/*"},
{"authority", "sdk.vercel.ai"},
{"content-type", "application/json"},
{"referer", "https://sdk.vercel.ai/"},
{"origin", "https://sdk.vercel.ai"},
{"sec-ch-ua", R"("Google Chrome";v="117", "Not;A=Brand";v="8", "Chromium";v="117")"},
{"sec-ch-ua-mobile", R"(?0)"},
{"sec-ch-ua-platform", R"("macOS")"},
{"cache-control", "no-cache"},
{"pragma", "no-cache"},
};
headers.emplace("user-agent", std::format(user_agent_str, create_random_number(), create_random_number()));
std::string recv;
Curl curl;
auto ret = curl.setUrl("https://sdk.vercel.ai/openai.jpeg")
.setProxy(m_cfg.http_proxy)
.setRecvHeadersCallback([](std::string) { return; })
.setRecvBodyCallback([&](std::string str) mutable {
recv.append(str);
return;
})
.clearHeaders()
.setHttpHeaders(headers)
.perform();
if (ret.has_value()) {
SPDLOG_ERROR("https://sdk.vercel.ai/openai.jpeg: [{}]", ret.value());
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, ret.value());
co_return;
}
nlohmann::json request;
request["data"] = std::move(recv);
recv.clear();
auto vercel_rsp = callZeus(std::format("{}/vercel", m_cfg.zeus), request.dump());
if (!vercel_rsp.has_value()) {
SPDLOG_ERROR("callZeus vercel error: {}", vercel_rsp.error());
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, vercel_rsp.error());
co_return;
}
headers.erase("custom-encoding");
headers.erase("user-agent");
headers.emplace("custom-encoding", vercel_rsp.value()["data"]);
headers.emplace("user-agent", std::format(user_agent_str, create_random_number(), create_random_number()));
for (int i = 0; i < 20; i++) {
auto ret =
curl.setUrl("https://sdk.vercel.ai/api/generate")
.setProxy(m_cfg.http_proxy)
.setRecvHeadersCallback([](std::string) { return; })
.setRecvBodyCallback([&](std::string str) mutable {
if (str == "Internal Server Error" || str == "Rate limit exceeded") {
SPDLOG_WARN("vercel: [{}]", str);
return;
}
boost::asio::post(ch->get_executor(), [=, str = std::move(str)] { ch->try_send(err, str); });
return;
})
.setBody([&] {
constexpr std::string_view json_str = R"({
"model":"openai:gpt-3.5-turbo",
"messages":[
{
"role":"user",
"content":"hello"
}
],
"playgroundId":"403bce4c-7eb6-47b0-b1b5-0cb6b2469f70",
"chatIndex":0,
"temperature":0.7,
"maximumLength":4096,
"topP":1,
"topK":1,
"presencePenalty":1,
"frequencyPenalty":1,
"stopSequences":[]
})";
nlohmann::json request = nlohmann::json::parse(json_str, nullptr, false);
request["messages"] = getConversationJson(json);
request["playgroundId"] = createUuidString();
SPDLOG_INFO("request: [{}]", request.dump(2));
return request.dump();
}())
.clearHeaders()
.setHttpHeaders(headers)
.perform();
if (ret.has_value()) {
SPDLOG_WARN("https://sdk.vercel.ai/api/generate: [{}]", ret.value());
co_await timeout(std::chrono::seconds(2));
continue;
}
co_return;
}
co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable));
ch->try_send(err, "call sdk.vercel.ai error");
co_return;
}

View File

@ -353,6 +353,7 @@ int main(int argc, char** argv) {
ADD_METHOD("llama2", FreeGpt::llama2);
ADD_METHOD("gpt-3.5-turbo-stream-chatGptAi", FreeGpt::chatGptAi);
ADD_METHOD("gpt-3.5-turbo-stream-FakeGpt", FreeGpt::fakeGpt);
ADD_METHOD("gpt-3.5-turbo-stream-Vercel", FreeGpt::vercel);
SPDLOG_INFO("active provider:");
for (auto& [provider, _] : gpt_function)

View File

@ -2,3 +2,4 @@ Flask==3.0.0
PyExecJS==1.5.1
selenium==4.14.0
Werkzeug==3.0.0
requests==2.31.0

View File

@ -1,3 +1,4 @@
import base64
import json
import os
import threading
@ -5,7 +6,7 @@ import time
import traceback
import execjs
from flask import Flask
from flask import Flask, request
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from werkzeug.serving import ThreadedWSGIServer
@ -55,6 +56,26 @@ return o.toString()
return json.dumps(dict)
# curl -X POST -d '{}' -H "Content-Type: application/json" http://127.0.0.1:8860/vercel
@app.route("/vercel", methods=["POST"])
def get_anti_bot_token():
request_body = json.loads(request.data)
raw_data = json.loads(base64.b64decode(request_body["data"], validate=True))
js_script = """const globalThis={marker:"mark"};String.prototype.fontcolor=function(){return `<font>${this}</font>`};
return (%s)(%s)""" % (
raw_data["c"],
raw_data["a"],
)
raw_token = json.dumps(
{"r": execjs.compile(js_script).call(""), "t": raw_data["t"]},
separators=(",", ":"),
)
dict = {"data": base64.b64encode(raw_token.encode("utf-16le")).decode()}
return json.dumps(dict)
if __name__ == "__main__":
thread = threading.Thread(target=deepai_refresh)
thread.start()