diff --git a/bot_microservice/constants.py b/bot_microservice/constants.py index 273da09..cad1c4c 100644 --- a/bot_microservice/constants.py +++ b/bot_microservice/constants.py @@ -59,6 +59,7 @@ class ChatGptModelsEnum(StrEnum): gpt_3_5_turbo_stream_FakeGpt = "gpt-3.5-turbo-stream-FakeGpt" gpt_3_5_turbo_stream_GeekGpt = "gpt-3.5-turbo-stream-GeekGpt" gpt_3_5_turbo_stream_gptforlove = "gpt-3.5-turbo-stream-gptforlove" + gpt_3_5_turbo_stream_Vercel = "gpt-3.5-turbo-stream-Vercel" @classmethod def values(cls) -> set[str]: diff --git a/bot_microservice/infra/admin.py b/bot_microservice/infra/admin.py index 1910a3b..721426e 100644 --- a/bot_microservice/infra/admin.py +++ b/bot_microservice/infra/admin.py @@ -22,7 +22,7 @@ class ChatGptAdmin(ModelView, model=ChatGpt): def create_admin(application: "Application") -> Admin: admin = Admin( - title='Chat GPT admin', + title="Chat GPT admin", app=application.fastapi_app, engine=application.db.async_engine, base_url=os.path.join(settings.URL_PREFIX, "admin"), diff --git a/chatgpt_microservice/client/css/style.css b/chatgpt_microservice/client/css/style.css index e68b909..59efeda 100644 --- a/chatgpt_microservice/client/css/style.css +++ b/chatgpt_microservice/client/css/style.css @@ -627,13 +627,12 @@ ul { } } -@media screen and (max-height: 640px) { +@media screen and (max-height: 640px) and (min-width: 990px) { body { height: 87vh } } - .shown { display: flex; } diff --git a/chatgpt_microservice/client/html/index.html b/chatgpt_microservice/client/html/index.html index 7e1641c..54b04aa 100644 --- a/chatgpt_microservice/client/html/index.html +++ b/chatgpt_microservice/client/html/index.html @@ -57,7 +57,7 @@ - + ChatGPT diff --git a/chatgpt_microservice/client/js/chat.js b/chatgpt_microservice/client/js/chat.js index 58585b7..c832a00 100644 --- a/chatgpt_microservice/client/js/chat.js +++ b/chatgpt_microservice/client/js/chat.js @@ -1,7 +1,3 @@ -const query = (obj) => - Object.keys(obj) - .map((k) => encodeURIComponent(k) + "=" + encodeURIComponent(obj[k])) - .join("&"); const colorThemes = document.querySelectorAll('[name="theme"]'); const markdown = window.markdownit(); const message_box = document.getElementById(`messages`); @@ -14,69 +10,64 @@ let prompt_lock = false; hljs.addPlugin(new CopyButtonPlugin()); -function resizeTextarea(textarea) { - textarea.style.height = '80px'; - textarea.style.height = Math.min(textarea.scrollHeight, 200) + 'px'; -} - const format = (text) => { - return text.replace(/(?:\r\n|\r|\n)/g, "
"); + return text.replace(/(?:\r\n|\r|\n)/g, "
"); }; message_input.addEventListener("blur", () => { - window.scrollTo(0, 0); + window.scrollTo(0, 0); }); message_input.addEventListener("focus", () => { - document.documentElement.scrollTop = document.documentElement.scrollHeight; + document.documentElement.scrollTop = document.documentElement.scrollHeight; }); const delete_conversations = async () => { - localStorage.clear(); - await new_conversation(); + localStorage.clear(); + await new_conversation(); }; const handle_ask = async () => { - message_input.style.height = `80px`; - message_input.focus(); + message_input.style.height = `80px`; + message_input.focus(); - window.scrollTo(0, 0); - let message = message_input.value; + window.scrollTo(0, 0); + let message = message_input.value; - if (message.length > 0) { - message_input.value = ``; - await ask_gpt(message); - } + if (message.length > 0) { + message_input.value = ``; + await ask_gpt(message); + } }; const remove_cancel_button = async () => { - stop_generating.classList.add(`stop_generating-hiding`); + stop_generating.classList.add(`stop_generating-hiding`); - setTimeout(() => { - stop_generating.classList.remove(`stop_generating-hiding`); - stop_generating.classList.add(`stop_generating-hidden`); - }, 300); + setTimeout(() => { + stop_generating.classList.remove(`stop_generating-hiding`); + stop_generating.classList.add(`stop_generating-hidden`); + }, 300); }; const ask_gpt = async (message) => { - try { - message_input.value = ``; - message_input.innerHTML = ``; - message_input.innerText = ``; + try { + message_input.value = ``; + message_input.innerHTML = ``; + message_input.innerText = ``; - add_conversation(window.conversation_id, message.substr(0, 20)); - window.scrollTo(0, 0); - window.controller = new AbortController(); + add_conversation(window.conversation_id, message); + window.scrollTo(0, 0); + window.controller = new AbortController(); - jailbreak = document.getElementById("jailbreak"); - model = document.getElementById("model"); - prompt_lock = true; - window.text = ``; - window.token = message_id(); + jailbreak = document.getElementById("jailbreak"); + model = document.getElementById("model"); + prompt_lock = true; + window.text = ``; + window.token = message_id(); - stop_generating.classList.remove(`stop_generating-hidden`); + stop_generating.classList.remove(`stop_generating-hidden`); - message_box.innerHTML += ` + message_box.innerHTML += `
${user_image} @@ -88,15 +79,13 @@ const ask_gpt = async (message) => {
`; + console.log(message_box.innerHTML) + message_box.scrollTop = message_box.scrollHeight; + window.scrollTo(0, 0); + await new Promise((r) => setTimeout(r, 500)); + window.scrollTo(0, 0); - /* .replace(/(?:\r\n|\r|\n)/g, '
') */ - - message_box.scrollTop = message_box.scrollHeight; - window.scrollTo(0, 0); - await new Promise((r) => setTimeout(r, 500)); - window.scrollTo(0, 0); - - message_box.innerHTML += ` + message_box.innerHTML += `
${gpt_image} @@ -107,493 +96,515 @@ const ask_gpt = async (message) => {
`; - message_box.scrollTop = message_box.scrollHeight; - window.scrollTo(0, 0); - await new Promise((r) => setTimeout(r, 1000)); - window.scrollTo(0, 0); + message_box.scrollTop = message_box.scrollHeight; + window.scrollTo(0, 0); + await new Promise((r) => setTimeout(r, 1000)); + window.scrollTo(0, 0); - const response = await fetch(`{{api_path}}/backend-api/v2/conversation`, { - method: `POST`, - signal: window.controller.signal, - headers: { - "content-type": `application/json`, - accept: `text/event-stream`, - }, - body: JSON.stringify({ - conversation_id: window.conversation_id, - action: `_ask`, - model: model.options[model.selectedIndex].value, - jailbreak: jailbreak.options[jailbreak.selectedIndex].value, - meta: { - id: window.token, - content: { - conversation: await get_conversation(window.conversation_id), - internet_access: document.getElementById("switch").checked, - content_type: "text", - parts: [ - { - content: message, - role: "user", - }, - ], - }, - }, - }), - }); + const response = await fetch(`{{api_path}}/backend-api/v2/conversation`, { + method: `POST`, + signal: window.controller.signal, + headers: { + 'content-type': `application/json`, + accept: `text/event-stream`, + }, + body: JSON.stringify({ + conversation_id: window.conversation_id, + action: `_ask`, + model: model.options[model.selectedIndex].value, + jailbreak: jailbreak.options[jailbreak.selectedIndex].value, + meta: { + id: window.token, + content: { + conversation: await get_conversation(window.conversation_id), + internet_access: document.getElementById(`switch`).checked, + content_type: `text`, + parts: [ + { + content: message, + role: `user`, + }, + ], + }, + }, + }), + }); - const reader = response.body.getReader(); + const reader = response.body.getReader(); - while (true) { - const { value, done } = await reader.read(); - if (done) break; + while (true) { + const { value, done } = await reader.read(); + if (done) break; - chunk = new TextDecoder().decode(value); + chunk = new TextDecoder().decode(value); - if ( - chunk.includes( - `
{ - // console.log(object) - // try { text += h2a(JSON.parse(object).content) } catch(t) { console.log(t); throw new Error(t)} - // }); + add_message(window.conversation_id, "user", message, token); + add_message(window.conversation_id, "assistant", text, token); - document.getElementById(`gpt_${window.token}`).innerHTML = - markdown.render(text); - document.querySelectorAll(`code`).forEach((el) => { - hljs.highlightElement(el); - }); + message_box.scrollTop = message_box.scrollHeight; + await remove_cancel_button(); + prompt_lock = false; - window.scrollTo(0, 0); - message_box.scrollTo({ top: message_box.scrollHeight, behavior: "auto" }); + await load_conversations(20, 0); + window.scrollTo(0, 0); + + } catch (e) { + add_message(window.conversation_id, "user", message, token); + + message_box.scrollTop = message_box.scrollHeight; + await remove_cancel_button(); + prompt_lock = false; + + await load_conversations(20, 0); + + console.log(e); + + let cursorDiv = document.getElementById(`cursor`); + if (cursorDiv) cursorDiv.parentNode.removeChild(cursorDiv); + + if (e.name != `AbortError`) { + let error_message = `oops ! something went wrong, please try again / reload. [stacktrace in console]`; + + document.getElementById(`gpt_${window.token}`).innerHTML = error_message; + add_message(window.conversation_id, "assistant", error_message, token); + } else { + document.getElementById(`gpt_${window.token}`).innerHTML += ` [aborted]`; + add_message(window.conversation_id, "assistant", text + ` [aborted]`, token); + } + + window.scrollTo(0, 0); } - - // if text contains : - if ( - text.includes( - `instead. Maintaining this website and API costs a lot of money` - ) - ) { - document.getElementById(`gpt_${window.token}`).innerHTML = - "An error occured, please reload / refresh cache and try again."; - } - - add_message(window.conversation_id, "user", message, token); - add_message(window.conversation_id, "assistant", text, token); - - message_box.scrollTop = message_box.scrollHeight; - await remove_cancel_button(); - prompt_lock = false; - - await load_conversations(20, 0); - window.scrollTo(0, 0); - } catch (e) { - add_message(window.conversation_id, "user", message, token); - - message_box.scrollTop = message_box.scrollHeight; - await remove_cancel_button(); - prompt_lock = false; - - await load_conversations(20, 0); - - console.log(e); - - let cursorDiv = document.getElementById(`cursor`); - if (cursorDiv) cursorDiv.parentNode.removeChild(cursorDiv); - - if (e.name != `AbortError`) { - let error_message = `oops ! something went wrong, please try again / reload. [stacktrace in console]`; - - document.getElementById(`gpt_${window.token}`).innerHTML = error_message; - add_message(window.conversation_id, "assistant", error_message, token); - } else { - document.getElementById(`gpt_${window.token}`).innerHTML += ` [aborted]`; - add_message(window.conversation_id, "assistant", text + ` [aborted]`, token); - } - - window.scrollTo(0, 0); - } }; const clear_conversations = async () => { - const elements = box_conversations.childNodes; - let index = elements.length; + const elements = box_conversations.childNodes; + let index = elements.length; - if (index > 0) { - while (index--) { - const element = elements[index]; - if ( - element.nodeType === Node.ELEMENT_NODE && - element.tagName.toLowerCase() !== `button` - ) { - box_conversations.removeChild(element); - } + if (index > 0) { + while (index--) { + const element = elements[index]; + if ( + element.nodeType === Node.ELEMENT_NODE && + element.tagName.toLowerCase() !== `button` + ) { + box_conversations.removeChild(element); + } + } } - } }; const clear_conversation = async () => { - let messages = message_box.getElementsByTagName(`div`); + let messages = message_box.getElementsByTagName(`div`); - while (messages.length > 0) { - message_box.removeChild(messages[0]); - } + while (messages.length > 0) { + message_box.removeChild(messages[0]); + } }; const show_option = async (conversation_id) => { - const conv = document.getElementById(`conv-${conversation_id}`); - const yes = document.getElementById(`yes-${conversation_id}`); - const not = document.getElementById(`not-${conversation_id}`); + const conv = document.getElementById(`conv-${conversation_id}`); + const yes = document.getElementById(`yes-${conversation_id}`); + const not = document.getElementById(`not-${conversation_id}`); - conv.style.display = "none"; - yes.style.display = "block"; - not.style.display = "block"; -} + conv.style.display = `none`; + yes.style.display = `block`; + not.style.display = `block`; +}; const hide_option = async (conversation_id) => { - const conv = document.getElementById(`conv-${conversation_id}`); - const yes = document.getElementById(`yes-${conversation_id}`); - const not = document.getElementById(`not-${conversation_id}`); + const conv = document.getElementById(`conv-${conversation_id}`); + const yes = document.getElementById(`yes-${conversation_id}`); + const not = document.getElementById(`not-${conversation_id}`); - conv.style.display = "block"; - yes.style.display = "none"; - not.style.display = "none"; -} + conv.style.display = `block`; + yes.style.display = `none`; + not.style.display = `none`; +}; const delete_conversation = async (conversation_id) => { - localStorage.removeItem(`conversation:${conversation_id}`); + localStorage.removeItem(`conversation:${conversation_id}`); - const conversation = document.getElementById(`convo-${conversation_id}`); + const conversation = document.getElementById(`convo-${conversation_id}`); conversation.remove(); - if (window.conversation_id == conversation_id) { - await new_conversation(); - } + if (window.conversation_id == conversation_id) { + await new_conversation(); + } - await load_conversations(20, 0, true); + await load_conversations(20, 0, true); }; const set_conversation = async (conversation_id) => { - history.pushState({}, null, `{{chat_path}}/${conversation_id}`); - window.conversation_id = conversation_id; + history.pushState({}, null, `{{chat_path}}/${conversation_id}`); + window.conversation_id = conversation_id; - await clear_conversation(); - await load_conversation(conversation_id); - await load_conversations(20, 0, true); + await clear_conversation(); + await load_conversation(conversation_id); + await load_conversations(20, 0, true); }; const new_conversation = async () => { - history.pushState({}, null, `{{chat_path}}/`); - window.conversation_id = uuid(); - await clear_conversation(); - await load_conversations(20, 0, true); + history.pushState({}, null, `{{chat_path}}/`); + window.conversation_id = uuid(); + + await clear_conversation(); + await load_conversations(20, 0, true); + + await say_hello() }; const load_conversation = async (conversation_id) => { - let conversation = await JSON.parse( - localStorage.getItem(`conversation:${conversation_id}`) - ); - console.log(conversation, conversation_id); + let conversation = await JSON.parse( + localStorage.getItem(`conversation:${conversation_id}`) + ); + console.log(conversation, conversation_id); - for (item of conversation.items) { - message_box.innerHTML += ` -
-
- ${item.role == "assistant" ? gpt_image : user_image} - ${ - item.role == "assistant" - ? `` - : `` - } -
- ${ - item.role == "user" - ? `
` - : `
` - } - ${ - item.role == "assistant" - ? markdown.render(item.content) - : item.content - } -
- ${ - item.role == "user" - ? `` - : '' - } + for (item of conversation.items) { + message_box.innerHTML += ` +
+
+ ${item.role == "assistant" ? gpt_image : user_image} + ${item.role == "assistant" + ? `` + : `` + }
- `; - } + ${item.role == "user" + ? `
` + : `
` + } + ${item.role == "assistant" + ? markdown.render(item.content) + : item.content + } +
+ ${item.role == "user" + ? `` + : '' + } +
+ `; + // console.log(message_box.innerHTML) + } - document.querySelectorAll(`code`).forEach((el) => { - hljs.highlightElement(el); - }); + document.querySelectorAll(`code`).forEach((el) => { + hljs.highlightElement(el); + }); - message_box.scrollTo({ top: message_box.scrollHeight, behavior: "smooth" }); + message_box.scrollTo({ top: message_box.scrollHeight, behavior: "smooth" }); - setTimeout(() => { - message_box.scrollTop = message_box.scrollHeight; - }, 500); + setTimeout(() => { + message_box.scrollTop = message_box.scrollHeight; + }, 500); }; const get_conversation = async (conversation_id) => { - let conversation = await JSON.parse( - localStorage.getItem(`conversation:${conversation_id}`) - ); - let result = conversation.items.slice(-4) - for (var i = 0; i < result.length; i++) { - delete result[i].token; - console.log(result[i]); - console.log(result[i]); - } - return result; + let conversation = await JSON.parse( + localStorage.getItem(`conversation:${conversation_id}`) + ); + let result = conversation.items.slice(-4) + for (var i = 0; i < result.length; i++) { + delete result[i].token; + console.log(result[i]); + console.log(result[i]); + } + return result; }; -const add_conversation = async (conversation_id, title) => { - if (localStorage.getItem(`conversation:${conversation_id}`) == null) { - localStorage.setItem( - `conversation:${conversation_id}`, - JSON.stringify({ - id: conversation_id, - title: title, - items: [], - }) - ); - } +const add_conversation = async (conversation_id, content) => { + if (content.length > 17) { + title = content.substring(0, 17) + '..' + } else { + title = content + ' '.repeat(19 - content.length) + } + + if (localStorage.getItem(`conversation:${conversation_id}`) == null) { + localStorage.setItem( + `conversation:${conversation_id}`, + JSON.stringify({ + id: conversation_id, + title: title, + items: [], + }) + ); + } }; const add_message = async (conversation_id, role, content, token) => { - before_adding = JSON.parse( - localStorage.getItem(`conversation:${conversation_id}`) - ); + before_adding = JSON.parse( + localStorage.getItem(`conversation:${conversation_id}`) + ); - before_adding.items.push({ - role: role, - content: content, - token: token, - }); + before_adding.items.push({ + role: role, + content: content, + token: token, + }); - localStorage.setItem( - `conversation:${conversation_id}`, - JSON.stringify(before_adding) - ); // update conversation + localStorage.setItem( + `conversation:${conversation_id}`, + JSON.stringify(before_adding) + ); // update conversation }; const load_conversations = async (limit, offset, loader) => { - //console.log(loader); - //if (loader === undefined) box_conversations.appendChild(spinner); - - let conversations = []; - for (let i = 0; i < localStorage.length; i++) { - if (localStorage.key(i).startsWith("conversation:")) { - let conversation = localStorage.getItem(localStorage.key(i)); - conversations.push(JSON.parse(conversation)); + let conversations = []; + for (let i = 0; i < localStorage.length; i++) { + if (localStorage.key(i).startsWith("conversation:")) { + let conversation = localStorage.getItem(localStorage.key(i)); + conversations.push(JSON.parse(conversation)); + } } - } - //if (loader === undefined) spinner.parentNode.removeChild(spinner) - await clear_conversations(); + await clear_conversations(); - for (conversation of conversations) { - box_conversations.innerHTML += ` -
-
- - ${conversation.title} -
- - - -
- `; - } + for (conversation of conversations) { + box_conversations.innerHTML += ` +
+
+ + ${conversation.title} +
+ + + +
+ `; + } - document.querySelectorAll(`code`).forEach((el) => { - hljs.highlightElement(el); - }); + document.querySelectorAll(`code`).forEach((el) => { + hljs.highlightElement(el); + }); }; document.getElementById(`cancelButton`).addEventListener(`click`, async () => { - window.controller.abort(); - console.log(`aborted ${window.conversation_id}`); + window.controller.abort(); + console.log(`aborted ${window.conversation_id}`); }); -function h2a(str1) { - var hex = str1.toString(); - var str = ""; - - for (var n = 0; n < hex.length; n += 2) { - str += String.fromCharCode(parseInt(hex.substr(n, 2), 16)); - } - - return str; -} - const uuid = () => { - return `xxxxxxxx-xxxx-4xxx-yxxx-${Date.now().toString(16)}`.replace( - /[xy]/g, - function (c) { - var r = (Math.random() * 16) | 0, - v = c == "x" ? r : (r & 0x3) | 0x8; - return v.toString(16); - } - ); + return `xxxxxxxx-xxxx-4xxx-yxxx-${Date.now().toString(16)}`.replace( + /[xy]/g, + function (c) { + var r = (Math.random() * 16) | 0, + v = c == "x" ? r : (r & 0x3) | 0x8; + return v.toString(16); + } + ); }; const message_id = () => { - random_bytes = (Math.floor(Math.random() * 1338377565) + 2956589730).toString( - 2 - ); - unix = Math.floor(Date.now() / 1000).toString(2); + random_bytes = (Math.floor(Math.random() * 1338377565) + 2956589730).toString( + 2 + ); + unix = Math.floor(Date.now() / 1000).toString(2); - return BigInt(`0b${unix}${random_bytes}`).toString(); -}; - -window.onload = async () => { - load_settings_localstorage(); - - conversations = 0; - for (let i = 0; i < localStorage.length; i++) { - if (localStorage.key(i).startsWith("conversation:")) { - conversations += 1; - } - } - - if (conversations == 0) localStorage.clear(); - - await setTimeout(() => { - load_conversations(20, 0); - }, 1); - - if (!window.location.href.endsWith(`#`)) { - if (/{{chat_path}}\/.+/.test(window.location.href)) { - await load_conversation(window.conversation_id); - } - } - -message_input.addEventListener(`keydown`, async (evt) => { - if (prompt_lock) return; - if (evt.keyCode === 13 && !evt.shiftKey) { - evt.preventDefault(); - console.log('pressed enter'); - await handle_ask(); - } else { - message_input.style.removeProperty("height"); - message_input.style.height = message_input.scrollHeight + 4 + "px"; - } - }); - - send_button.addEventListener(`click`, async () => { - console.log("clicked send"); - if (prompt_lock) return; - await handle_ask(); - }); - - register_settings_localstorage(); + return BigInt(`0b${unix}${random_bytes}`).toString(); }; document.querySelector(".mobile-sidebar").addEventListener("click", (event) => { - const sidebar = document.querySelector(".conversations"); + const sidebar = document.querySelector(".conversations"); - if (sidebar.classList.contains("shown")) { - sidebar.classList.remove("shown"); - event.target.classList.remove("rotated"); - } else { - sidebar.classList.add("shown"); - event.target.classList.add("rotated"); - } + if (sidebar.classList.contains("shown")) { + sidebar.classList.remove("shown"); + event.target.classList.remove("rotated"); + } else { + sidebar.classList.add("shown"); + event.target.classList.add("rotated"); + } - window.scrollTo(0, 0); + window.scrollTo(0, 0); }); const register_settings_localstorage = async () => { - settings_ids = ["switch", "model", "jailbreak"]; - settings_elements = settings_ids.map((id) => document.getElementById(id)); - settings_elements.map((element) => - element.addEventListener(`change`, async (event) => { - switch (event.target.type) { - case "checkbox": - localStorage.setItem(event.target.id, event.target.checked); - break; - case "select-one": - localStorage.setItem(event.target.id, event.target.selectedIndex); - break; - default: - console.warn("Unresolved element type"); - } - }) - ); + settings_ids = ["switch", "model", "jailbreak"]; + settings_elements = settings_ids.map((id) => document.getElementById(id)); + settings_elements.map((element) => + element.addEventListener(`change`, async (event) => { + switch (event.target.type) { + case "checkbox": + localStorage.setItem(event.target.id, event.target.checked); + break; + case "select-one": + localStorage.setItem(event.target.id, event.target.selectedIndex); + break; + default: + console.warn("Unresolved element type"); + } + }) + ); }; const load_settings_localstorage = async () => { - settings_ids = ["switch", "model", "jailbreak"]; - settings_elements = settings_ids.map((id) => document.getElementById(id)); - settings_elements.map((element) => { - if (localStorage.getItem(element.id)) { - switch (element.type) { - case "checkbox": - element.checked = localStorage.getItem(element.id) === "true"; - break; - case "select-one": - element.selectedIndex = parseInt(localStorage.getItem(element.id)); - break; - default: - console.warn("Unresolved element type"); - } - } - }); + settings_ids = ["switch", "model", "jailbreak"]; + settings_elements = settings_ids.map((id) => document.getElementById(id)); + settings_elements.map((element) => { + if (localStorage.getItem(element.id)) { + switch (element.type) { + case "checkbox": + element.checked = localStorage.getItem(element.id) === "true"; + break; + case "select-one": + element.selectedIndex = parseInt(localStorage.getItem(element.id)); + break; + default: + console.warn("Unresolved element type"); + } + } + }); }; +const say_hello = async () => { + tokens = [`Hello`, `!`, ` How`, ` can`, ` I`, ` assist`, ` you`, ` today`, `?`] + + message_box.innerHTML += ` +
+
+ ${gpt_image} + +
+
+
+
+ `; + + content = `` + to_modify = document.querySelector(`.welcome-message`); + for (token of tokens) { + await new Promise(resolve => setTimeout(resolve, (Math.random() * (100 - 200) + 100))) + content += token; + to_modify.innerHTML = markdown.render(content); + } +} + // Theme storage for recurring viewers const storeTheme = function (theme) { - localStorage.setItem("theme", theme); + localStorage.setItem("theme", theme); }; // set theme when visitor returns const setTheme = function () { - const activeTheme = localStorage.getItem("theme"); - colorThemes.forEach((themeOption) => { - if (themeOption.id === activeTheme) { - themeOption.checked = true; - } - }); - // fallback for no :has() support - document.documentElement.className = activeTheme; + const activeTheme = localStorage.getItem("theme"); + colorThemes.forEach((themeOption) => { + if (themeOption.id === activeTheme) { + themeOption.checked = true; + } + }); + // fallback for no :has() support + document.documentElement.className = activeTheme; }; colorThemes.forEach((themeOption) => { - themeOption.addEventListener("click", () => { - storeTheme(themeOption.id); - // fallback for no :has() support - document.documentElement.className = themeOption.id; - }); + themeOption.addEventListener("click", () => { + storeTheme(themeOption.id); + // fallback for no :has() support + document.documentElement.className = themeOption.id; + }); }); -function deleteMessage(token) { - const messageDivUser = document.getElementById(`user_${token}`) - const messageDivGpt = document.getElementById(`gpt_${token}`) - if (messageDivUser) messageDivUser.parentNode.remove(); - if (messageDivGpt) messageDivGpt.parentNode.remove(); - const conversation = JSON.parse(localStorage.getItem(`conversation:${window.conversation_id}`)); - conversation.items = conversation.items.filter(item => item.token !== token); - localStorage.setItem(`conversation:${window.conversation_id}`, JSON.stringify(conversation)); - - const messages = document.getElementsByClassName("message"); - if (messages.length === 0) { - delete_conversation(window.conversation_id); - }; +function resizeTextarea(textarea) { + textarea.style.height = '80px'; + textarea.style.height = Math.min(textarea.scrollHeight, 200) + 'px'; } -document.onload = setTheme(); +window.onload = async () => { + load_settings_localstorage(); + setTheme(); + + conversations = 0; + for (let i = 0; i < localStorage.length; i++) { + if (localStorage.key(i).startsWith("conversation:")) { + conversations += 1; + } + } + + if (conversations == 0) localStorage.clear(); + + await setTimeout(() => { + load_conversations(20, 0); + }, 1); + + if (!window.location.href.endsWith(`#`)) { + if (/{{chat_path}}\/.+/.test(window.location.href)) { + await load_conversation(window.conversation_id); + } + } + + // await load_models(); + await say_hello() + + message_input.addEventListener(`keydown`, async (evt) => { + if (prompt_lock) return; + if (evt.keyCode === 13 && !evt.shiftKey) { + evt.preventDefault(); + console.log("pressed enter"); + await handle_ask(); + } else { + message_input.style.removeProperty("height"); + message_input.style.height = message_input.scrollHeight + "px"; + } + }); + + send_button.addEventListener(`click`, async () => { + console.log("clicked send"); + if (prompt_lock) return; + await handle_ask(); + }); + + register_settings_localstorage(); +}; + +const observer = new MutationObserver((mutationsList) => { + for (const mutation of mutationsList) { + if (mutation.type === 'attributes' && mutation.attributeName === 'style') { + const height = message_input.offsetHeight; + + let heightValues = { + 81: "20px", + 82: "20px", + 100: "30px", + 119: "39px", + 138: "49px", + 150: "55px" + } + + send_button.style.top = heightValues[height] || ''; + } + } +}); + +observer.observe(message_input, { attributes: true }); + +function deleteMessage(token) { + console.log(token) + const messageDivUser = document.getElementById(`user_${token}`) + const messageDivGpt = document.getElementById(`gpt_${token}`) + if (messageDivUser) messageDivUser.parentNode.remove(); + if (messageDivGpt) messageDivGpt.parentNode.remove(); + const conversation = JSON.parse(localStorage.getItem(`conversation:${window.conversation_id}`)); + console.log(conversation) + conversation.items = conversation.items.filter(item => item.token !== token); + console.log(conversation) + localStorage.setItem(`conversation:${window.conversation_id}`, JSON.stringify(conversation)); + + const messages = document.getElementsByClassName("message"); + if (messages.length === 0) { + delete_conversation(window.conversation_id); + }; +} diff --git a/chatgpt_microservice/include/free_gpt.h b/chatgpt_microservice/include/free_gpt.h index a167d3d..fc4f1c1 100644 --- a/chatgpt_microservice/include/free_gpt.h +++ b/chatgpt_microservice/include/free_gpt.h @@ -38,6 +38,7 @@ public: boost::asio::awaitable geekGpt(std::shared_ptr, nlohmann::json); boost::asio::awaitable chatGptAi(std::shared_ptr, nlohmann::json); boost::asio::awaitable fakeGpt(std::shared_ptr, nlohmann::json); + boost::asio::awaitable vercel(std::shared_ptr, nlohmann::json); private: boost::asio::awaitable, std::string>> diff --git a/chatgpt_microservice/src/free_gpt.cpp b/chatgpt_microservice/src/free_gpt.cpp index 164dfa6..28a0e59 100644 --- a/chatgpt_microservice/src/free_gpt.cpp +++ b/chatgpt_microservice/src/free_gpt.cpp @@ -3042,3 +3042,121 @@ boost::asio::awaitable FreeGpt::fakeGpt(std::shared_ptr ch, nlohm co_return; } } + +boost::asio::awaitable FreeGpt::vercel(std::shared_ptr ch, nlohmann::json json) { + co_await boost::asio::post(boost::asio::bind_executor(*m_thread_pool_ptr, boost::asio::use_awaitable)); + ScopeExit _exit{[=] { boost::asio::post(ch->get_executor(), [=] { ch->close(); }); }}; + boost::system::error_code err{}; + + auto prompt = json.at("meta").at("content").at("parts").at(0).at("content").get(); + + auto create_random_number = [] { + std::random_device rd; + std::mt19937 mt(rd()); + std::uniform_int_distribution distribution(99, 999); + int random_number = distribution(mt); + return random_number; + }; + constexpr std::string_view user_agent_str{ + R"(Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.{}.{} Safari/537.36)"}; + + std::unordered_multimap headers{ + {"Accept", "*/*"}, + {"authority", "sdk.vercel.ai"}, + {"content-type", "application/json"}, + {"referer", "https://sdk.vercel.ai/"}, + {"origin", "https://sdk.vercel.ai"}, + {"sec-ch-ua", R"("Google Chrome";v="117", "Not;A=Brand";v="8", "Chromium";v="117")"}, + {"sec-ch-ua-mobile", R"(?0)"}, + {"sec-ch-ua-platform", R"("macOS")"}, + {"cache-control", "no-cache"}, + {"pragma", "no-cache"}, + }; + headers.emplace("user-agent", std::format(user_agent_str, create_random_number(), create_random_number())); + + std::string recv; + Curl curl; + auto ret = curl.setUrl("https://sdk.vercel.ai/openai.jpeg") + .setProxy(m_cfg.http_proxy) + .setRecvHeadersCallback([](std::string) { return; }) + .setRecvBodyCallback([&](std::string str) mutable { + recv.append(str); + return; + }) + .clearHeaders() + .setHttpHeaders(headers) + .perform(); + if (ret.has_value()) { + SPDLOG_ERROR("https://sdk.vercel.ai/openai.jpeg: [{}]", ret.value()); + co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); + ch->try_send(err, ret.value()); + co_return; + } + nlohmann::json request; + request["data"] = std::move(recv); + recv.clear(); + auto vercel_rsp = callZeus(std::format("{}/vercel", m_cfg.zeus), request.dump()); + if (!vercel_rsp.has_value()) { + SPDLOG_ERROR("callZeus vercel error: {}", vercel_rsp.error()); + co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); + ch->try_send(err, vercel_rsp.error()); + co_return; + } + + headers.erase("custom-encoding"); + headers.erase("user-agent"); + headers.emplace("custom-encoding", vercel_rsp.value()["data"]); + headers.emplace("user-agent", std::format(user_agent_str, create_random_number(), create_random_number())); + + for (int i = 0; i < 20; i++) { + auto ret = + curl.setUrl("https://sdk.vercel.ai/api/generate") + .setProxy(m_cfg.http_proxy) + .setRecvHeadersCallback([](std::string) { return; }) + .setRecvBodyCallback([&](std::string str) mutable { + if (str == "Internal Server Error" || str == "Rate limit exceeded") { + SPDLOG_WARN("vercel: [{}]", str); + return; + } + boost::asio::post(ch->get_executor(), [=, str = std::move(str)] { ch->try_send(err, str); }); + return; + }) + .setBody([&] { + constexpr std::string_view json_str = R"({ + "model":"openai:gpt-3.5-turbo", + "messages":[ + { + "role":"user", + "content":"hello" + } + ], + "playgroundId":"403bce4c-7eb6-47b0-b1b5-0cb6b2469f70", + "chatIndex":0, + "temperature":0.7, + "maximumLength":4096, + "topP":1, + "topK":1, + "presencePenalty":1, + "frequencyPenalty":1, + "stopSequences":[] + })"; + nlohmann::json request = nlohmann::json::parse(json_str, nullptr, false); + request["messages"] = getConversationJson(json); + request["playgroundId"] = createUuidString(); + SPDLOG_INFO("request: [{}]", request.dump(2)); + return request.dump(); + }()) + .clearHeaders() + .setHttpHeaders(headers) + .perform(); + if (ret.has_value()) { + SPDLOG_WARN("https://sdk.vercel.ai/api/generate: [{}]", ret.value()); + co_await timeout(std::chrono::seconds(2)); + continue; + } + co_return; + } + co_await boost::asio::post(boost::asio::bind_executor(ch->get_executor(), boost::asio::use_awaitable)); + ch->try_send(err, "call sdk.vercel.ai error"); + co_return; +} diff --git a/chatgpt_microservice/src/main.cpp b/chatgpt_microservice/src/main.cpp index 1642fbd..27f7336 100644 --- a/chatgpt_microservice/src/main.cpp +++ b/chatgpt_microservice/src/main.cpp @@ -353,6 +353,7 @@ int main(int argc, char** argv) { ADD_METHOD("llama2", FreeGpt::llama2); ADD_METHOD("gpt-3.5-turbo-stream-chatGptAi", FreeGpt::chatGptAi); ADD_METHOD("gpt-3.5-turbo-stream-FakeGpt", FreeGpt::fakeGpt); + ADD_METHOD("gpt-3.5-turbo-stream-Vercel", FreeGpt::vercel); SPDLOG_INFO("active provider:"); for (auto& [provider, _] : gpt_function) diff --git a/chatgpt_microservice/tools/requirements.txt b/chatgpt_microservice/tools/requirements.txt index 2f97ffe..407ae70 100644 --- a/chatgpt_microservice/tools/requirements.txt +++ b/chatgpt_microservice/tools/requirements.txt @@ -2,3 +2,4 @@ Flask==3.0.0 PyExecJS==1.5.1 selenium==4.14.0 Werkzeug==3.0.0 +requests==2.31.0 diff --git a/chatgpt_microservice/tools/zeus.py b/chatgpt_microservice/tools/zeus.py index 5fbd28b..121c691 100644 --- a/chatgpt_microservice/tools/zeus.py +++ b/chatgpt_microservice/tools/zeus.py @@ -1,3 +1,4 @@ +import base64 import json import os import threading @@ -5,7 +6,7 @@ import time import traceback import execjs -from flask import Flask +from flask import Flask, request from selenium import webdriver from selenium.webdriver.support.ui import WebDriverWait from werkzeug.serving import ThreadedWSGIServer @@ -55,6 +56,26 @@ return o.toString() return json.dumps(dict) +# curl -X POST -d '{}' -H "Content-Type: application/json" http://127.0.0.1:8860/vercel +@app.route("/vercel", methods=["POST"]) +def get_anti_bot_token(): + request_body = json.loads(request.data) + raw_data = json.loads(base64.b64decode(request_body["data"], validate=True)) + + js_script = """const globalThis={marker:"mark"};String.prototype.fontcolor=function(){return `${this}`}; + return (%s)(%s)""" % ( + raw_data["c"], + raw_data["a"], + ) + + raw_token = json.dumps( + {"r": execjs.compile(js_script).call(""), "t": raw_data["t"]}, + separators=(",", ":"), + ) + dict = {"data": base64.b64encode(raw_token.encode("utf-16le")).decode()} + return json.dumps(dict) + + if __name__ == "__main__": thread = threading.Thread(target=deepai_refresh) thread.start()