Mistivia 1 mese fa
parent
commit
21f890b6ae
5 ha cambiato i file con 198 aggiunte e 134 eliminazioni
  1. 57 70
      src/ai_api.js
  2. 40 0
      src/conversation.js
  3. 73 64
      src/main.js
  4. 0 0
      src/style.css
  5. 28 0
      src/utils.js

+ 57 - 70
src/ai_api.js

@@ -1,18 +1,7 @@
 import { createMessage } from './message';
 import { marked } from 'marked';
-import { unsetGenerating, getStopGenerating, resetStopGenerating } from './main';
-
-let context = [];
-let currentMessage = '';
-
-function appendUserMessageToCtx(msg) {
-    context.push({
-        role: 'user',
-        content: msg,
-    });
-}
-
-let renderCnt = 0;
+import { generationController, modelManager } from './main';
+import { conversationManager } from './conversation';
 
 function extractDelta(type, data) {
     if (type === 'openai') {
@@ -32,18 +21,29 @@ function isStop(type, rawData) {
     }
 }
 
-function onStreamEvent(type, elem, data, forceRender=false) {
+let renderCnt = 0;
+
+function shouldRender() {
+    renderCnt++;
+    if (renderCnt % 100 === 0) return true;
+    return false;
+}
+
+function onStreamEvent(model, conversation, domElem, data, forceRender=false) {
+    let deltaContent = extractDelta(model.type, data);
+    if (deltaContent === undefined) deltaContent = '';
+    conversation.currentMessage = conversation.currentMessage + deltaContent;
+    if (!shouldRender() && !forceRender) return;
+    renderMessage(domElem, conversation.currentMessage);
+}
+
+function renderMessage(domElem, message) {
     let container = document.getElementById('message-container');
     let shouldScroll = false;
     if (container.scrollTop + container.clientHeight >= container.scrollHeight - 100) {
         shouldScroll = true;
     }
-    let deltaContent = extractDelta(type, data);
-    if (deltaContent === undefined) deltaContent = '';
-    currentMessage = currentMessage + deltaContent;
-    renderCnt++;
-    if (renderCnt % 100 !== 0 && !forceRender) return;
-    let renderContent = currentMessage.replace('<think>', '<div class="think">\n\n');
+    let renderContent = message.replace('<think>', '<div class="think">\n\n');
     renderContent = renderContent.replace('</think>', '\n\n</div>').trim();
     renderContent = renderContent 
         .replace(/(\\\[)([\s\S]*?)(\\\])/g, (match, p1, p2, p3) => {
@@ -53,61 +53,44 @@ function onStreamEvent(type, elem, data, forceRender=false) {
             return `\\\\\\\(${p2.trim()}\\\\\\\)`;
         })
     const html = marked.parse(renderContent);
-    elem.innerHTML = html;
+    domElem.innerHTML = html;
     if (shouldScroll) {
         container.scrollTop = container.scrollHeight;
     }
 }
 
-function onStreamEnd(type, elem) {
-    onStreamEvent(type, elem, {}, true)
-    context.push({
-        role: 'assistant',
-        content: currentMessage.replace(/<think>[\s\S]*?<\/think>/g, ''),
-    });
-    currentMessage = '';
-    unsetGenerating();
+function onStreamEnd(model, conversation, domElem) {
+    onStreamEvent(model, conversation, domElem, {}, true)
+    conversation.addMessage(
+        'assistant',
+        conversation.currentMessage.replace(/<think>[\s\S]*?<\/think>/g, ''));
+    conversation.currentMessage = '';
+    generationController.unsetGenerating();
     MathJax.typesetPromise();
 }
 
-function cleanContext(len) {
-    let getBytes = (str) => {
-        return new Blob([str]).size;
-    };
-    let sum = 0;
-    let splitIndex = context.length;
-    for (let i = context.length - 1; i >= 0; i--) {
-        const bytes = getBytes(context[i].content);
-        if (sum + bytes > len) {
-            splitIndex = i;
-            break;
-        }
-        sum += bytes;
-        splitIndex = 0;
-    }
-    context = context.slice(splitIndex);
-}
-
 async function fetchAiStream() {
+    let currentModel = modelManager.currentModel;
+    let conversation = conversationManager.currentConversation;
+
     const headers = {
         'Content-Type': 'application/json',
-        'Authorization': `Bearer ${globalCurrentModel.key}`
+        'Authorization': `Bearer ${currentModel.key}`
     };
-    cleanContext(globalCurrentModel.ctxLen);
+    conversation.cleanContext(currentModel.ctxLen);
     const requestBody = {
-        model: globalCurrentModel.model,
-        messages: context,
+        model: currentModel.model,
+        messages: conversation.context,
         stream: true,
-        max_tokens: globalCurrentModel.maxTokens,
+        max_tokens: currentModel.maxTokens,
     };
-    let elem = createMessage('', false);
-    let type = globalCurrentModel.type;
+    let domElem = createMessage('', false);
     let container = document.getElementById('message-container');
-    container.appendChild(elem);
+    container.appendChild(domElem);
     container.scrollTop = container.scrollHeight;
-    elem.innerHTML = '<p>Generating...</p>';
+    domElem.innerHTML = '<p>Generating...</p>';
     try {
-        const response = await fetch(globalCurrentModel.url, {
+        const response = await fetch(currentModel.url, {
             method: 'POST',
             headers,
             body: JSON.stringify(requestBody),
@@ -128,36 +111,40 @@ async function fetchAiStream() {
             for (const line of lines) {
                 if (!generationStarted) {
                     generationStarted = true;
-                    elem.innerHTML = '';
+                    domElem.innerHTML = '';
                 }
-                if (getStopGenerating()) {
-                    resetStopGenerating();
-                    onStreamEnd(type, elem);
+                if (generationController.getStopGenerating()) {
+                    generationController.resetStopGenerating();
+                    onStreamEnd(currentModel, conversation, domElem);
                     return;
                 }
                 if (line === '') continue;
-                if (isStop(type, line.trim())) break;
+                if (isStop(currentModel.type, line.trim())) break;
                 if (!line.startsWith('data: ')) {
                     continue;
                 }
                 try {
                     const data = JSON.parse(line.slice(6));
-                    onStreamEvent(type, elem, data);
+                    onStreamEvent(currentModel, conversation, domElem, data);
                 } catch (e) {
                     console.error('Error parsing stream data:', e);
                 }
             }
         }
-        onStreamEnd(type, elem);
+        onStreamEnd(currentModel, conversation, domElem);
     } catch (error) {
         console.error('Error:', error);
-        onStreamEnd(type, elem);
-        let p = document.createElement('p');
-        p.innerHTML = error;
-        p.style.color = 'red';
-        elem.appendChild(p);
+        onStreamEnd(currentModel, conversation, domElem);
+        domElem.appendChild(createErrorMsg(error));
         throw error;
     }
 }
 
-export { appendUserMessageToCtx, fetchAiStream };
+function createErrorMsg(error) {
+    let p = document.createElement('p');
+    p.innerHTML = error;
+    p.style.color = 'red';
+    return p;
+}
+
+export { fetchAiStream };

+ 40 - 0
src/conversation.js

@@ -0,0 +1,40 @@
+export { conversationManager };
+
+let Conversation = () => {
+    let self = {
+        context: [],
+        currentMessage: '',
+        addMessage: (role, content) => {
+            self.context.push({
+                role: role,
+                content: content,
+            });
+        },
+        cleanContext: (len) => {
+            let getBytes = (str) => {
+                return new Blob([str]).size;
+            };
+            let sum = 0;
+            let splitIndex = self.context.length;
+            for (let i = self.context.length - 1; i >= 0; i--) {
+                const bytes = getBytes(self.context[i].content);
+                if (sum + bytes > len) {
+                    splitIndex = i;
+                    break;
+                }
+                sum += bytes;
+                splitIndex = 0;
+            }
+            self.context = self.context.slice(splitIndex);
+        },
+    };
+    return self;
+}
+
+let conversationManager = (() => {
+    let self = {
+        currentConversation: {},
+    };
+    self.currentConversation = Conversation();
+    return self;
+})();

+ 73 - 64
src/main.js

@@ -1,79 +1,89 @@
-import './main_style.css';
+import './style.css';
 import { createMessage } from './message.js';
-import { fetchAiStream, appendUserMessageToCtx } from './ai_api.js';
+import { fetchAiStream } from './ai_api.js';
+import { conversationManager } from './conversation';
+import utils from './utils';
+
+export { generationController, modelManager };
+
 
 let isGenerating = false;
+let stopGenerating = false;
 
-function fetchJSONSync(url) {
-    try {
-        const xhr = new XMLHttpRequest();
-        xhr.open('GET', url, false);  // sync request
-        xhr.send();
+let generationController = {
+    setGenerating: () => {
+        isGenerating = true;
+        document.getElementById('send-button').innerHTML = 'Generating...';
+    },
+    unsetGenerating: () => {
+        isGenerating = false;
+        document.getElementById('send-button').innerHTML = 'Send';
+    },
+    resetStopGenerating: () => {
+        stopGenerating = false;
+    },
+    getStopGenerating: () => {
+        return stopGenerating;
+    }
+};
 
-        if (xhr.status !== 200) {
-            throw new Error(`HTTP Error: ${xhr.status}`);
+function sanitizeGlobalConfig(config) {
+    for (let key in config) {
+        if (config[key]["type"] === undefined) {
+            config[key].type = 'openai';
+        }
+        if (config[key]["ctxLen"] === undefined) {
+            config[key]["ctxLen"] = 100000;
+        }
+        if (config[key]["maxTokens"] === undefined) {
+            config[key]["maxTokens"] = 10000;
         }
-
-        const data = JSON.parse(xhr.responseText);
-        return data;
-    } catch (error) {
-        console.error('Error fetching JSON:', error);
-        throw error;
     }
+    return config;
 }
 
-const timestamp = new Date().getTime();
-globalConfig = fetchJSONSync('./config.json' + '?t=' + timestamp);
-
-for (let key in globalConfig) {
-    if (globalConfig[key]["type"] === undefined) {
-        globalConfig[key].type = 'openai';
-    }
-    if (globalConfig[key]["ctxLen"] === undefined) {
-        globalConfig[key]["ctxLen"] = 100000;
+function buildModelSelector(config) {
+    let modelSelector = document.getElementById('model-selector');
+    let modelKeys = Object.keys(config).sort((a, b) => a.localeCompare(b));
+    for (let key of modelKeys) {
+        let option = document.createElement('option');
+        option.value = key;
+        option.text = key;
+        modelSelector.add(option);
     }
-    if (globalConfig[key]["maxTokens"] === undefined) {
-        globalConfig[key]["maxTokens"] = 10000;
+    if (localStorage.getItem('lite-aichat-model') !== null) {
+        modelSelector.value = localStorage.getItem('lite-aichat-model');
     }
 }
 
-let modelSelector = document.getElementById('model-selector');
-let modelKeys = Object.keys(globalConfig).sort((a, b) => a.localeCompare(b));
-for (let key of modelKeys) {
-    let option = document.createElement('option');
-    option.value = key;
-    option.text = key;
-    modelSelector.add(option);
-}
-if (localStorage.getItem('lite-aichat-model') !== null) {
-    modelSelector.value = localStorage.getItem('lite-aichat-model');
-}
-globalCurrentModel = globalConfig[modelSelector.value];
-
-modelSelector.addEventListener('change', function() {
-    globalCurrentModel = globalConfig[modelSelector.value];
-    localStorage.setItem('lite-aichat-model', modelSelector.value);
+let ModelManager = () => {
+    let self = {
+        config: {},
+        currentModel: '',
+        init: () => {
+            let modelSelector = document.getElementById('model-selector');
+            self.config = utils.noCacheSyncJsonFetch('./config.json');
+            self.config = sanitizeGlobalConfig(self.config);
+            buildModelSelector(self.config);
+            self.currentModel = self.config[modelSelector.value];
+        },
+        setModel: (modelKey) => {
+            let modelSelector = document.getElementById('model-selector');
+            self.currentModel = self.config[modelKey];
+            localStorage.setItem('lite-aichat-model', modelSelector.value);
+        },
+    };
+    self.init();
+    return self;
+};
+
+let modelManager = ModelManager();
+
+document.getElementById('model-selector').addEventListener('change', function() {
+    let modelSelector = document.getElementById('model-selector');
+    modelManager.setModel(modelSelector.value);;
 });
 
-function setGenerating() {
-    isGenerating = true;
-    document.getElementById('send-button').innerHTML = 'Generating...';
-}
-
-function unsetGenerating() {
-    isGenerating = false;
-    document.getElementById('send-button').innerHTML = 'Send';
-}
-
-let stopGenerating = false;
-
-function resetStopGenerating() {
-    stopGenerating = false;
-}
-
-function getStopGenerating() {
-    return stopGenerating;
-}
 
 function sendMessage() {
     if (isGenerating) return;
@@ -83,10 +93,10 @@ function sendMessage() {
         return;
     }
     let msgElem = createMessage(message, true);
-    appendUserMessageToCtx(message);
+    conversationManager.currentConversation.addMessage('user', message);
     let container = document.getElementById('message-container');
     container.appendChild(msgElem);
-    setGenerating();
+    generationController.setGenerating();
     container.scrollTop = container.scrollHeight;
     input.value = '';
     fetchAiStream()
@@ -113,5 +123,4 @@ document.getElementById('stop-button').addEventListener('click', () => {
     }
 });
 
-export { unsetGenerating, getStopGenerating, resetStopGenerating};
 

+ 0 - 0
src/main_style.css → src/style.css


+ 28 - 0
src/utils.js

@@ -0,0 +1,28 @@
+export default {
+    syncJsonFetch,
+    noCacheSyncJsonFetch,
+};
+
+function syncJsonFetch(url) {
+    try {
+        const xhr = new XMLHttpRequest();
+        xhr.open('GET', url, false);  // sync request
+        xhr.send();
+
+        if (xhr.status !== 200) {
+            throw new Error(`HTTP Error: ${xhr.status}`);
+        }
+
+        const data = JSON.parse(xhr.responseText);
+        return data;
+    } catch (error) {
+        console.error('Error fetching JSON:', error);
+        throw error;
+    }
+}
+
+function noCacheSyncJsonFetch(url) {
+    const timestamp = new Date().getTime();
+    return syncJsonFetch(url + '?t=' + timestamp);
+}
+