Mistivia 1 месяц назад
Родитель
Сommit
7df4a80212
6 измененных файлов с 60 добавлено и 14 удалено
  1. 2 0
      .gitignore
  2. 3 1
      index.html
  3. 10 3
      public/config.json
  4. 20 8
      src/ai_api.js
  5. 16 1
      src/main.js
  6. 9 1
      src/main_style.css

+ 2 - 0
.gitignore

@@ -1,4 +1,5 @@
 # Logs
+publish/
 logs
 *.log
 npm-debug.log*
@@ -23,3 +24,4 @@ dist-ssr
 *.sln
 *.sw?
 public/config.json
+deploy.sh

+ 3 - 1
index.html

@@ -4,6 +4,7 @@
     <script id="MathJax-script" async src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js"></script>
     <script>
         globalConfig = {};
+        globalCurrentModel = {};
     </script>
     <meta charset="UTF-8">
     <meta name="viewport" content="width=device-width, initial-scale=1.0">
@@ -11,7 +12,8 @@
 </head>
 <body>
     <div class="header">
-        <p id="model-name"></p>
+    <select id="model-selector">
+    </select>
     </div>
 
     <div class="chat-container" id="message-container">

+ 10 - 3
public/config.json

@@ -1,5 +1,12 @@
 {
-    "model": "gpt-4o",
-    "url": "https://api.openai.com/v1/chat/completions",
-    "key": "xxxxxxxxxxxxxxxxx"
+    "GPT 4o": {
+        "model": "gpt-4o",
+        "url": "https://api.openai.com/v1/chat/completions",
+        "key": "xxxxxxxxxxxxxxxxxx"
+    },
+    "GPT 3.5": {
+        "model": "gpt-3.5",
+        "url": "https://api.openai.com/v1/chat/completions",
+        "key": "xxxxxxxxxxxxxxxxxx"
+    }
 }

+ 20 - 8
src/ai_api.js

@@ -12,7 +12,9 @@ function appendUserMessageToCtx(msg) {
     });
 }
 
-function onStreamEvent(elem, data) {
+let renderCnt = 0;
+
+function onStreamEvent(elem, data, forceRender=false) {
     let container = document.getElementById('message-container');
     let shouldScroll = false;
     if (container.scrollTop + container.clientHeight >= container.scrollHeight - 100) {
@@ -22,6 +24,8 @@ function onStreamEvent(elem, data) {
     let deltaContent = data.choices?.[0]?.delta?.content;
     if (deltaContent === undefined) deltaContent = '';
     currentMessage = currentMessage + deltaContent;
+    renderCnt++;
+    if (renderCnt % 10 !== 0 && !forceRender) return;
     let renderContent = currentMessage.replace('<think>', '<div class="think">\n\n');
     renderContent = renderContent.replace('</think>', '\n\n</div>').trim();
     renderContent = renderContent 
@@ -38,7 +42,8 @@ function onStreamEvent(elem, data) {
     }
 }
 
-function onStreamEnd() {
+function onStreamEnd(elem) {
+    onStreamEvent(elem, {}, true)
     context.push({
         role: 'assistant',
         content: currentMessage.replace(/<think>[\s\S]*?<\/think>/g, ''),
@@ -51,10 +56,10 @@ function onStreamEnd() {
 async function fetchAiStream() {
     const headers = {
         'Content-Type': 'application/json',
-        'Authorization': `Bearer ${globalConfig.key}`
+        'Authorization': `Bearer ${globalCurrentModel.key}`
     };
     const requestBody = {
-        model: globalConfig.model,
+        model: globalCurrentModel.model,
         messages: context,
         stream: true
     };
@@ -62,17 +67,19 @@ async function fetchAiStream() {
     let container = document.getElementById('message-container');
     container.appendChild(elem);
     container.scrollTop = container.scrollHeight;
+    elem.innerHTML = '<p>Generating...</p>';
     try {
-        const response = await fetch(globalConfig.url, {
+        const response = await fetch(globalCurrentModel.url, {
             method: 'POST',
             headers,
-            body: JSON.stringify(requestBody)
+            body: JSON.stringify(requestBody),
         });
         if (!response.ok) {
             throw new Error(`HTTP error! status: ${response.status}`);
         }
         const decoder = new TextDecoder();
         const reader = response.body.getReader();
+        let generationStarted = false;
         while (true) {
             const { done, value } = await reader.read();
             if (done) {
@@ -81,9 +88,13 @@ async function fetchAiStream() {
             const chunk = decoder.decode(value);
             const lines = chunk.split('\n');
             for (const line of lines) {
+                if (!generationStarted) {
+                    generationStarted = true;
+                    elem.innerHTML = '';
+                }
                 if (getStopGenerating()) {
                     resetStopGenerating();
-                    onStreamEnd();
+                    onStreamEnd(elem);
                     return;
                 }
                 if (line === '') continue;
@@ -96,9 +107,10 @@ async function fetchAiStream() {
                 }
             }
         }
-        onStreamEnd();
+        onStreamEnd(elem);
     } catch (error) {
         console.error('Error:', error);
+        onStreamEnd(elem);
         let p = document.createElement('p');
         p.innerHTML = error;
         p.style.color = 'red';

+ 16 - 1
src/main.js

@@ -23,6 +23,22 @@ function fetchJSONSync(url) {
 }
 
 globalConfig = fetchJSONSync('./config.json');
+let modelSelector = document.getElementById('model-selector');
+for (let key in globalConfig) {
+    let option = document.createElement('option');
+    option.value = key;
+    option.text = key;
+    modelSelector.add(option);
+}
+if (localStorage.getItem('lite-aichat-model') !== null) {
+    modelSelector.value = localStorage.getItem('lite-aichat-model');
+}
+globalCurrentModel = globalConfig[modelSelector.value];
+
+modelSelector.addEventListener('change', function() {
+    globalCurrentModel = globalConfig[modelSelector.value];
+    localStorage.setItem('lite-aichat-model', modelSelector.value);
+});
 
 function setGenerating() {
     isGenerating = true;
@@ -64,7 +80,6 @@ function sendMessage() {
         });
 }
 
-document.getElementById('model-name').textContent = globalConfig.model;
 
 document.getElementById('input-box').addEventListener('keydown', function(event) {
     if (event.ctrlKey && event.key === 'Enter') {

+ 9 - 1
src/main_style.css

@@ -60,7 +60,7 @@ body {
     background-color: #eee;
     color: #333;
     box-shadow: 0 2px 4px rgba(0,0,0,0.1);
-    max-width: 80%;
+    max-width: 50%;
     margin-left: auto;
 }
 
@@ -96,6 +96,14 @@ body {
     font-weight: bold;
 }
 
+#model-selector {
+    background-color: white;
+    padding: 10px;
+    border: 1px solid #ddd;
+    border-radius: 5px;
+    cursor: pointer;
+    font-size: 1.2em;
+}
 
 body figcaption {
   color: #666