Browse Source

add claude support

Mistivia 1 month ago
parent
commit
491977ecfc
6 changed files with 85 additions and 29 deletions
  1. 1 1
      .gitignore
  2. 2 2
      index.html
  3. 0 12
      public/config.json
  4. 14 0
      public/config.json.exmaple
  5. 54 13
      src/ai_api.js
  6. 14 1
      src/main.js

+ 1 - 1
.gitignore

@@ -1,4 +1,4 @@
-# Logs
+release/
 publish/
 logs
 *.log

+ 2 - 2
index.html

@@ -7,7 +7,7 @@
         globalCurrentModel = {};
     </script>
     <meta charset="UTF-8">
-    <meta name="viewport" content="width=device-width, initial-scale=1.0">
+    <meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
     <title></title>
 </head>
 <body>
@@ -17,7 +17,7 @@
     </div>
 
     <div class="chat-container" id="message-container">
-        <div class="system-prompt-container">
+        <div class="system-prompt-container" style="display:none;">
             <textarea class="system-prompt-box" placeholder="System Prompt..." rows="2" id="system-prompt"></textarea>
         </div>
     </div>

+ 0 - 12
public/config.json

@@ -1,12 +0,0 @@
-{
-    "GPT 4o": {
-        "model": "gpt-4o",
-        "url": "https://api.openai.com/v1/chat/completions",
-        "key": "xxxxxxxxxxxxxxxxxx"
-    },
-    "GPT 3.5": {
-        "model": "gpt-3.5",
-        "url": "https://api.openai.com/v1/chat/completions",
-        "key": "xxxxxxxxxxxxxxxxxx"
-    }
-}

+ 14 - 0
public/config.json.exmaple

@@ -0,0 +1,14 @@
+{
+    "claude-haiku": {
+        "model": "claude-3-5-haiku-latest",
+        "url": "https://api.anthropic.com/v1/messages",
+        "key": "xxxxxxxxxxxxxxxxxxxx",
+        "maxTokens": 4096,
+        "type": "claude"
+    },
+    "gpt-4o": {
+        "model": "chatgpt-4o-latest",
+        "url": "https://api.openai.com/v1/chat/completions",
+        "key": "xxxxxxxxxxxxxxxxxxxxx"
+    }
+}

+ 54 - 13
src/ai_api.js

@@ -14,18 +14,35 @@ function appendUserMessageToCtx(msg) {
 
 let renderCnt = 0;
 
-function onStreamEvent(elem, data, forceRender=false) {
+function extractDelta(type, data) {
+    if (type === 'openai') {
+         return data.choices?.[0]?.delta?.content;
+    } else if (type === 'claude') {
+        if (data.type !== 'content_block_delta') return '';
+        if (data.delta.type !== 'text_delta') return '';
+        return data.delta.text;
+    }
+}
+
+function isStop(type, rawData) {
+    if (type === "openai") {
+        return rawData === 'data: [DONE]';
+    } else if (type === "claude") {
+        return rawData === 'event: message_stop'
+    }
+}
+
+function onStreamEvent(type, elem, data, forceRender=false) {
     let container = document.getElementById('message-container');
     let shouldScroll = false;
     if (container.scrollTop + container.clientHeight >= container.scrollHeight - 100) {
         shouldScroll = true;
     }
-
-    let deltaContent = data.choices?.[0]?.delta?.content;
+    let deltaContent = extractDelta(type, data);
     if (deltaContent === undefined) deltaContent = '';
     currentMessage = currentMessage + deltaContent;
     renderCnt++;
-    if (renderCnt % 10 !== 0 && !forceRender) return;
+    if (renderCnt % 100 !== 0 && !forceRender) return;
     let renderContent = currentMessage.replace('<think>', '<div class="think">\n\n');
     renderContent = renderContent.replace('</think>', '\n\n</div>').trim();
     renderContent = renderContent 
@@ -33,7 +50,7 @@ function onStreamEvent(elem, data, forceRender=false) {
             return `\$\$${p2.trim()}\$\$`;
         })
         .replace(/(\\\()([\s\S]*?)(\\\))/g, (match, p1, p2, p3) => {
-            return `\$${p2.trim()}\$`;
+            return `\\\\\\\(${p2.trim()}\\\\\\\)`;
         })
     const html = marked.parse(renderContent);
     elem.innerHTML = html;
@@ -42,8 +59,8 @@ function onStreamEvent(elem, data, forceRender=false) {
     }
 }
 
-function onStreamEnd(elem) {
-    onStreamEvent(elem, {}, true)
+function onStreamEnd(type, elem) {
+    onStreamEvent(type, elem, {}, true)
     context.push({
         role: 'assistant',
         content: currentMessage.replace(/<think>[\s\S]*?<\/think>/g, ''),
@@ -53,17 +70,38 @@ function onStreamEnd(elem) {
     MathJax.typesetPromise();
 }
 
+function cleanContext(len) {
+    let getBytes = (str) => {
+        return new Blob([str]).size;
+    };
+    let sum = 0;
+    let splitIndex = context.length;
+    for (let i = context.length - 1; i >= 0; i--) {
+        const bytes = getBytes(context[i].content);
+        if (sum + bytes > len) {
+            splitIndex = i;
+            break;
+        }
+        sum += bytes;
+        splitIndex = 0;
+    }
+    context = context.slice(splitIndex);
+}
+
 async function fetchAiStream() {
     const headers = {
         'Content-Type': 'application/json',
         'Authorization': `Bearer ${globalCurrentModel.key}`
     };
+    cleanContext(globalCurrentModel.ctxLen);
     const requestBody = {
         model: globalCurrentModel.model,
         messages: context,
-        stream: true
+        stream: true,
+        max_tokens: globalCurrentModel.maxTokens,
     };
     let elem = createMessage('', false);
+    let type = globalCurrentModel.type;
     let container = document.getElementById('message-container');
     container.appendChild(elem);
     container.scrollTop = container.scrollHeight;
@@ -94,23 +132,26 @@ async function fetchAiStream() {
                 }
                 if (getStopGenerating()) {
                     resetStopGenerating();
-                    onStreamEnd(elem);
+                    onStreamEnd(type, elem);
                     return;
                 }
                 if (line === '') continue;
-                if (line.trim() === 'data: [DONE]') break;
+                if (isStop(type, line.trim())) break;
+                if (!line.startsWith('data: ')) {
+                    continue;
+                }
                 try {
                     const data = JSON.parse(line.slice(6));
-                    onStreamEvent(elem, data);
+                    onStreamEvent(type, elem, data);
                 } catch (e) {
                     console.error('Error parsing stream data:', e);
                 }
             }
         }
-        onStreamEnd(elem);
+        onStreamEnd(type, elem);
     } catch (error) {
         console.error('Error:', error);
-        onStreamEnd(elem);
+        onStreamEnd(type, elem);
         let p = document.createElement('p');
         p.innerHTML = error;
         p.style.color = 'red';

+ 14 - 1
src/main.js

@@ -23,8 +23,21 @@ function fetchJSONSync(url) {
 }
 
 globalConfig = fetchJSONSync('./config.json');
-let modelSelector = document.getElementById('model-selector');
 for (let key in globalConfig) {
+    if (globalConfig[key]["type"] === undefined) {
+        globalConfig[key].type = 'openai';
+    }
+    if (globalConfig[key]["ctxLen"] === undefined) {
+        globalConfig[key]["ctxLen"] = 100000;
+    }
+    if (globalConfig[key]["maxTokens"] === undefined) {
+        globalConfig[key]["maxTokens"] = 10000;
+    }
+}
+
+let modelSelector = document.getElementById('model-selector');
+let modelKeys = Object.keys(globalConfig).sort((a, b) => a.localeCompare(b));
+for (let key of modelKeys) {
     let option = document.createElement('option');
     option.value = key;
     option.text = key;