From c392bd365e8239b926a7bb54ca11827de20c29a9 Mon Sep 17 00:00:00 2001 From: Justyn Shull Date: Sat, 11 Jan 2025 21:26:24 -0600 Subject: [PATCH] Log sseEvent errors --- docs/Changelog.md | 2 +- silverbullet-ai.plug.js | 2 +- src/providers/openai.ts | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/Changelog.md b/docs/Changelog.md index 89ec48f..e5c1f6a 100644 --- a/docs/Changelog.md +++ b/docs/Changelog.md @@ -3,7 +3,7 @@ For the full changelog, please refer to the individual release notes on https:// This page is a brief overview of each version. ## Unreleased -- Nothing yet. +- Better logging when SSE events have errors --- ## 0.4.1 (2024-11-15) diff --git a/silverbullet-ai.plug.js b/silverbullet-ai.plug.js index 28f74d9..7af5a50 100644 --- a/silverbullet-ai.plug.js +++ b/silverbullet-ai.plug.js @@ -11,7 +11,7 @@ var An=Object.defineProperty;var U=(e,t)=>{for(var r in t)An(e,r,{get:t[r],enume `),o=0,n=0,i=e.length;for(let s=0;s=0;a--)if(a===0||r[a-1].trim()===""){n=a===0?0:r.slice(0,a).join(` `).length+1;break}console.log("Looking forwards for the end of the paragraph");for(let a=s;a{try{if(!g){console.log("No data received from LLM");return}a?(["`","-","*"].includes(g.charAt(0))&&(g=` -`+g),u.replaceRange(c,c+s.length,g),a=!1):u.insertAtPos(g,c),c+=g.length,o&&o(g)}catch(y){console.error("Error handling chat stream data:",y),u.flashNotification("An error occurred while processing chat data.","error")}},d=async g=>{console.log("Response complete:",g);let y=p+g.length;console.log("Start of response:",p),console.log("End of response:",y),console.log("Full response:",g),console.log("Post-processors:",i);let x=g;if(i){let A=await u.getText(),w={response:g,lineBefore:gr(A,p),lineCurrent:fr(A,p),lineAfter:hr(A,y)};for(let N of i)console.log("Applying post-processor:",N),x=await S.invokeSpaceFunction(N,w);console.log("Data changed by post-processors, updating editor"),u.replaceRange(p,y,x)}n&&n(g)};await this.chatWithAI({...t,onDataReceived:m,onResponseComplete:d})}async singleMessageChat(t,r,o=!1){let n=[{role:"user",content:t}];return r&&n.unshift({role:"system",content:r}),o&&(n=await ue(n)),await this.chatWithAI({messages:n,stream:!1})}};var De=class extends Y{name="Gemini";constructor(t,r){super("Gemini",t,"https://generativelanguage.googleapis.com",r)}async listModels(){let t=`${this.baseUrl}/v1beta/models?key=${this.apiKey}`;try{let r=await fetch(t);if(!r.ok)throw new Error(`HTTP error! status: ${r.status}`);return(await r.json()).models||[]}catch(r){throw console.error("Failed to fetch models:",r),r}}async chatWithAI({messages:t,stream:r,onDataReceived:o}){return r?await this.streamChat({messages:t,stream:r,onDataReceived:o}):await this.nonStreamingChat(t)}mapRolesForGemini(t){let r=[],o="";return t.forEach(n=>{let i="user";n.role==="system"||n.role==="user"?i="user":n.role==="assistant"&&(i="model"),i==="model"&&(r.length===0||o==="model")||(i==="user"&&o==="user"?r[r.length-1].parts[0].text+=" "+n.content:r.push({role:i,parts:[{text:n.content}]})),o=i}),r}streamChat(t){let{messages:r,onDataReceived:o}=t;try{let n=`${this.baseUrl}/v1beta/models/${this.modelName}:streamGenerateContent?key=${this.apiKey}&alt=sse`,i={"Content-Type":"application/json"},s=this.mapRolesForGemini(r),c={method:"POST",headers:i,payload:JSON.stringify({contents:s}),withCredentials:!1},a=new ie(n,c),p="";a.addEventListener("message",m=>{try{if(m.data=="[DONE]")return a.close(),p;if(!m.data)console.error("Received empty message from Gemini"),console.log("source: ",a);else{let d=JSON.parse(m.data),g=d.candidates[0].content.parts[0].text||d.text||"";p+=g,o&&o(g)}}catch(d){console.error("Error processing message event:",d,m.data)}}),a.addEventListener("end",()=>(a.close(),p)),a.addEventListener("error",m=>{console.error("SSE error:",m),a.close()}),a.stream()}catch(n){throw console.error("Error streaming from Gemini chat endpoint:",n),n}}async nonStreamingChat(t){let r=this.mapRolesForGemini(t),o=await nativeFetch(`${this.baseUrl}/v1beta/models/${this.modelName}:generateContent?key=${this.apiKey}`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({contents:r})});if(!o.ok)throw new Error(`HTTP error! status: ${o.status}`);return(await o.json()).candidates[0].content.parts[0].text}},Ue=class extends G{constructor(t,r,o="https://generativelanguage.googleapis.com",n=!0){super(t,o,"Gemini",r,n)}async _generateEmbeddings(t){let r=JSON.stringify({model:this.modelName,content:{parts:[{text:t.text}]}}),o={"Content-Type":"application/json"};this.requireAuth&&(o.Authorization=`Bearer ${this.apiKey}`);let n=await nativeFetch(`${this.baseUrl}/v1beta/models/${this.modelName}:embedContent?key=${this.apiKey}`,{method:"POST",headers:o,body:r});if(!n.ok)throw console.error("HTTP response: ",n),console.error("HTTP response body: ",await n.json()),new Error(`HTTP error, status: ${n.status}`);let i=await n.json();if(!i||!i.embedding||!i.embedding.values)throw new Error("Invalid response from Gemini.");return i.embedding.values}};var pe=class extends Y{name="OpenAI";requireAuth;constructor(t,r,o,n){super("OpenAI",t,o,r),this.requireAuth=n}async chatWithAI({messages:t,stream:r,onDataReceived:o,onResponseComplete:n}){return r?await this.streamChat({messages:t,onDataReceived:o,onResponseComplete:n}):await this.nonStreamingChat(t)}async streamChat(t){let{messages:r,onDataReceived:o,onResponseComplete:n}=t;try{let i=`${this.baseUrl}/chat/completions`,s={"Content-Type":"application/json"};this.requireAuth&&(s.Authorization=`Bearer ${this.apiKey}`);let c={method:"POST",headers:s,payload:JSON.stringify({model:this.modelName,stream:!0,messages:r}),withCredentials:!1},a=new ie(i,c),p="";a.addEventListener("message",function(m){try{if(m.data=="[DONE]")return a.close(),n&&n(p),p;{let g=JSON.parse(m.data).choices[0]?.delta?.content||"";p+=g,o&&o(g)}}catch(d){console.error("Error processing message event:",d,m.data)}}),a.addEventListener("end",function(){return a.close(),n&&n(p),p}),a.addEventListener("error",m=>{console.error("SSE error:",m),a.close()}),a.stream()}catch(i){throw console.error("Error streaming from OpenAI chat endpoint:",i),await u.flashNotification("Error streaming from OpenAI chat endpoint.","error"),i}return""}async nonStreamingChat(t){try{let r=JSON.stringify({model:this.modelName,messages:t}),o={Authorization:`Bearer ${this.apiKey}`,"Content-Type":"application/json"},n=await nativeFetch(this.baseUrl+"/chat/completions",{method:"POST",headers:o,body:r});if(!n.ok)throw console.error("http response: ",n),console.error("http response body: ",await n.json()),new Error(`HTTP error, status: ${n.status}`);let i=await n.json();if(!i||!i.choices||i.choices.length===0)throw new Error("Invalid response from OpenAI.");return i.choices[0].message.content}catch(r){throw console.error("Error calling OpenAI chat endpoint:",r),await u.flashNotification("Error calling OpenAI chat endpoint.","error"),r}}},je=class extends G{constructor(t,r,o,n=!0){super(t,o,"OpenAI",r,n)}async _generateEmbeddings(t){let r=JSON.stringify({model:this.modelName,input:t.text,encoding_format:"float"}),o={"Content-Type":"application/json"};this.requireAuth&&(o.Authorization=`Bearer ${this.apiKey}`);let n=await nativeFetch(`${this.baseUrl}/embeddings`,{method:"POST",headers:o,body:r});if(!n.ok)throw console.error("HTTP response: ",n),console.error("HTTP response body: ",await n.json()),new Error(`HTTP error, status: ${n.status}`);let i=await n.json();if(!i||!i.data||i.data.length===0)throw new Error("Invalid response from OpenAI.");return i.data[0].embedding}};var qe=class extends Y{name="Ollama";requireAuth;openaiProvider;constructor(t,r,o,n){super("Ollama",t,o,r),this.requireAuth=n,this.openaiProvider=new pe(t,r,o,n)}async chatWithAI({messages:t,stream:r,onDataReceived:o,onResponseComplete:n}){return await this.openaiProvider.chatWithAI({messages:t,stream:r,onDataReceived:o,onResponseComplete:n})}},Be=class extends G{constructor(t,r,o,n=!1){super(t,o,"Ollama",r,n)}async _generateEmbeddings(t){let r=JSON.stringify({model:this.modelName,prompt:t.text}),o={"Content-Type":"application/json"};this.requireAuth&&(o.Authorization=`Bearer ${this.apiKey}`);let n=await nativeFetch(`${this.baseUrl}/api/embeddings`,{method:"POST",headers:o,body:r});if(!n.ok)throw console.error("HTTP response: ",n),console.error("HTTP response body: ",await n.json()),new Error(`HTTP error, status: ${n.status}`);let i=await n.json();if(!i||!i.embedding||i.embedding.length===0)throw new Error("Invalid response from Ollama.");return i.embedding}};var He=class extends Y{constructor(t,r,o="http://localhost"){super(t,o,"mock",r)}async chatWithAI(t){let r="This is a mock response from the AI.";if(t.onDataReceived)for(let o of r)await new Promise(n=>setTimeout(n,50)),t.onDataReceived(o);return r}},Ke=class extends le{constructor(t,r,o="http://localhost"){super(t,o,"mock",r)}generateImage(t){return new Promise(r=>{setTimeout(()=>{r("https://example.com/mock-image.jpg")},5)})}},Ge=class extends G{constructor(t,r,o="http://localhost"){super(t,o,"mock",r)}_generateEmbeddings(t){return new Promise(r=>{setTimeout(()=>{let o=Array(1536).fill(0).map(()=>Math.random());r(o)},5)})}};var T,f,xe,O,Ye,j,ht,ns,be;async function C(){let e=await xr();(!T||!O||!f||!ht||JSON.stringify(e)!==JSON.stringify(ht))&&await z(!0)}async function xr(){if(await S.getEnv()!="server")try{return await Q.get("ai.selectedTextModel")}catch{return}}async function os(){if(await S.getEnv()!="server")try{return await Q.get("ai.selectedImageModel")}catch{return}}async function is(){if(await S.getEnv()!="server")try{return await Q.get("ai.selectedEmbeddingModel")}catch{return}}async function yt(e){await S.getEnv()!="server"&&await Q.set("ai.selectedImageModel",e)}async function xt(e){await S.getEnv()!="server"&&await Q.set("ai.selectedTextModel",e)}async function bt(e){await S.getEnv()!="server"&&await Q.set("ai.selectedEmbeddingModel",e)}async function ss(){let e=await xr()||f.textModels[0];if(!e)throw new Error("No text model selected or available as default.");await we(e)}async function as(){let e=await os()||f.imageModels[0];if(!e)throw new Error("No image model selected or available as default.");await wt(e)}async function cs(){let e=await is()||f.embeddingModels[0];if(!e)throw new Error("No embedding model selected or available as default.");await Pt(e)}function ls(e){let t=e.provider.toLowerCase();switch(_("client","Provider name",t),t){case"dalle":Ye=new Fe(T,e.modelName,e.baseUrl||f.dallEBaseUrl);break;case"mock":Ye=new Ke(T,e.modelName);break;default:throw new Error(`Unsupported image provider: ${e.provider}. Please configure a supported provider.`)}}function us(e){switch(e.provider.toLowerCase()){case"openai":O=new pe(T,e.modelName,e.baseUrl||f.openAIBaseUrl,e.requireAuth||f.requireAuth);break;case"gemini":O=new De(T,e.modelName);break;case"ollama":O=new qe(T,e.modelName,e.baseUrl||"http://localhost:11434/v1",e.requireAuth);break;case"mock":O=new He(T,e.modelName,e.baseUrl);break;default:throw new Error(`Unsupported AI provider: ${e.provider}. Please configure a supported provider.`)}return O}function ps(e){switch(e.provider.toLowerCase()){case"openai":j=new je(T,e.modelName,e.baseUrl||f.openAIBaseUrl);break;case"gemini":j=new Ue(T,e.modelName);break;case"ollama":j=new Be(T,e.modelName,e.baseUrl||"http://localhost:11434",e.requireAuth);break;case"mock":j=new Ge(T,e.modelName,e.baseUrl);break;default:throw new Error(`Unsupported embedding provider: ${e.provider}. Please configure a supported provider.`)}}async function we(e){if(_("client","configureSelectedModel called with:",e),!e)throw new Error("No model provided to configure");if(e.requireAuth=e.requireAuth??f.requireAuth,e.requireAuth)try{let t=await ke(e.secretName||"OPENAI_API_KEY");t!==T&&(T=t,_("client","API key updated"))}catch(t){throw console.error("Error reading secret:",t),new Error("Failed to read the AI API key. Please check the SECRETS page.")}if(e.requireAuth&&!T)throw new Error("AI API key is missing. Please set it in the secrets page.");return ht=e,us(e)}async function wt(e){if(_("client","configureSelectedImageModel called with:",e),!e)throw new Error("No image model provided to configure");if(e.requireAuth){let t=await ke(e.secretName||"OPENAI_API_KEY");t!==T&&(T=t,_("client","API key updated for image model"))}if(e.requireAuth&&!T)throw new Error("AI API key is missing for image model. Please set it in the secrets page.");ns=e,ls(e)}async function Pt(e){if(_("client","configureSelectedEmbeddingModel called with:",e),!e)throw new Error("No embedding model provided to configure");if(e.requireAuth){let t=await ke(e.secretName||"OPENAI_API_KEY");t!==T&&(T=t,_("client","API key updated for embedding model"))}if(e.requireAuth&&!T)throw new Error("AI API key is missing for embedding model. Please set it in the secrets page.");be=e,ps(e)}async function ms(){let e={openAIBaseUrl:"https://api.openai.com/v1",dallEBaseUrl:"https://api.openai.com/v1",requireAuth:!0,secretName:"OPENAI_API_KEY",provider:"OpenAI",chat:{},promptInstructions:{},imageModels:[],embeddingModels:[],textModels:[],indexEmbeddings:!1,indexSummary:!1,indexSummaryModelName:"",indexEmbeddingsExcludePages:[],indexEmbeddingsExcludeStrings:["**user**:"]},t={userInformation:"",userInstructions:"",parseWikiLinks:!0,bakeMessages:!0,customEnrichFunctions:[],searchEmbeddings:!1},r={pageRenameSystem:"",pageRenameRules:"",tagRules:"",indexSummaryPrompt:"",enhanceFrontMatterPrompt:""},o=await S.getSpaceConfig("ai",{}),n={...e,...o};return n.chat={...t,...o.chat||{}},n.promptInstructions={...r,...o.promptInstructions||{}},n}async function z(e=!0){let t=await ms();!f||JSON.stringify(f)!==JSON.stringify(t)?(_("client","aiSettings updating from",f),f=t,_("client","aiSettings updated to",f)):_("client","aiSettings unchanged",f),f.textModels.length===1&&await xt(f.textModels[0]),f.imageModels.length===1&&await yt(f.imageModels[0]),f.embeddingModels.length===1&&await bt(f.embeddingModels[0]),e&&(f.textModels.length>0&&await ss(),f.imageModels.length>0&&await as(),f.embeddingModels.length>0&&await cs()),xe={role:"system",content:"This is an interactive chat session with a user in a markdown-based note-taking tool called SilverBullet."},f.chat.userInformation&&(xe.content+=` +`+g),u.replaceRange(c,c+s.length,g),a=!1):u.insertAtPos(g,c),c+=g.length,o&&o(g)}catch(y){console.error("Error handling chat stream data:",y),u.flashNotification("An error occurred while processing chat data.","error")}},d=async g=>{console.log("Response complete:",g);let y=p+g.length;console.log("Start of response:",p),console.log("End of response:",y),console.log("Full response:",g),console.log("Post-processors:",i);let x=g;if(i){let A=await u.getText(),w={response:g,lineBefore:gr(A,p),lineCurrent:fr(A,p),lineAfter:hr(A,y)};for(let N of i)console.log("Applying post-processor:",N),x=await S.invokeSpaceFunction(N,w);console.log("Data changed by post-processors, updating editor"),u.replaceRange(p,y,x)}n&&n(g)};await this.chatWithAI({...t,onDataReceived:m,onResponseComplete:d})}async singleMessageChat(t,r,o=!1){let n=[{role:"user",content:t}];return r&&n.unshift({role:"system",content:r}),o&&(n=await ue(n)),await this.chatWithAI({messages:n,stream:!1})}};var De=class extends Y{name="Gemini";constructor(t,r){super("Gemini",t,"https://generativelanguage.googleapis.com",r)}async listModels(){let t=`${this.baseUrl}/v1beta/models?key=${this.apiKey}`;try{let r=await fetch(t);if(!r.ok)throw new Error(`HTTP error! status: ${r.status}`);return(await r.json()).models||[]}catch(r){throw console.error("Failed to fetch models:",r),r}}async chatWithAI({messages:t,stream:r,onDataReceived:o}){return r?await this.streamChat({messages:t,stream:r,onDataReceived:o}):await this.nonStreamingChat(t)}mapRolesForGemini(t){let r=[],o="";return t.forEach(n=>{let i="user";n.role==="system"||n.role==="user"?i="user":n.role==="assistant"&&(i="model"),i==="model"&&(r.length===0||o==="model")||(i==="user"&&o==="user"?r[r.length-1].parts[0].text+=" "+n.content:r.push({role:i,parts:[{text:n.content}]})),o=i}),r}streamChat(t){let{messages:r,onDataReceived:o}=t;try{let n=`${this.baseUrl}/v1beta/models/${this.modelName}:streamGenerateContent?key=${this.apiKey}&alt=sse`,i={"Content-Type":"application/json"},s=this.mapRolesForGemini(r),c={method:"POST",headers:i,payload:JSON.stringify({contents:s}),withCredentials:!1},a=new ie(n,c),p="";a.addEventListener("message",m=>{try{if(m.data=="[DONE]")return a.close(),p;if(!m.data)console.error("Received empty message from Gemini"),console.log("source: ",a);else{let d=JSON.parse(m.data),g=d.candidates[0].content.parts[0].text||d.text||"";p+=g,o&&o(g)}}catch(d){console.error("Error processing message event:",d,m.data)}}),a.addEventListener("end",()=>(a.close(),p)),a.addEventListener("error",m=>{console.error("SSE error:",m),a.close()}),a.stream()}catch(n){throw console.error("Error streaming from Gemini chat endpoint:",n),n}}async nonStreamingChat(t){let r=this.mapRolesForGemini(t),o=await nativeFetch(`${this.baseUrl}/v1beta/models/${this.modelName}:generateContent?key=${this.apiKey}`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({contents:r})});if(!o.ok)throw new Error(`HTTP error! status: ${o.status}`);return(await o.json()).candidates[0].content.parts[0].text}},Ue=class extends G{constructor(t,r,o="https://generativelanguage.googleapis.com",n=!0){super(t,o,"Gemini",r,n)}async _generateEmbeddings(t){let r=JSON.stringify({model:this.modelName,content:{parts:[{text:t.text}]}}),o={"Content-Type":"application/json"};this.requireAuth&&(o.Authorization=`Bearer ${this.apiKey}`);let n=await nativeFetch(`${this.baseUrl}/v1beta/models/${this.modelName}:embedContent?key=${this.apiKey}`,{method:"POST",headers:o,body:r});if(!n.ok)throw console.error("HTTP response: ",n),console.error("HTTP response body: ",await n.json()),new Error(`HTTP error, status: ${n.status}`);let i=await n.json();if(!i||!i.embedding||!i.embedding.values)throw new Error("Invalid response from Gemini.");return i.embedding.values}};var pe=class extends Y{name="OpenAI";requireAuth;constructor(t,r,o,n){super("OpenAI",t,o,r),this.requireAuth=n}async chatWithAI({messages:t,stream:r,onDataReceived:o,onResponseComplete:n}){return r?await this.streamChat({messages:t,onDataReceived:o,onResponseComplete:n}):await this.nonStreamingChat(t)}async streamChat(t){let{messages:r,onDataReceived:o,onResponseComplete:n}=t;try{let i=`${this.baseUrl}/chat/completions`,s={"Content-Type":"application/json"};this.requireAuth&&(s.Authorization=`Bearer ${this.apiKey}`);let c={method:"POST",headers:s,payload:JSON.stringify({model:this.modelName,stream:!0,messages:r}),withCredentials:!1},a=new ie(i,c),p="";a.addEventListener("message",function(m){try{if(m.data=="[DONE]")return a.close(),n&&n(p),p;{let g=JSON.parse(m.data).choices[0]?.delta?.content||"";p+=g,o&&o(g)}}catch(d){console.error("Error processing message event:",d,m.data)}}),a.addEventListener("end",function(){return a.close(),n&&n(p),p}),a.addEventListener("error",m=>{console.error("SSE error sseEvent.data:",m.data," ssEventObj:",m),a.close()}),a.stream()}catch(i){throw console.error("Error streaming from OpenAI chat endpoint:",i),await u.flashNotification("Error streaming from OpenAI chat endpoint.","error"),i}return""}async nonStreamingChat(t){try{let r=JSON.stringify({model:this.modelName,messages:t}),o={Authorization:`Bearer ${this.apiKey}`,"Content-Type":"application/json"},n=await nativeFetch(this.baseUrl+"/chat/completions",{method:"POST",headers:o,body:r});if(!n.ok)throw console.error("http response: ",n),console.error("http response body: ",await n.json()),new Error(`HTTP error, status: ${n.status}`);let i=await n.json();if(!i||!i.choices||i.choices.length===0)throw new Error("Invalid response from OpenAI.");return i.choices[0].message.content}catch(r){throw console.error("Error calling OpenAI chat endpoint:",r),await u.flashNotification("Error calling OpenAI chat endpoint.","error"),r}}},je=class extends G{constructor(t,r,o,n=!0){super(t,o,"OpenAI",r,n)}async _generateEmbeddings(t){let r=JSON.stringify({model:this.modelName,input:t.text,encoding_format:"float"}),o={"Content-Type":"application/json"};this.requireAuth&&(o.Authorization=`Bearer ${this.apiKey}`);let n=await nativeFetch(`${this.baseUrl}/embeddings`,{method:"POST",headers:o,body:r});if(!n.ok)throw console.error("HTTP response: ",n),console.error("HTTP response body: ",await n.json()),new Error(`HTTP error, status: ${n.status}`);let i=await n.json();if(!i||!i.data||i.data.length===0)throw new Error("Invalid response from OpenAI.");return i.data[0].embedding}};var qe=class extends Y{name="Ollama";requireAuth;openaiProvider;constructor(t,r,o,n){super("Ollama",t,o,r),this.requireAuth=n,this.openaiProvider=new pe(t,r,o,n)}async chatWithAI({messages:t,stream:r,onDataReceived:o,onResponseComplete:n}){return await this.openaiProvider.chatWithAI({messages:t,stream:r,onDataReceived:o,onResponseComplete:n})}},Be=class extends G{constructor(t,r,o,n=!1){super(t,o,"Ollama",r,n)}async _generateEmbeddings(t){let r=JSON.stringify({model:this.modelName,prompt:t.text}),o={"Content-Type":"application/json"};this.requireAuth&&(o.Authorization=`Bearer ${this.apiKey}`);let n=await nativeFetch(`${this.baseUrl}/api/embeddings`,{method:"POST",headers:o,body:r});if(!n.ok)throw console.error("HTTP response: ",n),console.error("HTTP response body: ",await n.json()),new Error(`HTTP error, status: ${n.status}`);let i=await n.json();if(!i||!i.embedding||i.embedding.length===0)throw new Error("Invalid response from Ollama.");return i.embedding}};var He=class extends Y{constructor(t,r,o="http://localhost"){super(t,o,"mock",r)}async chatWithAI(t){let r="This is a mock response from the AI.";if(t.onDataReceived)for(let o of r)await new Promise(n=>setTimeout(n,50)),t.onDataReceived(o);return r}},Ke=class extends le{constructor(t,r,o="http://localhost"){super(t,o,"mock",r)}generateImage(t){return new Promise(r=>{setTimeout(()=>{r("https://example.com/mock-image.jpg")},5)})}},Ge=class extends G{constructor(t,r,o="http://localhost"){super(t,o,"mock",r)}_generateEmbeddings(t){return new Promise(r=>{setTimeout(()=>{let o=Array(1536).fill(0).map(()=>Math.random());r(o)},5)})}};var T,f,xe,O,Ye,j,ht,ns,be;async function C(){let e=await xr();(!T||!O||!f||!ht||JSON.stringify(e)!==JSON.stringify(ht))&&await z(!0)}async function xr(){if(await S.getEnv()!="server")try{return await Q.get("ai.selectedTextModel")}catch{return}}async function os(){if(await S.getEnv()!="server")try{return await Q.get("ai.selectedImageModel")}catch{return}}async function is(){if(await S.getEnv()!="server")try{return await Q.get("ai.selectedEmbeddingModel")}catch{return}}async function yt(e){await S.getEnv()!="server"&&await Q.set("ai.selectedImageModel",e)}async function xt(e){await S.getEnv()!="server"&&await Q.set("ai.selectedTextModel",e)}async function bt(e){await S.getEnv()!="server"&&await Q.set("ai.selectedEmbeddingModel",e)}async function ss(){let e=await xr()||f.textModels[0];if(!e)throw new Error("No text model selected or available as default.");await we(e)}async function as(){let e=await os()||f.imageModels[0];if(!e)throw new Error("No image model selected or available as default.");await wt(e)}async function cs(){let e=await is()||f.embeddingModels[0];if(!e)throw new Error("No embedding model selected or available as default.");await Pt(e)}function ls(e){let t=e.provider.toLowerCase();switch(_("client","Provider name",t),t){case"dalle":Ye=new Fe(T,e.modelName,e.baseUrl||f.dallEBaseUrl);break;case"mock":Ye=new Ke(T,e.modelName);break;default:throw new Error(`Unsupported image provider: ${e.provider}. Please configure a supported provider.`)}}function us(e){switch(e.provider.toLowerCase()){case"openai":O=new pe(T,e.modelName,e.baseUrl||f.openAIBaseUrl,e.requireAuth||f.requireAuth);break;case"gemini":O=new De(T,e.modelName);break;case"ollama":O=new qe(T,e.modelName,e.baseUrl||"http://localhost:11434/v1",e.requireAuth);break;case"mock":O=new He(T,e.modelName,e.baseUrl);break;default:throw new Error(`Unsupported AI provider: ${e.provider}. Please configure a supported provider.`)}return O}function ps(e){switch(e.provider.toLowerCase()){case"openai":j=new je(T,e.modelName,e.baseUrl||f.openAIBaseUrl);break;case"gemini":j=new Ue(T,e.modelName);break;case"ollama":j=new Be(T,e.modelName,e.baseUrl||"http://localhost:11434",e.requireAuth);break;case"mock":j=new Ge(T,e.modelName,e.baseUrl);break;default:throw new Error(`Unsupported embedding provider: ${e.provider}. Please configure a supported provider.`)}}async function we(e){if(_("client","configureSelectedModel called with:",e),!e)throw new Error("No model provided to configure");if(e.requireAuth=e.requireAuth??f.requireAuth,e.requireAuth)try{let t=await ke(e.secretName||"OPENAI_API_KEY");t!==T&&(T=t,_("client","API key updated"))}catch(t){throw console.error("Error reading secret:",t),new Error("Failed to read the AI API key. Please check the SECRETS page.")}if(e.requireAuth&&!T)throw new Error("AI API key is missing. Please set it in the secrets page.");return ht=e,us(e)}async function wt(e){if(_("client","configureSelectedImageModel called with:",e),!e)throw new Error("No image model provided to configure");if(e.requireAuth){let t=await ke(e.secretName||"OPENAI_API_KEY");t!==T&&(T=t,_("client","API key updated for image model"))}if(e.requireAuth&&!T)throw new Error("AI API key is missing for image model. Please set it in the secrets page.");ns=e,ls(e)}async function Pt(e){if(_("client","configureSelectedEmbeddingModel called with:",e),!e)throw new Error("No embedding model provided to configure");if(e.requireAuth){let t=await ke(e.secretName||"OPENAI_API_KEY");t!==T&&(T=t,_("client","API key updated for embedding model"))}if(e.requireAuth&&!T)throw new Error("AI API key is missing for embedding model. Please set it in the secrets page.");be=e,ps(e)}async function ms(){let e={openAIBaseUrl:"https://api.openai.com/v1",dallEBaseUrl:"https://api.openai.com/v1",requireAuth:!0,secretName:"OPENAI_API_KEY",provider:"OpenAI",chat:{},promptInstructions:{},imageModels:[],embeddingModels:[],textModels:[],indexEmbeddings:!1,indexSummary:!1,indexSummaryModelName:"",indexEmbeddingsExcludePages:[],indexEmbeddingsExcludeStrings:["**user**:"]},t={userInformation:"",userInstructions:"",parseWikiLinks:!0,bakeMessages:!0,customEnrichFunctions:[],searchEmbeddings:!1},r={pageRenameSystem:"",pageRenameRules:"",tagRules:"",indexSummaryPrompt:"",enhanceFrontMatterPrompt:""},o=await S.getSpaceConfig("ai",{}),n={...e,...o};return n.chat={...t,...o.chat||{}},n.promptInstructions={...r,...o.promptInstructions||{}},n}async function z(e=!0){let t=await ms();!f||JSON.stringify(f)!==JSON.stringify(t)?(_("client","aiSettings updating from",f),f=t,_("client","aiSettings updated to",f)):_("client","aiSettings unchanged",f),f.textModels.length===1&&await xt(f.textModels[0]),f.imageModels.length===1&&await yt(f.imageModels[0]),f.embeddingModels.length===1&&await bt(f.embeddingModels[0]),e&&(f.textModels.length>0&&await ss(),f.imageModels.length>0&&await as(),f.embeddingModels.length>0&&await cs()),xe={role:"system",content:"This is an interactive chat session with a user in a markdown-based note-taking tool called SilverBullet."},f.chat.userInformation&&(xe.content+=` The user has provided the following information about themselves: ${f.chat.userInformation}`),f.chat.userInstructions&&(xe.content+=` The user has provided the following instructions for the chat, follow them as closely as possible: ${f.chat.userInstructions}`)}var At="\u{1F916} ";function Pe(e){return!(["SETTINGS","SECRETS",...f.indexEmbeddingsExcludePages].includes(e)||e.startsWith("_")||e.startsWith("Library/")||/\.conflicted\.\d+$/.test(e))}async function br(){return await C(),f.indexEmbeddings&&j!==void 0&&be!==void 0&&f.embeddingModels.length>0&&await S.getEnv()==="server"}async function wr(){return await C(),f.indexEmbeddings&&f.indexSummary&&j!==void 0&&be!==void 0&&f.embeddingModels.length>0&&await S.getEnv()==="server"}async function ds(e){if(!await br()||!Pe(e))return;let t=await $.readPage(e),r=await k.parseMarkdown(t);if(!r.children)return;let o=r.children.filter(a=>a.type==="Paragraph"),n=[],i=Date.now();for(let a of o){let p=R(a).trim();if(!p||p.length<10||f.indexEmbeddingsExcludeStrings.some(y=>p.includes(y)))continue;let m=await j.generateEmbeddings({text:p}),d=a.from??0,g={ref:`${e}@${d}`,page:e,pos:d,embedding:m,text:p,tag:"embedding"};n.push(g)}await Mt(e,n);let c=(Date.now()-i)/1e3;_("any",`AI: Indexed ${n.length} embedding objects for page ${e} in ${c} seconds`)}async function fs(e){if(!await wr()||!Pe(e))return;let t=await $.readPage(e),r=await k.parseMarkdown(t);if(!r.children)return;let o=Date.now(),n=R(r),i=f.textModels.find(x=>x.name===f.indexSummaryModelName);if(!i)throw new Error(`Could not find summary model ${f.indexSummaryModelName}`);let s=await we(i),c;f.promptInstructions.indexSummaryPrompt!==""?c=f.promptInstructions.indexSummaryPrompt:c=`Provide a concise and informative summary of the above page. The summary should capture the key points and be useful for search purposes. Avoid any formatting or extraneous text. No more than one paragraph. Summary: `;let a=await $e(i.name,n,c),p=Le(a);p||(p=await s.singleMessageChat("Contents of "+e+`: diff --git a/src/providers/openai.ts b/src/providers/openai.ts index 53ad819..4db7072 100644 --- a/src/providers/openai.ts +++ b/src/providers/openai.ts @@ -106,7 +106,7 @@ export class OpenAIProvider extends AbstractProvider { }); source.addEventListener("error", (e: sseEvent) => { - console.error("SSE error:", e); + console.error("SSE error sseEvent.data:", e.data, " ssEventObj:", e); source.close(); });