diff --git a/README.md b/README.md index b50c879..f23480d 100644 --- a/README.md +++ b/README.md @@ -22,8 +22,6 @@ The list below are the commands available in this plugin. - **AI: Summarize Note and open summary**: Uses a built-in prompt to ask the LLM for a summary of either the entire note, or the selected text. Opens the resulting summary in a temporary right pane. -- **AI: Insert Summary**: Uses a built-in prompt to ask the LLM for a summary of either the entire note, or the selected -text. Inserts the summary at the cursor's position. - **AI: Call OpenAI with Note as context**: Prompts the user for a custom prompt to send to the LLM. If the user has text selected, the selected text is used as the note content. If the user has no text selected, the entire note is used as the note content. The response is streamed to the cursor position. diff --git a/silverbullet-ai.plug.yaml b/silverbullet-ai.plug.yaml index 55830e9..ee32ff9 100644 --- a/silverbullet-ai.plug.yaml +++ b/silverbullet-ai.plug.yaml @@ -19,10 +19,6 @@ functions: path: sbai.ts:openSummaryPanel command: name: "AI: Summarize Note and open summary" - insertSummary: - path: sbai.ts:insertSummary - command: - name: "AI: Insert Summary" callOpenAI: path: sbai.ts:callOpenAIwithNote command: diff --git a/src/openai.ts b/src/openai.ts index fa30706..0a06449 100644 --- a/src/openai.ts +++ b/src/openai.ts @@ -44,7 +44,7 @@ export class OpenAIProvider extends AbstractProvider { } } - async streamChat(options: StreamChatOptions): Promise { + async streamChat(options: StreamChatOptions): Promise { const { messages, onDataReceived } = options; try { @@ -70,15 +70,17 @@ export class OpenAIProvider extends AbstractProvider { }; const source = new SSE(sseUrl, sseOptions); + let fullMsg = ""; source.addEventListener("message", function (e) { try { if (e.data == "[DONE]") { source.close(); + return fullMsg; } else { const data = JSON.parse(e.data); const msg = data.choices[0]?.delta?.content || ""; - // TODO: Send msg to a callback that should be registered to the interface + fullMsg += msg; if (onDataReceived) { onDataReceived(msg); } @@ -90,6 +92,7 @@ export class OpenAIProvider extends AbstractProvider { source.addEventListener("end", function () { source.close(); + return fullMsg; }); source.stream(); @@ -101,6 +104,7 @@ export class OpenAIProvider extends AbstractProvider { ); throw error; } + return ""; } async nonStreamingChat(messages: Array): Promise { @@ -146,6 +150,7 @@ export class OpenAIProvider extends AbstractProvider { } } +// TODO: Make an interface for image generating models too export async function generateImageWithDallE( prompt: string, n: 1,