diff --git a/README.md b/README.md index 4662aff..b50c879 100644 --- a/README.md +++ b/README.md @@ -63,6 +63,14 @@ ai: # Any openai compatible API _should_ be usable, but no testing is done on that currently openAIBaseUrl: "https://api.openai.com/v1", dallEBaseUrl: "https://api.openai.com/v1", + + # Chat section is optional, but may help provide better results when using the Chat On Page command + chat: + userInformation: > + I'm a software developer who likes taking notes. + userInstructions: > + Please give short and concise responses. When providing code, do so in python unless requested otherwise. + ``` #### Ollama @@ -103,6 +111,22 @@ ai: `OPENAI_API_KEY` also needs to be set in `SECRETS` to an API key generated from [their web console](https://www.perplexity.ai/settings/api). +#### Chat Custom Instructions + +OpenAI introduced [custom instructions for ChatGPT](https://openai.com/blog/custom-instructions-for-chatgpt) a while back to help improve the responses from ChatGPT. We are emulating that feature by allowing a system prompt to be injected into each new chat session. + +The system prompt is rendered similar to the one below, see the example config above for where to configure these settings: + +Always added: +> This is an interactive chat session with a user in a note-taking tool called SilverBullet. + +If **userInformation** is set, this is added: +> The user has provided the following information about their self: **${ai.chat.userInformation}** + +If **userInstructions** is set, this is added: +> The user has provided the following instructions for the chat, follow them as closely as possible: **${ai.chat.userInstructions}** + + ### Templated Prompts **NOTE:** All built-in prompts will be replaced with templated prompts eventually. diff --git a/deno.jsonc b/deno.jsonc index b330635..4034b15 100644 --- a/deno.jsonc +++ b/deno.jsonc @@ -15,7 +15,8 @@ "exclude": [ "*.md", "**/*.md", - "*.plug.js" + "*.plug.js", + "**/*.plug.js" ] } } diff --git a/import_map.json b/import_map.json index 7745317..12b41af 100644 --- a/import_map.json +++ b/import_map.json @@ -28,14 +28,14 @@ "turndown": "https://cdn.skypack.dev/turndown@7.1.1", "turndown-plugin-gfm": "https://cdn.skypack.dev/@joplin/turndown-plugin-gfm@1.0.45", - "$common/": "https://deno.land/x/silverbullet@0.7.1/common/", + "$common/": "https://deno.land/x/silverbullet@0.7.3/common/", "$lib/": "../silverbullet/lib/", - "$type/": "https://deno.land/x/silverbullet@0.7.1/type/", + "$type/": "https://deno.land/x/silverbullet@0.7.3/type/", "preact": "https://esm.sh/preact@10.11.1", - "$sb/": "https://deno.land/x/silverbullet@0.7.1/plug-api/", - "$sbplugs/": "https://deno.land/x/silverbullet@0.7.1/plugs/", - "$plugos/": "https://deno.land/x/silverbullet@0.7.1/plugos/", + "$sb/": "https://deno.land/x/silverbullet@0.7.3/plug-api/", + "$sbplugs/": "https://deno.land/x/silverbullet@0.7.3/plugs/", + "$plugos/": "https://deno.land/x/silverbullet@0.7.3/plugos/", "zod": "https://deno.land/x/zod@v3.22.4/mod.ts", "$std/": "https://deno.land/std@0.189.0/" } diff --git a/sbai.ts b/sbai.ts index 3cba632..9acb3da 100644 --- a/sbai.ts +++ b/sbai.ts @@ -70,11 +70,14 @@ export async function callOpenAIwithNote() { }); await streamChatWithOpenAI({ - systemMessage: - "You are an AI note assistant. Follow all user instructions and use the note context and note content to help follow those instructions. Use Markdown for any formatting.", - userMessage: - `Note Context: Today is ${dayString}, ${dateString}. The current note name is "${noteName}".\nUser Prompt: ${userPrompt}\nNote Content:\n${selectedTextInfo.text}`, - }, selectedTextInfo.isWholeNote ? undefined : selectedTextInfo.to); + messages: { + systemMessage: + "You are an AI note assistant. Follow all user instructions and use the note context and note content to help follow those instructions. Use Markdown for any formatting.", + userMessage: + `Note Context: Today is ${dayString}, ${dateString}. The current note name is "${noteName}".\nUser Prompt: ${userPrompt}\nNote Content:\n${selectedTextInfo.text}`, + }, + cursorStart: selectedTextInfo.isWholeNote ? undefined : selectedTextInfo.to, + }); } /** @@ -146,9 +149,11 @@ export async function streamOpenAIWithSelectionAsPrompt() { const selectedTextInfo = await getSelectedTextOrNote(); await streamChatWithOpenAI({ - systemMessage: - "You are an AI note assistant in a markdown-based note tool.", - userMessage: selectedTextInfo.text, + messages: { + systemMessage: + "You are an AI note assistant in a markdown-based note tool.", + userMessage: selectedTextInfo.text, + }, }); } @@ -169,7 +174,12 @@ export async function streamChatOnPage() { const newPageLength = currentPageLength + "\n\n**assistant**: ".length; await editor.insertAtPos("\n\n**user**: ", newPageLength); await editor.moveCursor(newPageLength + "\n\n**user**: ".length); - await streamChatWithOpenAI(messages, newPageLength); + await streamChatWithOpenAI({ + messages: messages, + cursorStart: newPageLength, + scrollIntoView: true, + includeChatSystemPrompt: true, + }); } /** diff --git a/src/init.ts b/src/init.ts index 4f29b7c..53c18bd 100644 --- a/src/init.ts +++ b/src/init.ts @@ -1,9 +1,18 @@ -import { readSetting } from "$sb/lib/settings_page.ts"; import { readSecret } from "$sb/lib/secrets_page.ts"; +import { readSetting } from "$sb/lib/settings_page.ts"; import { editor } from "$sb/syscalls.ts"; -let apiKey: string; -let aiSettings: { +export type ChatMessage = { + content: string; + role: "user" | "assistant" | "system"; +}; + +type ChatSettings = { + userInformation: string; + userInstructions: string; +}; + +type AISettings = { summarizePrompt: string; tagPrompt: string; imagePrompt: string; @@ -13,8 +22,13 @@ let aiSettings: { openAIBaseUrl: string; dallEBaseUrl: string; requireAuth: boolean; + chat: ChatSettings; }; +let apiKey: string; +let aiSettings: AISettings; +let chatSystemPrompt: ChatMessage; + async function initializeOpenAI() { const newApiKey = await readSecret("OPENAI_API_KEY"); if (newApiKey !== apiKey) { @@ -41,6 +55,7 @@ async function initializeOpenAI() { openAIBaseUrl: "https://api.openai.com/v1", dallEBaseUrl: "https://api.openai.com/v1", requireAuth: true, + chat: {}, }; const newSettings = await readSetting("ai", {}); const newCombinedSettings = { ...defaultSettings, ...newSettings }; @@ -51,6 +66,20 @@ async function initializeOpenAI() { } else { console.log("aiSettings unchanged", aiSettings); } + + chatSystemPrompt = { + role: "system", + content: + `This is an interactive chat session with a user in a markdown-based note-taking tool called SilverBullet.`, + }; + if (aiSettings.chat.userInformation) { + chatSystemPrompt.content += + `\nThe user has provided the following information about their self: ${aiSettings.chat.userInformation}`; + } + if (aiSettings.chat.userInstructions) { + chatSystemPrompt.content += + `\nThe user has provided the following instructions for the chat, follow them as closely as possible: ${aiSettings.chat.userInstructions}`; + } } -export { aiSettings, apiKey, initializeOpenAI }; +export { aiSettings, apiKey, chatSystemPrompt, initializeOpenAI }; diff --git a/src/openai.ts b/src/openai.ts index 54e9b7c..9964a4d 100644 --- a/src/openai.ts +++ b/src/openai.ts @@ -1,28 +1,48 @@ +import "$sb/lib/native_fetch.ts"; import { editor } from "$sb/syscalls.ts"; import { SSE } from "npm:sse.js@2.2.0"; -import { aiSettings, apiKey, initializeOpenAI } from "./init.ts"; import { getPageLength } from "./editorUtils.ts"; +import { + aiSettings, + apiKey, + ChatMessage, + chatSystemPrompt, + initializeOpenAI, +} from "./init.ts"; -export async function streamChatWithOpenAI( - messages: Array<{ role: string; content: string }> | { +type StreamChatOptions = { + messages: Array | { systemMessage: string; userMessage: string; - }, - cursorStart: number | undefined = undefined, - cursorFollow: boolean = false, -): Promise { + }; + cursorStart?: number; + cursorFollow?: boolean; + scrollIntoView?: boolean; + includeChatSystemPrompt?: boolean; +}; + +export async function streamChatWithOpenAI({ + messages, + cursorStart = undefined, + cursorFollow = false, + scrollIntoView = true, + includeChatSystemPrompt = false, +}: StreamChatOptions): Promise { try { if (!apiKey) await initializeOpenAI(); const sseUrl = `${aiSettings.openAIBaseUrl}/chat/completions`; - let payloadMessages; + let payloadMessages: ChatMessage[] = []; + if (includeChatSystemPrompt) { + payloadMessages.push(chatSystemPrompt); + } if ("systemMessage" in messages && "userMessage" in messages) { - payloadMessages = [ - { role: "system", content: messages.systemMessage }, - { role: "user", content: messages.userMessage }, - ]; + payloadMessages.push( + { role: "system", content: messages.systemMessage } as ChatMessage, + { role: "user", content: messages.userMessage } as ChatMessage, + ); } else { - payloadMessages = messages; + payloadMessages.push(...messages); } var headers = { @@ -90,6 +110,19 @@ export async function streamChatWithOpenAI( if (cursorFollow) { editor.moveCursor(cursorPos, true); } + if (scrollIntoView) { + // TODO: + // editor.dispatch({ + // effects: [ + // EditorView.scrollIntoView( + // pos, + // { + // y: "center", + // }, + // ), + // ], + // }); + } } catch (error) { console.error("Error processing message event:", error, e.data); } @@ -126,25 +159,36 @@ export async function chatWithOpenAI( ); throw new Error("API key or AI settings are not properly configured."); } - const response = await fetch( + + const body = JSON.stringify({ + model: aiSettings.defaultTextModel, + messages: [ + { role: "system", content: systemMessage }, + ...userMessages, + ], + }); + + console.log("Sending body", body); + + const headers = { + "Authorization": `Bearer ${apiKey}`, + "Content-Type": "application/json", + }; + + console.log("Request headers:", headers); + + const response = await nativeFetch( aiSettings.openAIBaseUrl + "/chat/completions", { method: "POST", - headers: { - "Authorization": `Bearer ${apiKey}`, - "Content-Type": "application/json", - }, - body: JSON.stringify({ - model: aiSettings.defaultTextModel, - messages: [ - { role: "system", content: systemMessage }, - ...userMessages, - ], - }), + headers: headers, + body: body, }, ); if (!response.ok) { + console.error("http response: ", response); + console.error("http response body: ", await response.json()); throw new Error(`HTTP error, status: ${response.status}`); } @@ -172,7 +216,7 @@ export async function generateImageWithDallE( try { if (!apiKey) await initializeOpenAI(); await editor.flashNotification("Contacting DALLĀ·E, please wait..."); - const response = await fetch( + const response = await nativeFetch( aiSettings.dallEBaseUrl + "/images/generations", { method: "POST", diff --git a/src/prompts.ts b/src/prompts.ts index ae7bc44..e41cebe 100644 --- a/src/prompts.ts +++ b/src/prompts.ts @@ -145,7 +145,10 @@ export async function insertAiPromptFromTemplate( // console.log("Rendered template:", renderedTemplate); await streamChatWithOpenAI({ - systemMessage: selectedTemplate.systemPrompt, - userMessage: renderedTemplate.text, - }, cursorPos); + messages: { + systemMessage: selectedTemplate.systemPrompt, + userMessage: renderedTemplate.text, + }, + cursorStart: cursorPos, + }); } diff --git a/src/utils.ts b/src/utils.ts index c99c1aa..439f8c9 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -1,4 +1,5 @@ import { editor } from "$sb/syscalls.ts"; +import { ChatMessage } from "./init.ts"; export function folderName(path: string) { return path.split("/").slice(0, -1).join("/"); @@ -11,12 +12,12 @@ export function folderName(path: string) { * * Valid roles are system, assistant, and user. * - * @returns {Array<{ role: string; content: string }>} + * @returns {Array} */ export async function convertPageToMessages() { const pageText = await editor.getText(); const lines = pageText.split("\n"); - const messages = []; + const messages: ChatMessage[] = []; let currentRole = "user"; let contentBuffer = ""; @@ -25,7 +26,9 @@ export async function convertPageToMessages() { if (match) { const newRole = match[1].toLowerCase(); if (currentRole && currentRole !== newRole) { - messages.push({ role: currentRole, content: contentBuffer.trim() }); + messages.push( + { role: currentRole, content: contentBuffer.trim() } as ChatMessage, + ); contentBuffer = ""; } currentRole = newRole; @@ -35,7 +38,9 @@ export async function convertPageToMessages() { } }); if (contentBuffer && currentRole) { - messages.push({ role: currentRole, content: contentBuffer.trim() }); + messages.push( + { role: currentRole, content: contentBuffer.trim() } as ChatMessage, + ); } return messages;