Skip to content

Commit

Permalink
Merge pull request #617 from chhoumann/dev
Browse files Browse the repository at this point in the history
  • Loading branch information
chhoumann authored Dec 3, 2023
2 parents 1c210dd + 05f0225 commit 8584d11
Show file tree
Hide file tree
Showing 2 changed files with 75 additions and 56 deletions.
121 changes: 65 additions & 56 deletions src/ai/OpenAIRequest.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,69 +4,78 @@ import type { OpenAIModelParameters } from "./OpenAIModelParameters";
import { settingsStore } from "src/settingsStore";
import { getTokenCount } from "./AIAssistant";
import { getModelMaxTokens } from "./getModelMaxTokens";
import { preventCursorChange } from "./preventCursorChange";

type ReqResponse = {
id: string;
model: string;
object: string;
usage: {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
};
choices: {
finish_reason: string;
index: number;
message: { content: string; role: string; };
}[];
created: number;
id: string;
model: string;
object: string;
usage: {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
};
choices: {
finish_reason: string;
index: number;
message: { content: string; role: string };
}[];
created: number;
};

export function OpenAIRequest(
apiKey: string,
model: Model,
systemPrompt: string,
modelParams: Partial<OpenAIModelParameters> = {}
apiKey: string,
model: Model,
systemPrompt: string,
modelParams: Partial<OpenAIModelParameters> = {}
) {
return async function makeRequest(prompt: string) {
if (settingsStore.getState().disableOnlineFeatures) {
throw new Error(
"Blocking request to OpenAI: Online features are disabled in settings."
);
}
return async function makeRequest(prompt: string) {
if (settingsStore.getState().disableOnlineFeatures) {
throw new Error(
"Blocking request to OpenAI: Online features are disabled in settings."
);
}

const tokenCount =
getTokenCount(prompt, model) + getTokenCount(systemPrompt, model);
const maxTokens = getModelMaxTokens(model);

const tokenCount = getTokenCount(prompt, model) + getTokenCount(systemPrompt, model);
const maxTokens = getModelMaxTokens(model);
if (tokenCount > maxTokens) {
throw new Error(
`The ${model} API has a token limit of ${maxTokens}. Your prompt has ${tokenCount} tokens.`
);
}

if (tokenCount > maxTokens) {
throw new Error(
`The ${model} API has a token limit of ${maxTokens}. Your prompt has ${tokenCount} tokens.`
);
}
try {
const restoreCursor = preventCursorChange();
const _response = requestUrl({
url: `https://api.openai.com/v1/chat/completions`,
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
},
body: JSON.stringify({
model,
...modelParams,
messages: [
{ role: "system", content: systemPrompt },
{ role: "user", content: prompt },
],
}),
});
restoreCursor();

try {
const response = await requestUrl({
url: `https://api.openai.com/v1/chat/completions`,
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
},
body: JSON.stringify({
model,
...modelParams,
messages: [
{ role: "system", content: systemPrompt },
{ role: "user", content: prompt },
],
}),
});
const response = await _response;

return response.json as ReqResponse;
} catch (error) {
console.log(error);
throw new Error(
`Error while making request to OpenAI API: ${(error as { message: string; }).message}`
);
}
};
return response.json as ReqResponse;
} catch (error) {
console.log(error);
throw new Error(
`Error while making request to OpenAI API: ${
(error as { message: string }).message
}`
);
}
};
}
10 changes: 10 additions & 0 deletions src/ai/preventCursorChange.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
export function preventCursorChange(): () => void {
const cursor = app.workspace.activeEditor?.editor?.getCursor();
const selection = app.workspace.activeEditor?.editor?.listSelections();

return () => {
if (cursor) app.workspace.activeEditor?.editor?.setCursor(cursor);
if (selection)
app.workspace.activeEditor?.editor?.setSelections(selection);
};
}

1 comment on commit 8584d11

@vercel
Copy link

@vercel vercel bot commented on 8584d11 Dec 3, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

quickadd – ./

quickadd.obsidian.guide
quickadd-git-master-chrisbbh.vercel.app
quickadd-chrisbbh.vercel.app

Please sign in to comment.