Skip to content

Commit

Permalink
🐛 fix: lm studio
Browse files Browse the repository at this point in the history
  • Loading branch information
arvinxx committed Nov 10, 2024
1 parent 238e57f commit 5b4ddeb
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 0 deletions.
3 changes: 3 additions & 0 deletions src/config/modelProviders/lmstudio.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@ const LMStudio: ModelProviderCard = {
modelList: { showModelFetcher: true },
modelsUrl: 'https://lmstudio.ai/models',
name: 'LM Studio',
proxyUrl: {
placeholder: 'http://127.0.0.1:1234/v1',
},
showApiKey: false,
smoothing: {
speed: 2,
Expand Down
4 changes: 4 additions & 0 deletions src/config/modelProviders/ollama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -292,6 +292,10 @@ const Ollama: ModelProviderCard = {
modelsUrl: 'https://ollama.com/library',
name: 'Ollama',
showApiKey: false,
smoothing: {
speed: 2,
text: true,
},
url: 'https://ollama.com',
};

Expand Down
1 change: 1 addition & 0 deletions src/const/settings/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@ export const DEFAULT_LLM_CONFIG: UserModelProviderConfig = {
lmstudio: {
enabled: false,
enabledModels: filterEnabledModels(LMStudioProviderCard),
fetchOnClient: true,
},
minimax: {
enabled: false,
Expand Down

0 comments on commit 5b4ddeb

Please sign in to comment.