diff --git a/docker/compose.cpu.ollama.yml b/docker/compose.cpu.ollama.yml index 4fed391..1b9749e 100644 --- a/docker/compose.cpu.ollama.yml +++ b/docker/compose.cpu.ollama.yml @@ -29,12 +29,12 @@ services: echo 'ollama serve did not start in time'; \ exit 1; \ fi; \ - ollama pull phi3.5 && ollama cp phi3.5 microsoft/Phi3.5-mini-instruct; \ + ollama pull qwen2.5:3b && ollama cp qwen2.5:3b Qwen/Qwen2.5-3B-Instruct; \ tail -f /dev/null", ] restart: unless-stopped healthcheck: - test: ["CMD", "sh", "-c", "ollama show microsoft/Phi3.5-mini-instruct || exit 1"] + test: ["CMD", "sh", "-c", "ollama show Qwen/Qwen2.5-3B-Instruct || exit 1"] interval: 20s timeout: 2s retries: 20 diff --git a/docker/ollama.yml b/docker/ollama.yml index ce53422..8c0cc58 100644 --- a/docker/ollama.yml +++ b/docker/ollama.yml @@ -1,4 +1,4 @@ services: owl: environment: - - OWL_MODELS_CONFIG="models_ollama.json" + - OWL_MODELS_CONFIG=models_ollama.json diff --git a/services/api/src/owl/configs/models_ollama.json b/services/api/src/owl/configs/models_ollama.json index 0ca99f0..00f1ed0 100644 --- a/services/api/src/owl/configs/models_ollama.json +++ b/services/api/src/owl/configs/models_ollama.json @@ -43,15 +43,15 @@ ] }, { - "id": "ellm/microsoft/Phi3.5-mini-instruct", - "name": "ELLM Phi3.5 mini instruct (3.8B)", - "context_length": 131072, + "id": "ellm/Qwen/Qwen2.5-3B-Instruct", + "name": "ELLM Qwen2.5 (3B)", + "context_length": 32000, "languages": ["en"], "capabilities": ["chat"], "deployments": [ { - "litellm_id": "ollama_chat/microsoft/Phi3.5-mini-instruct", - "api_base": "http://ollama:11434", + "litellm_id": "openai/Qwen/Qwen2.5-3B-Instruct", + "api_base": "http://ollama:11434/v1", "provider": "ellm" } ]