From fa4f044782ab584e057f0bf8b01bee2adbd4677b Mon Sep 17 00:00:00 2001 From: bojiang Date: Mon, 8 Jul 2024 15:45:51 +0800 Subject: [PATCH] fix: chat ui path --- source/llamacpp-chat/service.py | 2 +- source/vllm-chat/service.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/source/llamacpp-chat/service.py b/source/llamacpp-chat/service.py index c93b0b3d..3cfd766d 100644 --- a/source/llamacpp-chat/service.py +++ b/source/llamacpp-chat/service.py @@ -72,7 +72,7 @@ async def catch_all(full_path: str): sys.modules.pop("prometheus_client") -@bentoml.mount_asgi_app(ui_app, path="/ui") +@bentoml.mount_asgi_app(ui_app, path="/chat") @bentoml.mount_asgi_app(openai_api_app, path="/v1") @bentoml.service(**SERVICE_CONFIG) class LlamaCppChat: diff --git a/source/vllm-chat/service.py b/source/vllm-chat/service.py index 94af0b0d..42152243 100644 --- a/source/vllm-chat/service.py +++ b/source/vllm-chat/service.py @@ -85,7 +85,7 @@ async def catch_all(full_path: str): @bentoml.mount_asgi_app(openai_api_app, path="/v1") -@bentoml.mount_asgi_app(ui_app, path="/ui") +@bentoml.mount_asgi_app(ui_app, path="/chat") @bentoml.service(**SERVICE_CONFIG) class VLLM: def __init__(self) -> None: