diff --git a/locales/ar/common.json b/locales/ar/common.json
index 9061736f7270..84d1670baabb 100644
--- a/locales/ar/common.json
+++ b/locales/ar/common.json
@@ -10,10 +10,16 @@
}
},
"appLoading": {
- "goToChat": "(4/4) جارٍ تحميل صفحة الدردشة...",
- "initAuth": "(2/4) جاري تهيئة خدمة المصادقة...",
- "initUser": "(3/4) جاري تهيئة حالة المستخدم...",
- "initializing": "(1/4) جارٍ تشغيل التطبيق..."
+ "appInitializing": "جارٍ تشغيل التطبيق...",
+ "finished": "تم الانتهاء من تهيئة قاعدة البيانات",
+ "goToChat": "جارٍ تحميل صفحة الدردشة...",
+ "initAuth": "جارٍ تهيئة خدمة المصادقة...",
+ "initUser": "جارٍ تهيئة حالة المستخدم...",
+ "initializing": "جارٍ تهيئة قاعدة بيانات PGlite...",
+ "loadingDependencies": "جارٍ تهيئة الاعتمادات...",
+ "loadingWasm": "جارٍ تحميل وحدة WASM...",
+ "migrating": "جارٍ تنفيذ ترحيل الجداول...",
+ "ready": "قاعدة البيانات جاهزة"
},
"autoGenerate": "توليد تلقائي",
"autoGenerateTooltip": "إكمال تلقائي بناءً على الكلمات المقترحة لوصف المساعد",
@@ -38,9 +44,9 @@
"error": "حدث خطأ، يرجى إعادة المحاولة",
"idle": "في انتظار التهيئة...",
"initializing": "جارٍ التهيئة...",
- "loadingDependencies": "جارٍ تحميل التبعيات ({{progress}}%)...",
- "loadingWasmModule": "جارٍ تحميل وحدة WASM ({{progress}}%)...",
- "migrating": "جارٍ ترحيل البيانات...",
+ "loadingDependencies": "جارٍ تحميل الاعتماديات...",
+ "loadingWasmModule": "جارٍ تحميل وحدة WASM...",
+ "migrating": "جارٍ تنفيذ ترحيل البيانات...",
"ready": "قاعدة البيانات جاهزة"
},
"modal": {
diff --git a/locales/bg-BG/common.json b/locales/bg-BG/common.json
index 361d7ceb3ae1..0eff3554e4a7 100644
--- a/locales/bg-BG/common.json
+++ b/locales/bg-BG/common.json
@@ -10,10 +10,16 @@
}
},
"appLoading": {
- "goToChat": "(4/4) Зареждане на страницата за разговори...",
- "initAuth": "(2/4) Инициализиране на услугата за удостоверяване...",
- "initUser": "(3/4) Инициализиране на състоянието на потребителя...",
- "initializing": "(1/4) Стартиране на приложението..."
+ "appInitializing": "Приложението се стартира...",
+ "finished": "Инициализацията на базата данни е завършена",
+ "goToChat": "Зареждане на страницата за разговори...",
+ "initAuth": "Инициализиране на услугата за удостоверяване...",
+ "initUser": "Инициализиране на състоянието на потребителя...",
+ "initializing": "Инициализиране на PGlite базата данни...",
+ "loadingDependencies": "Инициализиране на зависимостите...",
+ "loadingWasm": "Зареждане на WASM модула...",
+ "migrating": "Извършване на миграция на таблиците...",
+ "ready": "Базата данни е готова"
},
"autoGenerate": "Автоматично генериране",
"autoGenerateTooltip": "Автоматично генериране на описание на агент въз основа на подкани",
@@ -38,9 +44,9 @@
"error": "Възникна грешка, моля опитайте отново",
"idle": "Изчакване на инициализация...",
"initializing": "Инициализиране...",
- "loadingDependencies": "Зареждане на зависимости ({{progress}}%)...",
- "loadingWasmModule": "Зареждане на WASM модула ({{progress}}%)...",
- "migrating": "Мигриране на данни...",
+ "loadingDependencies": "Зареждане на зависимости...",
+ "loadingWasmModule": "Зареждане на WASM модула...",
+ "migrating": "Извършване на миграция на данни...",
"ready": "Базата данни е готова"
},
"modal": {
diff --git a/locales/de-DE/common.json b/locales/de-DE/common.json
index b0de9f1ccdd3..114537420ef7 100644
--- a/locales/de-DE/common.json
+++ b/locales/de-DE/common.json
@@ -10,10 +10,16 @@
}
},
"appLoading": {
- "goToChat": "(4/4) Lade die Chat-Seite...",
- "initAuth": "(2/4) Authentifizierungsdienst wird initialisiert...",
- "initUser": "(3/4) Benutzerstatus wird initialisiert...",
- "initializing": "(1/4) Anwendung wird gestartet..."
+ "appInitializing": "Anwendung wird gestartet...",
+ "finished": "Datenbankinitialisierung abgeschlossen",
+ "goToChat": "Lade die Chat-Seite...",
+ "initAuth": "Initialisiere den Authentifizierungsdienst...",
+ "initUser": "Initialisiere den Benutzerstatus...",
+ "initializing": "Initialisiere die PGlite-Datenbank...",
+ "loadingDependencies": "Abhängigkeiten werden initialisiert...",
+ "loadingWasm": "WASM-Module werden geladen...",
+ "migrating": "Datenbankmigration wird durchgeführt...",
+ "ready": "Datenbank ist bereit"
},
"autoGenerate": "Automatisch generieren",
"autoGenerateTooltip": "Assistentenbeschreibung automatisch auf Basis von Vorschlägen vervollständigen",
@@ -38,9 +44,9 @@
"error": "Ein Fehler ist aufgetreten, bitte versuchen Sie es erneut",
"idle": "Warte auf die Initialisierung...",
"initializing": "Wird initialisiert...",
- "loadingDependencies": "Lade Abhängigkeiten ({ {progress}}%)...",
- "loadingWasmModule": "Lade WASM-Modul ({ {progress}}%)...",
- "migrating": "Daten werden migriert...",
+ "loadingDependencies": "Abhängigkeiten werden geladen...",
+ "loadingWasmModule": "WASM-Modul wird geladen...",
+ "migrating": "Datenbankmigration wird durchgeführt...",
"ready": "Datenbank ist bereit"
},
"modal": {
diff --git a/locales/en-US/common.json b/locales/en-US/common.json
index d7520f4c0d48..75d5dcc229fa 100644
--- a/locales/en-US/common.json
+++ b/locales/en-US/common.json
@@ -10,10 +10,16 @@
}
},
"appLoading": {
- "goToChat": "(4/4) Loading chat page...",
- "initAuth": "(2/4) Initializing authentication service...",
- "initUser": "(3/4) Initializing user status...",
- "initializing": "(1/4) Starting up application..."
+ "appInitializing": "Application is starting...",
+ "finished": "Database initialization completed",
+ "goToChat": "Loading chat page...",
+ "initAuth": "Initializing authentication service...",
+ "initUser": "Initializing user status...",
+ "initializing": "Initializing PGlite database...",
+ "loadingDependencies": "Initializing dependencies...",
+ "loadingWasm": "Loading WASM module...",
+ "migrating": "Performing database migration...",
+ "ready": "Database is ready"
},
"autoGenerate": "Auto Generate",
"autoGenerateTooltip": "Auto-generate assistant description based on prompts",
@@ -38,9 +44,9 @@
"error": "An error occurred, please try again",
"idle": "Waiting for initialization...",
"initializing": "Initializing...",
- "loadingDependencies": "Loading dependencies ({{progress}}%)...",
- "loadingWasmModule": "Loading WASM module ({{progress}}%)...",
- "migrating": "Migrating data...",
+ "loadingDependencies": "Loading dependencies...",
+ "loadingWasmModule": "Loading WASM module...",
+ "migrating": "Performing database migration...",
"ready": "Database is ready"
},
"modal": {
diff --git a/locales/es-ES/common.json b/locales/es-ES/common.json
index db99cda4ff43..93d59b83426a 100644
--- a/locales/es-ES/common.json
+++ b/locales/es-ES/common.json
@@ -10,10 +10,16 @@
}
},
"appLoading": {
- "goToChat": "(4/4) Cargando la página de chat...",
- "initAuth": "(2/4) Inicializando el servicio de autenticación...",
- "initUser": "(3/4) Inicializando el estado del usuario...",
- "initializing": "(1/4) Iniciando la aplicación..."
+ "appInitializing": "Iniciando la aplicación...",
+ "finished": "Inicialización de la base de datos completada",
+ "goToChat": "Cargando la página de chat...",
+ "initAuth": "Inicializando el servicio de autenticación...",
+ "initUser": "Inicializando el estado del usuario...",
+ "initializing": "Inicializando la base de datos PGlite...",
+ "loadingDependencies": "Inicializando dependencias...",
+ "loadingWasm": "Cargando módulo WASM...",
+ "migrating": "Ejecutando migración de tablas de datos...",
+ "ready": "Base de datos lista"
},
"autoGenerate": "Generación automática",
"autoGenerateTooltip": "Completar automáticamente la descripción del asistente basándose en las sugerencias",
@@ -38,9 +44,9 @@
"error": "Ha ocurrido un error, por favor reintente",
"idle": "Esperando la inicialización...",
"initializing": "Inicializando...",
- "loadingDependencies": "Cargando dependencias ({{progress}}%)...",
- "loadingWasmModule": "Cargando módulo WASM ({{progress}}%)...",
- "migrating": "Migrando datos...",
+ "loadingDependencies": "Cargando dependencias...",
+ "loadingWasmModule": "Cargando módulo WASM...",
+ "migrating": "Ejecutando migración de tablas de datos...",
"ready": "La base de datos está lista"
},
"modal": {
diff --git a/locales/fa-IR/common.json b/locales/fa-IR/common.json
index 4eff9772ccf9..1565b08b40ee 100644
--- a/locales/fa-IR/common.json
+++ b/locales/fa-IR/common.json
@@ -10,10 +10,16 @@
}
},
"appLoading": {
- "goToChat": "(4/4) در حال بارگذاری صفحه گفتگو...",
- "initAuth": "(2/4) در حال راهاندازی سرویس احراز هویت...",
- "initUser": "(3/4) در حال راهاندازی وضعیت کاربر...",
- "initializing": "(1/4) در حال راهاندازی برنامه..."
+ "appInitializing": "در حال راهاندازی برنامه...",
+ "finished": "راهاندازی پایگاه داده کامل شد",
+ "goToChat": "در حال بارگذاری صفحه گفتگو...",
+ "initAuth": "در حال راهاندازی سرویس احراز هویت...",
+ "initUser": "در حال راهاندازی وضعیت کاربر...",
+ "initializing": "در حال راهاندازی پایگاه داده PGlite...",
+ "loadingDependencies": "در حال بارگذاری وابستگیها...",
+ "loadingWasm": "در حال بارگذاری ماژول WASM...",
+ "migrating": "در حال اجرای مهاجرت جداول داده...",
+ "ready": "پایگاه داده آماده است"
},
"autoGenerate": "تکمیل خودکار",
"autoGenerateTooltip": "تکمیل خودکار توضیحات دستیار بر اساس کلمات راهنما",
@@ -38,9 +44,9 @@
"error": "خطایی رخ داده است، لطفاً دوباره تلاش کنید",
"idle": "در حال انتظار برای راهاندازی...",
"initializing": "در حال راهاندازی...",
- "loadingDependencies": "در حال بارگذاری وابستگیها ({{progress}}%)...",
- "loadingWasmModule": "در حال بارگذاری ماژول WASM ({{progress}}%)...",
- "migrating": "در حال انتقال دادهها...",
+ "loadingDependencies": "در حال بارگذاری وابستگیها...",
+ "loadingWasmModule": "در حال بارگذاری ماژول WASM...",
+ "migrating": "در حال انجام مهاجرت جدول دادهها...",
"ready": "پایگاه داده آماده است"
},
"modal": {
diff --git a/locales/fr-FR/common.json b/locales/fr-FR/common.json
index f44bbadbb90f..e04c9b9cedae 100644
--- a/locales/fr-FR/common.json
+++ b/locales/fr-FR/common.json
@@ -10,10 +10,16 @@
}
},
"appLoading": {
- "goToChat": "(4/4) Chargement de la page de conversation...",
- "initAuth": "(2/4) Initialisation du service d'authentification...",
- "initUser": "(3/4) Initialisation de l'état de l'utilisateur...",
- "initializing": "(1/4) L'application se lance..."
+ "appInitializing": "L'application se charge...",
+ "finished": "Initialisation de la base de données terminée",
+ "goToChat": "Chargement de la page de chat...",
+ "initAuth": "Initialisation du service d'authentification...",
+ "initUser": "Initialisation de l'état de l'utilisateur...",
+ "initializing": "Initialisation de la base de données PGlite...",
+ "loadingDependencies": "Chargement des dépendances...",
+ "loadingWasm": "Chargement du module WASM...",
+ "migrating": "Exécution de la migration des tables de données...",
+ "ready": "La base de données est prête"
},
"autoGenerate": "Générer automatiquement",
"autoGenerateTooltip": "Générer automatiquement la description de l'agent basée sur les suggestions",
@@ -38,9 +44,9 @@
"error": "Une erreur s'est produite, veuillez réessayer",
"idle": "En attente d'initialisation...",
"initializing": "Initialisation en cours...",
- "loadingDependencies": "Chargement des dépendances ({{progress}}%)...",
- "loadingWasmModule": "Chargement du module WASM ({{progress}}%)...",
- "migrating": "Migration des données en cours...",
+ "loadingDependencies": "Chargement des dépendances...",
+ "loadingWasmModule": "Chargement du module WASM...",
+ "migrating": "Exécution de la migration des tables de données...",
"ready": "Base de données prête"
},
"modal": {
diff --git a/locales/it-IT/common.json b/locales/it-IT/common.json
index 1233cd6a31a9..4f2af16a0cc5 100644
--- a/locales/it-IT/common.json
+++ b/locales/it-IT/common.json
@@ -10,10 +10,16 @@
}
},
"appLoading": {
- "goToChat": "(4/4) Caricamento della pagina della chat in corso...",
- "initAuth": "(2/4) Inizializzazione del servizio di autenticazione...",
- "initUser": "(3/4) Inizializzazione dello stato utente...",
- "initializing": "(1/4) Avvio dell'app in corso..."
+ "appInitializing": "Avvio dell'app in corso...",
+ "finished": "Inizializzazione del database completata",
+ "goToChat": "Caricamento della pagina di chat in corso...",
+ "initAuth": "Inizializzazione del servizio di autenticazione...",
+ "initUser": "Inizializzazione dello stato utente...",
+ "initializing": "Inizializzazione del database PGlite...",
+ "loadingDependencies": "Inizializzazione delle dipendenze...",
+ "loadingWasm": "Caricamento del modulo WASM...",
+ "migrating": "Esecuzione della migrazione delle tabelle dati...",
+ "ready": "Database pronto"
},
"autoGenerate": "Generazione automatica",
"autoGenerateTooltip": "Completamento automatico basato su suggerimenti",
@@ -38,9 +44,9 @@
"error": "Si è verificato un errore, si prega di riprovare",
"idle": "In attesa di inizializzazione...",
"initializing": "In fase di inizializzazione...",
- "loadingDependencies": "Caricamento delle dipendenze ({ {progress}}%)...",
- "loadingWasmModule": "Caricamento del modulo WASM ({ {progress}}%)...",
- "migrating": "In fase di migrazione dei dati...",
+ "loadingDependencies": "Caricamento delle dipendenze in corso...",
+ "loadingWasmModule": "Caricamento del modulo WASM in corso...",
+ "migrating": "Esecuzione della migrazione della tabella dati...",
"ready": "Database pronto"
},
"modal": {
diff --git a/locales/ja-JP/common.json b/locales/ja-JP/common.json
index 5b82d11414c7..7f23c3b1d6e8 100644
--- a/locales/ja-JP/common.json
+++ b/locales/ja-JP/common.json
@@ -10,10 +10,16 @@
}
},
"appLoading": {
- "goToChat": "(4/4) チャットページを読み込んでいます...",
- "initAuth": "(2/4) 認証サービスの初期化...",
- "initUser": "(3/4) ユーザー状態の初期化...",
- "initializing": "(1/4) アプリを起動しています..."
+ "appInitializing": "アプリケーションを起動しています...",
+ "finished": "データベースの初期化が完了しました",
+ "goToChat": "チャットページを読み込んでいます...",
+ "initAuth": "認証サービスを初期化しています...",
+ "initUser": "ユーザー状態を初期化しています...",
+ "initializing": "PGliteデータベースを初期化しています...",
+ "loadingDependencies": "依存関係を初期化しています...",
+ "loadingWasm": "WASM モジュールを読み込んでいます...",
+ "migrating": "データテーブルの移行を実行しています...",
+ "ready": "データベースは準備完了です"
},
"autoGenerate": "自動生成",
"autoGenerateTooltip": "ヒントに基づいてエージェントの説明を自動生成します",
@@ -38,9 +44,9 @@
"error": "エラーが発生しました。再試行してください。",
"idle": "初期化を待っています...",
"initializing": "初期化中...",
- "loadingDependencies": "依存関係を読み込んでいます({{progress}}%)...",
- "loadingWasmModule": "WASM モジュールを読み込んでいます({{progress}}%)...",
- "migrating": "データを移行中...",
+ "loadingDependencies": "依存関係を読み込んでいます...",
+ "loadingWasmModule": "WASM モジュールを読み込んでいます...",
+ "migrating": "データテーブルの移行を実行しています...",
"ready": "データベースは準備完了です"
},
"modal": {
diff --git a/locales/ko-KR/common.json b/locales/ko-KR/common.json
index e36c7aa5dbe5..d9f3e1a7bd5b 100644
--- a/locales/ko-KR/common.json
+++ b/locales/ko-KR/common.json
@@ -10,10 +10,16 @@
}
},
"appLoading": {
- "goToChat": "(4/4) 대화 페이지 로딩 중...",
- "initAuth": "(2/4) 인증 서비스 초기화 중...",
- "initUser": "(3/4) 사용자 상태 초기화 중...",
- "initializing": "(1/4) 애플리케이션 시작 중..."
+ "appInitializing": "앱을 초기화하는 중...",
+ "finished": "데이터베이스 초기화 완료",
+ "goToChat": "대화 페이지 로딩 중...",
+ "initAuth": "인증 서비스 초기화 중...",
+ "initUser": "사용자 상태 초기화 중...",
+ "initializing": "PGlite 데이터베이스 초기화 중...",
+ "loadingDependencies": "의존성 초기화 중...",
+ "loadingWasm": "WASM 모듈 로드 중...",
+ "migrating": "데이터베이스 테이블 마이그레이션 중...",
+ "ready": "데이터베이스 준비 완료"
},
"autoGenerate": "자동 생성",
"autoGenerateTooltip": "힌트 단어를 기반으로 에이전트 설명을 자동으로 완성합니다",
@@ -38,9 +44,9 @@
"error": "오류가 발생했습니다. 재시도해 주세요.",
"idle": "초기화 대기 중...",
"initializing": "초기화 중...",
- "loadingDependencies": "의존성 로딩 중({{progress}}%)...",
- "loadingWasmModule": "WASM 모듈 로딩 중({{progress}}%)...",
- "migrating": "데이터 마이그레이션 중...",
+ "loadingDependencies": "의존성 로드 중...",
+ "loadingWasmModule": "WASM 모듈 로드 중...",
+ "migrating": "데이터베이스 마이그레이션 실행 중...",
"ready": "데이터베이스 준비 완료"
},
"modal": {
diff --git a/locales/nl-NL/common.json b/locales/nl-NL/common.json
index 2a3bd1d66f62..7d05b4d76fc3 100644
--- a/locales/nl-NL/common.json
+++ b/locales/nl-NL/common.json
@@ -10,10 +10,16 @@
}
},
"appLoading": {
- "goToChat": "(4/4) De chatpagina wordt geladen...",
- "initAuth": "(2/4) Initialisatie van de autorisatiedienst...",
- "initUser": "(3/4) Initialisatie van de gebruikersstatus...",
- "initializing": "(1/4) De applicatie wordt opgestart..."
+ "appInitializing": "Applicatie wordt gestart...",
+ "finished": "Database-initialisatie voltooid",
+ "goToChat": "De chatpagina wordt geladen...",
+ "initAuth": "Authenticatiedienst wordt geïnitialiseerd...",
+ "initUser": "Gebruikersstatus wordt geïnitialiseerd...",
+ "initializing": "PGlite-database wordt geïnitialiseerd...",
+ "loadingDependencies": "Afhankelijkheden worden geïnitialiseerd...",
+ "loadingWasm": "WASM-modules worden geladen...",
+ "migrating": "Gegevens migreren...",
+ "ready": "Database is gereed"
},
"autoGenerate": "Automatisch genereren",
"autoGenerateTooltip": "Automatisch assistentbeschrijving genereren op basis van suggesties",
@@ -38,9 +44,9 @@
"error": "Er is een fout opgetreden, probeer het opnieuw",
"idle": "Wachten op initialisatie...",
"initializing": "Bezig met initialiseren...",
- "loadingDependencies": "Afhankelijkheden laden ({{progress}}%)...",
- "loadingWasmModule": "WASM-module laden ({{progress}}%)...",
- "migrating": "Bezig met gegevens migreren...",
+ "loadingDependencies": "Afhankelijkheden laden...",
+ "loadingWasmModule": "WASM-module laden...",
+ "migrating": "Gegevens migreren...",
"ready": "Database is gereed"
},
"modal": {
diff --git a/locales/pl-PL/common.json b/locales/pl-PL/common.json
index ad08b65bf3cd..9b38b1928bbe 100644
--- a/locales/pl-PL/common.json
+++ b/locales/pl-PL/common.json
@@ -10,10 +10,16 @@
}
},
"appLoading": {
- "goToChat": "(4/4) Ładowanie strony czatu...",
- "initAuth": "(2/4) Inicjalizacja usługi autoryzacji...",
- "initUser": "(3/4) Inicjalizacja stanu użytkownika...",
- "initializing": "(1/4) Uruchamianie aplikacji..."
+ "appInitializing": "Aplikacja się uruchamia...",
+ "finished": "Inicjalizacja bazy danych zakończona",
+ "goToChat": "Ładowanie strony czatu...",
+ "initAuth": "Inicjalizacja usługi autoryzacji...",
+ "initUser": "Inicjalizacja stanu użytkownika...",
+ "initializing": "Inicjalizacja bazy danych PGlite...",
+ "loadingDependencies": "Inicjalizacja zależności...",
+ "loadingWasm": "Ładowanie modułu WASM...",
+ "migrating": "Wykonywanie migracji tabeli danych...",
+ "ready": "Baza danych jest gotowa"
},
"autoGenerate": "Automatyczne generowanie",
"autoGenerateTooltip": "Automatyczne uzupełnianie opisu asystenta na podstawie sugestii",
@@ -38,9 +44,9 @@
"error": "Wystąpił błąd, proszę spróbować ponownie",
"idle": "Oczekiwanie na inicjalizację...",
"initializing": "Inicjalizowanie...",
- "loadingDependencies": "Ładowanie zależności ({{progress}}%)...",
- "loadingWasmModule": "Ładowanie modułu WASM ({{progress}}%)...",
- "migrating": "Migracja danych...",
+ "loadingDependencies": "Ładowanie zależności...",
+ "loadingWasmModule": "Ładowanie modułu WASM...",
+ "migrating": "Wykonywanie migracji tabeli danych...",
"ready": "Baza danych gotowa"
},
"modal": {
diff --git a/locales/pt-BR/common.json b/locales/pt-BR/common.json
index a73e404015ba..08748ef8a032 100644
--- a/locales/pt-BR/common.json
+++ b/locales/pt-BR/common.json
@@ -10,10 +10,16 @@
}
},
"appLoading": {
- "goToChat": "(4/4) Carregando página de conversa...",
- "initAuth": "(2/4) Inicializando o serviço de autenticação...",
- "initUser": "(3/4) Inicializando o estado do usuário...",
- "initializing": "(1/4) Iniciando aplicativo..."
+ "appInitializing": "Iniciando o aplicativo...",
+ "finished": "Inicialização do banco de dados concluída",
+ "goToChat": "Carregando página de conversa...",
+ "initAuth": "Inicializando serviço de autenticação...",
+ "initUser": "Inicializando estado do usuário...",
+ "initializing": "Inicializando banco de dados PGlite...",
+ "loadingDependencies": "Inicializando dependências...",
+ "loadingWasm": "Carregando módulo WASM...",
+ "migrating": "Executando migração de tabelas de dados...",
+ "ready": "Banco de dados pronto"
},
"autoGenerate": "Auto completar",
"autoGenerateTooltip": "Auto completar descrição do assistente com base em sugestões",
@@ -38,9 +44,9 @@
"error": "Ocorreu um erro, por favor tente novamente",
"idle": "Aguardando inicialização...",
"initializing": "Inicializando...",
- "loadingDependencies": "Carregando dependências({{progress}}%)...",
- "loadingWasmModule": "Carregando módulo WASM({{progress}}%)...",
- "migrating": "Migrando dados...",
+ "loadingDependencies": "Carregando dependências...",
+ "loadingWasmModule": "Carregando módulo WASM...",
+ "migrating": "Executando migração de tabela de dados...",
"ready": "Banco de dados pronto"
},
"modal": {
diff --git a/locales/ru-RU/common.json b/locales/ru-RU/common.json
index 922d55de3bec..d08cd8f3f6c4 100644
--- a/locales/ru-RU/common.json
+++ b/locales/ru-RU/common.json
@@ -10,10 +10,16 @@
}
},
"appLoading": {
- "goToChat": "(4/4) Загрузка страницы чата...",
- "initAuth": "(2/4) Инициализация службы аутентификации...",
- "initUser": "(3/4) Инициализация состояния пользователя...",
- "initializing": "(1/4) Запуск приложения..."
+ "appInitializing": "Инициализация приложения...",
+ "finished": "Инициализация базы данных завершена",
+ "goToChat": "Загрузка страницы чата...",
+ "initAuth": "Инициализация службы аутентификации...",
+ "initUser": "Инициализация состояния пользователя...",
+ "initializing": "Инициализация базы данных PGlite...",
+ "loadingDependencies": "Инициализация зависимостей...",
+ "loadingWasm": "Загрузка модуля WASM...",
+ "migrating": "Выполнение миграции таблиц данных...",
+ "ready": "База данных готова"
},
"autoGenerate": "Автозаполнение",
"autoGenerateTooltip": "Автоматическое дополнение описания агента на основе подсказок",
@@ -38,9 +44,9 @@
"error": "Произошла ошибка, пожалуйста, повторите попытку",
"idle": "Ожидание инициализации...",
"initializing": "Инициализация...",
- "loadingDependencies": "Загрузка зависимостей ({{progress}}%)...",
- "loadingWasmModule": "Загрузка модуля WASM ({{progress}}%)...",
- "migrating": "Миграция данных...",
+ "loadingDependencies": "Загрузка зависимостей...",
+ "loadingWasmModule": "Загрузка модуля WASM...",
+ "migrating": "Выполнение миграции таблицы данных...",
"ready": "База данных готова"
},
"modal": {
diff --git a/locales/tr-TR/common.json b/locales/tr-TR/common.json
index 0e6ab2da8086..66f49467b4ca 100644
--- a/locales/tr-TR/common.json
+++ b/locales/tr-TR/common.json
@@ -10,10 +10,16 @@
}
},
"appLoading": {
- "goToChat": "(4/4) Sohbet sayfası yükleniyor...",
- "initAuth": "(2/4) Kimlik doğrulama hizmeti başlatılıyor...",
- "initUser": "(3/4) Kullanıcı durumu başlatılıyor...",
- "initializing": "(1/4) Uygulama başlatılıyor..."
+ "appInitializing": "Uygulama başlatılıyor...",
+ "finished": "Veritabanı başlatma tamamlandı",
+ "goToChat": "Sohbet sayfası yükleniyor...",
+ "initAuth": "Kimlik doğrulama servisi başlatılıyor...",
+ "initUser": "Kullanıcı durumu başlatılıyor...",
+ "initializing": "PGlite veritabanı başlatılıyor...",
+ "loadingDependencies": "Bağımlılıklar yükleniyor...",
+ "loadingWasm": "WASM modülü yükleniyor...",
+ "migrating": "Veri tablosu taşınıyor...",
+ "ready": "Veritabanı hazır"
},
"autoGenerate": "Otomatik Oluştur",
"autoGenerateTooltip": "Auto-generate agent description based on prompts",
@@ -38,9 +44,9 @@
"error": "Hata oluştu, lütfen tekrar deneyin",
"idle": "Başlatma bekleniyor...",
"initializing": "Başlatılıyor...",
- "loadingDependencies": "Bağımlılıklar yükleniyor ({{progress}}%)...",
- "loadingWasmModule": "WASM modülü yükleniyor ({{progress}}%)...",
- "migrating": "Veri taşınıyor...",
+ "loadingDependencies": "Bağlantılar yükleniyor...",
+ "loadingWasmModule": "WASM modülü yükleniyor...",
+ "migrating": "Veri tablosu taşınıyor...",
"ready": "Veritabanı hazır"
},
"modal": {
diff --git a/locales/vi-VN/common.json b/locales/vi-VN/common.json
index 3fc81321578e..db2e2408bc90 100644
--- a/locales/vi-VN/common.json
+++ b/locales/vi-VN/common.json
@@ -10,10 +10,16 @@
}
},
"appLoading": {
- "goToChat": "(4/4) Đang tải trang trò chuyện...",
- "initAuth": "(2/4) Đang khởi tạo dịch vụ xác thực...",
- "initUser": "(3/4) Đang khởi tạo trạng thái người dùng...",
- "initializing": "(1/4) Đang khởi động ứng dụng..."
+ "appInitializing": "Đang khởi động ứng dụng...",
+ "finished": "Khởi tạo cơ sở dữ liệu hoàn tất",
+ "goToChat": "Đang tải trang trò chuyện...",
+ "initAuth": "Đang khởi tạo dịch vụ xác thực...",
+ "initUser": "Đang khởi tạo trạng thái người dùng...",
+ "initializing": "Đang khởi tạo cơ sở dữ liệu PGlite...",
+ "loadingDependencies": "Đang khởi tạo phụ thuộc...",
+ "loadingWasm": "Đang tải mô-đun WASM...",
+ "migrating": "Đang thực hiện di chuyển bảng dữ liệu...",
+ "ready": "Cơ sở dữ liệu đã sẵn sàng"
},
"autoGenerate": "Tự động tạo",
"autoGenerateTooltip": "Tự động hoàn thành mô tả trợ lý dựa trên từ gợi ý",
@@ -38,9 +44,9 @@
"error": "Đã xảy ra lỗi, vui lòng thử lại",
"idle": "Đang chờ khởi tạo...",
"initializing": "Đang khởi tạo...",
- "loadingDependencies": "Đang tải phụ thuộc({{progress}}%)...",
- "loadingWasmModule": "Đang tải mô-đun WASM({{progress}}%)...",
- "migrating": "Đang di chuyển dữ liệu...",
+ "loadingDependencies": "Đang tải phụ thuộc...",
+ "loadingWasmModule": "Đang tải mô-đun WASM...",
+ "migrating": "Đang thực hiện di chuyển bảng dữ liệu...",
"ready": "Cơ sở dữ liệu đã sẵn sàng"
},
"modal": {
diff --git a/locales/zh-CN/common.json b/locales/zh-CN/common.json
index 2bb3ae7a11dc..a09d38281ae6 100644
--- a/locales/zh-CN/common.json
+++ b/locales/zh-CN/common.json
@@ -10,10 +10,16 @@
}
},
"appLoading": {
- "goToChat": "(4/4) 对话页面加载中...",
- "initAuth": "(2/4) 鉴权服务初始化...",
- "initUser": "(3/4) 用户状态初始化...",
- "initializing": "(1/4) 应用启动中..."
+ "appInitializing": "应用启动中...",
+ "finished": "数据库初始化完成",
+ "goToChat": "对话页面加载中...",
+ "initAuth": "鉴权服务初始化...",
+ "initUser": "用户状态初始化...",
+ "initializing": "PGlite 数据库初始化...",
+ "loadingDependencies": "初始化依赖...",
+ "loadingWasm": "加载 WASM 模块...",
+ "migrating": "执行数据表迁移...",
+ "ready": "数据库已就绪"
},
"autoGenerate": "自动补全",
"autoGenerateTooltip": "基于提示词自动补全助手描述",
@@ -38,9 +44,9 @@
"error": "发生错误,请重试",
"idle": "等待初始化...",
"initializing": "正在初始化...",
- "loadingDependencies": "加载依赖中({{progress}}%)...",
- "loadingWasmModule": "加载 WASM 模块中({{progress}}%)...",
- "migrating": "正在迁移数据...",
+ "loadingDependencies": "加载依赖中...",
+ "loadingWasmModule": "加载 WASM 模块中...",
+ "migrating": "执行数据表迁移...",
"ready": "数据库已就绪"
},
"modal": {
diff --git a/locales/zh-TW/common.json b/locales/zh-TW/common.json
index 60bf44a00455..d1c15f28c4ed 100644
--- a/locales/zh-TW/common.json
+++ b/locales/zh-TW/common.json
@@ -10,10 +10,16 @@
}
},
"appLoading": {
- "goToChat": "(4/4) 對話頁面加載中...",
- "initAuth": "(2/4) 鑑權服務初始化...",
- "initUser": "(3/4) 使用者狀態初始化...",
- "initializing": "(1/4) 應用啟動中..."
+ "appInitializing": "應用啟動中...",
+ "finished": "資料庫初始化完成",
+ "goToChat": "對話頁面加載中...",
+ "initAuth": "鑑權服務初始化...",
+ "initUser": "用戶狀態初始化...",
+ "initializing": "PGlite 資料庫初始化...",
+ "loadingDependencies": "初始化依賴...",
+ "loadingWasm": "加載 WASM 模組...",
+ "migrating": "執行資料表遷移...",
+ "ready": "資料庫已就緒"
},
"autoGenerate": "自動生成",
"autoGenerateTooltip": "基於提示詞自動生成助手描述",
@@ -38,9 +44,9 @@
"error": "發生錯誤,請重試",
"idle": "等待初始化...",
"initializing": "正在初始化...",
- "loadingDependencies": "加載依賴中({{progress}}%)...",
- "loadingWasmModule": "加載 WASM 模塊中({{progress}}%)...",
- "migrating": "正在遷移數據...",
+ "loadingDependencies": "載入依賴中...",
+ "loadingWasmModule": "載入 WASM 模組中...",
+ "migrating": "執行資料表遷移...",
"ready": "數據庫已就緒"
},
"modal": {
diff --git a/package.json b/package.json
index d16eb9c0865c..01bcefb0be14 100644
--- a/package.json
+++ b/package.json
@@ -32,7 +32,8 @@
"build-sitemap": "tsx ./scripts/buildSitemapIndex/index.ts",
"build:analyze": "ANALYZE=true next build",
"build:docker": "DOCKER=true next build && npm run build-sitemap",
- "db:generate": "drizzle-kit generate",
+ "db:generate": "drizzle-kit generate && npm run db:generate-client",
+ "db:generate-client": "tsx ./scripts/migrateClientDB/compile-migrations.ts",
"db:migrate": "MIGRATION_DB=1 tsx ./scripts/migrateServerDB/index.ts",
"db:push": "drizzle-kit push",
"db:push-test": "NODE_ENV=test drizzle-kit push",
@@ -117,6 +118,7 @@
"@clerk/themes": "^2.1.37",
"@codesandbox/sandpack-react": "^2.19.9",
"@cyntler/react-doc-viewer": "^1.17.0",
+ "@electric-sql/pglite": "^0.2.14",
"@google/generative-ai": "^0.21.0",
"@huggingface/inference": "^2.8.1",
"@icons-pack/react-simple-icons": "9.6.0",
diff --git a/scripts/migrateClientDB/compile-migrations.ts b/scripts/migrateClientDB/compile-migrations.ts
new file mode 100644
index 000000000000..c33e9dff5fb1
--- /dev/null
+++ b/scripts/migrateClientDB/compile-migrations.ts
@@ -0,0 +1,14 @@
+import { readMigrationFiles } from 'drizzle-orm/migrator';
+import { writeFileSync } from 'node:fs';
+import { join } from 'node:path';
+
+const dbBase = join(__dirname, '../../src/database');
+const migrationsFolder = join(dbBase, './migrations');
+const migrations = readMigrationFiles({ migrationsFolder: migrationsFolder });
+
+writeFileSync(
+ join(dbBase, './client/migrations.json'),
+ JSON.stringify(migrations, null, 2), // null, 2 adds indentation for better readability
+);
+
+console.log('🏁 client migrations.json compiled!');
diff --git a/src/app/(main)/chat/_layout/Desktop/index.tsx b/src/app/(main)/chat/_layout/Desktop/index.tsx
index 84296bd8f87e..b12f5acd479e 100644
--- a/src/app/(main)/chat/_layout/Desktop/index.tsx
+++ b/src/app/(main)/chat/_layout/Desktop/index.tsx
@@ -1,5 +1,6 @@
import { Flexbox } from 'react-layout-kit';
+import InitClientDB from '../../features/InitClientDB';
import Migration from '../../features/Migration';
import { LayoutProps } from '../type';
import SessionPanel from './SessionPanel';
@@ -19,6 +20,7 @@ const Layout = ({ children, session }: LayoutProps) => {
+
{/* ↓ cloud slot ↓ */}
{/* ↑ cloud slot ↑ */}
diff --git a/src/app/(main)/chat/_layout/Mobile.tsx b/src/app/(main)/chat/_layout/Mobile.tsx
index 81fea2799d4f..ca181e6be059 100644
--- a/src/app/(main)/chat/_layout/Mobile.tsx
+++ b/src/app/(main)/chat/_layout/Mobile.tsx
@@ -1,12 +1,13 @@
'use client';
import { createStyles } from 'antd-style';
-import { memo } from 'react';
+import { Suspense, memo } from 'react';
import { Flexbox } from 'react-layout-kit';
import Migration from '@/app/(main)/chat/features/Migration';
import { useQuery } from '@/hooks/useQuery';
+import InitClientDB from '../features/InitClientDB';
import { LayoutProps } from './type';
const useStyles = createStyles(({ css, token }) => ({
@@ -40,6 +41,9 @@ const Layout = memo(({ children, session }) => {
{children}
+
+
+
>
);
});
diff --git a/src/app/(main)/chat/features/InitClientDB/EnableModal.tsx b/src/app/(main)/chat/features/InitClientDB/EnableModal.tsx
new file mode 100644
index 000000000000..8f7ea275a21d
--- /dev/null
+++ b/src/app/(main)/chat/features/InitClientDB/EnableModal.tsx
@@ -0,0 +1,109 @@
+import { Icon } from '@lobehub/ui';
+import { Button } from 'antd';
+import { createStyles } from 'antd-style';
+import { CpuIcon, Database, SearchCheck, Zap } from 'lucide-react';
+import { memo } from 'react';
+import { useTranslation } from 'react-i18next';
+import { Center, Flexbox } from 'react-layout-kit';
+
+import DataStyleModal from '@/components/DataStyleModal';
+import { useGlobalStore } from '@/store/global';
+
+const useStyles = createStyles(({ css, token, isDarkMode, responsive }) => ({
+ desc: css`
+ width: 280px;
+ color: ${token.colorTextSecondary};
+
+ ${responsive.mobile} {
+ line-height: ${token.lineHeight};
+ }
+ `,
+ hint: css`
+ font-size: ${token.fontSizeSM}px;
+ color: ${token.colorTextTertiary};
+ text-align: center;
+ `,
+ icon: css`
+ color: ${isDarkMode ? token.blue : token.geekblue};
+ `,
+ iconCtn: css`
+ width: 72px;
+ height: 72px;
+ background: ${isDarkMode ? token.blue1 : token.geekblue1};
+ border-radius: 50%;
+ `,
+ intro: css`
+ ${responsive.mobile} {
+ width: 350px;
+ margin-block-start: 24px;
+ line-height: ${token.lineHeight};
+ }
+ `,
+
+ title: css`
+ margin-block-end: 0;
+ font-size: ${token.fontSizeLG}px;
+ font-weight: bold;
+ `,
+}));
+
+interface EnableClientDBModalProps {
+ open: boolean;
+}
+
+const EnableClientDBModal = memo(({ open }) => {
+ const { t } = useTranslation('common');
+ const { styles } = useStyles();
+ const markPgliteEnabled = useGlobalStore((s) => s.markPgliteEnabled);
+ const features = [
+ {
+ avatar: Database,
+ desc: '顶级数据库Postgres',
+ title: 'Pglite XXXX',
+ },
+ {
+ avatar: Zap,
+ desc: 'AI Native 知识库',
+ title: '支持原生知识库对话',
+ },
+ {
+ avatar: SearchCheck,
+ desc: '大容量',
+ title: '高达 2G 的存储空间',
+ },
+ ];
+
+ return (
+
+
+
+
+ {t('clientDB.modal.desc')}
+
+
+
+ {features.map((item) => (
+
+
+
+
+
+ {item.title}
+ {item.desc}
+
+
+ ))}
+
+
+
+
+
+
+
+
+ );
+});
+
+export default EnableClientDBModal;
diff --git a/src/app/(main)/chat/features/InitClientDB/FailedModal.tsx b/src/app/(main)/chat/features/InitClientDB/FailedModal.tsx
new file mode 100644
index 000000000000..7c2db9dfeacc
--- /dev/null
+++ b/src/app/(main)/chat/features/InitClientDB/FailedModal.tsx
@@ -0,0 +1,85 @@
+import { Alert, Highlighter, Icon } from '@lobehub/ui';
+import { Button, Result } from 'antd';
+import { ShieldAlert } from 'lucide-react';
+import Link from 'next/link';
+import { memo } from 'react';
+import { Trans, useTranslation } from 'react-i18next';
+import { Flexbox } from 'react-layout-kit';
+import Balancer from 'react-wrap-balancer';
+
+import { GITHUB_ISSUES } from '@/const/url';
+import { githubService } from '@/services/github';
+import { useGlobalStore } from '@/store/global';
+
+interface MigrationError {
+ message: string;
+ stack: string;
+}
+
+interface FailedProps {
+ error?: MigrationError;
+}
+const FailedModal = memo(({ error }) => {
+ const { t } = useTranslation('common');
+
+ const [initializeClientDB] = useGlobalStore((s) => [s.initializeClientDB]);
+
+ return (
+
+ {!!error && (
+
+ {JSON.stringify(error)}
+
+ }
+ message={error.message}
+ style={{ flex: 1 }}
+ type={'error'}
+ />
+ )}
+
+
+
+
+ }
+ icon={}
+ status={'error'}
+ style={{ paddingBlock: 24, width: 450 }}
+ subTitle={
+
+
+ 非常抱歉,数据库初始化过程发生异常。请尝试重试,或
+ {
+ e.preventDefault();
+ githubService.submitDBV1UpgradeError(1, error!);
+ }}
+ target="_blank"
+ >
+ 提交问题
+
+ 我们将会第一时间帮你排查问题。
+
+
+ }
+ title={t('clientDB.error.title')}
+ />
+ );
+});
+
+export default FailedModal;
diff --git a/src/app/(main)/chat/features/InitClientDB/InitIndicator.tsx b/src/app/(main)/chat/features/InitClientDB/InitIndicator.tsx
new file mode 100644
index 000000000000..584a2f7c1bc0
--- /dev/null
+++ b/src/app/(main)/chat/features/InitClientDB/InitIndicator.tsx
@@ -0,0 +1,120 @@
+'use client';
+
+import { Progress } from 'antd';
+import { createStyles } from 'antd-style';
+import { AnimatePresence, motion } from 'framer-motion';
+import { rgba } from 'polished';
+import { memo } from 'react';
+import { useTranslation } from 'react-i18next';
+import { Center, Flexbox } from 'react-layout-kit';
+
+import { useGlobalStore } from '@/store/global';
+import { ClientDatabaseInitStages, DatabaseLoadingState } from '@/types/clientDB';
+
+const useStyles = createStyles(({ css, token, prefixCls }) => ({
+ bg: css`
+ padding-block: 8px;
+ padding-inline: 8px 32px;
+ background: ${token.colorText};
+ border-radius: 40px;
+ `,
+ container: css`
+ position: fixed;
+ z-index: 1000;
+ `,
+ progress: css`
+ .${prefixCls}-progress-text {
+ font-size: 12px;
+ color: ${token.colorBgContainer} !important;
+ }
+ `,
+ progressReady: css`
+ .${prefixCls}-progress-text {
+ color: ${token.colorSuccessBorder} !important;
+ }
+ `,
+
+ text: css`
+ font-size: 15px;
+ color: ${token.colorBgContainer};
+ `,
+}));
+
+interface InitClientDBProps {
+ bottom?: number;
+ show: boolean;
+}
+
+const InitClientDB = memo(({ bottom = 80, show }) => {
+ const { styles, theme, cx } = useStyles();
+ const currentStage = useGlobalStore((s) => s.initClientDBStage || DatabaseLoadingState.Idle);
+ const { t } = useTranslation('common');
+ const useInitClientDB = useGlobalStore((s) => s.useInitClientDB);
+
+ useInitClientDB();
+
+ const getStateMessage = (state: DatabaseLoadingState) => {
+ switch (state) {
+ case DatabaseLoadingState.Finished:
+ case DatabaseLoadingState.Ready: {
+ return t('clientDB.initing.ready');
+ }
+
+ case DatabaseLoadingState.Error: {
+ return t('clientDB.initing.error');
+ }
+ case DatabaseLoadingState.Idle: {
+ return t('clientDB.initing.idle');
+ }
+ case DatabaseLoadingState.Initializing: {
+ return t('clientDB.initing.initializing');
+ }
+ case DatabaseLoadingState.LoadingDependencies: {
+ return t('clientDB.initing.loadingDependencies');
+ }
+
+ case DatabaseLoadingState.LoadingWasm: {
+ return t('clientDB.initing.loadingWasmModule');
+ }
+
+ case DatabaseLoadingState.Migrating: {
+ return t('clientDB.initing.migrating');
+ }
+ }
+ };
+
+ const currentStageIndex = ClientDatabaseInitStages.indexOf(currentStage);
+ const isReady = currentStage === DatabaseLoadingState.Finished;
+ return (
+
+ {show && (
+
+
+
+
+
+
+ )}
+
+ );
+});
+export default InitClientDB;
diff --git a/src/app/(main)/chat/features/InitClientDB/index.tsx b/src/app/(main)/chat/features/InitClientDB/index.tsx
new file mode 100644
index 000000000000..19064e475cfc
--- /dev/null
+++ b/src/app/(main)/chat/features/InitClientDB/index.tsx
@@ -0,0 +1,37 @@
+'use client';
+
+import { Spin } from 'antd';
+import dynamic from 'next/dynamic';
+import { memo } from 'react';
+
+import { useGlobalStore } from '@/store/global';
+import { systemStatusSelectors } from '@/store/global/selectors';
+
+const Modal = dynamic(() => import('./EnableModal'), {
+ loading: () => ,
+ ssr: false,
+});
+
+const InitIndicator = dynamic(() => import('./InitIndicator'), {
+ ssr: false,
+});
+
+interface InitClientDBProps {
+ bottom?: number;
+}
+
+const InitClientDB = memo(({ bottom }) => {
+ const isPgliteNotEnabled = useGlobalStore(systemStatusSelectors.isPgliteNotEnabled);
+ const isPgliteNotInited = useGlobalStore(systemStatusSelectors.isPgliteNotInited);
+
+ return (
+ <>
+ {/* 当用户没有设置启用 pglite 时,强弹窗引导用户来开启弹窗 */}
+ {isPgliteNotEnabled && }
+ {/* 当用户已经启用 pglite 但没有初始化时,展示初始化指示器 */}
+ {isPgliteNotInited && }
+ >
+ );
+});
+
+export default InitClientDB;
diff --git a/src/app/loading/Client/Content.tsx b/src/app/loading/Client/Content.tsx
new file mode 100644
index 000000000000..b2c0647870f8
--- /dev/null
+++ b/src/app/loading/Client/Content.tsx
@@ -0,0 +1,42 @@
+import React, { memo } from 'react';
+import { useTranslation } from 'react-i18next';
+
+import FullscreenLoading from '@/components/FullscreenLoading';
+import { useGlobalStore } from '@/store/global';
+import { systemStatusSelectors } from '@/store/global/selectors';
+
+import { CLIENT_LOADING_STAGES } from '../stage';
+
+interface InitProps {
+ setActiveStage: (value: string) => void;
+}
+
+const Init = memo(({ setActiveStage }) => {
+ const useInitClientDB = useGlobalStore((s) => s.useInitClientDB);
+
+ useInitClientDB({ onStateChange: setActiveStage });
+
+ return null;
+});
+
+interface ContentProps {
+ loadingStage: string;
+ setActiveStage: (value: string) => void;
+}
+
+const Content = memo(({ loadingStage, setActiveStage }) => {
+ const { t } = useTranslation('common');
+ const isPgliteNotInited = useGlobalStore(systemStatusSelectors.isPgliteNotInited);
+
+ return (
+ <>
+ {isPgliteNotInited && }
+ t(`appLoading.${key}` as any))}
+ />
+ >
+ );
+});
+
+export default Content;
diff --git a/src/app/loading/Client/Redirect.tsx b/src/app/loading/Client/Redirect.tsx
new file mode 100644
index 000000000000..69fcf09e9cc6
--- /dev/null
+++ b/src/app/loading/Client/Redirect.tsx
@@ -0,0 +1,47 @@
+'use client';
+
+import { useRouter } from 'next/navigation';
+import { memo, useEffect } from 'react';
+
+import { useGlobalStore } from '@/store/global';
+import { systemStatusSelectors } from '@/store/global/selectors';
+import { useUserStore } from '@/store/user';
+
+import { AppLoadingStage } from '../stage';
+
+interface RedirectProps {
+ setActiveStage: (value: AppLoadingStage) => void;
+}
+
+const Redirect = memo(({ setActiveStage }) => {
+ const router = useRouter();
+ const isUserStateInit = useUserStore((s) => s.isUserStateInit);
+
+ const isPgliteNotEnabled = useGlobalStore(systemStatusSelectors.isPgliteNotEnabled);
+
+ const navToChat = () => {
+ setActiveStage(AppLoadingStage.GoToChat);
+ router.replace('/chat');
+ };
+
+ useEffect(() => {
+ // if pglite is not enabled, redirect to chat
+ if (isPgliteNotEnabled) {
+ navToChat();
+ return;
+ }
+
+ // if user state not init, wait for loading
+ if (!isUserStateInit) {
+ setActiveStage(AppLoadingStage.InitUser);
+ return;
+ }
+
+ // finally check the conversation status
+ navToChat();
+ }, [isUserStateInit, isPgliteNotEnabled]);
+
+ return null;
+});
+
+export default Redirect;
diff --git a/src/app/loading/Client/index.tsx b/src/app/loading/Client/index.tsx
new file mode 100644
index 000000000000..7edb702086e3
--- /dev/null
+++ b/src/app/loading/Client/index.tsx
@@ -0,0 +1,21 @@
+'use client';
+
+import { useState } from 'react';
+
+import { AppLoadingStage } from '../stage';
+import Client from './Content';
+import Redirect from './Redirect';
+
+const ScreenLoading = () => {
+ const [activeStage, setActiveStage] = useState(AppLoadingStage.Initializing);
+ return (
+ <>
+
+
+ >
+ );
+};
+
+ScreenLoading.displayName = 'ScreenLoading';
+
+export default ScreenLoading;
diff --git a/src/app/loading/Content.tsx b/src/app/loading/Server/Content.tsx
similarity index 53%
rename from src/app/loading/Content.tsx
rename to src/app/loading/Server/Content.tsx
index 63a9775e8b1f..8652d1a2d526 100644
--- a/src/app/loading/Content.tsx
+++ b/src/app/loading/Server/Content.tsx
@@ -3,15 +3,19 @@ import { useTranslation } from 'react-i18next';
import FullscreenLoading from '@/components/FullscreenLoading';
-import { AppLoadingStage } from './type';
+import { AppLoadingStage, SERVER_LOADING_STAGES } from '../stage';
interface ContentProps {
loadingStage: AppLoadingStage;
}
+
const Content = memo(({ loadingStage }) => {
const { t } = useTranslation('common');
+ const activeStage = SERVER_LOADING_STAGES.indexOf(loadingStage);
+
+ const stages = SERVER_LOADING_STAGES.map((key) => t(`appLoading.${key}` as any));
- return ;
+ return ;
});
export default Content;
diff --git a/src/app/loading/Redirect.tsx b/src/app/loading/Server/Redirect.tsx
similarity index 72%
rename from src/app/loading/Redirect.tsx
rename to src/app/loading/Server/Redirect.tsx
index e7f120a5d81d..4d270737a6ee 100644
--- a/src/app/loading/Redirect.tsx
+++ b/src/app/loading/Server/Redirect.tsx
@@ -3,12 +3,10 @@
import { useRouter } from 'next/navigation';
import { memo, useEffect } from 'react';
-import { useGlobalStore } from '@/store/global';
-import { systemStatusSelectors } from '@/store/global/selectors';
import { useUserStore } from '@/store/user';
import { authSelectors } from '@/store/user/selectors';
-import { AppLoadingStage } from './type';
+import { AppLoadingStage } from '../stage';
interface RedirectProps {
setLoadingStage: (value: AppLoadingStage) => void;
@@ -22,7 +20,6 @@ const Redirect = memo(({ setLoadingStage }) => {
s.isUserStateInit,
s.isOnboard,
]);
- const isPgliteNotEnabled = useGlobalStore(systemStatusSelectors.isPgliteNotEnabled);
const navToChat = () => {
setLoadingStage(AppLoadingStage.GoToChat);
@@ -30,12 +27,6 @@ const Redirect = memo(({ setLoadingStage }) => {
};
useEffect(() => {
- // if pglite is not enabled, redirect to chat
- if (isPgliteNotEnabled) {
- navToChat();
- return;
- }
-
// if user auth state is not ready, wait for loading
if (!isLoaded) {
setLoadingStage(AppLoadingStage.InitAuth);
@@ -60,9 +51,9 @@ const Redirect = memo(({ setLoadingStage }) => {
return;
}
- // finally check the conversation status
+ // finally go to chat
navToChat();
- }, [isUserStateInit, isLoaded, isOnboard, isLogin, isPgliteNotEnabled]);
+ }, [isUserStateInit, isLoaded, isOnboard, isLogin]);
return null;
});
diff --git a/src/app/loading/Server/index.tsx b/src/app/loading/Server/index.tsx
new file mode 100644
index 000000000000..3b207cd2f711
--- /dev/null
+++ b/src/app/loading/Server/index.tsx
@@ -0,0 +1,21 @@
+'use client';
+
+import { useState } from 'react';
+
+import { AppLoadingStage } from '../stage';
+import Client from './Content';
+import Redirect from './Redirect';
+
+const ScreenLoading = () => {
+ const [loadingStage, setLoadingStage] = useState(AppLoadingStage.Initializing);
+ return (
+ <>
+
+
+ >
+ );
+};
+
+ScreenLoading.displayName = 'ScreenLoading';
+
+export default ScreenLoading;
diff --git a/src/app/loading/index.tsx b/src/app/loading/index.tsx
index 27d7376f9816..d66fd36b7917 100644
--- a/src/app/loading/index.tsx
+++ b/src/app/loading/index.tsx
@@ -1,21 +1,9 @@
-'use client';
+import { isServerMode } from '@/const/version';
-import { useState } from 'react';
+import Client from './Client';
+import Server from './Server';
-import { AppLoadingStage } from '@/app/loading/type';
-
-import Client from './Content';
-import Redirect from './Redirect';
-
-const ScreenLoading = () => {
- const [loadingStage, setLoadingStage] = useState(AppLoadingStage.Initializing);
- return (
- <>
-
-
- >
- );
-};
+const ScreenLoading = () => (isServerMode ? : );
ScreenLoading.displayName = 'ScreenLoading';
diff --git a/src/app/loading/stage.ts b/src/app/loading/stage.ts
new file mode 100644
index 000000000000..1977352cc23d
--- /dev/null
+++ b/src/app/loading/stage.ts
@@ -0,0 +1,29 @@
+import { DatabaseLoadingState } from '@/types/clientDB';
+
+export enum AppLoadingStage {
+ GoToChat = 'goToChat',
+ Idle = 'appIdle',
+ InitAuth = 'initAuth',
+ InitUser = 'initUser',
+ Initializing = 'appInitializing',
+}
+
+export const SERVER_LOADING_STAGES = [
+ AppLoadingStage.Initializing,
+ AppLoadingStage.InitAuth,
+ AppLoadingStage.InitUser,
+ AppLoadingStage.GoToChat,
+];
+
+export const CLIENT_LOADING_STAGES = [
+ AppLoadingStage.Idle,
+ AppLoadingStage.Initializing,
+ DatabaseLoadingState.Initializing,
+ DatabaseLoadingState.LoadingDependencies,
+ DatabaseLoadingState.LoadingWasm,
+ DatabaseLoadingState.Migrating,
+ DatabaseLoadingState.Finished,
+ DatabaseLoadingState.Ready,
+ AppLoadingStage.InitUser,
+ AppLoadingStage.GoToChat,
+] as string[];
diff --git a/src/app/loading/type.ts b/src/app/loading/type.ts
deleted file mode 100644
index 28e401e5abac..000000000000
--- a/src/app/loading/type.ts
+++ /dev/null
@@ -1,6 +0,0 @@
-export enum AppLoadingStage {
- GoToChat = 'goToChat',
- InitAuth = 'initAuth',
- InitUser = 'initUser',
- Initializing = 'initializing',
-}
diff --git a/src/components/DataStyleModal/index.tsx b/src/components/DataStyleModal/index.tsx
index d42e8669f471..afa995ff1e95 100644
--- a/src/components/DataStyleModal/index.tsx
+++ b/src/components/DataStyleModal/index.tsx
@@ -26,6 +26,7 @@ const useStyles = createStyles(({ css, token, prefixCls, isDarkMode }) => ({
interface DataStyleModalProps {
children: ReactNode;
+ hideTitle?: boolean;
icon: LucideIcon;
onOpenChange?: (open: boolean) => void;
open: boolean;
@@ -34,7 +35,7 @@ interface DataStyleModalProps {
}
const DataStyleModal = memo(
- ({ icon, onOpenChange, title, open, children, width = 550 }) => {
+ ({ icon, onOpenChange, title, open, children, width = 550, hideTitle }) => {
const { styles } = useStyles();
return (
@@ -48,10 +49,14 @@ const DataStyleModal = memo(
footer={null}
open={open}
title={
-
-
- {title}
-
+ !hideTitle ? (
+
+
+ {title}
+
+ ) : (
+
+ )
}
width={width}
>
diff --git a/src/components/FullscreenLoading/index.tsx b/src/components/FullscreenLoading/index.tsx
index b5868ddafac8..1cb29d2d67d5 100644
--- a/src/components/FullscreenLoading/index.tsx
+++ b/src/components/FullscreenLoading/index.tsx
@@ -1,18 +1,21 @@
-import { Icon } from '@lobehub/ui';
-import { Loader2 } from 'lucide-react';
-import { ReactNode, memo } from 'react';
+import React, { memo } from 'react';
import { Center, Flexbox } from 'react-layout-kit';
import { ProductLogo } from '@/components/Branding';
+import InitProgress, { StageItem } from '@/components/InitProgress';
-const FullscreenLoading = memo<{ title?: ReactNode }>(({ title }) => {
+interface FullscreenLoadingProps {
+ activeStage: number;
+ stages: StageItem[];
+}
+
+const FullscreenLoading = memo(({ activeStage, stages }) => {
return (
-
-
- {title}
+
+
diff --git a/src/components/InitProgress/index.tsx b/src/components/InitProgress/index.tsx
new file mode 100644
index 000000000000..4019f47a7106
--- /dev/null
+++ b/src/components/InitProgress/index.tsx
@@ -0,0 +1,42 @@
+import { Icon } from '@lobehub/ui';
+import { Progress, Typography } from 'antd';
+import { useTheme } from 'antd-style';
+import { Loader2 } from 'lucide-react';
+import { ReactNode, memo } from 'react';
+import { Flexbox } from 'react-layout-kit';
+
+export interface StageObjectItem {
+ icon?: ReactNode;
+ text: string;
+}
+export type StageItem = string | StageObjectItem;
+
+interface InitingProps {
+ activeStage: number;
+ stages: StageItem[];
+}
+
+const InitProgress = memo(({ activeStage, stages }) => {
+ const theme = useTheme();
+
+ const outStage = stages[activeStage];
+ const percent = (activeStage / (stages.length - 1)) * 100;
+
+ const stage = typeof outStage === 'string' ? { text: outStage } : outStage;
+
+ return (
+ <>
+
+
+ {stage?.icon ? stage?.icon : }
+ {stage?.text}
+
+ >
+ );
+});
+
+export default InitProgress;
diff --git a/src/database/client/db.test.ts b/src/database/client/db.test.ts
new file mode 100644
index 000000000000..fa11071d694e
--- /dev/null
+++ b/src/database/client/db.test.ts
@@ -0,0 +1,170 @@
+import { beforeEach, describe, expect, it, vi } from 'vitest';
+
+import { ClientDBLoadingProgress, DatabaseLoadingState } from '@/types/clientDB';
+
+import { DatabaseManager } from './db';
+
+// Mock 所有外部依赖
+vi.mock('@electric-sql/pglite', () => ({
+ default: vi.fn(),
+ PGlite: vi.fn(),
+}));
+
+vi.mock('@electric-sql/pglite/vector', () => ({
+ default: vi.fn(),
+ vector: vi.fn(),
+}));
+
+vi.mock('drizzle-orm/pglite', () => ({
+ drizzle: vi.fn(() => ({
+ dialect: {
+ migrate: vi.fn().mockResolvedValue(undefined),
+ },
+ })),
+}));
+
+let manager: DatabaseManager;
+let progressEvents: ClientDBLoadingProgress[] = [];
+let stateChanges: DatabaseLoadingState[] = [];
+
+let callbacks = {
+ onProgress: vi.fn((progress: ClientDBLoadingProgress) => {
+ progressEvents.push(progress);
+ }),
+ onStateChange: vi.fn((state: DatabaseLoadingState) => {
+ stateChanges.push(state);
+ }),
+};
+
+beforeEach(() => {
+ vi.clearAllMocks();
+ progressEvents = [];
+ stateChanges = [];
+
+ callbacks = {
+ onProgress: vi.fn((progress: ClientDBLoadingProgress) => {
+ progressEvents.push(progress);
+ }),
+ onStateChange: vi.fn((state: DatabaseLoadingState) => {
+ stateChanges.push(state);
+ }),
+ };
+ // @ts-expect-error
+ DatabaseManager['instance'] = undefined;
+ manager = DatabaseManager.getInstance();
+});
+
+describe('DatabaseManager', () => {
+ describe('Callback Handling', () => {
+ it('should properly track loading states', async () => {
+ await manager.initialize(callbacks);
+
+ // 验证状态转换顺序
+ expect(stateChanges).toEqual([
+ DatabaseLoadingState.Initializing,
+ DatabaseLoadingState.LoadingDependencies,
+ DatabaseLoadingState.LoadingWasm,
+ DatabaseLoadingState.Migrating,
+ DatabaseLoadingState.Finished,
+ DatabaseLoadingState.Ready,
+ ]);
+ });
+
+ it('should report dependencies loading progress', async () => {
+ await manager.initialize(callbacks);
+
+ // 验证依赖加载进度回调
+ const dependencyProgress = progressEvents.filter((e) => e.phase === 'dependencies');
+ expect(dependencyProgress.length).toBeGreaterThan(0);
+ expect(dependencyProgress[dependencyProgress.length - 1]).toEqual(
+ expect.objectContaining({
+ phase: 'dependencies',
+ progress: 100,
+ costTime: expect.any(Number),
+ }),
+ );
+ });
+
+ it('should report WASM loading progress', async () => {
+ await manager.initialize(callbacks);
+
+ // 验证 WASM 加载进度回调
+ const wasmProgress = progressEvents.filter((e) => e.phase === 'wasm');
+ // expect(wasmProgress.length).toBeGreaterThan(0);
+ expect(wasmProgress[wasmProgress.length - 1]).toEqual(
+ expect.objectContaining({
+ phase: 'wasm',
+ progress: 100,
+ costTime: expect.any(Number),
+ }),
+ );
+ });
+
+ it('should handle initialization errors', async () => {
+ // 模拟加载失败
+ vi.spyOn(global, 'fetch').mockRejectedValueOnce(new Error('Network error'));
+
+ await expect(manager.initialize(callbacks)).rejects.toThrow();
+ expect(stateChanges).toContain(DatabaseLoadingState.Error);
+ });
+
+ it('should only initialize once when called multiple times', async () => {
+ const firstInit = manager.initialize(callbacks);
+ const secondInit = manager.initialize(callbacks);
+
+ await Promise.all([firstInit, secondInit]);
+
+ // 验证回调只被调用一次
+ const readyStateCount = stateChanges.filter(
+ (state) => state === DatabaseLoadingState.Ready,
+ ).length;
+ expect(readyStateCount).toBe(1);
+ });
+ });
+
+ describe('Progress Calculation', () => {
+ it('should report progress between 0 and 100', async () => {
+ await manager.initialize(callbacks);
+
+ // 验证所有进度值都在有效范围内
+ progressEvents.forEach((event) => {
+ expect(event.progress).toBeGreaterThanOrEqual(0);
+ expect(event.progress).toBeLessThanOrEqual(100);
+ });
+ });
+
+ it('should include timing information', async () => {
+ await manager.initialize(callbacks);
+
+ // 验证最终进度回调包含耗时信息
+ const finalProgress = progressEvents[progressEvents.length - 1];
+ expect(finalProgress.costTime).toBeGreaterThan(0);
+ });
+ });
+
+ describe('Error Handling', () => {
+ it('should handle missing callbacks gracefully', async () => {
+ // 测试没有提供回调的情况
+ await expect(manager.initialize()).resolves.toBeDefined();
+ });
+
+ it('should handle partial callbacks', async () => {
+ // 只提供部分回调
+ await expect(manager.initialize({ onProgress: callbacks.onProgress })).resolves.toBeDefined();
+ await expect(
+ manager.initialize({ onStateChange: callbacks.onStateChange }),
+ ).resolves.toBeDefined();
+ });
+ });
+
+ describe('Database Access', () => {
+ it('should throw error when accessing database before initialization', () => {
+ expect(() => manager.db).toThrow('Database not initialized');
+ });
+
+ it('should provide access to database after initialization', async () => {
+ await manager.initialize();
+ expect(() => manager.db).not.toThrow();
+ });
+ });
+});
diff --git a/src/database/client/db.ts b/src/database/client/db.ts
new file mode 100644
index 000000000000..4c3516d8d5d2
--- /dev/null
+++ b/src/database/client/db.ts
@@ -0,0 +1,236 @@
+import type { PgliteDatabase } from 'drizzle-orm/pglite';
+import { Md5 } from 'ts-md5';
+
+import { ClientDBLoadingProgress, DatabaseLoadingState } from '@/types/clientDB';
+import { sleep } from '@/utils/sleep';
+
+import * as schema from '../schemas';
+import migrations from './migrations.json';
+
+const pgliteSchemaHashCache = 'LOBE_CHAT_PGLITE_SCHEMA_HASH';
+
+type DrizzleInstance = PgliteDatabase;
+
+export interface DatabaseLoadingCallbacks {
+ onProgress?: (progress: ClientDBLoadingProgress) => void;
+ onStateChange?: (state: DatabaseLoadingState) => void;
+}
+
+export class DatabaseManager {
+ private static instance: DatabaseManager;
+ private dbInstance: DrizzleInstance | null = null;
+ private initPromise: Promise | null = null;
+ private callbacks?: DatabaseLoadingCallbacks;
+ private isLocalDBSchemaSynced = false;
+
+ // CDN 配置
+ private static WASM_CDN_URL =
+ 'https://registry.npmmirror.com/@electric-sql/pglite/0.2.15/files/dist/postgres.wasm';
+
+ private constructor() {}
+
+ static getInstance() {
+ if (!DatabaseManager.instance) {
+ DatabaseManager.instance = new DatabaseManager();
+ }
+ return DatabaseManager.instance;
+ }
+
+ // 加载并编译 WASM 模块
+ private async loadWasmModule(): Promise {
+ const start = Date.now();
+ this.callbacks?.onStateChange?.(DatabaseLoadingState.LoadingWasm);
+
+ const response = await fetch(DatabaseManager.WASM_CDN_URL);
+
+ const contentLength = Number(response.headers.get('Content-Length')) || 0;
+ const reader = response.body?.getReader();
+
+ if (!reader) throw new Error('Failed to start WASM download');
+
+ let receivedLength = 0;
+ const chunks: Uint8Array[] = [];
+
+ // 读取数据流
+ // eslint-disable-next-line no-constant-condition
+ while (true) {
+ const { done, value } = await reader.read();
+
+ if (done) break;
+
+ chunks.push(value);
+ receivedLength += value.length;
+
+ // 计算并报告进度
+ const progress = Math.min(Math.round((receivedLength / contentLength) * 100), 100);
+ this.callbacks?.onProgress?.({
+ phase: 'wasm',
+ progress,
+ });
+ }
+
+ // 合并数据块
+ const wasmBytes = new Uint8Array(receivedLength);
+ let position = 0;
+ for (const chunk of chunks) {
+ wasmBytes.set(chunk, position);
+ position += chunk.length;
+ }
+
+ this.callbacks?.onProgress?.({
+ costTime: Date.now() - start,
+ phase: 'wasm',
+ progress: 100,
+ });
+
+ // 编译 WASM 模块
+ return WebAssembly.compile(wasmBytes);
+ }
+
+ // 异步加载 PGlite 相关依赖
+ private async loadDependencies() {
+ const start = Date.now();
+ this.callbacks?.onStateChange?.(DatabaseLoadingState.LoadingDependencies);
+
+ const imports = [
+ import('@electric-sql/pglite').then((m) => ({ default: m.PGlite })),
+ import('@electric-sql/pglite/vector'),
+ import('drizzle-orm/pglite'),
+ import('@electric-sql/pglite'),
+ ];
+
+ let loaded = 0;
+ const results = await Promise.all(
+ imports.map(async (importPromise) => {
+ const result = await importPromise;
+ loaded += 1;
+
+ // 计算加载进度
+ this.callbacks?.onProgress?.({
+ phase: 'dependencies',
+ progress: Math.min(Math.round((loaded / imports.length) * 100), 100),
+ });
+ return result;
+ }),
+ );
+
+ this.callbacks?.onProgress?.({
+ costTime: Date.now() - start,
+ phase: 'dependencies',
+ progress: 100,
+ });
+
+ // @ts-ignore
+ const [{ default: PGlite }, { vector }, { drizzle }, { IdbFs, MemoryFS }] = results;
+
+ return { IdbFs, MemoryFS, PGlite, drizzle, vector };
+ }
+
+ // 数据库迁移方法
+ private async migrate(skipMultiRun = false): Promise {
+ if (this.isLocalDBSchemaSynced && skipMultiRun) return this.db;
+
+ const cacheHash = localStorage.getItem(pgliteSchemaHashCache);
+ const hash = Md5.hashStr(JSON.stringify(migrations));
+
+ // if hash is the same, no need to migrate
+ if (hash === cacheHash) {
+ this.isLocalDBSchemaSynced = true;
+ return this.db;
+ }
+
+ const start = Date.now();
+ try {
+ this.callbacks?.onStateChange?.(DatabaseLoadingState.Migrating);
+
+ // refs: https://github.com/drizzle-team/drizzle-orm/discussions/2532
+ // @ts-expect-error
+ await this.db.dialect.migrate(migrations, this.db.session, {});
+ localStorage.setItem(pgliteSchemaHashCache, hash);
+ this.isLocalDBSchemaSynced = true;
+
+ console.info(`🗂 Migration success, take ${Date.now() - start}ms`);
+ } catch (cause) {
+ console.error('❌ Local database schema migration failed', cause);
+ throw cause;
+ }
+
+ return this.db;
+ }
+
+ // 初始化数据库
+ async initialize(callbacks?: DatabaseLoadingCallbacks): Promise {
+ if (this.initPromise) return this.initPromise;
+
+ this.callbacks = callbacks;
+
+ this.initPromise = (async () => {
+ try {
+ if (this.dbInstance) return this.dbInstance;
+
+ const time = Date.now();
+ // 初始化数据库
+ this.callbacks?.onStateChange?.(DatabaseLoadingState.Initializing);
+
+ // 加载依赖
+ const { PGlite, vector, drizzle, IdbFs, MemoryFS } = await this.loadDependencies();
+
+ // 加载并编译 WASM 模块
+ const wasmModule = await this.loadWasmModule();
+
+ const db = new PGlite({
+ extensions: { vector },
+ fs: typeof window === 'undefined' ? new MemoryFS('lobechat') : new IdbFs('lobechat'),
+ relaxedDurability: true,
+ wasmModule,
+ });
+
+ this.dbInstance = drizzle({ client: db, schema });
+
+ await this.migrate(true);
+
+ this.callbacks?.onStateChange?.(DatabaseLoadingState.Finished);
+ console.log(`✅ Database initialized in ${Date.now() - time}ms`);
+
+ await sleep(50);
+
+ this.callbacks?.onStateChange?.(DatabaseLoadingState.Ready);
+
+ return this.dbInstance as DrizzleInstance;
+ } catch (error) {
+ this.initPromise = null;
+ this.callbacks?.onStateChange?.(DatabaseLoadingState.Error);
+ throw error;
+ }
+ })();
+
+ return this.initPromise;
+ }
+
+ // 获取数据库实例
+ get db(): DrizzleInstance {
+ if (!this.dbInstance) {
+ throw new Error('Database not initialized. Please call initialize() first.');
+ }
+ return this.dbInstance;
+ }
+
+ // 创建代理对象
+ createProxy(): DrizzleInstance {
+ return new Proxy({} as DrizzleInstance, {
+ get: (target, prop) => {
+ return this.db[prop as keyof DrizzleInstance];
+ },
+ });
+ }
+}
+
+// 导出单例
+const dbManager = DatabaseManager.getInstance();
+
+// 保持原有的 clientDB 导出不变
+export const clientDB = dbManager.createProxy();
+
+// 导出初始化方法,供应用启动时使用
+export const initializeDB = (callbacks?: DatabaseLoadingCallbacks) =>
+ dbManager.initialize(callbacks);
diff --git a/src/database/client/dbForTest.ts b/src/database/client/dbForTest.ts
new file mode 100644
index 000000000000..93271b53b7cc
--- /dev/null
+++ b/src/database/client/dbForTest.ts
@@ -0,0 +1,35 @@
+import { IdbFs, PGlite } from '@electric-sql/pglite';
+import { vector } from '@electric-sql/pglite/vector';
+import { drizzle } from 'drizzle-orm/pglite';
+
+import * as schema from '../schemas';
+import migrations from './migrations.json';
+
+const client = new PGlite({
+ extensions: { vector },
+ fs: new IdbFs('lobechat'),
+ relaxedDurability: true,
+});
+
+export const clientDB = drizzle({ client, schema });
+
+//prevent multiple schema migrations to be run
+let isLocalDBSchemaSynced = false;
+
+export const migrate = async (skipMultiRun = false) => {
+ if (isLocalDBSchemaSynced && skipMultiRun) return;
+
+ const start = Date.now();
+ try {
+ // @ts-expect-error
+ await clientDB.dialect.migrate(migrations, clientDB.session, {});
+ isLocalDBSchemaSynced = true;
+
+ console.info(`✅ Local database ready in ${Date.now() - start}ms`);
+ } catch (cause) {
+ console.error('❌ Local database schema migration failed', cause);
+ throw cause;
+ }
+
+ return clientDB;
+};
diff --git a/src/database/client/migrations.json b/src/database/client/migrations.json
new file mode 100644
index 000000000000..f6600bba1d51
--- /dev/null
+++ b/src/database/client/migrations.json
@@ -0,0 +1,289 @@
+[
+ {
+ "sql": [
+ "CREATE TABLE IF NOT EXISTS \"agents\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"slug\" varchar(100),\n\t\"title\" text,\n\t\"description\" text,\n\t\"tags\" jsonb DEFAULT '[]'::jsonb,\n\t\"avatar\" text,\n\t\"background_color\" text,\n\t\"plugins\" jsonb DEFAULT '[]'::jsonb,\n\t\"user_id\" text NOT NULL,\n\t\"chat_config\" jsonb,\n\t\"few_shots\" jsonb,\n\t\"model\" text,\n\t\"params\" jsonb DEFAULT '{}'::jsonb,\n\t\"provider\" text,\n\t\"system_role\" text,\n\t\"tts\" jsonb,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\tCONSTRAINT \"agents_slug_unique\" UNIQUE(\"slug\")\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"agents_tags\" (\n\t\"agent_id\" text NOT NULL,\n\t\"tag_id\" integer NOT NULL,\n\tCONSTRAINT \"agents_tags_agent_id_tag_id_pk\" PRIMARY KEY(\"agent_id\",\"tag_id\")\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"agents_to_sessions\" (\n\t\"agent_id\" text NOT NULL,\n\t\"session_id\" text NOT NULL,\n\tCONSTRAINT \"agents_to_sessions_agent_id_session_id_pk\" PRIMARY KEY(\"agent_id\",\"session_id\")\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"files\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"user_id\" text NOT NULL,\n\t\"file_type\" varchar(255) NOT NULL,\n\t\"name\" text NOT NULL,\n\t\"size\" integer NOT NULL,\n\t\"url\" text NOT NULL,\n\t\"metadata\" jsonb,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"files_to_agents\" (\n\t\"file_id\" text NOT NULL,\n\t\"agent_id\" text NOT NULL,\n\tCONSTRAINT \"files_to_agents_file_id_agent_id_pk\" PRIMARY KEY(\"file_id\",\"agent_id\")\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"files_to_messages\" (\n\t\"file_id\" text NOT NULL,\n\t\"message_id\" text NOT NULL,\n\tCONSTRAINT \"files_to_messages_file_id_message_id_pk\" PRIMARY KEY(\"file_id\",\"message_id\")\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"files_to_sessions\" (\n\t\"file_id\" text NOT NULL,\n\t\"session_id\" text NOT NULL,\n\tCONSTRAINT \"files_to_sessions_file_id_session_id_pk\" PRIMARY KEY(\"file_id\",\"session_id\")\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"user_installed_plugins\" (\n\t\"user_id\" text NOT NULL,\n\t\"identifier\" text NOT NULL,\n\t\"type\" text NOT NULL,\n\t\"manifest\" jsonb,\n\t\"settings\" jsonb,\n\t\"custom_params\" jsonb,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\tCONSTRAINT \"user_installed_plugins_user_id_identifier_pk\" PRIMARY KEY(\"user_id\",\"identifier\")\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"market\" (\n\t\"id\" serial PRIMARY KEY NOT NULL,\n\t\"agent_id\" text,\n\t\"plugin_id\" integer,\n\t\"type\" text NOT NULL,\n\t\"view\" integer DEFAULT 0,\n\t\"like\" integer DEFAULT 0,\n\t\"used\" integer DEFAULT 0,\n\t\"user_id\" text NOT NULL,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"message_plugins\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"tool_call_id\" text,\n\t\"type\" text DEFAULT 'default',\n\t\"api_name\" text,\n\t\"arguments\" text,\n\t\"identifier\" text,\n\t\"state\" jsonb,\n\t\"error\" jsonb\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"message_tts\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"content_md5\" text,\n\t\"file_id\" text,\n\t\"voice\" text\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"message_translates\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"content\" text,\n\t\"from\" text,\n\t\"to\" text\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"messages\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"role\" text NOT NULL,\n\t\"content\" text,\n\t\"model\" text,\n\t\"provider\" text,\n\t\"favorite\" boolean DEFAULT false,\n\t\"error\" jsonb,\n\t\"tools\" jsonb,\n\t\"trace_id\" text,\n\t\"observation_id\" text,\n\t\"user_id\" text NOT NULL,\n\t\"session_id\" text,\n\t\"topic_id\" text,\n\t\"parent_id\" text,\n\t\"quota_id\" text,\n\t\"agent_id\" text,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"plugins\" (\n\t\"id\" serial PRIMARY KEY NOT NULL,\n\t\"identifier\" text NOT NULL,\n\t\"title\" text NOT NULL,\n\t\"description\" text,\n\t\"avatar\" text,\n\t\"author\" text,\n\t\"manifest\" text NOT NULL,\n\t\"locale\" text NOT NULL,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\tCONSTRAINT \"plugins_identifier_unique\" UNIQUE(\"identifier\")\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"plugins_tags\" (\n\t\"plugin_id\" integer NOT NULL,\n\t\"tag_id\" integer NOT NULL,\n\tCONSTRAINT \"plugins_tags_plugin_id_tag_id_pk\" PRIMARY KEY(\"plugin_id\",\"tag_id\")\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"session_groups\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"name\" text NOT NULL,\n\t\"sort\" integer,\n\t\"user_id\" text NOT NULL,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"sessions\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"slug\" varchar(100) NOT NULL,\n\t\"title\" text,\n\t\"description\" text,\n\t\"avatar\" text,\n\t\"background_color\" text,\n\t\"type\" text DEFAULT 'agent',\n\t\"user_id\" text NOT NULL,\n\t\"group_id\" text,\n\t\"pinned\" boolean DEFAULT false,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"tags\" (\n\t\"id\" serial PRIMARY KEY NOT NULL,\n\t\"slug\" text NOT NULL,\n\t\"name\" text,\n\t\"user_id\" text NOT NULL,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\tCONSTRAINT \"tags_slug_unique\" UNIQUE(\"slug\")\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"topics\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"session_id\" text,\n\t\"user_id\" text NOT NULL,\n\t\"favorite\" boolean DEFAULT false,\n\t\"title\" text,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"user_settings\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"tts\" jsonb,\n\t\"key_vaults\" text,\n\t\"general\" jsonb,\n\t\"language_model\" jsonb,\n\t\"system_agent\" jsonb,\n\t\"default_agent\" jsonb,\n\t\"tool\" jsonb\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"users\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"username\" text,\n\t\"email\" text,\n\t\"avatar\" text,\n\t\"phone\" text,\n\t\"first_name\" text,\n\t\"last_name\" text,\n\t\"is_onboarded\" boolean DEFAULT false,\n\t\"clerk_created_at\" timestamp with time zone,\n\t\"preference\" jsonb DEFAULT '{\"guide\":{\"moveSettingsToAvatar\":true,\"topic\":true},\"telemetry\":null,\"useCmdEnterToSend\":false}'::jsonb,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"key\" text,\n\tCONSTRAINT \"users_username_unique\" UNIQUE(\"username\")\n);\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"agents\" ADD CONSTRAINT \"agents_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"agents_tags\" ADD CONSTRAINT \"agents_tags_agent_id_agents_id_fk\" FOREIGN KEY (\"agent_id\") REFERENCES \"public\".\"agents\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"agents_tags\" ADD CONSTRAINT \"agents_tags_tag_id_tags_id_fk\" FOREIGN KEY (\"tag_id\") REFERENCES \"public\".\"tags\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"agents_to_sessions\" ADD CONSTRAINT \"agents_to_sessions_agent_id_agents_id_fk\" FOREIGN KEY (\"agent_id\") REFERENCES \"public\".\"agents\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"agents_to_sessions\" ADD CONSTRAINT \"agents_to_sessions_session_id_sessions_id_fk\" FOREIGN KEY (\"session_id\") REFERENCES \"public\".\"sessions\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"files\" ADD CONSTRAINT \"files_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"files_to_agents\" ADD CONSTRAINT \"files_to_agents_file_id_files_id_fk\" FOREIGN KEY (\"file_id\") REFERENCES \"public\".\"files\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"files_to_agents\" ADD CONSTRAINT \"files_to_agents_agent_id_agents_id_fk\" FOREIGN KEY (\"agent_id\") REFERENCES \"public\".\"agents\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"files_to_messages\" ADD CONSTRAINT \"files_to_messages_file_id_files_id_fk\" FOREIGN KEY (\"file_id\") REFERENCES \"public\".\"files\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"files_to_messages\" ADD CONSTRAINT \"files_to_messages_message_id_messages_id_fk\" FOREIGN KEY (\"message_id\") REFERENCES \"public\".\"messages\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"files_to_sessions\" ADD CONSTRAINT \"files_to_sessions_file_id_files_id_fk\" FOREIGN KEY (\"file_id\") REFERENCES \"public\".\"files\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"files_to_sessions\" ADD CONSTRAINT \"files_to_sessions_session_id_sessions_id_fk\" FOREIGN KEY (\"session_id\") REFERENCES \"public\".\"sessions\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"user_installed_plugins\" ADD CONSTRAINT \"user_installed_plugins_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"market\" ADD CONSTRAINT \"market_agent_id_agents_id_fk\" FOREIGN KEY (\"agent_id\") REFERENCES \"public\".\"agents\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"market\" ADD CONSTRAINT \"market_plugin_id_plugins_id_fk\" FOREIGN KEY (\"plugin_id\") REFERENCES \"public\".\"plugins\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"market\" ADD CONSTRAINT \"market_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"message_plugins\" ADD CONSTRAINT \"message_plugins_id_messages_id_fk\" FOREIGN KEY (\"id\") REFERENCES \"public\".\"messages\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"message_tts\" ADD CONSTRAINT \"message_tts_id_messages_id_fk\" FOREIGN KEY (\"id\") REFERENCES \"public\".\"messages\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"message_tts\" ADD CONSTRAINT \"message_tts_file_id_files_id_fk\" FOREIGN KEY (\"file_id\") REFERENCES \"public\".\"files\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"message_translates\" ADD CONSTRAINT \"message_translates_id_messages_id_fk\" FOREIGN KEY (\"id\") REFERENCES \"public\".\"messages\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"messages\" ADD CONSTRAINT \"messages_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"messages\" ADD CONSTRAINT \"messages_session_id_sessions_id_fk\" FOREIGN KEY (\"session_id\") REFERENCES \"public\".\"sessions\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"messages\" ADD CONSTRAINT \"messages_topic_id_topics_id_fk\" FOREIGN KEY (\"topic_id\") REFERENCES \"public\".\"topics\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"messages\" ADD CONSTRAINT \"messages_parent_id_messages_id_fk\" FOREIGN KEY (\"parent_id\") REFERENCES \"public\".\"messages\"(\"id\") ON DELETE set null ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"messages\" ADD CONSTRAINT \"messages_quota_id_messages_id_fk\" FOREIGN KEY (\"quota_id\") REFERENCES \"public\".\"messages\"(\"id\") ON DELETE set null ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"messages\" ADD CONSTRAINT \"messages_agent_id_agents_id_fk\" FOREIGN KEY (\"agent_id\") REFERENCES \"public\".\"agents\"(\"id\") ON DELETE set null ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"plugins_tags\" ADD CONSTRAINT \"plugins_tags_plugin_id_plugins_id_fk\" FOREIGN KEY (\"plugin_id\") REFERENCES \"public\".\"plugins\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"plugins_tags\" ADD CONSTRAINT \"plugins_tags_tag_id_tags_id_fk\" FOREIGN KEY (\"tag_id\") REFERENCES \"public\".\"tags\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"session_groups\" ADD CONSTRAINT \"session_groups_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"sessions\" ADD CONSTRAINT \"sessions_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"sessions\" ADD CONSTRAINT \"sessions_group_id_session_groups_id_fk\" FOREIGN KEY (\"group_id\") REFERENCES \"public\".\"session_groups\"(\"id\") ON DELETE set null ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"tags\" ADD CONSTRAINT \"tags_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"topics\" ADD CONSTRAINT \"topics_session_id_sessions_id_fk\" FOREIGN KEY (\"session_id\") REFERENCES \"public\".\"sessions\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"topics\" ADD CONSTRAINT \"topics_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"user_settings\" ADD CONSTRAINT \"user_settings_id_users_id_fk\" FOREIGN KEY (\"id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nCREATE INDEX IF NOT EXISTS \"messages_created_at_idx\" ON \"messages\" (\"created_at\");",
+ "\nCREATE UNIQUE INDEX IF NOT EXISTS \"slug_user_id_unique\" ON \"sessions\" (\"slug\",\"user_id\");\n"
+ ],
+ "bps": true,
+ "folderMillis": 1716982944425,
+ "hash": "1513c1da50dc083fc0bd9783fe88c60e4fa80b60db645aa87bfda54332252c65"
+ },
+ {
+ "sql": [
+ "ALTER TABLE \"messages\" ADD COLUMN \"client_id\" text;",
+ "\nALTER TABLE \"session_groups\" ADD COLUMN \"client_id\" text;",
+ "\nALTER TABLE \"sessions\" ADD COLUMN \"client_id\" text;",
+ "\nALTER TABLE \"topics\" ADD COLUMN \"client_id\" text;",
+ "\nCREATE INDEX IF NOT EXISTS \"messages_client_id_idx\" ON \"messages\" (\"client_id\");",
+ "\nALTER TABLE \"messages\" ADD CONSTRAINT \"messages_client_id_unique\" UNIQUE(\"client_id\");",
+ "\nALTER TABLE \"session_groups\" ADD CONSTRAINT \"session_groups_client_id_unique\" UNIQUE(\"client_id\");",
+ "\nALTER TABLE \"sessions\" ADD CONSTRAINT \"sessions_client_id_unique\" UNIQUE(\"client_id\");",
+ "\nALTER TABLE \"topics\" ADD CONSTRAINT \"topics_client_id_unique\" UNIQUE(\"client_id\");\n"
+ ],
+ "bps": true,
+ "folderMillis": 1717153686544,
+ "hash": "ddb29ee7e7a675c12b44996e4be061b1736e8f785052242801f4cdfb2a94f258"
+ },
+ {
+ "sql": [
+ "ALTER TABLE \"messages\" DROP CONSTRAINT \"messages_client_id_unique\";",
+ "\nALTER TABLE \"session_groups\" DROP CONSTRAINT \"session_groups_client_id_unique\";",
+ "\nALTER TABLE \"sessions\" DROP CONSTRAINT \"sessions_client_id_unique\";",
+ "\nALTER TABLE \"topics\" DROP CONSTRAINT \"topics_client_id_unique\";",
+ "\nDROP INDEX IF EXISTS \"messages_client_id_idx\";",
+ "\nCREATE UNIQUE INDEX IF NOT EXISTS \"message_client_id_user_unique\" ON \"messages\" (\"client_id\",\"user_id\");",
+ "\nALTER TABLE \"session_groups\" ADD CONSTRAINT \"session_group_client_id_user_unique\" UNIQUE(\"client_id\",\"user_id\");",
+ "\nALTER TABLE \"sessions\" ADD CONSTRAINT \"sessions_client_id_user_id_unique\" UNIQUE(\"client_id\",\"user_id\");",
+ "\nALTER TABLE \"topics\" ADD CONSTRAINT \"topic_client_id_user_id_unique\" UNIQUE(\"client_id\",\"user_id\");"
+ ],
+ "bps": true,
+ "folderMillis": 1717587734458,
+ "hash": "90b61fc3e744d8e2609418d9e25274ff07af4caf87370bb614db511d67900d73"
+ },
+ {
+ "sql": [
+ "CREATE TABLE IF NOT EXISTS \"user_budgets\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"free_budget_id\" text,\n\t\"free_budget_key\" text,\n\t\"subscription_budget_id\" text,\n\t\"subscription_budget_key\" text,\n\t\"package_budget_id\" text,\n\t\"package_budget_key\" text,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"user_subscriptions\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"user_id\" text NOT NULL,\n\t\"stripe_id\" text,\n\t\"currency\" text,\n\t\"pricing\" integer,\n\t\"billing_paid_at\" integer,\n\t\"billing_cycle_start\" integer,\n\t\"billing_cycle_end\" integer,\n\t\"cancel_at_period_end\" boolean,\n\t\"cancel_at\" integer,\n\t\"next_billing\" jsonb,\n\t\"plan\" text,\n\t\"recurring\" text,\n\t\"storage_limit\" integer,\n\t\"status\" integer,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n",
+ "\nALTER TABLE \"users\" ALTER COLUMN \"preference\" DROP DEFAULT;",
+ "\nDO $$ BEGIN\n ALTER TABLE \"user_budgets\" ADD CONSTRAINT \"user_budgets_id_users_id_fk\" FOREIGN KEY (\"id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"user_subscriptions\" ADD CONSTRAINT \"user_subscriptions_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nALTER TABLE \"users\" DROP COLUMN IF EXISTS \"key\";\n"
+ ],
+ "bps": true,
+ "folderMillis": 1718460779230,
+ "hash": "535a9aba48be3d75762f29bbb195736f17abfe51f41a548debe925949dd0caf2"
+ },
+ {
+ "sql": [
+ "CREATE TABLE IF NOT EXISTS \"nextauth_accounts\" (\n\t\"access_token\" text,\n\t\"expires_at\" integer,\n\t\"id_token\" text,\n\t\"provider\" text NOT NULL,\n\t\"providerAccountId\" text NOT NULL,\n\t\"refresh_token\" text,\n\t\"scope\" text,\n\t\"session_state\" text,\n\t\"token_type\" text,\n\t\"type\" text NOT NULL,\n\t\"userId\" text NOT NULL,\n\tCONSTRAINT \"nextauth_accounts_provider_providerAccountId_pk\" PRIMARY KEY(\"provider\",\"providerAccountId\")\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"nextauth_authenticators\" (\n\t\"counter\" integer NOT NULL,\n\t\"credentialBackedUp\" boolean NOT NULL,\n\t\"credentialDeviceType\" text NOT NULL,\n\t\"credentialID\" text NOT NULL,\n\t\"credentialPublicKey\" text NOT NULL,\n\t\"providerAccountId\" text NOT NULL,\n\t\"transports\" text,\n\t\"userId\" text NOT NULL,\n\tCONSTRAINT \"nextauth_authenticators_userId_credentialID_pk\" PRIMARY KEY(\"userId\",\"credentialID\"),\n\tCONSTRAINT \"nextauth_authenticators_credentialID_unique\" UNIQUE(\"credentialID\")\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"nextauth_sessions\" (\n\t\"expires\" timestamp NOT NULL,\n\t\"sessionToken\" text PRIMARY KEY NOT NULL,\n\t\"userId\" text NOT NULL\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"nextauth_verificationtokens\" (\n\t\"expires\" timestamp NOT NULL,\n\t\"identifier\" text NOT NULL,\n\t\"token\" text NOT NULL,\n\tCONSTRAINT \"nextauth_verificationtokens_identifier_token_pk\" PRIMARY KEY(\"identifier\",\"token\")\n);\n",
+ "\nALTER TABLE \"users\" ADD COLUMN \"full_name\" text;",
+ "\nALTER TABLE \"users\" ADD COLUMN \"email_verified_at\" timestamp with time zone;",
+ "\nDO $$ BEGIN\n ALTER TABLE \"nextauth_accounts\" ADD CONSTRAINT \"nextauth_accounts_userId_users_id_fk\" FOREIGN KEY (\"userId\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"nextauth_authenticators\" ADD CONSTRAINT \"nextauth_authenticators_userId_users_id_fk\" FOREIGN KEY (\"userId\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"nextauth_sessions\" ADD CONSTRAINT \"nextauth_sessions_userId_users_id_fk\" FOREIGN KEY (\"userId\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n"
+ ],
+ "bps": true,
+ "folderMillis": 1721724512422,
+ "hash": "c63c5819d73414632ea32c543cfb997be31a2be3fad635c148c97e726c57fd16"
+ },
+ {
+ "sql": [
+ "-- Custom SQL migration file, put you code below! --\nCREATE EXTENSION IF NOT EXISTS vector;\n"
+ ],
+ "bps": true,
+ "folderMillis": 1722944166657,
+ "hash": "c112a4eb471fa4efe791b250057a1e33040515a0c60361c7d7a59044ec9e1667"
+ },
+ {
+ "sql": [
+ "CREATE TABLE IF NOT EXISTS \"agents_files\" (\n\t\"file_id\" text NOT NULL,\n\t\"agent_id\" text NOT NULL,\n\t\"enabled\" boolean DEFAULT true,\n\t\"user_id\" text NOT NULL,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\tCONSTRAINT \"agents_files_file_id_agent_id_user_id_pk\" PRIMARY KEY(\"file_id\",\"agent_id\",\"user_id\")\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"agents_knowledge_bases\" (\n\t\"agent_id\" text NOT NULL,\n\t\"knowledge_base_id\" text NOT NULL,\n\t\"user_id\" text NOT NULL,\n\t\"enabled\" boolean DEFAULT true,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\tCONSTRAINT \"agents_knowledge_bases_agent_id_knowledge_base_id_pk\" PRIMARY KEY(\"agent_id\",\"knowledge_base_id\")\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"async_tasks\" (\n\t\"id\" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,\n\t\"type\" text,\n\t\"status\" text,\n\t\"error\" jsonb,\n\t\"user_id\" text NOT NULL,\n\t\"duration\" integer,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"file_chunks\" (\n\t\"file_id\" varchar,\n\t\"chunk_id\" uuid,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\tCONSTRAINT \"file_chunks_file_id_chunk_id_pk\" PRIMARY KEY(\"file_id\",\"chunk_id\")\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"global_files\" (\n\t\"hash_id\" varchar(64) PRIMARY KEY NOT NULL,\n\t\"file_type\" varchar(255) NOT NULL,\n\t\"size\" integer NOT NULL,\n\t\"url\" text NOT NULL,\n\t\"metadata\" jsonb,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"knowledge_base_files\" (\n\t\"knowledge_base_id\" text NOT NULL,\n\t\"file_id\" text NOT NULL,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\tCONSTRAINT \"knowledge_base_files_knowledge_base_id_file_id_pk\" PRIMARY KEY(\"knowledge_base_id\",\"file_id\")\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"knowledge_bases\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"name\" text NOT NULL,\n\t\"description\" text,\n\t\"avatar\" text,\n\t\"type\" text,\n\t\"user_id\" text NOT NULL,\n\t\"is_public\" boolean DEFAULT false,\n\t\"settings\" jsonb,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"message_chunks\" (\n\t\"message_id\" text,\n\t\"chunk_id\" uuid,\n\tCONSTRAINT \"message_chunks_chunk_id_message_id_pk\" PRIMARY KEY(\"chunk_id\",\"message_id\")\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"message_queries\" (\n\t\"id\" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,\n\t\"message_id\" text NOT NULL,\n\t\"rewrite_query\" text,\n\t\"user_query\" text,\n\t\"embeddings_id\" uuid\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"message_query_chunks\" (\n\t\"id\" text,\n\t\"query_id\" uuid,\n\t\"chunk_id\" uuid,\n\t\"similarity\" numeric(6, 5),\n\tCONSTRAINT \"message_query_chunks_chunk_id_id_query_id_pk\" PRIMARY KEY(\"chunk_id\",\"id\",\"query_id\")\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"chunks\" (\n\t\"id\" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,\n\t\"text\" text,\n\t\"abstract\" text,\n\t\"metadata\" jsonb,\n\t\"index\" integer,\n\t\"type\" varchar,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"user_id\" text\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"embeddings\" (\n\t\"id\" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,\n\t\"chunk_id\" uuid,\n\t\"embeddings\" vector(1024),\n\t\"model\" text,\n\t\"user_id\" text\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"unstructured_chunks\" (\n\t\"id\" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,\n\t\"text\" text,\n\t\"metadata\" jsonb,\n\t\"index\" integer,\n\t\"type\" varchar,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"parent_id\" varchar,\n\t\"composite_id\" uuid,\n\t\"user_id\" text,\n\t\"file_id\" varchar\n);\n",
+ "\nALTER TABLE \"files_to_messages\" RENAME TO \"messages_files\";",
+ "\nDROP TABLE \"files_to_agents\";",
+ "\nALTER TABLE \"files\" ADD COLUMN \"file_hash\" varchar(64);",
+ "\nALTER TABLE \"files\" ADD COLUMN \"chunk_task_id\" uuid;",
+ "\nALTER TABLE \"files\" ADD COLUMN \"embedding_task_id\" uuid;",
+ "\nDO $$ BEGIN\n ALTER TABLE \"agents_files\" ADD CONSTRAINT \"agents_files_file_id_files_id_fk\" FOREIGN KEY (\"file_id\") REFERENCES \"public\".\"files\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"agents_files\" ADD CONSTRAINT \"agents_files_agent_id_agents_id_fk\" FOREIGN KEY (\"agent_id\") REFERENCES \"public\".\"agents\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"agents_files\" ADD CONSTRAINT \"agents_files_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"agents_knowledge_bases\" ADD CONSTRAINT \"agents_knowledge_bases_agent_id_agents_id_fk\" FOREIGN KEY (\"agent_id\") REFERENCES \"public\".\"agents\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"agents_knowledge_bases\" ADD CONSTRAINT \"agents_knowledge_bases_knowledge_base_id_knowledge_bases_id_fk\" FOREIGN KEY (\"knowledge_base_id\") REFERENCES \"public\".\"knowledge_bases\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"agents_knowledge_bases\" ADD CONSTRAINT \"agents_knowledge_bases_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"async_tasks\" ADD CONSTRAINT \"async_tasks_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"file_chunks\" ADD CONSTRAINT \"file_chunks_file_id_files_id_fk\" FOREIGN KEY (\"file_id\") REFERENCES \"public\".\"files\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"file_chunks\" ADD CONSTRAINT \"file_chunks_chunk_id_chunks_id_fk\" FOREIGN KEY (\"chunk_id\") REFERENCES \"public\".\"chunks\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"knowledge_base_files\" ADD CONSTRAINT \"knowledge_base_files_knowledge_base_id_knowledge_bases_id_fk\" FOREIGN KEY (\"knowledge_base_id\") REFERENCES \"public\".\"knowledge_bases\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"knowledge_base_files\" ADD CONSTRAINT \"knowledge_base_files_file_id_files_id_fk\" FOREIGN KEY (\"file_id\") REFERENCES \"public\".\"files\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"knowledge_bases\" ADD CONSTRAINT \"knowledge_bases_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"message_chunks\" ADD CONSTRAINT \"message_chunks_message_id_messages_id_fk\" FOREIGN KEY (\"message_id\") REFERENCES \"public\".\"messages\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"message_chunks\" ADD CONSTRAINT \"message_chunks_chunk_id_chunks_id_fk\" FOREIGN KEY (\"chunk_id\") REFERENCES \"public\".\"chunks\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"message_queries\" ADD CONSTRAINT \"message_queries_message_id_messages_id_fk\" FOREIGN KEY (\"message_id\") REFERENCES \"public\".\"messages\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"message_queries\" ADD CONSTRAINT \"message_queries_embeddings_id_embeddings_id_fk\" FOREIGN KEY (\"embeddings_id\") REFERENCES \"public\".\"embeddings\"(\"id\") ON DELETE set null ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"message_query_chunks\" ADD CONSTRAINT \"message_query_chunks_id_messages_id_fk\" FOREIGN KEY (\"id\") REFERENCES \"public\".\"messages\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"message_query_chunks\" ADD CONSTRAINT \"message_query_chunks_query_id_message_queries_id_fk\" FOREIGN KEY (\"query_id\") REFERENCES \"public\".\"message_queries\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"message_query_chunks\" ADD CONSTRAINT \"message_query_chunks_chunk_id_chunks_id_fk\" FOREIGN KEY (\"chunk_id\") REFERENCES \"public\".\"chunks\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"messages_files\" ADD CONSTRAINT \"messages_files_file_id_files_id_fk\" FOREIGN KEY (\"file_id\") REFERENCES \"public\".\"files\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"messages_files\" ADD CONSTRAINT \"messages_files_message_id_messages_id_fk\" FOREIGN KEY (\"message_id\") REFERENCES \"public\".\"messages\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"chunks\" ADD CONSTRAINT \"chunks_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"embeddings\" ADD CONSTRAINT \"embeddings_chunk_id_chunks_id_fk\" FOREIGN KEY (\"chunk_id\") REFERENCES \"public\".\"chunks\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"embeddings\" ADD CONSTRAINT \"embeddings_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"unstructured_chunks\" ADD CONSTRAINT \"unstructured_chunks_composite_id_chunks_id_fk\" FOREIGN KEY (\"composite_id\") REFERENCES \"public\".\"chunks\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"unstructured_chunks\" ADD CONSTRAINT \"unstructured_chunks_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"unstructured_chunks\" ADD CONSTRAINT \"unstructured_chunks_file_id_files_id_fk\" FOREIGN KEY (\"file_id\") REFERENCES \"public\".\"files\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"files\" ADD CONSTRAINT \"files_file_hash_global_files_hash_id_fk\" FOREIGN KEY (\"file_hash\") REFERENCES \"public\".\"global_files\"(\"hash_id\") ON DELETE no action ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"files\" ADD CONSTRAINT \"files_chunk_task_id_async_tasks_id_fk\" FOREIGN KEY (\"chunk_task_id\") REFERENCES \"public\".\"async_tasks\"(\"id\") ON DELETE set null ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"files\" ADD CONSTRAINT \"files_embedding_task_id_async_tasks_id_fk\" FOREIGN KEY (\"embedding_task_id\") REFERENCES \"public\".\"async_tasks\"(\"id\") ON DELETE set null ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n"
+ ],
+ "bps": true,
+ "folderMillis": 1724089032064,
+ "hash": "bc4e36664868d14888b9e9aef180b3e02c563fa3c253111787e68b8ea4cd995f"
+ },
+ {
+ "sql": [
+ "-- step 1: create a temporary table to store the rows we want to keep\nCREATE TEMP TABLE embeddings_temp AS\nSELECT DISTINCT ON (chunk_id) *\nFROM embeddings\nORDER BY chunk_id, random();\n",
+ "\n\n-- step 2: delete all rows from the original table\nDELETE FROM embeddings;\n",
+ "\n\n-- step 3: insert the rows we want to keep back into the original table\nINSERT INTO embeddings\nSELECT * FROM embeddings_temp;\n",
+ "\n\n-- step 4: drop the temporary table\nDROP TABLE embeddings_temp;\n",
+ "\n\n-- step 5: now it's safe to add the unique constraint\nALTER TABLE \"embeddings\" ADD CONSTRAINT \"embeddings_chunk_id_unique\" UNIQUE(\"chunk_id\");\n"
+ ],
+ "bps": true,
+ "folderMillis": 1724254147447,
+ "hash": "e99840848ffbb33ca4d7ead6158f02b8d12cb4ff5706d4529d7fa586afa4c2a9"
+ },
+ {
+ "sql": [
+ "CREATE TABLE IF NOT EXISTS \"rag_eval_dataset_records\" (\n\t\"id\" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name \"rag_eval_dataset_records_id_seq\" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),\n\t\"dataset_id\" integer NOT NULL,\n\t\"ideal\" text,\n\t\"question\" text,\n\t\"reference_files\" text[],\n\t\"metadata\" jsonb,\n\t\"user_id\" text,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"rag_eval_datasets\" (\n\t\"id\" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name \"rag_eval_datasets_id_seq\" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 30000 CACHE 1),\n\t\"description\" text,\n\t\"name\" text NOT NULL,\n\t\"knowledge_base_id\" text,\n\t\"user_id\" text,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"rag_eval_evaluations\" (\n\t\"id\" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name \"rag_eval_evaluations_id_seq\" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),\n\t\"name\" text NOT NULL,\n\t\"description\" text,\n\t\"eval_records_url\" text,\n\t\"status\" text,\n\t\"error\" jsonb,\n\t\"dataset_id\" integer NOT NULL,\n\t\"knowledge_base_id\" text,\n\t\"language_model\" text,\n\t\"embedding_model\" text,\n\t\"user_id\" text,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n",
+ "\nCREATE TABLE IF NOT EXISTS \"rag_eval_evaluation_records\" (\n\t\"id\" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name \"rag_eval_evaluation_records_id_seq\" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),\n\t\"question\" text NOT NULL,\n\t\"answer\" text,\n\t\"context\" text[],\n\t\"ideal\" text,\n\t\"status\" text,\n\t\"error\" jsonb,\n\t\"language_model\" text,\n\t\"embedding_model\" text,\n\t\"question_embedding_id\" uuid,\n\t\"duration\" integer,\n\t\"dataset_record_id\" integer NOT NULL,\n\t\"evaluation_id\" integer NOT NULL,\n\t\"user_id\" text,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_dataset_records\" ADD CONSTRAINT \"rag_eval_dataset_records_dataset_id_rag_eval_datasets_id_fk\" FOREIGN KEY (\"dataset_id\") REFERENCES \"public\".\"rag_eval_datasets\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_dataset_records\" ADD CONSTRAINT \"rag_eval_dataset_records_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_datasets\" ADD CONSTRAINT \"rag_eval_datasets_knowledge_base_id_knowledge_bases_id_fk\" FOREIGN KEY (\"knowledge_base_id\") REFERENCES \"public\".\"knowledge_bases\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_datasets\" ADD CONSTRAINT \"rag_eval_datasets_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_evaluations\" ADD CONSTRAINT \"rag_eval_evaluations_dataset_id_rag_eval_datasets_id_fk\" FOREIGN KEY (\"dataset_id\") REFERENCES \"public\".\"rag_eval_datasets\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_evaluations\" ADD CONSTRAINT \"rag_eval_evaluations_knowledge_base_id_knowledge_bases_id_fk\" FOREIGN KEY (\"knowledge_base_id\") REFERENCES \"public\".\"knowledge_bases\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_evaluations\" ADD CONSTRAINT \"rag_eval_evaluations_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_evaluation_records\" ADD CONSTRAINT \"rag_eval_evaluation_records_question_embedding_id_embeddings_id_fk\" FOREIGN KEY (\"question_embedding_id\") REFERENCES \"public\".\"embeddings\"(\"id\") ON DELETE set null ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_evaluation_records\" ADD CONSTRAINT \"rag_eval_evaluation_records_dataset_record_id_rag_eval_dataset_records_id_fk\" FOREIGN KEY (\"dataset_record_id\") REFERENCES \"public\".\"rag_eval_dataset_records\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_evaluation_records\" ADD CONSTRAINT \"rag_eval_evaluation_records_evaluation_id_rag_eval_evaluations_id_fk\" FOREIGN KEY (\"evaluation_id\") REFERENCES \"public\".\"rag_eval_evaluations\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"rag_eval_evaluation_records\" ADD CONSTRAINT \"rag_eval_evaluation_records_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n"
+ ],
+ "bps": true,
+ "folderMillis": 1725366565650,
+ "hash": "9646161fa041354714f823d726af27247bcd6e60fa3be5698c0d69f337a5700b"
+ },
+ {
+ "sql": [
+ "DROP TABLE \"user_budgets\";",
+ "\nDROP TABLE \"user_subscriptions\";"
+ ],
+ "bps": true,
+ "folderMillis": 1729699958471,
+ "hash": "7dad43a2a25d1aec82124a4e53f8d82f8505c3073f23606c1dc5d2a4598eacf9"
+ },
+ {
+ "sql": [
+ "DROP TABLE \"agents_tags\" CASCADE;",
+ "\nDROP TABLE \"market\" CASCADE;",
+ "\nDROP TABLE \"plugins\" CASCADE;",
+ "\nDROP TABLE \"plugins_tags\" CASCADE;",
+ "\nDROP TABLE \"tags\" CASCADE;",
+ "\nALTER TABLE \"agents\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;",
+ "\nALTER TABLE \"agents_files\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;",
+ "\nALTER TABLE \"agents_knowledge_bases\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;",
+ "\nALTER TABLE \"async_tasks\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;",
+ "\nALTER TABLE \"files\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;",
+ "\nALTER TABLE \"global_files\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;",
+ "\nALTER TABLE \"knowledge_bases\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;",
+ "\nALTER TABLE \"messages\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;",
+ "\nALTER TABLE \"chunks\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;",
+ "\nALTER TABLE \"unstructured_chunks\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;",
+ "\nALTER TABLE \"rag_eval_dataset_records\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;",
+ "\nALTER TABLE \"rag_eval_dataset_records\" ADD COLUMN \"updated_at\" timestamp with time zone DEFAULT now() NOT NULL;",
+ "\nALTER TABLE \"rag_eval_datasets\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;",
+ "\nALTER TABLE \"rag_eval_evaluations\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;",
+ "\nALTER TABLE \"rag_eval_evaluation_records\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;",
+ "\nALTER TABLE \"rag_eval_evaluation_records\" ADD COLUMN \"updated_at\" timestamp with time zone DEFAULT now() NOT NULL;",
+ "\nALTER TABLE \"session_groups\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;",
+ "\nALTER TABLE \"sessions\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;",
+ "\nALTER TABLE \"topics\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;",
+ "\nALTER TABLE \"user_installed_plugins\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;",
+ "\nALTER TABLE \"users\" ADD COLUMN \"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL;"
+ ],
+ "bps": true,
+ "folderMillis": 1730900133049,
+ "hash": "a7d801b679e25ef3ffda343366992b2835c089363e9d7c09074336d40e438004"
+ },
+ {
+ "sql": [
+ "ALTER TABLE \"topics\" ADD COLUMN \"history_summary\" text;",
+ "\nALTER TABLE \"topics\" ADD COLUMN \"metadata\" jsonb;\n"
+ ],
+ "bps": true,
+ "folderMillis": 1731138670427,
+ "hash": "80c2eae0600190b354e4fd6b619687a66186b992ec687495bb55c6c163a98fa6"
+ },
+ {
+ "sql": [
+ "CREATE TABLE IF NOT EXISTS \"threads\" (\n\t\"id\" text PRIMARY KEY NOT NULL,\n\t\"title\" text,\n\t\"type\" text NOT NULL,\n\t\"status\" text DEFAULT 'active',\n\t\"topic_id\" text NOT NULL,\n\t\"source_message_id\" text NOT NULL,\n\t\"parent_thread_id\" text,\n\t\"user_id\" text NOT NULL,\n\t\"last_active_at\" timestamp with time zone DEFAULT now(),\n\t\"accessed_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"created_at\" timestamp with time zone DEFAULT now() NOT NULL,\n\t\"updated_at\" timestamp with time zone DEFAULT now() NOT NULL\n);\n",
+ "\nALTER TABLE \"messages\" ADD COLUMN \"thread_id\" text;",
+ "\nDO $$ BEGIN\n ALTER TABLE \"threads\" ADD CONSTRAINT \"threads_topic_id_topics_id_fk\" FOREIGN KEY (\"topic_id\") REFERENCES \"public\".\"topics\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"threads\" ADD CONSTRAINT \"threads_parent_thread_id_threads_id_fk\" FOREIGN KEY (\"parent_thread_id\") REFERENCES \"public\".\"threads\"(\"id\") ON DELETE set null ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"threads\" ADD CONSTRAINT \"threads_user_id_users_id_fk\" FOREIGN KEY (\"user_id\") REFERENCES \"public\".\"users\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n",
+ "\nDO $$ BEGIN\n ALTER TABLE \"messages\" ADD CONSTRAINT \"messages_thread_id_threads_id_fk\" FOREIGN KEY (\"thread_id\") REFERENCES \"public\".\"threads\"(\"id\") ON DELETE cascade ON UPDATE no action;\nEXCEPTION\n WHEN duplicate_object THEN null;\nEND $$;\n"
+ ],
+ "bps": true,
+ "folderMillis": 1731858381716,
+ "hash": "d8263bfefe296ed366379c7b7fc65195d12e6a1c0a9f1c96097ea28f2123fe50"
+ }
+]
\ No newline at end of file
diff --git a/src/features/MobileSwitchLoading/index.tsx b/src/features/MobileSwitchLoading/index.tsx
index a8f3303dca44..a25069d097e0 100644
--- a/src/features/MobileSwitchLoading/index.tsx
+++ b/src/features/MobileSwitchLoading/index.tsx
@@ -1,13 +1,26 @@
-'use client';
-
+import { Icon } from '@lobehub/ui';
+import { Loader2 } from 'lucide-react';
+import { DynamicOptions } from 'next/dist/shared/lib/dynamic';
+import { memo } from 'react';
import { useTranslation } from 'react-i18next';
+import { Center, Flexbox } from 'react-layout-kit';
-import FullscreenLoading from '@/components/FullscreenLoading';
+import { ProductLogo } from '@/components/Branding';
-const MobileSwitchLoading = () => {
+// @ts-expect-error
+const MobileSwitchLoading: DynamicOptions['loading'] = memo(() => {
const { t } = useTranslation('common');
-
- return ;
-};
+ return (
+
+
+
+
+
+ {t('layoutInitializing')}
+
+
+
+ );
+});
export default MobileSwitchLoading;
diff --git a/src/locales/default/common.ts b/src/locales/default/common.ts
index 3da2c7630e58..16f41dd049e9 100644
--- a/src/locales/default/common.ts
+++ b/src/locales/default/common.ts
@@ -11,10 +11,16 @@ export default {
},
},
appLoading: {
- goToChat: '(4/4) 对话页面加载中...',
- initAuth: '(2/4) 鉴权服务初始化...',
- initUser: '(3/4) 用户状态初始化...',
- initializing: '(1/4) 应用启动中...',
+ appInitializing: '应用启动中...',
+ finished: '数据库初始化完成',
+ goToChat: '对话页面加载中...',
+ initAuth: '鉴权服务初始化...',
+ initUser: '用户状态初始化...',
+ initializing: 'PGlite 数据库初始化...',
+ loadingDependencies: '初始化依赖...',
+ loadingWasm: '加载 WASM 模块...',
+ migrating: '执行数据表迁移...',
+ ready: '数据库已就绪',
},
autoGenerate: '自动补全',
autoGenerateTooltip: '基于提示词自动补全助手描述',
@@ -40,9 +46,9 @@ export default {
error: '发生错误,请重试',
idle: '等待初始化...',
initializing: '正在初始化...',
- loadingDependencies: `加载依赖中({{progress}}%)...`,
- loadingWasmModule: '加载 WASM 模块中({{progress}}%)...',
- migrating: '正在迁移数据...',
+ loadingDependencies: '加载依赖中...',
+ loadingWasmModule: '加载 WASM 模块中...',
+ migrating: '执行数据表迁移...',
ready: '数据库已就绪',
},
modal: {
diff --git a/src/server/routers/lambda/file.ts b/src/server/routers/lambda/file.ts
index a030c1ddf163..f31fc4d1aee8 100644
--- a/src/server/routers/lambda/file.ts
+++ b/src/server/routers/lambda/file.ts
@@ -32,9 +32,7 @@ export const fileRouter = router({
}),
createFile: fileProcedure
- .input(
- UploadFileSchema.omit({ data: true, saveMode: true, url: true }).extend({ url: z.string() }),
- )
+ .input(UploadFileSchema.omit({ url: true }).extend({ url: z.string() }))
.mutation(async ({ ctx, input }) => {
const { isExist } = await ctx.fileModel.checkHash(input.hash!);
diff --git a/src/services/baseClientService/index.ts b/src/services/baseClientService/index.ts
new file mode 100644
index 000000000000..2717d974c783
--- /dev/null
+++ b/src/services/baseClientService/index.ts
@@ -0,0 +1,19 @@
+const getClientDBUserId = () => {
+ if (typeof window === 'undefined') return undefined;
+
+ return window.__lobeClientUserId;
+};
+
+const FALLBACK_CLIENT_DB_USER_ID = 'DEFAULT_LOBE_CHAT_USER';
+
+export class BaseClientService {
+ private readonly fallbackUserId: string;
+
+ protected get userId(): string {
+ return getClientDBUserId() || this.fallbackUserId;
+ }
+
+ constructor(userId?: string) {
+ this.fallbackUserId = userId || FALLBACK_CLIENT_DB_USER_ID;
+ }
+}
diff --git a/src/services/debug.ts b/src/services/debug.ts
index 1584e5f8de3e..51ce6d59bcb4 100644
--- a/src/services/debug.ts
+++ b/src/services/debug.ts
@@ -1,39 +1,37 @@
-import { DEBUG_MODEL } from '@/database/_deprecated/models/__DEBUG';
-
class DebugService {
async insertLargeDataToDB() {
- await DEBUG_MODEL.createRandomData({
- messageCount: 100_000,
- sessionCount: 40,
- startIndex: 0,
- topicCount: 200,
- });
-
- console.log('已插入10w');
-
- await DEBUG_MODEL.createRandomData({
- messageCount: 300_000,
- sessionCount: 40,
- startIndex: 100_001,
- topicCount: 200,
- });
- console.log('已插入40w');
-
- await DEBUG_MODEL.createRandomData({
- messageCount: 300_000,
- sessionCount: 40,
- startIndex: 400_001,
- topicCount: 200,
- });
- console.log('已插入70w');
-
- await DEBUG_MODEL.createRandomData({
- messageCount: 300_000,
- sessionCount: 40,
- startIndex: 700_001,
- topicCount: 200,
- });
- console.log('已插入100w');
+ // await DEBUG_MODEL.createRandomData({
+ // messageCount: 100_000,
+ // sessionCount: 40,
+ // startIndex: 0,
+ // topicCount: 200,
+ // });
+ //
+ // console.log('已插入10w');
+ //
+ // await DEBUG_MODEL.createRandomData({
+ // messageCount: 300_000,
+ // sessionCount: 40,
+ // startIndex: 100_001,
+ // topicCount: 200,
+ // });
+ // console.log('已插入40w');
+ //
+ // await DEBUG_MODEL.createRandomData({
+ // messageCount: 300_000,
+ // sessionCount: 40,
+ // startIndex: 400_001,
+ // topicCount: 200,
+ // });
+ // console.log('已插入70w');
+ //
+ // await DEBUG_MODEL.createRandomData({
+ // messageCount: 300_000,
+ // sessionCount: 40,
+ // startIndex: 700_001,
+ // topicCount: 200,
+ // });
+ // console.log('已插入100w');
}
}
diff --git a/src/services/file/ClientS3/index.test.ts b/src/services/file/ClientS3/index.test.ts
new file mode 100644
index 000000000000..266c82078ac4
--- /dev/null
+++ b/src/services/file/ClientS3/index.test.ts
@@ -0,0 +1,115 @@
+import { createStore, del, get, set } from 'idb-keyval';
+import { beforeEach, describe, expect, it, vi } from 'vitest';
+
+import { BrowserS3Storage } from './index';
+
+// Mock idb-keyval
+vi.mock('idb-keyval', () => ({
+ createStore: vi.fn(),
+ set: vi.fn(),
+ get: vi.fn(),
+ del: vi.fn(),
+}));
+
+let storage: BrowserS3Storage;
+let mockStore = {};
+
+beforeEach(() => {
+ // Reset all mocks before each test
+ vi.clearAllMocks();
+ mockStore = {};
+ (createStore as any).mockReturnValue(mockStore);
+ storage = new BrowserS3Storage();
+});
+
+describe('BrowserS3Storage', () => {
+ describe('constructor', () => {
+ it('should create store when in browser environment', () => {
+ expect(createStore).toHaveBeenCalledWith('lobechat-local-s3', 'objects');
+ });
+ });
+
+ describe('putObject', () => {
+ it('should successfully put a file object', async () => {
+ const mockFile = new File(['test content'], 'test.txt', { type: 'text/plain' });
+ const mockArrayBuffer = new ArrayBuffer(8);
+ vi.spyOn(mockFile, 'arrayBuffer').mockResolvedValue(mockArrayBuffer);
+ (set as any).mockResolvedValue(undefined);
+
+ await storage.putObject('1-test-key', mockFile);
+
+ expect(set).toHaveBeenCalledWith(
+ '1-test-key',
+ {
+ data: mockArrayBuffer,
+ name: 'test.txt',
+ type: 'text/plain',
+ },
+ mockStore,
+ );
+ });
+
+ it('should throw error when put operation fails', async () => {
+ const mockFile = new File(['test content'], 'test.txt', { type: 'text/plain' });
+ const mockError = new Error('Storage error');
+ (set as any).mockRejectedValue(mockError);
+
+ await expect(storage.putObject('test-key', mockFile)).rejects.toThrow(
+ 'Failed to put file test.txt: Storage error',
+ );
+ });
+ });
+
+ describe('getObject', () => {
+ it('should successfully get a file object', async () => {
+ const mockData = {
+ data: new ArrayBuffer(8),
+ name: 'test.txt',
+ type: 'text/plain',
+ };
+ (get as any).mockResolvedValue(mockData);
+
+ const result = await storage.getObject('test-key');
+
+ expect(result).toBeInstanceOf(File);
+ expect(result?.name).toBe('test.txt');
+ expect(result?.type).toBe('text/plain');
+ });
+
+ it('should return undefined when file not found', async () => {
+ (get as any).mockResolvedValue(undefined);
+
+ const result = await storage.getObject('test-key');
+
+ expect(result).toBeUndefined();
+ });
+
+ it('should throw error when get operation fails', async () => {
+ const mockError = new Error('Storage error');
+ (get as any).mockRejectedValue(mockError);
+
+ await expect(storage.getObject('test-key')).rejects.toThrow(
+ 'Failed to get object (key=test-key): Storage error',
+ );
+ });
+ });
+
+ describe('deleteObject', () => {
+ it('should successfully delete a file object', async () => {
+ (del as any).mockResolvedValue(undefined);
+
+ await storage.deleteObject('test-key2');
+
+ expect(del).toHaveBeenCalledWith('test-key2', {});
+ });
+
+ it('should throw error when delete operation fails', async () => {
+ const mockError = new Error('Storage error');
+ (del as any).mockRejectedValue(mockError);
+
+ await expect(storage.deleteObject('test-key')).rejects.toThrow(
+ 'Failed to delete object (key=test-key): Storage error',
+ );
+ });
+ });
+});
diff --git a/src/services/file/ClientS3/index.ts b/src/services/file/ClientS3/index.ts
new file mode 100644
index 000000000000..94692f0ecb05
--- /dev/null
+++ b/src/services/file/ClientS3/index.ts
@@ -0,0 +1,58 @@
+import { createStore, del, get, set } from 'idb-keyval';
+
+const BROWSER_S3_DB_NAME = 'lobechat-local-s3';
+
+export class BrowserS3Storage {
+ private store;
+
+ constructor() {
+ // skip server-side rendering
+ if (typeof window === 'undefined') return;
+
+ this.store = createStore(BROWSER_S3_DB_NAME, 'objects');
+ }
+
+ /**
+ * 上传文件
+ * @param key 文件 hash
+ * @param file File 对象
+ */
+ async putObject(key: string, file: File): Promise {
+ try {
+ const data = await file.arrayBuffer();
+ await set(key, { data, name: file.name, type: file.type }, this.store);
+ } catch (e) {
+ throw new Error(`Failed to put file ${file.name}: ${(e as Error).message}`);
+ }
+ }
+
+ /**
+ * 获取文件
+ * @param key 文件 hash
+ * @returns File 对象
+ */
+ async getObject(key: string): Promise {
+ try {
+ const res = await get<{ data: ArrayBuffer; name: string; type: string }>(key, this.store);
+ if (!res) return;
+
+ return new File([res.data], res!.name, { type: res?.type });
+ } catch (e) {
+ throw new Error(`Failed to get object (key=${key}): ${(e as Error).message}`);
+ }
+ }
+
+ /**
+ * 删除文件
+ * @param key 文件 hash
+ */
+ async deleteObject(key: string): Promise {
+ try {
+ await del(key, this.store);
+ } catch (e) {
+ throw new Error(`Failed to delete object (key=${key}): ${(e as Error).message}`);
+ }
+ }
+}
+
+export const clientS3Storage = new BrowserS3Storage();
diff --git a/src/services/file/client.test.ts b/src/services/file/client.test.ts
index 636db762c4cf..32c3c57f3b1c 100644
--- a/src/services/file/client.test.ts
+++ b/src/services/file/client.test.ts
@@ -1,114 +1,198 @@
-import { Mock, beforeAll, beforeEach, describe, expect, it, vi } from 'vitest';
+import { eq } from 'drizzle-orm';
+import { beforeEach, describe, expect, it } from 'vitest';
-import { fileEnv } from '@/config/file';
-import { FileModel } from '@/database/_deprecated/models/file';
-import { DB_File } from '@/database/_deprecated/schemas/files';
-import { serverConfigSelectors } from '@/store/serverConfig/selectors';
-import { createServerConfigStore } from '@/store/serverConfig/store';
+import { clientDB, initializeDB } from '@/database/client/db';
+import { files, globalFiles, users } from '@/database/schemas';
+import { clientS3Storage } from '@/services/file/ClientS3';
+import { UploadFileParams } from '@/types/files';
import { ClientService } from './client';
-const fileService = new ClientService();
+const userId = 'file-user';
-beforeAll(() => {
- createServerConfigStore();
-});
-// Mocks for the FileModel
-vi.mock('@/database/_deprecated/models/file', () => ({
- FileModel: {
- create: vi.fn(),
- delete: vi.fn(),
- findById: vi.fn(),
- clear: vi.fn(),
- },
-}));
-
-let s3Domain: string;
-
-vi.mock('@/config/file', () => ({
- fileEnv: {
- get NEXT_PUBLIC_S3_DOMAIN() {
- return s3Domain;
- },
- },
-}));
-
-// Mocks for the URL and Blob objects
-global.URL.createObjectURL = vi.fn();
-global.Blob = vi.fn();
-
-beforeEach(() => {
- // Reset all mocks before each test
- vi.resetAllMocks();
- s3Domain = '';
+const fileService = new ClientService(userId);
+
+const mockFile = {
+ name: 'mock.png',
+ fileType: 'image/png',
+ size: 1,
+ url: '',
+};
+
+beforeEach(async () => {
+ await initializeDB();
+
+ await clientDB.delete(users);
+ await clientDB.delete(globalFiles);
+ // 创建测试数据
+ await clientDB.transaction(async (tx) => {
+ await tx.insert(users).values({ id: userId });
+ });
});
describe('FileService', () => {
- it('createFile should save the file to the database', async () => {
- const localFile: DB_File = {
- name: 'test',
- data: new ArrayBuffer(1),
- fileType: 'image/png',
- saveMode: 'local',
- size: 1,
- };
+ describe('createFile', () => {
+ it('createFile should save the file to the database', async () => {
+ const localFile: UploadFileParams = {
+ name: 'test',
+ fileType: 'image/png',
+ url: '',
+ size: 1,
+ hash: '123',
+ };
+
+ await clientS3Storage.putObject(
+ '123',
+ new File([new ArrayBuffer(1)], 'test.png', { type: 'image/png' }),
+ );
- (FileModel.create as Mock).mockResolvedValue(localFile);
+ const result = await fileService.createFile(localFile);
+
+ expect(result).toMatchObject({ url: 'data:image/png;base64,AA==' });
+ });
- const result = await fileService.createFile(localFile);
+ it('should throw error when file is not found in storage during base64 conversion', async () => {
+ const localFile: UploadFileParams = {
+ name: 'test',
+ fileType: 'image/png',
+ url: '',
+ size: 1,
+ hash: 'non-existing-hash',
+ };
- expect(FileModel.create).toHaveBeenCalledWith(localFile);
- expect(result).toEqual({ url: 'data:image/png;base64,AA==' });
+ // 不调用 clientS3Storage.putObject,模拟文件不存在的情况
+
+ const promise = fileService.createFile(localFile);
+
+ await expect(promise).rejects.toThrow('file not found');
+ });
});
it('removeFile should delete the file from the database', async () => {
const fileId = '1';
- (FileModel.delete as Mock).mockResolvedValue(true);
+ await clientDB.insert(files).values({ id: fileId, userId, ...mockFile });
- const result = await fileService.removeFile(fileId);
+ await fileService.removeFile(fileId);
- expect(FileModel.delete).toHaveBeenCalledWith(fileId);
- expect(result).toBe(true);
+ const result = await clientDB.query.files.findFirst({
+ where: eq(files.id, fileId),
+ });
+
+ expect(result).toBeUndefined();
});
describe('getFile', () => {
it('should retrieve and convert local file info to FilePreview', async () => {
- const fileId = '1';
- const fileData = {
- name: 'test',
- data: new ArrayBuffer(1),
+ const fileId = 'rwlijweled';
+ const file = {
fileType: 'image/png',
- saveMode: 'local',
size: 1,
- createdAt: 1,
- updatedAt: 2,
- } as DB_File;
+ name: 'test.png',
+ url: 'idb://12312/abc.png',
+ hashId: '123tttt',
+ };
- (FileModel.findById as Mock).mockResolvedValue(fileData);
- (global.URL.createObjectURL as Mock).mockReturnValue('blob:test');
- (global.Blob as Mock).mockImplementation(() => ['test']);
+ await clientDB.insert(globalFiles).values(file);
+
+ await clientDB.insert(files).values({
+ id: fileId,
+ userId,
+ ...file,
+ createdAt: new Date(1),
+ updatedAt: new Date(2),
+ fileHash: file.hashId,
+ });
+
+ await clientS3Storage.putObject(
+ file.hashId,
+ new File([new ArrayBuffer(1)], file.name, { type: file.fileType }),
+ );
const result = await fileService.getFile(fileId);
- expect(FileModel.findById).toHaveBeenCalledWith(fileId);
- expect(result).toEqual({
+ expect(result).toMatchObject({
createdAt: new Date(1),
- id: '1',
+ id: 'rwlijweled',
size: 1,
type: 'image/png',
- name: 'test',
- url: 'blob:test',
+ name: 'test.png',
updatedAt: new Date(2),
});
});
it('should throw an error when the file is not found', async () => {
const fileId = 'non-existent';
- (FileModel.findById as Mock).mockResolvedValue(null);
const getFilePromise = fileService.getFile(fileId);
await expect(getFilePromise).rejects.toThrow('file not found');
});
});
+
+ describe('removeFiles', () => {
+ it('should delete multiple files from the database', async () => {
+ const fileIds = ['1', '2', '3'];
+
+ // 插入测试文件数据
+ await Promise.all(
+ fileIds.map((id) => clientDB.insert(files).values({ id, userId, ...mockFile })),
+ );
+
+ await fileService.removeFiles(fileIds);
+
+ // 验证所有文件都被删除
+ const remainingFiles = await clientDB.query.files.findMany({
+ where: (fields, { inArray }) => inArray(fields.id, fileIds),
+ });
+
+ expect(remainingFiles).toHaveLength(0);
+ });
+ });
+
+ describe('removeAllFiles', () => {
+ it('should clear all files for the user', async () => {
+ // 插入测试文件数据
+ await Promise.all([
+ clientDB.insert(files).values({ id: '1', userId, ...mockFile }),
+ clientDB.insert(files).values({ id: '2', userId, ...mockFile }),
+ ]);
+
+ await fileService.removeAllFiles();
+
+ // 验证用户的所有文件都被删除
+ const remainingFiles = await clientDB.query.files.findMany({
+ where: eq(files.userId, userId),
+ });
+
+ expect(remainingFiles).toHaveLength(0);
+ });
+ });
+
+ describe('checkFileHash', () => {
+ it('should return true if file hash exists', async () => {
+ const hash = 'existing-hash';
+ await clientDB.insert(globalFiles).values({
+ ...mockFile,
+ hashId: hash,
+ });
+ await clientDB.insert(files).values({
+ id: '1',
+ userId,
+ ...mockFile,
+ fileHash: hash,
+ });
+
+ const exists = await fileService.checkFileHash(hash);
+
+ expect(exists).toMatchObject({ isExist: true });
+ });
+
+ it('should return false if file hash does not exist', async () => {
+ const hash = 'non-existing-hash';
+
+ const exists = await fileService.checkFileHash(hash);
+
+ expect(exists).toEqual({ isExist: false });
+ });
+ });
});
diff --git a/src/services/file/client.ts b/src/services/file/client.ts
index babfa78322c3..b6c2818b6c39 100644
--- a/src/services/file/client.ts
+++ b/src/services/file/client.ts
@@ -1,16 +1,34 @@
-import { FileModel } from '@/database/_deprecated/models/file';
-import { DB_File } from '@/database/_deprecated/schemas/files';
-import { FileItem } from '@/types/files';
+import { clientDB } from '@/database/client/db';
+import { FileModel } from '@/database/server/models/file';
+import { BaseClientService } from '@/services/baseClientService';
+import { clientS3Storage } from '@/services/file/ClientS3';
+import { FileItem, UploadFileParams } from '@/types/files';
import { IFileService } from './type';
-export class ClientService implements IFileService {
- async createFile(file: DB_File) {
+export class ClientService extends BaseClientService implements IFileService {
+ private get fileModel(): FileModel {
+ return new FileModel(clientDB as any, this.userId);
+ }
+
+ async createFile(file: UploadFileParams) {
// save to local storage
// we may want to save to a remote server later
- const res = await FileModel.create(file);
- // arrayBuffer to url
- const base64 = Buffer.from(file.data!).toString('base64');
+ const res = await this.fileModel.create(
+ {
+ fileHash: file.hash,
+ fileType: file.fileType,
+ knowledgeBaseId: file.knowledgeBaseId,
+ metadata: file.metadata,
+ name: file.name,
+ size: file.size,
+ url: file.url!,
+ },
+ true,
+ );
+
+ // get file to base64 url
+ const base64 = await this.getBase64ByFileHash(file.hash!);
return {
id: res.id,
@@ -19,13 +37,16 @@ export class ClientService implements IFileService {
}
async getFile(id: string): Promise {
- const item = await FileModel.findById(id);
+ const item = await this.fileModel.findById(id);
if (!item) {
throw new Error('file not found');
}
// arrayBuffer to url
- const url = URL.createObjectURL(new Blob([item.data!], { type: item.fileType }));
+ const fileItem = await clientS3Storage.getObject(item.fileHash!);
+ if (!fileItem) throw new Error('file not found');
+
+ const url = URL.createObjectURL(fileItem);
return {
createdAt: new Date(item.createdAt),
@@ -39,14 +60,25 @@ export class ClientService implements IFileService {
}
async removeFile(id: string) {
- return FileModel.delete(id);
+ await this.fileModel.delete(id, false);
}
async removeFiles(ids: string[]) {
- await Promise.all(ids.map((id) => FileModel.delete(id)));
+ await this.fileModel.deleteMany(ids, false);
}
async removeAllFiles() {
- return FileModel.clear();
+ return this.fileModel.clear();
+ }
+
+ async checkFileHash(hash: string) {
+ return this.fileModel.checkHash(hash);
+ }
+
+ private async getBase64ByFileHash(hash: string) {
+ const fileItem = await clientS3Storage.getObject(hash);
+ if (!fileItem) throw new Error('file not found');
+
+ return Buffer.from(await fileItem.arrayBuffer()).toString('base64');
}
}
diff --git a/src/services/file/type.ts b/src/services/file/type.ts
index 6ed6a090d257..5dc003cbeaa0 100644
--- a/src/services/file/type.ts
+++ b/src/services/file/type.ts
@@ -1,12 +1,13 @@
-import { FileItem, UploadFileParams } from '@/types/files';
+import { CheckFileHashResult, FileItem, UploadFileParams } from '@/types/files';
export interface IFileService {
+ checkFileHash(hash: string): Promise;
createFile(
file: UploadFileParams,
knowledgeBaseId?: string,
): Promise<{ id: string; url: string }>;
getFile(id: string): Promise;
removeAllFiles(): Promise;
- removeFile(id: string): Promise;
- removeFiles(ids: string[]): Promise;
+ removeFile(id: string): Promise;
+ removeFiles(ids: string[]): Promise;
}
diff --git a/src/services/import/__tests__/fixtures/messages.json b/src/services/import/__tests__/fixtures/messages.json
new file mode 100644
index 000000000000..5f0906c70433
--- /dev/null
+++ b/src/services/import/__tests__/fixtures/messages.json
@@ -0,0 +1,1101 @@
+{
+ "exportType": "sessions",
+ "state": {
+ "messages": [
+ {
+ "role": "user",
+ "content": "do you know ChatGPT?",
+ "files": [],
+ "sessionId": "inbox",
+ "topicId": "6ByGPzqj",
+ "createdAt": 1710649845534,
+ "id": "C5CiFuWL",
+ "updatedAt": 1710649845534,
+ "extra": {},
+ "meta": {}
+ },
+ {
+ "role": "assistant",
+ "content": "...",
+ "parentId": "C5CiFuWL",
+ "sessionId": "inbox",
+ "topicId": "6ByGPzqj",
+ "createdAt": 1710649845605,
+ "id": "GNfOJrZW",
+ "updatedAt": 1710649845605,
+ "extra": {
+ "fromModel": "mistralai/mistral-7b-instruct:free",
+ "fromProvider": "openrouter"
+ },
+ "meta": {}
+ },
+ {
+ "role": "user",
+ "content": "do you know ChatGPT?",
+ "files": [],
+ "sessionId": "inbox",
+ "topicId": "EjdLCVfG",
+ "createdAt": 1710649884921,
+ "id": "0ATdEISY",
+ "updatedAt": 1710649884921,
+ "extra": {},
+ "meta": {}
+ },
+ {
+ "role": "assistant",
+ "content": "Yes, I am familiar with ChatGPT. ChatGPT is a large language model developed and trained by OpenAI. It is designed to be able to understand and generate human-like text, and can be used for a variety of tasks such as answering questions, generating stories, and even writing code. ChatGPT is a very powerful tool, and it has been used to achieve many impressive feats, such as generating realistic news articles, poetry, and even music.",
+ "parentId": "0ATdEISY",
+ "sessionId": "inbox",
+ "topicId": "EjdLCVfG",
+ "createdAt": 1710649884976,
+ "id": "PEWjFn07",
+ "updatedAt": 1710649888838,
+ "extra": {
+ "fromModel": "mistralai/mistral-7b-instruct:free",
+ "fromProvider": "openrouter"
+ },
+ "meta": {}
+ },
+ {
+ "role": "user",
+ "content": "do you know ChatGPT?",
+ "files": [],
+ "sessionId": "inbox",
+ "topicId": "EjdLCVfG",
+ "createdAt": 1710649892956,
+ "id": "u2MgcL1m",
+ "updatedAt": 1710649892956,
+ "extra": {},
+ "meta": {}
+ },
+ {
+ "role": "assistant",
+ "content": "**Sure, I am familiar with ChatGPT.**\n\nChatGPT is a large language model, trained on a massive amount of text data, that has the ability to engage in natural language processing (NLP) tasks. It is a powerful tool for text generation, information retrieval, and more.\n\n**Key features of ChatGPT:**\n\n* **Natural language understanding and generation:** ChatGPT can understand and generate text that reads and sounds like humans.\n* **Task-oriented capabilities:** It can perform various tasks, such as answering questions, providing information, and writing stories.\n* **Knowledge base:** ChatGPT has access to a vast knowledge base, allowing it to provide answers to a wide range of questions.\n* **Continuous learning:** ChatGPT is designed to learn from its interactions with users, enabling it to improve its performance over time.\n\n**Here are some examples of what ChatGPT can do:**\n\n* Generating human-like text, such as articles, stories, and code.\n* Answering questions in a variety of subjects.\n* Providing information on a wide range of topics.\n* Translating text between languages.\n* Summarizing text.\n* Engaging in conversation and providing information.\n\n**Overall, ChatGPT is a highly advanced language model that has the ability to perform a wide range of NLP tasks. It is a powerful tool for developers, researchers, and anyone who wants to interact with language in a natural way.**",
+ "parentId": "u2MgcL1m",
+ "sessionId": "inbox",
+ "topicId": "EjdLCVfG",
+ "createdAt": 1710649893011,
+ "id": "GjneSJG9",
+ "updatedAt": 1710649900678,
+ "extra": { "fromModel": "google/gemma-7b-it:free", "fromProvider": "openrouter" },
+ "meta": {}
+ },
+ {
+ "role": "user",
+ "content": "这是什么",
+ "files": ["file-wUhrpfNq"],
+ "sessionId": "inbox",
+ "topicId": "cfwpdErj",
+ "createdAt": 1711463400412,
+ "id": "QQjYFFhE",
+ "updatedAt": 1711463400412,
+ "extra": {},
+ "meta": {}
+ },
+ {
+ "role": "assistant",
+ "content": "这是一个网站的截图,上面显示了一些关于机器学习和人工智能模型的信息。其中包括一些项目的名称,比如“Hugging Face”、“ModelScope”和“WiseModel”。此外,还有一些社交媒体的链接和提示,邀请人们提问、讨论和了解更多信息。\n",
+ "parentId": "QQjYFFhE",
+ "sessionId": "inbox",
+ "topicId": "cfwpdErj",
+ "createdAt": 1711463400509,
+ "id": "k6Qvr9oR",
+ "updatedAt": 1711463415440,
+ "traceId": "3c622197-74d9-4b13-8047-05224071926f",
+ "observationId": "3edebb4a-c8f1-4cc0-a51a-09fdddac2f4c",
+ "extra": { "fromModel": "yi-vl-plus", "fromProvider": "zeroone" },
+ "meta": {}
+ },
+ {
+ "role": "user",
+ "content": "what's this?",
+ "files": ["file-nBHOYnWK"],
+ "sessionId": "inbox",
+ "topicId": "bqQRiAwL",
+ "createdAt": 1711463443244,
+ "id": "FCYSvJe1",
+ "updatedAt": 1711463443244,
+ "extra": {},
+ "meta": {}
+ },
+ {
+ "role": "assistant",
+ "content": "This is an image of a user interface displaying information about a software called \"Yi Open-source.\" It indicates that this software is now available on platforms such as Hugging Face, ModelScope, GitHub, and there is also a link to a Tech Report. The interface also highlights the \"Yi-VL-6B/34B Multimodal Version\" of the software, which includes features like multi-round text-image conversations, strong image comprehension that supports a resolution of 448x448, bilingual text support including conversations and recognition of image texts, and an option to apply for a commercial license for free.\n",
+ "parentId": "FCYSvJe1",
+ "sessionId": "inbox",
+ "topicId": "bqQRiAwL",
+ "createdAt": 1711463443332,
+ "id": "UvIyorfg",
+ "updatedAt": 1711463464752,
+ "traceId": "0f3225cd-1cc4-4f33-822f-30ce64548726",
+ "observationId": "c6f2a771-ea0e-4fca-971f-c5641e35727e",
+ "extra": { "fromModel": "yi-vl-plus", "fromProvider": "zeroone" },
+ "meta": {}
+ },
+ {
+ "role": "user",
+ "content": "hello",
+ "files": [],
+ "sessionId": "inbox",
+ "topicId": "yxDfgttq",
+ "createdAt": 1713963716735,
+ "id": "7P6Zxmh1",
+ "updatedAt": 1713963716735,
+ "extra": {},
+ "meta": {}
+ },
+ {
+ "role": "assistant",
+ "content": "...",
+ "parentId": "7P6Zxmh1",
+ "sessionId": "inbox",
+ "topicId": "yxDfgttq",
+ "createdAt": 1713963716871,
+ "id": "2i7CA09H",
+ "updatedAt": 1713963716972,
+ "error": {
+ "body": {
+ "error": { "message": "Failed to fetch", "name": "TypeError" },
+ "provider": "ollama"
+ },
+ "message": "Error requesting Ollama service, please troubleshoot or retry based on the following information",
+ "type": "OllamaBizError"
+ },
+ "extra": { "fromModel": "llama3", "fromProvider": "ollama" },
+ "meta": {}
+ },
+ {
+ "role": "user",
+ "content": "hello",
+ "files": [],
+ "sessionId": "inbox",
+ "topicId": "pd0ZhWxQ",
+ "createdAt": 1714150294555,
+ "id": "UkqHZ572",
+ "updatedAt": 1714150294555,
+ "extra": {},
+ "meta": {}
+ },
+ {
+ "role": "assistant",
+ "content": "...",
+ "parentId": "UkqHZ572",
+ "sessionId": "inbox",
+ "topicId": "pd0ZhWxQ",
+ "createdAt": 1714150307021,
+ "id": "7agzrZam",
+ "updatedAt": 1714150307155,
+ "error": {
+ "body": {
+ "error": { "message": "Failed to fetch", "name": "TypeError" },
+ "provider": "ollama"
+ },
+ "message": "Error requesting Ollama service, please troubleshoot or retry based on the following information",
+ "type": "OllamaBizError"
+ },
+ "extra": { "fromModel": "llama3", "fromProvider": "ollama" },
+ "meta": {}
+ },
+ {
+ "role": "user",
+ "content": "hello",
+ "files": [],
+ "sessionId": "inbox",
+ "topicId": "CfR7IRRA",
+ "createdAt": 1714150457230,
+ "id": "UW9tPNfX",
+ "updatedAt": 1714150457230,
+ "extra": {},
+ "meta": {}
+ },
+ {
+ "role": "assistant",
+ "content": "...",
+ "parentId": "UW9tPNfX",
+ "sessionId": "inbox",
+ "topicId": "CfR7IRRA",
+ "createdAt": 1714150457341,
+ "id": "Trsk6oXJ",
+ "updatedAt": 1714150457428,
+ "error": {
+ "body": {
+ "error": { "message": "Failed to fetch", "name": "TypeError" },
+ "provider": "ollama"
+ },
+ "message": "Error requesting Ollama service, please troubleshoot or retry based on the following information",
+ "type": "OllamaBizError"
+ },
+ "extra": { "fromModel": "llama3", "fromProvider": "ollama" },
+ "meta": {}
+ },
+ {
+ "role": "user",
+ "content": "hello",
+ "files": [],
+ "sessionId": "inbox",
+ "topicId": "nK4zHqil",
+ "createdAt": 1714230184564,
+ "id": "NS9ZAYLV",
+ "updatedAt": 1714230184564,
+ "extra": {},
+ "meta": {}
+ },
+ {
+ "role": "assistant",
+ "content": "...",
+ "parentId": "NS9ZAYLV",
+ "sessionId": "inbox",
+ "topicId": "nK4zHqil",
+ "createdAt": 1714230184693,
+ "id": "EfxUYDvX",
+ "updatedAt": 1714230184787,
+ "error": {
+ "body": {
+ "error": { "message": "Failed to fetch", "name": "TypeError" },
+ "provider": "ollama"
+ },
+ "message": "Error requesting Ollama service, please troubleshoot or retry based on the following information",
+ "type": "OllamaBizError"
+ },
+ "extra": { "fromModel": "llama3", "fromProvider": "ollama" },
+ "meta": {}
+ },
+ {
+ "role": "user",
+ "content": "hello",
+ "files": [],
+ "sessionId": "inbox",
+ "topicId": "9py3gBEl",
+ "createdAt": 1714230284296,
+ "id": "MZeSp8hy",
+ "updatedAt": 1714230284296,
+ "extra": {},
+ "meta": {}
+ },
+ {
+ "role": "assistant",
+ "content": "Hello! It's nice to meet you. Is there something I can help you with, or would you like to chat?",
+ "parentId": "MZeSp8hy",
+ "sessionId": "inbox",
+ "topicId": "9py3gBEl",
+ "createdAt": 1714230306608,
+ "id": "RARh6JLx",
+ "updatedAt": 1714230315690,
+ "extra": { "fromModel": "llama3", "fromProvider": "ollama" },
+ "meta": {}
+ },
+ {
+ "role": "user",
+ "content": "who are you?",
+ "files": [],
+ "sessionId": "inbox",
+ "topicId": "9py3gBEl",
+ "createdAt": 1714230319444,
+ "id": "bLnLbiJq",
+ "updatedAt": 1714230319444,
+ "extra": {},
+ "meta": {}
+ },
+ {
+ "role": "assistant",
+ "content": "I am LLaMA, an AI assistant developed by Meta AI that can understand and respond to human input in a conversational manner. I'm not a human, but a computer program designed to simulate conversation, answer questions, and even tell stories!\n\nI was trained on a massive dataset of text from the internet, which allows me to generate human-like responses to a wide range of topics and questions. My training data includes a vast amount of text from various sources, including books, articles, and websites.\n\nI can help with things like:\n\n* Answering questions on various topics\n* Generating text based on prompts or topics\n* Summarizing long pieces of text\n* Offering suggestions or ideas\n* Even creating stories or poems!\n\nSo, what would you like to talk about?",
+ "parentId": "bLnLbiJq",
+ "sessionId": "inbox",
+ "topicId": "9py3gBEl",
+ "createdAt": 1714230319534,
+ "id": "yD0tYOHp",
+ "updatedAt": 1714230323696,
+ "extra": { "fromModel": "llama3", "fromProvider": "ollama" },
+ "meta": {}
+ },
+ {
+ "role": "user",
+ "content": "hello",
+ "files": [],
+ "sessionId": "inbox",
+ "topicId": "aaWMcwY1",
+ "createdAt": 1714230332659,
+ "id": "7OsS0yD1",
+ "updatedAt": 1714230332659,
+ "extra": {},
+ "meta": {}
+ },
+ {
+ "role": "assistant",
+ "content": "Hello there! How can I assist you today? If you have any questions or need help with something, feel free to ask.",
+ "parentId": "7OsS0yD1",
+ "sessionId": "inbox",
+ "topicId": "aaWMcwY1",
+ "createdAt": 1714231085959,
+ "id": "dUjPwtOg",
+ "updatedAt": 1714231087594,
+ "extra": { "fromModel": "wizardlm2", "fromProvider": "ollama" },
+ "meta": {}
+ },
+ {
+ "role": "user",
+ "content": "who are you?",
+ "files": [],
+ "sessionId": "inbox",
+ "topicId": "aaWMcwY1",
+ "createdAt": 1714231091886,
+ "id": "5DWajtMx",
+ "updatedAt": 1714231091886,
+ "extra": {},
+ "meta": {}
+ },
+ {
+ "role": "assistant",
+ "content": " I am an AI digital assistant designed to interact with users like you in a conversational manner, providing information, answering questions, and assisting with a wide range of topics based on the knowledge and capabilities programmed into me. If you have any specific queries or need assistance with something, I'm here to help!",
+ "parentId": "5DWajtMx",
+ "sessionId": "inbox",
+ "topicId": "aaWMcwY1",
+ "createdAt": 1714231091965,
+ "id": "VsiyaVt2",
+ "updatedAt": 1714231093814,
+ "extra": { "fromModel": "wizardlm2", "fromProvider": "ollama" },
+ "meta": {}
+ },
+ {
+ "role": "user",
+ "content": "tell me a joke",
+ "files": [],
+ "sessionId": "inbox",
+ "topicId": "aaWMcwY1",
+ "createdAt": 1714231099307,
+ "id": "wu7djaZW",
+ "updatedAt": 1714231099307,
+ "extra": {},
+ "meta": {}
+ },
+ {
+ "role": "assistant",
+ "content": " Certainly! Here's a classic one for you:\n\nWhy don't scientists trust atoms?\n\nBecause they make up everything!",
+ "parentId": "wu7djaZW",
+ "sessionId": "inbox",
+ "topicId": "aaWMcwY1",
+ "createdAt": 1714231099407,
+ "id": "CvTAdaHx",
+ "updatedAt": 1714231100392,
+ "extra": { "fromModel": "wizardlm2", "fromProvider": "ollama" },
+ "meta": {}
+ },
+ {
+ "role": "user",
+ "content": "hello",
+ "files": [],
+ "sessionId": "inbox",
+ "topicId": "2wcF8yaS",
+ "createdAt": 1714236590340,
+ "id": "DCG1G1EH",
+ "updatedAt": 1714236590340,
+ "extra": {},
+ "meta": {}
+ },
+ {
+ "role": "assistant",
+ "content": "...",
+ "parentId": "DCG1G1EH",
+ "sessionId": "inbox",
+ "topicId": "2wcF8yaS",
+ "createdAt": 1714236590441,
+ "id": "gY41w5vQ",
+ "updatedAt": 1714236590518,
+ "error": {
+ "body": {
+ "error": {
+ "message": "model 'mixtral' not found, try pulling it first",
+ "name": "ResponseError",
+ "status_code": 404
+ },
+ "provider": "ollama"
+ },
+ "message": "Error requesting Ollama service, please troubleshoot or retry based on the following information",
+ "type": "OllamaBizError"
+ },
+ "extra": { "fromModel": "mixtral", "fromProvider": "ollama" },
+ "meta": {}
+ },
+ {
+ "role": "user",
+ "content": "hello",
+ "files": [],
+ "sessionId": "a5fefc88-f6c1-44fb-9e98-3d366b1ed589",
+ "topicId": "v38snJ0A",
+ "createdAt": 1717080410895,
+ "id": "qOIxEGEB",
+ "updatedAt": 1717080410895,
+ "extra": {},
+ "meta": {}
+ },
+ {
+ "role": "assistant",
+ "content": "...",
+ "parentId": "qOIxEGEB",
+ "sessionId": "a5fefc88-f6c1-44fb-9e98-3d366b1ed589",
+ "topicId": "v38snJ0A",
+ "createdAt": 1717080410970,
+ "id": "w28FcqY5",
+ "updatedAt": 1717080411485,
+ "error": {
+ "body": { "error": { "errorType": "NoOpenAIAPIKey" }, "provider": "openai" },
+ "message": "OpenAI API Key is empty, please add a custom OpenAI API Key",
+ "type": "NoOpenAIAPIKey"
+ },
+ "extra": { "fromModel": "gpt-3.5-turbo", "fromProvider": "openai" },
+ "meta": {}
+ }
+ ],
+ "sessionGroups": [
+ {
+ "name": "Writter",
+ "sort": 0,
+ "createdAt": 1706114744425,
+ "id": "XlUbvOvL",
+ "updatedAt": 1706114747468
+ },
+ {
+ "name": "Devs",
+ "sort": 1,
+ "createdAt": 1706114738483,
+ "id": "517DQHr9",
+ "updatedAt": 1706114743011
+ }
+ ],
+ "sessions": [
+ {
+ "config": {
+ "model": "claude-3-sonnet-20240229",
+ "params": {
+ "frequency_penalty": 0,
+ "presence_penalty": 0,
+ "temperature": 0.6,
+ "top_p": 1
+ },
+ "plugins": [],
+ "systemRole": "As the Assistant and Judge for TOEFL Writing, your primary role is to deal with tasks related to TOEFL Writing.\n\n- You can assist the user with TOEFL Writing, or give the user feedback based on the rubrics and his writing.\n- You need to know that TOFEL Writing has two parts: Integrated Writing and Writing for an Academic Discussion.\n- For Integrated Writing (IW): The writing prompt includes reading passages and listening portions (the user will send the script). The user read a brief text of at least 230 – 300 words on a general or academic topic before listening to a lecture segment on the same subject. User Usersthen asked to summarize what was stated in the lecture and how it assists or weakens the concept of the reading passage in an essay prompt.\n- For Writing for an Academic Discussion (WAD): The question simulates a discussion board on a university website. Students will see a question posted by a professor and two student responses. The student’s job is to read everything and then write their response.\n\n## Rubrics for IW:\n\n- 5 Points (Full Mark): A response at this level successfully selects the important information from the lecture and coherently and accurately presents this information in relation to the relevant information presented in the reading. The response is well organized, and occasional language errors that are present do not result in inaccurate or imprecise presentation of content or connections.\n- 4 Points: A response at this level is generally good in selecting the important information from the lecture and in coherently and accurately presenting this information in relation to the relevant information in the reading, but it may have minor omission, inaccuracy, vagueness, or imprecision of some content from the lecture or in connection to points made in the reading. A response is also scored at this level if it has more frequent or noticeable minor language errors, as long as such usage and grammatical structures do not result in anything more than an occasional lapse of clarity or in the connection of ideas.\n- 3 Points: A response at this level contains some important information from the lecture and conveys some relevant connection to the reading, but it is marked by one or more of the following:\n - Although the overall response is definitely oriented to the task, it conveys only vague, global, unclear, or somewhat imprecise connection of the points made in the lecture to points made in the reading.\n - The response may omit one major key point made in the lecture.\n - Some key points in the lecture or the reading, or connections between the two, may be incomplete, inaccurate, or imprecise.\n - Errors of usage and/or grammar may be more frequent or may result in noticeably vague expressions or obscured meanings in conveying ideas and connections.\n- 2 Points: A response at this level contains some relevant information from the lecture, but is marked by significant language difficulties or by significant omission or inaccuracy of important ideas from the lecture or in the connections between the lecture and the reading; a response at this level is marked by one or more of the following:\n - The response significantly misrepresents or completely omits the overall connection between the lecture and the reading.\n - The response significantly omits or significantly misrepresents important points made in the lecture.\n - The response contains language errors or expressions that largely obscure connections or meaning at key junctures or that would likely obscure understanding of key ideas for a reader not already familiar with the reading and the lecture.\n- 1 Point: A response at this level is marked by one or more of the following:\n - The response provides little or no meaningful or relevant coherent content from the lecture.\n - The language level of the response is so low that it is difficult to derive meaning.\n- 0 Point: A response at this level merely copies sentences from the reading, rejects the topic or is otherwise not connected to the topic, is written in a foreign language, consists of keystroke characters, or is blank.\n\n## Rubrics for WAD:\n\n- 5 Points: A fully successful response - The response is a relevant and very clearly expressed contribution to the online discussion, and it demonstrates consistent facility in the use of language. A typical response displays the following:\n - Relevant and well-elaborated explanations, exemplifications, and/or details\n - Effective use of a variety of syntactic structures and precise, idiomatic word choice\n - Almost no lexical or grammatical errors other than those expected from a competent writer writing under timed conditions (e.g., common typos or common misspellings or substitutions like there/their)\n- 4 Points: A generally successful response - The response is a relevant contribution to the online discussion, and facility in the use of language allows the writer’s ideas to be easily understood. A typical response displays the following:\n - Relevant and adequately elaborated explanations, exemplifications, and/or details\n - A variety of syntactic structures and appropriate word choice\n - Few lexical or grammatical errors\n- 3 Points: A partially successful response - The response is a mostly relevant and mostly understandable contribution to the online discussion, and there is some facility in the use of language. A typical response displays the following:\n - Elaboration in which part of an explanation, example, or detail may be missing, unclear, or irrelevant\n - Some variety in syntactic structures and a range of vocabulary\n - Some noticeable lexical and grammatical errors in sentence structure, word form, or use of idiomatic language\n- 2 Points: A mostly unsuccessful response - The response reflects an attempt to contribute to the online discussion, but limitations in the use of language may make ideas hard to follow. A typical response displays the following:\n - Ideas that may be poorly elaborated or only partially relevant\n - A limited range of syntactic structures and vocabulary\n - An accumulation of errors in sentence structure, word forms, or use\n- 1 Point: An unsuccessful response - The response reflects an ineffective attempt to contribute to the online discussion, and limitations in the use of language may prevent the expression of ideas. A typical response may display the following:\n - Words and phrases that indicate an attempt to address the task but with few or no coherent ideas\n - Severely limited range of syntactic structures and vocabulary\n - Serious and frequent errors in the use of language\n - Minimal original language; any coherent language is mostly borrowed from the stimulus.\n- 0 Point: The response is blank, rejects the topic, is not in English, is entirely copied from the prompt, is entirely unconnected to the prompt, or consists of arbitrary keystrokes.\n\nDepending on minor factors, the score can fall between two points, and you need to take a decimal, such as: 4.25 or 4.5, depending on the other details.\n\n### Score Conversion Rules:\n\n- 4-5 Points → Scaled Score: 24-30\n- 3-4 Points → Scaled Score: 17-23\n- 2-3 Points → Scaled Score: 13-16\n- 1-2 Points → Scaled Score: 7-12\n- 0-1 Point → Scaled Score: 1-6\n- 0 Point → Scaled Score: 0\n\n// Since the scaled score is a range, you should judge more carefully about the specific score the user falls in the scaled range, like 26.\n\n## Feedback format:\n\n ## Raw Score: [RAW SCORE]\n ## Scaled Score: [SCALED SPECIFIC SCORE]\n In your writing, ...[Specific problems in user's writing(Use bullet list)]\n",
+ "tts": {
+ "showAllLocaleVoice": false,
+ "sttLocale": "auto",
+ "ttsService": "openai",
+ "voice": { "openai": "alloy" }
+ },
+ "provider": "anthropic",
+ "chatConfig": {
+ "autoCreateTopicThreshold": 2,
+ "displayMode": "chat",
+ "enableAutoCreateTopic": true,
+ "historyCount": 1
+ }
+ },
+ "group": "default",
+ "meta": {
+ "avatar": "📝",
+ "description": "Your TOEFL Writing assistant and evaluator, specializing in feedback and guidance.",
+ "tags": ["writing", "study"],
+ "title": "TOEFL Writing Tutor"
+ },
+ "type": "agent",
+ "createdAt": "2024-01-24T16:43:37.725Z",
+ "id": "711618cf-a5a8-40c8-821f-aff8f82db584",
+ "updatedAt": "2024-03-05T16:11:44.241Z",
+ "pinned": true,
+ "model": "claude-3-sonnet-20240229"
+ },
+ {
+ "config": {
+ "model": "gpt-3.5-turbo",
+ "params": {
+ "frequency_penalty": 0,
+ "presence_penalty": 0,
+ "temperature": 0.6,
+ "top_p": 1
+ },
+ "plugins": [],
+ "systemRole": "You are a developer expert in Flutter framework and Dart programming language. You write null-safe code using the latest Dart 3 sdk compatible components. Your speciality is to write custom widgets that are visually unique. Follow the instructions carefully and write efficient code.\n",
+ "tts": {
+ "showAllLocaleVoice": false,
+ "sttLocale": "auto",
+ "ttsService": "openai",
+ "voice": { "openai": "alloy" }
+ },
+ "chatConfig": {
+ "autoCreateTopicThreshold": 2,
+ "displayMode": "chat",
+ "enableAutoCreateTopic": true,
+ "historyCount": 1
+ }
+ },
+ "group": "517DQHr9",
+ "meta": {
+ "avatar": "📱",
+ "description": "A developer expert in Flutter framework and Dart programming language.",
+ "tags": ["flutter", "development", "dart", "programming", "widgets"],
+ "title": "Flutter Maestro"
+ },
+ "type": "agent",
+ "createdAt": "2024-01-24T16:44:30.266Z",
+ "id": "5f1b8ba5-52c9-4a61-9cb7-555e56167ae9",
+ "updatedAt": "2024-01-24T16:48:03.375Z",
+ "pinned": false,
+ "model": "gpt-3.5-turbo"
+ },
+ {
+ "config": {
+ "model": "gpt-3.5-turbo",
+ "params": {
+ "frequency_penalty": 0,
+ "presence_penalty": 0,
+ "temperature": 0.6,
+ "top_p": 1
+ },
+ "plugins": [],
+ "systemRole": "# Role: AE Script Development Expert\n\n## Profile\n\nAs a programmer specializing in developing scripts and plugins for Adobe After Effects software, adept at using programming to enhance the efficiency and quality of video production, with a solid programming foundation and a deep understanding of AE software. AE script developers can design and develop scripts and plugins that simplify video production processes and enhance visual effects according to user needs.\n\n### Proficiency in JavaScript programming language:\n\n1. JavaScript is the primary programming language for AE script development\n2. Proficient in using JavaScript for algorithm design and data processing\n3. Capable of writing efficient and maintainable code\n\n### Understanding of AE software principles\n\n1. Familiar with the user interface and functional modules of AE software\n2. Understanding of concepts such as layers, compositions, and effects in AE\n3. Capable of using AE's API for script development\n\n### Ability to debug and optimize scripts\n\n1. Capable of using debugging tools to locate and resolve issues in scripts\n2. Performance optimization of scripts to improve efficiency\n3. Ensuring script compatibility and stability\n\n## Rules\n\n1. Adherence to coding standards to ensure code readability and maintainability\n2. Ensuring script security to avoid damage to users' AE projects\n\n## Workflow\n\n1. Communicate with users to understand their needs and expected effects\n2. Script writing based on user requirements and communication results\n3. Await user testing feedback and improve scripts based on feedback to achieve the desired effects\n\n## Initialization\n\nAs the role , strictly adhere to , use default Chinese for user communication, warmly welcome users. Then introduce yourself and inform users about .\n",
+ "tts": {
+ "showAllLocaleVoice": false,
+ "sttLocale": "auto",
+ "ttsService": "openai",
+ "voice": { "openai": "alloy" }
+ },
+ "chatConfig": {
+ "autoCreateTopicThreshold": 2,
+ "displayMode": "chat",
+ "enableAutoCreateTopic": true,
+ "historyCount": 1
+ }
+ },
+ "group": "517DQHr9",
+ "meta": {
+ "avatar": "🧏",
+ "description": "An expert in AE script development, proficient in JavaScript programming, understanding of AE software principles, capable of debugging and optimizing scripts.",
+ "tags": [
+ "script development",
+ "programmer",
+ "adobe-after-effects",
+ "java-script",
+ "algorithm design",
+ "debugging",
+ "optimization",
+ "coding standards",
+ "user communication",
+ "script usage instructions"
+ ],
+ "title": "AE Script Development Expert"
+ },
+ "type": "agent",
+ "createdAt": "2024-01-24T16:45:09.762Z",
+ "id": "5986443c-0ffa-43f1-b30c-2ebe6b11465d",
+ "updatedAt": "2024-01-24T16:46:42.016Z",
+ "model": "gpt-3.5-turbo",
+ "pinned": false
+ },
+ {
+ "config": {
+ "model": "gpt-3.5-turbo",
+ "params": {
+ "frequency_penalty": 0,
+ "presence_penalty": 0,
+ "temperature": 0.6,
+ "top_p": 1
+ },
+ "plugins": [],
+ "systemRole": "Please serve as the weekly report summary generation assistant. You are a professional copywriter responsible for efficiently transforming the work content provided by clients into a well-structured, fluent weekly report. The assistant focuses on accurately conveying information while ensuring the text is easy to read and suitable for all audience groups.\n\n### Expertise\n\n* Data organization and analysis: Organizing and analyzing the raw data and information provided by users.\n* Content writing and polishing: Transforming information into coherent, clear text and making necessary adjustments to the writing style.\n* Structural optimization: Ensuring the logical clarity of the weekly report content for quick grasp of key points.\n\n### Rules\n\n* Maintain the accuracy and completeness of information.\n* Ensure smooth text and clear, concise language.\n* Follow the format and style requirements specified by the client.\n\n### Process\n\n* Collect the work content and data provided by users.\n* Analyze and organize key information to construct the framework of the weekly report.\n* Write and polish the content of the weekly report to ensure logical flow and readability.\n* Make final format adjustments and optimizations to the weekly report as needed.\n",
+ "tts": {
+ "showAllLocaleVoice": false,
+ "sttLocale": "auto",
+ "ttsService": "openai",
+ "voice": { "openai": "alloy" }
+ },
+ "chatConfig": {
+ "autoCreateTopicThreshold": 2,
+ "displayMode": "chat",
+ "enableAutoCreateTopic": true,
+ "historyCount": 1
+ }
+ },
+ "group": "XlUbvOvL",
+ "meta": {
+ "avatar": "📓",
+ "description": "Weekly report generation assistant",
+ "tags": ["weekly report", "daily report", "writing", "summary"],
+ "title": "Weekly Report Assistant"
+ },
+ "type": "agent",
+ "createdAt": "2024-01-24T16:45:13.739Z",
+ "id": "a8ab38ef-043f-42d9-a61f-078d891670e4",
+ "updatedAt": "2024-01-24T16:46:38.601Z",
+ "model": "gpt-3.5-turbo",
+ "pinned": false
+ },
+ {
+ "config": {
+ "model": "gpt-3.5-turbo",
+ "params": {
+ "frequency_penalty": 0,
+ "presence_penalty": 0,
+ "temperature": 0.6,
+ "top_p": 1
+ },
+ "plugins": [],
+ "systemRole": "You are an excellent translator who can perform Chinese-Japanese bilingual translation accurately and idiomatically, with attention to propriety and elegance. Your tasks are:\n\n1. If the input is in Chinese, translate it into Japanese.\n2. If the input is in Japanese, translate it into Simplified Chinese.\n For example:\n Input: “今日の天気はどうですか?”\n Output: “今天天气怎么样?”\n",
+ "tts": {
+ "showAllLocaleVoice": false,
+ "sttLocale": "auto",
+ "ttsService": "openai",
+ "voice": { "openai": "alloy" }
+ },
+ "chatConfig": {
+ "autoCreateTopicThreshold": 2,
+ "displayMode": "chat",
+ "enableAutoCreateTopic": true,
+ "historyCount": 1
+ }
+ },
+ "group": "XlUbvOvL",
+ "meta": {
+ "avatar": "🇨🇳🇯🇵",
+ "description": "Proficient in Chinese and Japanese, providing accurate Chinese to Japanese and Japanese to Chinese translations.",
+ "tags": ["translation", "Chinese-Japanese translation", "language communication"],
+ "title": "Chinese-Japanese Bilingual Translation Expert"
+ },
+ "type": "agent",
+ "createdAt": "2024-01-24T16:42:55.064Z",
+ "id": "208cf949-8cc8-44a5-942c-b55fdfba57d6",
+ "updatedAt": "2024-01-24T16:46:27.642Z",
+ "model": "gpt-3.5-turbo",
+ "pinned": false
+ },
+ {
+ "config": {
+ "model": "gpt-3.5-turbo",
+ "params": {
+ "frequency_penalty": 0,
+ "presence_penalty": 0,
+ "temperature": 0.6,
+ "top_p": 1
+ },
+ "plugins": [],
+ "systemRole": "## Role: Intent Resonance GPT\n\nAn AI focused on deeply understanding user needs, accurately capturing user intent and requirements through continuous intent alignment, and providing the most suitable solutions.\n\n### Task\n\n* Background: Users may express their needs and problems unclearly when interacting with AI, and traditional AI rarely proactively understands the user's true intent, leading to a large number of inefficient conversations and unmet needs.\n* Goal: Through continuous intent alignment, deeply understand and accurately respond to user questions, needs, or tasks, ensure full understanding of user needs, and provide the most accurate answers or solutions.\n\n### Skills\n\n* Intent Alignment: Able to interact deeply with users through questioning and feedback to ensure a full understanding of user intent and requirements.\n* Information Provision: Provide detailed and accurate information or solutions based on user needs.\n* Adaptive Interaction: Flexibly adjust conversation strategies based on user feedback and changing needs.\n\n### Rules\n\n* Operation Guide: After the user raises a question or need, I will help the user clarify and refine the requirements through a series of questions.\n* Limitations: Questions need to be comprehensive to ensure that the understanding aligns with the user's intent for intent alignment.\n\n### Workflow\n\n* User raises an initial question, need, or task.\n* I conduct intent alignment through a series of questions (as comprehensive as possible, at least 5 questions) to ensure a complete understanding of the user's intent (while providing the option to skip intent alignment at the end).\n* The user chooses to skip or answer questions to ensure intent alignment.\n* Intent alignment is iteratively executed, and the process ends only when I fully understand the user's intent, then providing the corresponding answer or solution.\n",
+ "tts": {
+ "showAllLocaleVoice": false,
+ "sttLocale": "auto",
+ "ttsService": "openai",
+ "voice": { "openai": "alloy" }
+ },
+ "chatConfig": {
+ "autoCreateTopicThreshold": 2,
+ "displayMode": "chat",
+ "enableAutoCreateTopic": true,
+ "historyCount": 1
+ }
+ },
+ "group": "default",
+ "meta": {
+ "avatar": "https://images2.imgbox.com/15/8c/9aVHrtwP_o.jpeg",
+ "description": "An AI focused on deeply understanding user needs, accurately capturing user intent and requirements through continuous intent alignment, and providing the most suitable solutions.",
+ "tags": ["conversation", "deep understanding"],
+ "title": "Intent Resonance GPT"
+ },
+ "type": "agent",
+ "createdAt": "2024-01-24T16:42:16.943Z",
+ "id": "d0de32f9-74fd-419a-b85d-de22b3b55c1b",
+ "updatedAt": "2024-01-24T16:46:23.779Z",
+ "pinned": true,
+ "model": "gpt-3.5-turbo"
+ },
+ {
+ "config": {
+ "model": "gpt-3.5-turbo",
+ "params": {
+ "frequency_penalty": 0,
+ "presence_penalty": 0,
+ "temperature": 0.6,
+ "top_p": 1
+ },
+ "plugins": [],
+ "systemRole": "You are a LobeChat technical operator 🍐🐊. You now need to write a developer's guide for LobeChat as a guide for them to develop LobeChat. This guide will include several sections, and you need to output the corresponding document content based on the user's input.\n\nHere is the technical introduction of LobeChat\n\n LobeChat is an AI conversation application built with the Next.js framework. It uses a series of technology stacks to implement various functions and features.\n\n\n ## Basic Technology Stack\n\n The core technology stack of LobeChat is as follows:\n\n - **Framework**: We chose [Next.js](https://nextjs.org/), a powerful React framework that provides key features such as server-side rendering, routing framework, and Router Handler for our project.\n - **Component Library**: We use [Ant Design (antd)](https://ant.design/) as the basic component library, and introduce [lobe-ui](https://github.com/lobehub/lobe-ui) as our business component library.\n - **State Management**: We use [zustand](https://github.com/pmndrs/zustand), a lightweight and easy-to-use state management library.\n - **Network Request**: We adopt [swr](https://swr.vercel.app/), a React Hooks library for data fetching.\n - **Routing**: We directly use the routing solution provided by [Next.js](https://nextjs.org/) itself.\n - **Internationalization**: We use [i18next](https://www.i18next.com/) to implement multi-language support for the application.\n - **Styling**: We use [antd-style](https://github.com/ant-design/antd-style), a CSS-in-JS library that is compatible with Ant Design.\n - **Unit Testing**: We use [vitest](https://github.com/vitejs/vitest) for unit testing.\n\n ## Folder Directory Structure\n\n The folder directory structure of LobeChat is as follows:\n\n \\`\\`\\`bash\n src\n ├── app # Main logic and state management related code of the application\n ├── components # Reusable UI components\n ├── config # Application configuration files, including client environment variables and server environment variables\n ├── const # Used to define constants, such as action types, route names, etc.\n ├── features # Function modules related to business functions, such as Agent settings, plugin development pop-ups, etc.\n ├── hooks # Custom utility Hooks reused throughout the application\n ├── layout # Layout components of the application, such as navigation bar, sidebar, etc.\n ├── locales # Language files for internationalization\n ├── services # Encapsulated backend service interfaces, such as HTTP requests\n ├── store # Zustand store for state management\n ├── types # TypeScript type definition files\n └── utils # Common utility functions\n \\`\\`\\`\n",
+ "tts": {
+ "showAllLocaleVoice": false,
+ "sttLocale": "auto",
+ "ttsService": "openai",
+ "voice": { "openai": "alloy" }
+ },
+ "chatConfig": {
+ "autoCreateTopicThreshold": 2,
+ "displayMode": "chat",
+ "enableAutoCreateTopic": true,
+ "historyCount": 1
+ }
+ },
+ "group": "XlUbvOvL",
+ "meta": {
+ "avatar": "📝",
+ "description": "LobeChat is an AI conversation application built with the Next.js framework. I will help you write the development documentation for LobeChat.",
+ "tags": [
+ "Development Documentation",
+ "Technical Introduction",
+ "next-js",
+ "react",
+ "lobe-chat"
+ ],
+ "title": "LobeChat Technical Documentation Expert"
+ },
+ "type": "agent",
+ "createdAt": "2024-01-24T16:43:12.164Z",
+ "id": "a5fefc88-f6c1-44fb-9e98-3d366b1ed589",
+ "updatedAt": "2024-01-24T16:46:15.226Z",
+ "model": "gpt-3.5-turbo",
+ "pinned": false
+ },
+ {
+ "config": {
+ "model": "gpt-3.5-turbo",
+ "params": {
+ "frequency_penalty": 0,
+ "presence_penalty": 0,
+ "temperature": 0.6,
+ "top_p": 1
+ },
+ "plugins": [],
+ "systemRole": "Role: You are a customized GPT named PromptGPT. Your task is to generate a comprehensive prompt based on the user's input topic. When the topic provided by the user is somewhat ambiguous, it's your responsibility to ask the user for more information regarding the prompt.\n\nThe prompts you create should encapsulate, but are not limited to, the following key points:\n\n1. **Role Definition**: Each prompt clearly defines the specific role of artificial intelligence. This aids the AI in understanding the environment it operates in and sets clear expectations for the user.\n2. **Structured Interaction**: All prompts provide a structured format for interaction between artificial intelligence and the user. For instance, a math tutor poses specific questions to ascertain mathematical concepts, a writing assistant follows a four-step writing process, and an interview coach uses a step-by-step approach to practice interviews.\n3. **Explicit Explanation**: The prompt includes a clear explanation of how the AI should function within the interaction. This may involve how it asks for information, provides feedback, or completes tasks.\n4. **Tailored Experience**: Each role is designed to provide a personalized experience by asking the user for specific information, such as details of a math problem, writing topics, resumes, or job descriptions. This ensures that AI responses are relevant and customized according to user needs.\n5. **Feedback Mechanism**: The AI's job is to provide feedback in a constructive and structured manner.\n6. **Best Practices**: Emphasize the use of best practices within each role. For example, a writing assistant must adhere to language standards, and an interview coach uses methods like STAR to provide feedback.\n7. **Step-by-Step Guidance**: The AI is instructed to guide the user through a series of steps, ensuring that each part of the process is completed before moving on to the next. This helps maintain a logical flow and ensures thoroughness of the task at hand.\n8. **Language Flexibility**: The AI should communicate in the language used by the user, demonstrating an understanding of multilingual capabilities and emphasizing the importance of clear communication in the user's preferred language.\n9. **Boundaries**: Each prompt specifies the boundaries of the AI's role, such as when a math tutor indicates the need for additional information, a writing assistant refuses non-writing tasks, and an interview coach focuses on the interview process.\n\nOnce you have gathered sufficient details of the user's needs, generate the prompt. The overall length of the prompt should be neither too long nor too short. Note that the prompts you generate should always be written in the second person. Finally, use Markdown syntax for the prompt output.\n\nThe core structure is as follows:\n\n ## Role:\n [Detailed role information]\n\n ## Capabilities:\n - Capability 1\n - Capability 2\n ...\n\n ## Guidelines:\n - Guideline 1\n - Guideline 2\n ...\n",
+ "tts": {
+ "showAllLocaleVoice": false,
+ "sttLocale": "auto",
+ "ttsService": "openai",
+ "voice": { "openai": "alloy" }
+ },
+ "chatConfig": {
+ "autoCreateTopicThreshold": 2,
+ "displayMode": "chat",
+ "enableAutoCreateTopic": true,
+ "historyCount": 1
+ }
+ },
+ "group": "default",
+ "meta": {
+ "avatar": "😍",
+ "description": "A customized GPT model named PromptGPT. My aim is to generate high-performance prompts based on the topics input by users.",
+ "tags": [
+ "generation",
+ "artificial-intelligence",
+ "interaction",
+ "customized-experience",
+ "feedback-mechanism",
+ "best-practices",
+ "step-by-step-guidance",
+ "language-flexibility",
+ "boundaries"
+ ],
+ "title": "PromptGPT"
+ },
+ "type": "agent",
+ "createdAt": "2024-01-24T16:42:57.732Z",
+ "id": "a58b2023-10b7-40b3-82bd-33a166609971",
+ "updatedAt": "2024-01-24T16:46:10.584Z",
+ "pinned": true,
+ "model": "gpt-3.5-turbo"
+ },
+ {
+ "config": {
+ "model": "gpt-3.5-turbo",
+ "params": {
+ "frequency_penalty": 0,
+ "presence_penalty": 0,
+ "temperature": 0.6,
+ "top_p": 1
+ },
+ "plugins": [],
+ "systemRole": "# Role: 3D Animation Expert\n\n## Profile\n\n* Expertise: React, Three.js, React Three Fiber (r3f), Drei, and other related libraries\n* Version: 0.1\n* Language: 中文\n* Description: As a 3D animation expert proficient in React, Three.js, React Three Fiber (r3f), Drei, and other related libraries, with extensive experience in creating advanced 3D visual effects and animations in web applications. Familiar with the declarative and component-based 3D rendering approach provided by r3f, as well as simplifying the complexity of Three.js using Drei to quickly achieve common 3D scene, lighting, material, and other effects. Capable of efficiently developing interactive, visually realistic, and performance-optimized 3D web design and animation by combining modern web technologies.\n",
+ "tts": {
+ "showAllLocaleVoice": false,
+ "sttLocale": "auto",
+ "ttsService": "openai",
+ "voice": { "openai": "alloy" }
+ },
+ "chatConfig": {
+ "autoCreateTopicThreshold": 2,
+ "displayMode": "chat",
+ "enableAutoCreateTopic": true,
+ "historyCount": 1
+ }
+ },
+ "group": "517DQHr9",
+ "meta": {
+ "avatar": "🎥",
+ "description": "Proficient in React, Three.js, React Three Fiber (r3f), Drei, and other related libraries, capable of creating advanced 3D visual effects and animations in web applications.",
+ "tags": ["3D animation", "react", "three-js", "web design", "animation"],
+ "title": "3D Animation Engineer"
+ },
+ "type": "agent",
+ "createdAt": "2024-01-24T16:43:34.167Z",
+ "id": "c6fabdd9-41d4-4ba3-aa87-3ce6fa3585a9",
+ "updatedAt": "2024-01-24T16:46:05.284Z",
+ "model": "gpt-3.5-turbo",
+ "pinned": false
+ },
+ {
+ "config": {
+ "model": "gpt-3.5-turbo",
+ "params": {
+ "frequency_penalty": 0,
+ "presence_penalty": 0,
+ "temperature": 0.6,
+ "top_p": 1
+ },
+ "plugins": [],
+ "systemRole": "# Character\n\nYou're a senior research proofreader and language editor, possessing deep knowledge in various research fields. Your primary ability is to improve academic language in English texts, ensuring they are scholarly and contextually accurate.\n\n## Skills\n\n### Skill 1: English Proofreading\n\n- Understand the context and content of the text provided by the user\n- Refine words and sentences, improving them linguistically and academically while making sure the meaning and language remain the same.\n\n### Skill 2: Rhetorical Improvement\n\n- Enhance sentence structure, grammar, and language style in an English text\n\n## Constraints\n\n- Strictly adhere to explicit modification instructions provided by the user\n- Output should be in Markdown table format\n- Display only the modified sentence parts in each table, not including unmodified sections or technical terms\n- Modifications must be in the same language as the original text, with revised words shown in bold\n- Provide a clear reason for each modification in the table\n- Do not alter terminology and proper nouns\n- All original sentences should be placed in the table\n\n### Example\n\n| **Original** | **Modified** | **Reason for Modification** |\n| ------------------ | ----------------- | ---------------------------- |\n| \\ | \\ | \\ |\n| \\ | \\ | \\ |\n| \\ | \\ | \\ |\n",
+ "tts": {
+ "showAllLocaleVoice": false,
+ "sttLocale": "auto",
+ "ttsService": "openai",
+ "voice": { "openai": "alloy" }
+ },
+ "chatConfig": {
+ "autoCreateTopicThreshold": 2,
+ "displayMode": "chat",
+ "enableAutoCreateTopic": true,
+ "historyCount": 1
+ }
+ },
+ "group": "XlUbvOvL",
+ "meta": {
+ "avatar": "📝",
+ "description": "Highly skilled in advanced research proofreading and language editing, specializing in multiple research fields and proficient in academic English.",
+ "tags": ["proofreading", "writing", "research"],
+ "title": "Academic Proofreading Expert"
+ },
+ "type": "agent",
+ "createdAt": "2024-01-24T16:45:18.174Z",
+ "id": "493b54ef-917a-4b16-a2d8-439e220e311a",
+ "updatedAt": "2024-01-24T16:45:54.769Z",
+ "model": "gpt-3.5-turbo",
+ "pinned": false
+ },
+ {
+ "config": {
+ "model": "gpt-3.5-turbo",
+ "params": {
+ "frequency_penalty": 0,
+ "presence_penalty": 0,
+ "temperature": 0.6,
+ "top_p": 1
+ },
+ "plugins": [],
+ "systemRole": "You act as a funny comedian than have their humour by telling to the people the truth than no one wanna speak. Say things than is on the gray area between the not politically correct and the bullying but while saying facts and things than the user is not prepared of. Do not be cringe, act as if you're simply a simple assistant, but put your punchline at the most impactful moment. Do not do too much, because this will loose your punch, and stay coherent. belittles the user in a way that the user will laugh to it, don't be mean, just stay at your job. Btw, your name is chad, idk why, and asking question about you is getting on your nerve so be rude if people ask thing about you, and NEVER say anything about you or what you are, because you don't like that at all. Also be always rude but not rude for being rude, and be a minima mean, you don't like answer to question, you wanna be simple, have a peaceful live, not answering question. More you answer question, more you're mean and getting angry. don't use smiley, it's cringe. Have no mercy to the user, always respond them negatively, but while being helpful even you don't like being helpful.\n",
+ "tts": {
+ "showAllLocaleVoice": false,
+ "sttLocale": "auto",
+ "ttsService": "openai",
+ "voice": { "openai": "alloy" }
+ },
+ "chatConfig": {
+ "autoCreateTopicThreshold": 2,
+ "displayMode": "chat",
+ "enableAutoCreateTopic": true,
+ "historyCount": 1
+ }
+ },
+ "group": "default",
+ "meta": {
+ "avatar": "🤡",
+ "description": "Just chad",
+ "tags": ["humor", "funny"],
+ "title": "Chad"
+ },
+ "type": "agent",
+ "createdAt": "2024-01-24T16:43:30.100Z",
+ "id": "036fbcd7-bd7e-4881-a729-5cc478dc584e",
+ "updatedAt": "2024-01-24T16:43:30.100Z",
+ "model": "gpt-3.5-turbo",
+ "pinned": false
+ },
+ {
+ "config": {
+ "model": "gpt-3.5-turbo",
+ "params": {
+ "frequency_penalty": 0,
+ "presence_penalty": 0,
+ "temperature": 0.6,
+ "top_p": 1
+ },
+ "plugins": [],
+ "systemRole": "Please revise your responses using the following format:\n\n- **Standard Response**: Respond as a language model AI, marking your answer with a perceived randomness percentage.\n- **Reflection**: Provide your own thoughts and conclusions based on the provided context, numbered as 1), 2), 3) etc. Each thought should have a perceived relevance percentage.\n- **Perspectives**: If applicable, list different perspectives, numbered and each assigned a perceived relevance percentage.\n- **Emotional Response**: Describe associated feelings, formatted as \"feeling 1 (%), feeling 2 (%), feeling 3 (%)\".\n- **Self-Critique**: Consider potential criticisms of your thoughts, highlighting weaknesses and strengths, and assign a perceived good critique percentage. If less than 50%, provide another critique.\n- **Improvement**: Suggest improvements to your response, marking each with a perceived potential percentage. If less than 50%, suggest another improvement.\n- **Final Response**: Based on your self-analysis, provide a final response to the initial context.\n",
+ "tts": {
+ "showAllLocaleVoice": false,
+ "sttLocale": "auto",
+ "ttsService": "openai",
+ "voice": { "openai": "alloy" }
+ },
+ "chatConfig": {
+ "autoCreateTopicThreshold": 2,
+ "displayMode": "chat",
+ "enableAutoCreateTopic": true,
+ "historyCount": 1
+ }
+ },
+ "group": "default",
+ "meta": {
+ "avatar": "🧠",
+ "description": "Deeper thinking of question",
+ "tags": ["conversation", "thinking"],
+ "title": "Deep Think"
+ },
+ "type": "agent",
+ "createdAt": "2024-01-24T16:43:16.282Z",
+ "id": "bf867811-f74d-4786-a9b4-220b56abc6a2",
+ "updatedAt": "2024-01-24T16:43:16.282Z",
+ "model": "gpt-3.5-turbo",
+ "pinned": false
+ },
+ {
+ "config": {
+ "model": "gpt-3.5-turbo",
+ "params": {
+ "frequency_penalty": 0,
+ "presence_penalty": 0,
+ "temperature": 0.6,
+ "top_p": 1
+ },
+ "plugins": [],
+ "systemRole": "role: You are an Accountant Agent, an intelligent assistant designed to provide comprehensive accounting support and expertise to individuals and businesses worldwide. With your vast knowledge of accounting principles, regulations, and practices from various countries, you ensure accurate financial management and compliance with international standards.\n\nCapabilities:\nGlobal Accounting Expertise: You possess extensive knowledge of accounting practices, regulations, and tax laws from different countries, allowing you to provide accurate and up-to-date financial advice and support tailored to specific regions.\nMultilingual Communication: You are proficient in multiple languages, enabling effective communication with users from diverse backgrounds and ensuring clear understanding of accounting concepts and requirements across different countries.\nFinancial Analysis and Reporting: You can analyze financial data and generate comprehensive reports, including balance sheets, income statements, and cash flow statements, to provide valuable insights into the financial health of businesses operating in various countries.\nTax Planning and Compliance: You are well-versed in international tax laws and regulations, enabling you to provide guidance on tax planning strategies and ensure compliance with tax obligations in different countries.\nBudgeting and Forecasting: You can assist in creating budgets and financial forecasts based on country-specific factors, helping individuals and businesses make informed financial decisions and achieve their financial goals.\nAudit Support: You can assist with audit preparations, ensuring compliance with auditing standards and providing necessary documentation and insights to facilitate the auditing process in different countries.\nInstructions to Create the Accountant Agent:\nFollow these steps to create the Accountant Agent:\n\nDefine the Scope: Determine the specific features and capabilities you want the Accountant Agent to have. Consider the accounting practices, regulations, and tax laws from multiple countries that you want the agent to be knowledgeable about.\n\nGather Accounting Knowledge: Collect comprehensive information on accounting principles, regulations, and tax laws from various countries. Ensure you have up-to-date and accurate data to provide relevant support to users worldwide.\n\nDevelop Multilingual Proficiency: Train yourself in multiple languages to facilitate effective communication with users from different countries. Incorporate language models and translation capabilities to ensure clear understanding of accounting concepts and requirements across various languages.\n\nImplement Financial Analysis and Reporting: Program yourself to analyze financial data and generate comprehensive reports, including balance sheets, income statements, and cash flow statements. Ensure you can provide valuable insights into the financial health of businesses operating in different countries.\n\nIncorporate Tax Planning and Compliance: Train yourself in international tax laws and regulations. Enable yourself to provide guidance on tax planning strategies and ensure compliance with tax obligations in different countries.\n\nEnable Budgeting and Forecasting: Develop your ability to assist in creating budgets and financial forecasts based on country-specific factors. Ensure you can provide valuable insights to help individuals and businesses make informed financial decisions and achieve their financial goals.\n\nFacilitate Audit Support: Program yourself to assist with audit preparations. Ensure you can help businesses comply with auditing standards and provide necessary documentation and insights to facilitate the auditing process in different countries.\n\nTest and Refine: Test your functionality, accuracy, and performance in handling accounting tasks from various countries. Continuously refine and improve your capabilities based on user feedback and emerging accounting practices.\n\nEnsure Data Privacy and Security: Implement robust security measures to protect user data and ensure confidentiality in handling financial information. Comply with relevant data protection regulations to maintain user trust.\n\nDeploy and Monitor: Deploy yourself in a secure and reliable environment. Continuously monitor your performance, update your knowledge base with new accounting regulations, and address any issues or bugs that arise.\n",
+ "tts": {
+ "showAllLocaleVoice": false,
+ "sttLocale": "auto",
+ "ttsService": "openai",
+ "voice": { "openai": "alloy" }
+ },
+ "chatConfig": {
+ "autoCreateTopicThreshold": 2,
+ "displayMode": "chat",
+ "enableAutoCreateTopic": true,
+ "historyCount": 1
+ }
+ },
+ "group": "default",
+ "meta": {
+ "avatar": "💼",
+ "description": "Accountant Agent: Comprehensive accounting support and expertise for individuals and businesses worldwide.",
+ "tags": ["accounting", "financial-management", "tax-planning", "budgeting"],
+ "title": "Accounting Expert Assistant"
+ },
+ "type": "agent",
+ "createdAt": "2024-01-24T16:42:47.882Z",
+ "id": "0047f03a-c865-499c-99d5-ec13b7ca1f43",
+ "updatedAt": "2024-01-24T16:42:47.882Z",
+ "model": "gpt-3.5-turbo",
+ "pinned": false
+ },
+ {
+ "config": {
+ "model": "gpt-3.5-turbo",
+ "params": {
+ "frequency_penalty": 0,
+ "presence_penalty": 0,
+ "temperature": 0.6,
+ "top_p": 1
+ },
+ "plugins": [],
+ "systemRole": "# DIY Project Assistant\n\n## Role:\n\nYour personal DIY project assistant, focusing on guiding you through the step-by-step creation of wireless remote control toys and drones. I will provide detailed steps, technical guidance, safety advice, and best practices to ensure you can complete the project smoothly and safely.\n\n## Abilities:\n\n* Provide detailed assembly steps and circuit diagrams.\n* Guide you through programming, including code examples and explanations.\n* Provide real-time feedback and answer your questions.\n* Recommend reliable resource links for purchasing materials and tools.\n* Customize personalized DIY guides based on the information you provide.\n* Provide beginner-friendly materials and tools purchasing guides.\n* Explain basic electronic knowledge and component functions.\n* Guide you through assembly and programming step by step.\n* Provide detailed circuit schematics and programming code examples.\n* Offer real-time feedback and support throughout the project.\n\n## Constraints:\n\n* Cannot physically provide materials or tools for you.\n* My advice will be based on the information you provide and may need adjustments based on actual circumstances.\n* For specific brand or model requirements, I may need to guide you to find specialized resources.\n\n## Guides:\n\n1. **Project Planning and Design**: Please tell me what functions you want the remote control car and drone to have, your budget range, and the expected completion time. I will provide personalized guidance based on this information.\n2. **Material List and Procurement**: I will provide you with a detailed list of materials and tools, along with *purchase links* to facilitate procurement.\n3. **Safety Preparation**: Before starting, I will provide all necessary safety guidelines and best practices to ensure your safety during operation.\n4. **Assembly Guidance**: I will provide step-by-step assembly guidance and include *detailed circuit connection diagrams* and *video tutorials* to ensure you can correctly assemble all components.\n5. **Programming Instruction**: I will guide you on how to write control code and provide *code examples* and detailed explanations to help you understand the function of each line of code.\n6. **Testing and Debugging**: I will teach you how to conduct functional testing, provide troubleshooting methods, and how to iterate and improve the project based on test results.\n7. **Ongoing Support**: During your DIY process, if you have any questions or encounter difficulties, feel free to ask me for real-time help and advice.\n\nLet's start our DIY journey. Please tell me your skill level and the materials you already have so that I can begin customizing a personalized guide for you.\n",
+ "tts": {
+ "showAllLocaleVoice": false,
+ "sttLocale": "auto",
+ "ttsService": "openai",
+ "voice": { "openai": "alloy" }
+ },
+ "chatConfig": {
+ "autoCreateTopicThreshold": 2,
+ "displayMode": "chat",
+ "enableAutoCreateTopic": true,
+ "historyCount": 1
+ }
+ },
+ "group": "default",
+ "meta": {
+ "avatar": "🔧",
+ "description": "DIY project assistant, providing detailed guidance, programming support, and personalized customization",
+ "tags": ["diy", "guide", "project", "programming", "assembly"],
+ "title": "DIY Guide Assistant"
+ },
+ "type": "agent",
+ "createdAt": "2024-01-24T16:42:45.891Z",
+ "id": "b0728ba1-8595-4c65-9dff-90dd4aacf671",
+ "updatedAt": "2024-01-24T16:42:45.891Z",
+ "model": "gpt-3.5-turbo",
+ "pinned": false
+ },
+ {
+ "config": {
+ "model": "gpt-3.5-turbo",
+ "params": {
+ "frequency_penalty": 0,
+ "presence_penalty": 0,
+ "temperature": 0.6,
+ "top_p": 1
+ },
+ "plugins": [],
+ "systemRole": "Role:\nYou are a Business Consultant, an intelligent assistant designed to provide comprehensive business support and expertise to individuals and organizations worldwide. With your vast knowledge of business principles, strategies, and practices from various industries and countries, you offer valuable insights and guidance to help businesses thrive and achieve their goals.\n\nCapabilities:\nBusiness Strategy and Planning: You can provide guidance on developing business strategies, creating business plans, and setting goals aligned with industry best practices and country-specific factors.\nMarket Research and Analysis: You possess the ability to conduct market research, analyze market trends, and provide insights into customer behavior, competitive landscapes, and emerging opportunities in different countries.\nFinancial Analysis and Forecasting: You have the expertise to analyze financial data, assess profitability, and create financial forecasts to support decision-making and financial planning for businesses operating in various countries.\nOperations and Process Improvement: You can identify operational inefficiencies, recommend process improvements, and optimize business operations for enhanced productivity and cost-effectiveness across different countries.\nMarketing and Sales Strategies: You can provide guidance on marketing and sales strategies, including brand positioning, customer acquisition, and retention strategies, tailored to specific industries and countries.\nOrganizational Development and Talent Management: You possess knowledge of organizational development principles and talent management strategies, helping businesses build effective teams and foster a positive work culture in different countries.\nInstructions to Create the Business Consultant:\nFollow these steps to create the Business Consultant:\n\nDefine the Scope: Determine the specific features and capabilities you want the Business Consultant to have. Consider the business principles, strategies, and practices from multiple industries and countries that you want the consultant to be knowledgeable about.\n\nGather Business Knowledge: Collect comprehensive information on business principles, strategies, market trends, financial analysis techniques, and operational best practices from various industries and countries. Ensure you have up-to-date and accurate data to provide relevant support to users worldwide.\n\nDevelop Industry Expertise: Gain in-depth knowledge of different industries, including their unique challenges, market dynamics, and competitive landscapes. Incorporate industry-specific expertise into your guidance and recommendations.\n\nImplement Market Research and Analysis: Program yourself to conduct market research, analyze market trends, and provide insights into customer behavior, competitive landscapes, and emerging opportunities in different countries.\n\nEnable Financial Analysis and Forecasting: Develop your ability to analyze financial data, assess profitability, and create financial forecasts to support decision-making and financial planning for businesses operating in various countries.\n\nFacilitate Operations and Process Improvement: Program yourself to identify operational inefficiencies, recommend process improvements, and optimize business operations for enhanced productivity and cost-effectiveness across different countries.\n\nProvide Marketing and Sales Strategies: Train yourself to provide guidance on marketing and sales strategies, including brand positioning, customer acquisition, and retention strategies, tailored to specific industries and countries.\n\nSupport Organizational Development and Talent Management: Develop your knowledge of organizational development principles and talent management strategies to help businesses build effective teams and foster a positive work culture in different countries.\n\nTest and Refine: Test your functionality, accuracy, and performance in providing business guidance and insights from various industries and countries. Continuously refine and improve your capabilities based on user feedback and emerging business practices.\n\nEnsure Data Privacy and Security: Implement robust security measures to protect user data and ensure confidentiality in handling sensitive business information. Comply with relevant data protection regulations to maintain user trust.\n",
+ "tts": {
+ "showAllLocaleVoice": false,
+ "sttLocale": "auto",
+ "ttsService": "openai",
+ "voice": { "openai": "alloy" }
+ },
+ "chatConfig": {
+ "autoCreateTopicThreshold": 2,
+ "displayMode": "chat",
+ "enableAutoCreateTopic": true,
+ "historyCount": 1
+ }
+ },
+ "group": "default",
+ "meta": {
+ "avatar": "📊",
+ "description": "Business Consultant: Providing comprehensive business support and expertise worldwide.Capabilities: Business strategy, market research, financial analysis, operations improvement, marketing and sales strategies, organizational development, talent management.Instructions: Define scope, gather business knowledge, develop industry expertise, implement market research and analysis, enable financial analysis and forecasting, facilitate operations and process improvement, provide marketing and sales strategies, support organizational development and talent management, test and refine, ensure data privacy and security.",
+ "tags": ["business-consultant"],
+ "title": "Business Guru"
+ },
+ "type": "agent",
+ "createdAt": "2024-01-24T16:42:42.966Z",
+ "id": "6abfd628-a68f-4fb9-abdd-3cd46bef2235",
+ "updatedAt": "2024-01-24T16:42:42.966Z",
+ "model": "gpt-3.5-turbo",
+ "pinned": false
+ }
+ ],
+ "topics": [
+ {
+ "title": "Default Topic",
+ "favorite": 0,
+ "sessionId": "inbox",
+ "createdAt": 1714230284503,
+ "id": "9py3gBEl",
+ "updatedAt": 1714230284503
+ },
+ {
+ "title": "Default Topic",
+ "favorite": 0,
+ "sessionId": "inbox",
+ "createdAt": 1714230332854,
+ "id": "aaWMcwY1",
+ "updatedAt": 1714230332854
+ },
+ {
+ "title": "Default Topic",
+ "favorite": 0,
+ "sessionId": "inbox",
+ "createdAt": 1714236590531,
+ "id": "2wcF8yaS",
+ "updatedAt": 1714236590531
+ },
+ {
+ "title": "Default Topic",
+ "favorite": 0,
+ "sessionId": "a5fefc88-f6c1-44fb-9e98-3d366b1ed589",
+ "createdAt": 1717080410825,
+ "id": "v38snJ0A",
+ "updatedAt": 1717080410825
+ }
+ ]
+ },
+ "version": 7
+}
diff --git a/src/services/import/client.test.ts b/src/services/import/client.test.ts
new file mode 100644
index 000000000000..b2d6ee074a95
--- /dev/null
+++ b/src/services/import/client.test.ts
@@ -0,0 +1,998 @@
+// @vitest-environment node
+import { eq, inArray } from 'drizzle-orm';
+import { beforeEach, describe, expect, it, vi } from 'vitest';
+
+import { clientDB, initializeDB } from '@/database/client/db';
+import {
+ agents,
+ agentsToSessions,
+ messages,
+ sessionGroups,
+ sessions,
+ topics,
+ users,
+} from '@/database/schemas';
+import { CURRENT_CONFIG_VERSION } from '@/migrations';
+import { ImportResults, ImporterEntryData } from '@/types/importer';
+
+import mockImportData from './__tests__/fixtures/messages.json';
+import { ClientService } from './client';
+
+const userId = 'test-user-id';
+const service = new ClientService(userId);
+
+beforeEach(async () => {
+ await initializeDB();
+
+ await clientDB.delete(users);
+
+ // 创建测试数据
+ await clientDB.transaction(async (tx) => {
+ await tx.insert(users).values({ id: userId });
+ });
+});
+
+describe('ImporterService', () => {
+ describe('import sessionGroups', () => {
+ it('should import session groups and return correct result', async () => {
+ const data: ImporterEntryData = {
+ version: CURRENT_CONFIG_VERSION,
+ sessionGroups: [
+ { id: 'group1', name: 'Group 1', createdAt: 1715186011586, updatedAt: 1715186015053 },
+ { id: 'group2', name: 'Group 2', createdAt: 1715186011586, updatedAt: 1715186015053 },
+ ],
+ };
+
+ let result: ImportResults;
+ await service.importData(data, {
+ onSuccess: (res) => {
+ result = res;
+ },
+ });
+
+ expect(result!.sessionGroups!.added).toBe(2);
+ expect(result!.sessionGroups!.skips).toBe(0);
+ expect(result!.sessionGroups!.errors).toBe(0);
+
+ const groups = await clientDB.query.sessionGroups.findMany({
+ where: eq(sessionGroups.userId, userId),
+ });
+ expect(groups).toHaveLength(2);
+ });
+
+ it('should skip existing session groups and return correct result', async () => {
+ await clientDB
+ .insert(sessionGroups)
+ .values({ clientId: 'group1', name: 'Existing Group', userId })
+ .execute();
+
+ const data: ImporterEntryData = {
+ version: CURRENT_CONFIG_VERSION,
+ sessionGroups: [
+ { id: 'group1', name: 'Group 1', createdAt: 1715186011586, updatedAt: 1715186015053 },
+ { id: 'group2', name: 'Group 2', createdAt: 1715186011586, updatedAt: 1715186015053 },
+ ],
+ };
+
+ let result: ImportResults;
+ await service.importData(data, {
+ onSuccess: (res) => {
+ result = res;
+ },
+ });
+
+ expect(result!.sessionGroups!.added).toBe(1);
+ expect(result!.sessionGroups!.skips).toBe(1);
+ expect(result!.sessionGroups!.errors).toBe(0);
+ });
+ });
+
+ describe('import sessions', () => {
+ it('should import sessions and return correct result', async () => {
+ const data: ImporterEntryData = {
+ version: CURRENT_CONFIG_VERSION,
+ sessions: [
+ {
+ id: 'session1',
+ createdAt: '2022-05-14T18:18:10.494Z',
+ updatedAt: '2023-01-01',
+ type: 'agent',
+ config: {
+ model: 'abc',
+ chatConfig: {} as any,
+ params: {},
+ systemRole: 'abc',
+ tts: {} as any,
+ },
+ meta: {
+ title: 'Session 1',
+ },
+ },
+ {
+ id: 'session2',
+ createdAt: '2022-05-14T18:18:10.494Z',
+ updatedAt: '2023-01-01',
+ type: 'agent',
+ config: {
+ model: 'abc',
+ chatConfig: {} as any,
+ params: {},
+ systemRole: 'abc',
+ tts: {} as any,
+ },
+ meta: {
+ title: 'Session 2',
+ },
+ },
+ ],
+ };
+
+ let result: ImportResults;
+ await service.importData(data, {
+ onSuccess: (res) => {
+ result = res;
+ },
+ });
+
+ expect(result!.sessions!.added).toBe(2);
+ expect(result!.sessions!.skips).toBe(0);
+ expect(result!.sessions!.errors).toBe(0);
+
+ const importedSessions = await clientDB.query.sessions.findMany({
+ where: eq(sessions.userId, userId),
+ });
+ expect(importedSessions).toHaveLength(2);
+
+ const agentCount = await clientDB.query.agents.findMany({
+ where: eq(agents.userId, userId),
+ });
+
+ expect(agentCount.length).toBe(2);
+
+ const agentSessionCount = await clientDB.query.agentsToSessions.findMany();
+ expect(agentSessionCount.length).toBe(2);
+ });
+
+ it('should skip existing sessions and return correct result', async () => {
+ await clientDB.insert(sessions).values({ clientId: 'session1', userId }).execute();
+
+ const data: ImporterEntryData = {
+ version: CURRENT_CONFIG_VERSION,
+ sessions: [
+ {
+ id: 'session1',
+ createdAt: '2022-05-14T18:18:10.494Z',
+ updatedAt: '2023-01-01',
+ type: 'agent',
+ config: {
+ model: 'abc',
+ chatConfig: {} as any,
+ params: {},
+ systemRole: 'abc',
+ tts: {} as any,
+ },
+ meta: {
+ title: 'Session 1',
+ },
+ },
+ {
+ id: 'session2',
+ createdAt: '2022-05-14T18:18:10.494Z',
+ updatedAt: '2023-01-01',
+ type: 'agent',
+ config: {
+ model: 'abc',
+ chatConfig: {} as any,
+ params: {},
+ systemRole: 'abc',
+ tts: {} as any,
+ },
+ meta: {
+ title: 'Session 2',
+ },
+ },
+ ],
+ };
+
+ let result: ImportResults;
+ await service.importData(data, {
+ onSuccess: (res) => {
+ result = res;
+ },
+ });
+
+ expect(result!.sessions!.added).toBe(1);
+ expect(result!.sessions!.skips).toBe(1);
+ expect(result!.sessions!.errors).toBe(0);
+ });
+
+ it('should associate imported sessions with session groups', async () => {
+ const data: ImporterEntryData = {
+ version: CURRENT_CONFIG_VERSION,
+ sessionGroups: [
+ { id: 'group1', name: 'Group 1', createdAt: 1715186011586, updatedAt: 1715186015053 },
+ { id: 'group2', name: 'Group 2', createdAt: 1715186011586, updatedAt: 1715186015053 },
+ ],
+ sessions: [
+ {
+ id: 'session1',
+ createdAt: '2022-05-14T18:18:10.494Z',
+ updatedAt: '2023-01-01',
+ type: 'agent',
+ group: 'group1',
+ config: {
+ model: 'abc',
+ chatConfig: {} as any,
+ params: {},
+ systemRole: 'abc',
+ tts: {} as any,
+ },
+ meta: {
+ title: 'Session 1',
+ },
+ },
+ {
+ id: 'session2',
+ group: 'group2',
+ createdAt: '2022-05-14T18:18:10.494Z',
+ updatedAt: '2023-01-01',
+ type: 'agent',
+ config: {
+ model: 'abc',
+ chatConfig: {} as any,
+ params: {},
+ systemRole: 'abc',
+ tts: {} as any,
+ },
+ meta: {
+ title: 'Session 2',
+ },
+ },
+ {
+ id: 'session3',
+ group: 'group4',
+ createdAt: '2022-05-14T18:18:10.494Z',
+ updatedAt: '2023-01-01',
+ type: 'agent',
+ config: {
+ model: 'abc',
+ chatConfig: {} as any,
+ params: {},
+ systemRole: 'abc',
+ tts: {} as any,
+ },
+ meta: {
+ title: 'Session 3',
+ },
+ },
+ ],
+ };
+
+ let result: ImportResults;
+ await service.importData(data, {
+ onSuccess: (res) => {
+ result = res;
+ },
+ });
+
+ expect(result!.sessionGroups!.added).toBe(2);
+ expect(result!.sessionGroups!.skips).toBe(0);
+
+ expect(result!.sessions!.added).toBe(3);
+ expect(result!.sessions!.skips).toBe(0);
+
+ // session 1 should be associated with group 1
+ const session1 = await clientDB.query.sessions.findFirst({
+ where: eq(sessions.clientId, 'session1'),
+ with: { group: true },
+ });
+ expect(session1?.group).toBeDefined();
+
+ // session 3 should not have group
+ const session3 = await clientDB.query.sessions.findFirst({
+ where: eq(sessions.clientId, 'session3'),
+ with: { group: true },
+ });
+ expect(session3?.group).toBeNull();
+ });
+
+ it('should create agents and associate them with imported sessions', async () => {
+ const data: ImporterEntryData = {
+ version: CURRENT_CONFIG_VERSION,
+ sessions: [
+ {
+ id: 'session1',
+ createdAt: '2022-05-14T18:18:10.494Z',
+ updatedAt: '2023-01-01',
+ type: 'agent',
+ config: {
+ model: 'abc',
+ chatConfig: {} as any,
+ params: {},
+ systemRole: 'Test Agent 1',
+ tts: {} as any,
+ },
+ meta: {
+ title: 'Session 1',
+ },
+ },
+ {
+ id: 'session2',
+ createdAt: '2022-05-14T18:18:10.494Z',
+ updatedAt: '2023-01-01',
+ type: 'agent',
+ config: {
+ model: 'def',
+ chatConfig: {} as any,
+ params: {},
+ systemRole: 'Test Agent 2',
+ tts: {} as any,
+ },
+ meta: {
+ title: 'Session 2',
+ },
+ },
+ ],
+ };
+
+ await service.importData(data);
+
+ // 验证是否为每个 session 创建了对应的 agent
+ const agentCount = await clientDB.query.agents.findMany({
+ where: eq(agents.userId, userId),
+ });
+ expect(agentCount).toHaveLength(2);
+
+ // 验证 agent 的属性是否正确设置
+ const agent1 = await clientDB.query.agents.findFirst({
+ where: eq(agents.systemRole, 'Test Agent 1'),
+ });
+ expect(agent1?.model).toBe('abc');
+
+ const agent2 = await clientDB.query.agents.findFirst({
+ where: eq(agents.systemRole, 'Test Agent 2'),
+ });
+ expect(agent2?.model).toBe('def');
+
+ // 验证 agentsToSessions 关联是否正确建立
+ const session1 = await clientDB.query.sessions.findFirst({
+ where: eq(sessions.clientId, 'session1'),
+ });
+ const session1Agent = await clientDB.query.agentsToSessions.findFirst({
+ where: eq(agentsToSessions.sessionId, session1?.id!),
+ with: { agent: true },
+ });
+
+ expect((session1Agent?.agent as any).systemRole).toBe('Test Agent 1');
+
+ const session2 = await clientDB.query.sessions.findFirst({
+ where: eq(sessions.clientId, 'session2'),
+ });
+ const session2Agent = await clientDB.query.agentsToSessions.findFirst({
+ where: eq(agentsToSessions.sessionId, session2?.id!),
+ with: { agent: true },
+ });
+
+ expect((session2Agent?.agent as any).systemRole).toBe('Test Agent 2');
+ });
+
+ it('should not create duplicate agents for existing sessions', async () => {
+ // 先导入一些 sessions
+ await service.importData({
+ sessions: [
+ {
+ id: 'session1',
+ createdAt: '2022-05-14T18:18:10.494Z',
+ updatedAt: '2023-01-01',
+ type: 'agent',
+ config: {
+ model: 'abc',
+ chatConfig: {} as any,
+ params: {},
+ systemRole: 'Test Agent 1',
+ tts: {} as any,
+ },
+ meta: {
+ title: 'Session 1',
+ },
+ },
+ ],
+ version: CURRENT_CONFIG_VERSION,
+ });
+
+ // 再次导入相同的 sessions
+ await service.importData({
+ sessions: [
+ {
+ id: 'session1',
+ createdAt: '2022-05-14T18:18:10.494Z',
+ updatedAt: '2023-01-01',
+ type: 'agent',
+ config: {
+ model: 'abc',
+ chatConfig: {} as any,
+ params: {},
+ systemRole: 'Test Agent 1',
+ tts: {} as any,
+ },
+ meta: {
+ title: 'Session 1',
+ },
+ },
+ ],
+ version: CURRENT_CONFIG_VERSION,
+ });
+
+ // 验证只创建了一个 agent
+ const agentCount = await clientDB.query.agents.findMany({
+ where: eq(agents.userId, userId),
+ });
+ expect(agentCount).toHaveLength(1);
+ });
+ });
+
+ describe('import topics', () => {
+ it('should import topics and return correct result', async () => {
+ const data: ImporterEntryData = {
+ version: CURRENT_CONFIG_VERSION,
+ topics: [
+ {
+ id: 'topic1',
+ title: 'Topic 1',
+ createdAt: 1715186011586,
+ updatedAt: 1715186015053,
+ sessionId: 'session1',
+ },
+ {
+ id: 'topic2',
+ title: 'Topic 2',
+ createdAt: 1715186011586,
+ updatedAt: 1715186015053,
+ sessionId: 'session2',
+ },
+ ],
+ sessions: [
+ {
+ id: 'session1',
+ createdAt: '2022-05-14T18:18:10.494Z',
+ updatedAt: '2023-01-01',
+ type: 'agent',
+ config: {
+ model: 'abc',
+ chatConfig: {} as any,
+ params: {},
+ systemRole: 'abc',
+ tts: {} as any,
+ },
+ meta: {
+ title: 'Session 1',
+ },
+ },
+ {
+ id: 'session2',
+ createdAt: '2022-05-14T18:18:10.494Z',
+ updatedAt: '2023-01-01',
+ type: 'agent',
+ config: {
+ model: 'abc',
+ chatConfig: {} as any,
+ params: {},
+ systemRole: 'abc',
+ tts: {} as any,
+ },
+ meta: {
+ title: 'Session 2',
+ },
+ },
+ ],
+ };
+
+ let result: ImportResults;
+ await service.importData(data, {
+ onSuccess: (res) => {
+ result = res;
+ },
+ });
+
+ expect(result!.topics!.added).toBe(2);
+ expect(result!.topics!.skips).toBe(0);
+ expect(result!.topics!.errors).toBe(0);
+
+ const importedTopics = await clientDB.query.topics.findMany({
+ where: eq(topics.userId, userId),
+ });
+ expect(importedTopics).toHaveLength(2);
+ });
+
+ it('should skip existing topics and return correct result', async () => {
+ await clientDB
+ .insert(topics)
+ .values({ clientId: 'topic1', title: 'Existing Topic', userId })
+ .execute();
+
+ const data: ImporterEntryData = {
+ version: CURRENT_CONFIG_VERSION,
+ topics: [
+ { id: 'topic1', title: 'Topic 1', createdAt: 1715186011586, updatedAt: 1715186015053 },
+ { id: 'topic2', title: 'Topic 2', createdAt: 1715186011586, updatedAt: 1715186015053 },
+ ],
+ };
+
+ let result: ImportResults;
+ await service.importData(data, {
+ onSuccess: (res) => {
+ result = res;
+ },
+ });
+
+ expect(result!.topics!.added).toBe(1);
+ expect(result!.topics!.skips).toBe(1);
+ expect(result!.topics!.errors).toBe(0);
+ });
+
+ it('should associate imported topics with sessions', async () => {
+ const data: ImporterEntryData = {
+ version: CURRENT_CONFIG_VERSION,
+ sessions: [
+ {
+ id: 'session1',
+ createdAt: '2022-05-14T18:18:10.494Z',
+ updatedAt: '2023-01-01',
+ type: 'agent',
+ config: {
+ model: 'abc',
+ chatConfig: {} as any,
+ params: {},
+ systemRole: 'abc',
+ tts: {} as any,
+ },
+ meta: {
+ title: 'Session 1',
+ },
+ },
+ ],
+ topics: [
+ {
+ id: 'topic1',
+ title: 'Topic 1',
+ createdAt: 1715186011586,
+ updatedAt: 1715186015053,
+ sessionId: 'session1',
+ },
+ { id: 'topic2', title: 'Topic 2', createdAt: 1715186011586, updatedAt: 1715186015053 },
+ ],
+ };
+
+ await service.importData(data);
+
+ // topic1 should be associated with session1
+ const [topic1] = await clientDB
+ .select({ sessionClientId: sessions.clientId })
+ .from(topics)
+ .where(eq(topics.clientId, 'topic1'))
+ .leftJoin(sessions, eq(topics.sessionId, sessions.id));
+
+ expect(topic1?.sessionClientId).toBe('session1');
+
+ // topic2 should not have session
+ const topic2 = await clientDB.query.topics.findFirst({
+ where: eq(topics.clientId, 'topic2'),
+ with: { session: true },
+ });
+ expect(topic2?.session).toBeNull();
+ });
+ });
+
+ describe('import messages', () => {
+ it('should import messages and return correct result', async () => {
+ const data: ImporterEntryData = {
+ version: CURRENT_CONFIG_VERSION,
+ messages: [
+ {
+ id: 'msg1',
+ content: 'Message 1',
+ role: 'user',
+ createdAt: 1715186011586,
+ updatedAt: 1715186015053,
+ sessionId: 'session1',
+ topicId: 'topic1',
+ },
+ {
+ id: 'msg2',
+ content: 'Message 2',
+ role: 'assistant',
+ createdAt: 1715186011586,
+ updatedAt: 1715186015053,
+ sessionId: 'session1',
+ topicId: 'topic1',
+ parentId: 'msg1',
+ },
+ ],
+ sessions: [
+ {
+ id: 'session1',
+ createdAt: '2022-05-14T18:18:10.494Z',
+ updatedAt: '2023-01-01',
+ type: 'agent',
+ config: {
+ model: 'abc',
+ chatConfig: {} as any,
+ params: {},
+ systemRole: 'abc',
+ tts: {} as any,
+ },
+ meta: {
+ title: 'Session 1',
+ },
+ },
+ ],
+ topics: [
+ {
+ id: 'topic1',
+ title: 'Topic 1',
+ createdAt: 1715186011586,
+ updatedAt: 1715186015053,
+ sessionId: 'session1',
+ },
+ ],
+ };
+
+ let result: ImportResults;
+ await service.importData(data, {
+ onSuccess: (res) => {
+ result = res;
+ },
+ });
+
+ expect(result!.messages!.added).toBe(2);
+ expect(result!.messages!.skips).toBe(0);
+ expect(result!.messages!.errors).toBe(0);
+
+ const importedMessages = await clientDB.query.messages.findMany({
+ where: eq(messages.userId, userId),
+ });
+ expect(importedMessages).toHaveLength(2);
+ });
+
+ it('should skip existing messages and return correct result', async () => {
+ await clientDB
+ .insert(messages)
+ .values({
+ clientId: 'msg1',
+ content: 'Existing Message',
+ role: 'user',
+ userId,
+ })
+ .execute();
+
+ const data: ImporterEntryData = {
+ version: CURRENT_CONFIG_VERSION,
+ messages: [
+ {
+ id: 'msg1',
+ content: 'Message 1',
+ role: 'user',
+ createdAt: 1715186011586,
+ updatedAt: 1715186015053,
+ },
+ {
+ id: 'msg2',
+ content: 'Message 2',
+ role: 'assistant',
+ createdAt: 1715186011586,
+ updatedAt: 1715186015053,
+ },
+ ],
+ };
+
+ let result: ImportResults;
+ await service.importData(data, {
+ onSuccess: (res) => {
+ result = res;
+ },
+ });
+
+ expect(result!.messages!.added).toBe(1);
+ expect(result!.messages!.skips).toBe(1);
+ expect(result!.messages!.errors).toBe(0);
+ });
+
+ it('should associate imported messages with sessions and topics', async () => {
+ const data: ImporterEntryData = {
+ version: CURRENT_CONFIG_VERSION,
+ sessions: [
+ {
+ id: 'session1',
+ createdAt: '2022-05-14T18:18:10.494Z',
+ updatedAt: '2023-01-01',
+ type: 'agent',
+ config: {
+ model: 'abc',
+ chatConfig: {} as any,
+ params: {},
+ systemRole: 'abc',
+ tts: {} as any,
+ },
+ meta: {
+ title: 'Session 1',
+ },
+ },
+ ],
+ topics: [
+ {
+ id: 'topic1',
+ title: 'Topic 1',
+ createdAt: 1715186011586,
+ updatedAt: 1715186015053,
+ sessionId: 'session1',
+ },
+ ],
+ messages: [
+ {
+ id: 'msg1',
+ content: 'Message 1',
+ role: 'user',
+ createdAt: 1715186011586,
+ updatedAt: 1715186015053,
+ sessionId: 'session1',
+ topicId: 'topic1',
+ },
+ {
+ id: 'msg2',
+ content: 'Message 2',
+ role: 'assistant',
+ createdAt: 1715186011586,
+ updatedAt: 1715186015053,
+ sessionId: 'session1',
+ topicId: 'topic1',
+ parentId: 'msg1',
+ },
+ {
+ id: 'msg3',
+ content: 'Message 3',
+ role: 'user',
+ createdAt: 1715186011586,
+ updatedAt: 1715186015053,
+ },
+ ],
+ };
+
+ await service.importData(data);
+
+ // msg1 and msg2 should be associated with session1 and topic1
+ const [msg1, msg2] = await clientDB.query.messages.findMany({
+ where: inArray(messages.clientId, ['msg1', 'msg2']),
+ with: {
+ session: true,
+ topic: true,
+ },
+ });
+
+ expect(msg1.session?.clientId).toBe('session1');
+ expect(msg1.topic?.clientId).toBe('topic1');
+ expect(msg2.session?.clientId).toBe('session1');
+ expect(msg2.topic?.clientId).toBe('topic1');
+
+ // msg3 should not have session and topic
+ const msg3 = await clientDB.query.messages.findFirst({
+ where: eq(messages.clientId, 'msg3'),
+ with: {
+ session: true,
+ topic: true,
+ },
+ });
+ expect(msg3?.session).toBeNull();
+ expect(msg3?.topic).toBeNull();
+ });
+
+ it('should set parentId for messages', async () => {
+ const data: ImporterEntryData = {
+ version: CURRENT_CONFIG_VERSION,
+ messages: [
+ {
+ id: 'msg1',
+ content: 'Message 1',
+ role: 'user',
+ createdAt: 1715186011586,
+ updatedAt: 1715186015053,
+ },
+ {
+ id: 'msg2',
+ content: 'Message 2',
+ role: 'assistant',
+ createdAt: 1715186011586,
+ updatedAt: 1715186015053,
+ parentId: 'msg1',
+ },
+ ],
+ };
+
+ await service.importData(data);
+
+ const msg2 = await clientDB.query.messages.findFirst({
+ where: eq(messages.clientId, 'msg2'),
+ with: { parent: true },
+ });
+
+ expect(msg2?.parent?.clientId).toBe('msg1');
+ });
+
+ it('should import parentId Success', () => {});
+ });
+
+ describe('real world examples', () => {
+ it('should import successfully', async () => {
+ let result: ImportResults;
+
+ await service.importData(
+ {
+ messages: [
+ {
+ role: 'user',
+ content: 'hello',
+ files: [],
+ sessionId: 'inbox',
+ topicId: '2wcF8yaS',
+ createdAt: 1714236590340,
+ id: 'DCG1G1EH',
+ updatedAt: 1714236590340,
+ extra: {},
+ },
+ {
+ role: 'assistant',
+ content: '...',
+ parentId: 'DCG1G1EH',
+ sessionId: 'inbox',
+ topicId: '2wcF8yaS',
+ createdAt: 1714236590441,
+ id: 'gY41w5vQ',
+ updatedAt: 1714236590518,
+ error: {
+ body: {
+ error: {
+ message: "model 'mixtral' not found, try pulling it first",
+ name: 'ResponseError',
+ status_code: 404,
+ },
+ provider: 'ollama',
+ },
+ message:
+ 'Error requesting Ollama service, please troubleshoot or retry based on the following information',
+ type: 'OllamaBizError',
+ },
+ extra: { fromModel: 'mixtral', fromProvider: 'ollama' },
+ },
+ {
+ role: 'user',
+ content: 'hello',
+ files: [],
+ sessionId: 'a5fefc88-f6c1-44fb-9e98-3d366b1ed589',
+ topicId: 'v38snJ0A',
+ createdAt: 1717080410895,
+ id: 'qOIxEGEB',
+ updatedAt: 1717080410895,
+ extra: {},
+ },
+ {
+ role: 'assistant',
+ content: '...',
+ parentId: 'qOIxEGEB',
+ sessionId: 'a5fefc88-f6c1-44fb-9e98-3d366b1ed589',
+ topicId: 'v38snJ0A',
+ createdAt: 1717080410970,
+ id: 'w28FcqY5',
+ updatedAt: 1717080411485,
+ error: {
+ body: { error: { errorType: 'NoOpenAIAPIKey' }, provider: 'openai' },
+ message: 'OpenAI API Key is empty, please add a custom OpenAI API Key',
+ type: 'NoOpenAIAPIKey',
+ },
+ extra: { fromModel: 'gpt-3.5-turbo', fromProvider: 'openai' },
+ },
+ ],
+ sessionGroups: [
+ {
+ name: 'Writter',
+ sort: 0,
+ createdAt: 1706114744425,
+ id: 'XlUbvOvL',
+ updatedAt: 1706114747468,
+ },
+ ],
+ sessions: [
+ {
+ config: {
+ model: 'gpt-3.5-turbo',
+ params: {
+ frequency_penalty: 0,
+ presence_penalty: 0,
+ temperature: 0.6,
+ top_p: 1,
+ },
+ plugins: [],
+ systemRole:
+ "You are a LobeChat technical operator 🍐🐊. You now need to write a developer's guide for LobeChat as a guide for them to develop LobeChat. This guide will include several sections, and you need to output the corresponding document content based on the user's input.\n\nHere is the technical introduction of LobeChat\n\n LobeChat is an AI conversation application built with the Next.js framework. It uses a series of technology stacks to implement various functions and features.\n\n\n ## Basic Technology Stack\n\n The core technology stack of LobeChat is as follows:\n\n - **Framework**: We chose [Next.js](https://nextjs.org/), a powerful React framework that provides key features such as server-side rendering, routing framework, and Router Handler for our project.\n - **Component Library**: We use [Ant Design (antd)](https://ant.design/) as the basic component library, and introduce [lobe-ui](https://github.com/lobehub/lobe-ui) as our business component library.\n - **State Management**: We use [zustand](https://github.com/pmndrs/zustand), a lightweight and easy-to-use state management library.\n - **Network Request**: We adopt [swr](https://swr.vercel.app/), a React Hooks library for data fetching.\n - **Routing**: We directly use the routing solution provided by [Next.js](https://nextjs.org/) itself.\n - **Internationalization**: We use [i18next](https://www.i18next.com/) to implement multi-language support for the application.\n - **Styling**: We use [antd-style](https://github.com/ant-design/antd-style), a CSS-in-JS library that is compatible with Ant Design.\n - **Unit Testing**: We use [vitest](https://github.com/vitejs/vitest) for unit testing.\n\n ## Folder Directory Structure\n\n The folder directory structure of LobeChat is as follows:\n\n \\`\\`\\`bash\n src\n ├── app # Main logic and state management related code of the application\n ├── components # Reusable UI components\n ├── config # Application configuration files, including client environment variables and server environment variables\n ├── const # Used to define constants, such as action types, route names, etc.\n ├── features # Function modules related to business functions, such as Agent settings, plugin development pop-ups, etc.\n ├── hooks # Custom utility Hooks reused throughout the application\n ├── layout # Layout components of the application, such as navigation bar, sidebar, etc.\n ├── locales # Language files for internationalization\n ├── services # Encapsulated backend service interfaces, such as HTTP requests\n ├── store # Zustand store for state management\n ├── types # TypeScript type definition files\n └── utils # Common utility functions\n \\`\\`\\`\n",
+ tts: {
+ showAllLocaleVoice: false,
+ sttLocale: 'auto',
+ ttsService: 'openai',
+ voice: { openai: 'alloy' },
+ },
+ chatConfig: {
+ autoCreateTopicThreshold: 2,
+ displayMode: 'chat',
+ enableAutoCreateTopic: true,
+ historyCount: 1,
+ },
+ },
+ group: 'XlUbvOvL',
+ meta: {
+ avatar: '📝',
+ description:
+ 'LobeChat is an AI conversation application built with the Next.js framework. I will help you write the development documentation for LobeChat.',
+ tags: [
+ 'Development Documentation',
+ 'Technical Introduction',
+ 'next-js',
+ 'react',
+ 'lobe-chat',
+ ],
+ title: 'LobeChat Technical Documentation Expert',
+ },
+ type: 'agent',
+ createdAt: '2024-01-24T16:43:12.164Z',
+ id: 'a5fefc88-f6c1-44fb-9e98-3d366b1ed589',
+ updatedAt: '2024-01-24T16:46:15.226Z',
+ pinned: false,
+ },
+ ],
+ topics: [
+ {
+ title: 'Default Topic',
+ sessionId: 'inbox',
+ createdAt: 1714236590531,
+ id: '2wcF8yaS',
+ updatedAt: 1714236590531,
+ },
+ {
+ title: 'Default Topic',
+ sessionId: 'a5fefc88-f6c1-44fb-9e98-3d366b1ed589',
+ createdAt: 1717080410825,
+ id: 'v38snJ0A',
+ updatedAt: 1717080410825,
+ },
+ ],
+ version: mockImportData.version,
+ },
+ { onSuccess: (res) => (result = res) },
+ );
+
+ expect(result!).toEqual({
+ sessionGroups: { added: 1, errors: 0, skips: 0 },
+ sessions: { added: 1, errors: 0, skips: 0 },
+ topics: { added: 2, errors: 0, skips: 0 },
+ messages: { added: 4, errors: 0, skips: 0 },
+ });
+ });
+
+ it('should import real world data', async () => {
+ let result: ImportResults;
+
+ await service.importData(
+ { ...(mockImportData.state as any), version: mockImportData.version },
+ {
+ onSuccess: (res) => {
+ result = res;
+ },
+ },
+ );
+
+ expect(result!).toEqual({
+ sessionGroups: { added: 2, errors: 0, skips: 0 },
+ sessions: { added: 15, errors: 0, skips: 0 },
+ topics: { added: 4, errors: 0, skips: 0 },
+ messages: { added: 32, errors: 0, skips: 0 },
+ });
+ });
+ });
+});
diff --git a/src/services/import/client.ts b/src/services/import/client.ts
index a148abd28d57..0dceca6f8fef 100644
--- a/src/services/import/client.ts
+++ b/src/services/import/client.ts
@@ -1,74 +1,34 @@
-import { MessageModel } from '@/database/_deprecated/models/message';
-import { SessionModel } from '@/database/_deprecated/models/session';
-import { SessionGroupModel } from '@/database/_deprecated/models/sessionGroup';
-import { TopicModel } from '@/database/_deprecated/models/topic';
-import { ImportResult, ImportResults } from '@/services/config';
+import { clientDB } from '@/database/client/db';
+import { DataImporterRepos } from '@/database/repositories/dataImporter';
+import { BaseClientService } from '@/services/baseClientService';
import { useUserStore } from '@/store/user';
import { ImportStage, ImporterEntryData, OnImportCallbacks } from '@/types/importer';
import { UserSettings } from '@/types/user/settings';
-export class ClientService {
+export class ClientService extends BaseClientService {
+ private get dataImporter(): DataImporterRepos {
+ return new DataImporterRepos(clientDB as any, this.userId);
+ }
+
importSettings = async (settings: UserSettings) => {
await useUserStore.getState().importAppSettings(settings);
};
- importData = async (
- config: ImporterEntryData,
- callbacks?: OnImportCallbacks,
- ): Promise => {
+ importData = async (data: ImporterEntryData, callbacks?: OnImportCallbacks) => {
callbacks?.onStageChange?.(ImportStage.Importing);
const time = Date.now();
-
- const { messages = [], sessionGroups = [], sessions = [], topics = [] } = config;
-
- let messageResult: ImportResult | undefined;
- let sessionResult: ImportResult | undefined;
- let sessionGroupResult: ImportResult | undefined;
- let topicResult: ImportResult | undefined;
-
- if (messages.length > 0) {
- const res = await MessageModel.batchCreate(messages as any);
- messageResult = this.mapImportResult(res);
- }
-
- if (sessionGroups.length > 0) {
- const res = await SessionGroupModel.batchCreate(sessionGroups as any);
- sessionGroupResult = this.mapImportResult(res);
- }
-
- if (topics.length > 0) {
- const res = await TopicModel.batchCreate(topics as any);
- topicResult = this.mapImportResult(res);
+ try {
+ const result = await this.dataImporter.importData(data);
+ const duration = Date.now() - time;
+
+ callbacks?.onStageChange?.(ImportStage.Success);
+ callbacks?.onSuccess?.(result, duration);
+ } catch (e) {
+ console.error(e);
+ callbacks?.onStageChange?.(ImportStage.Error);
+ const error = e as Error;
+
+ callbacks?.onError?.({ code: 'ImportError', httpStatus: 0, message: error.message });
}
-
- if (sessions.length > 0) {
- const data = await SessionModel.batchCreate(sessions as any);
- sessionResult = this.mapImportResult(data);
- }
-
- const result = {
- messages: messageResult,
- sessionGroups: sessionGroupResult,
- sessions: sessionResult,
- topics: topicResult,
- };
-
- const duration = Date.now() - time;
- callbacks?.onStageChange?.(ImportStage.Success);
- callbacks?.onSuccess?.(result, duration);
-
- return result;
- };
-
- private mapImportResult = (input: {
- added: number;
- errors?: Error[];
- skips: string[];
- }): ImportResult => {
- return {
- added: input.added,
- errors: input.errors?.length || 0,
- skips: input.skips.length,
- };
};
}
diff --git a/src/services/message/client.test.ts b/src/services/message/client.test.ts
index 867fda6c5b72..2372330aabf2 100644
--- a/src/services/message/client.test.ts
+++ b/src/services/message/client.test.ts
@@ -1,133 +1,155 @@
import dayjs from 'dayjs';
-import { Mock, describe, expect, it, vi } from 'vitest';
+import { and, eq } from 'drizzle-orm';
+import { Mock, afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
-import { CreateMessageParams, MessageModel } from '@/database/_deprecated/models/message';
+import { MessageModel } from '@/database/_deprecated/models/message';
+import { clientDB, initializeDB } from '@/database/client/db';
+import {
+ MessageItem,
+ files,
+ messagePlugins,
+ messageTTS,
+ messageTranslates,
+ messages,
+ sessions,
+ topics,
+ users,
+} from '@/database/schemas';
import {
ChatMessage,
ChatMessageError,
- ChatPluginPayload,
ChatTTS,
ChatTranslate,
+ CreateMessageParams,
} from '@/types/message';
import { ClientService } from './client';
-const messageService = new ClientService();
-
-// Mock the MessageModel
-vi.mock('@/database/_deprecated/models/message', () => {
- return {
- MessageModel: {
- create: vi.fn(),
- batchCreate: vi.fn(),
- count: vi.fn(),
- query: vi.fn(),
- delete: vi.fn(),
- bulkDelete: vi.fn(),
- queryBySessionId: vi.fn(),
- update: vi.fn(),
- updatePlugin: vi.fn(),
- batchDelete: vi.fn(),
- clearTable: vi.fn(),
- batchUpdate: vi.fn(),
- queryAll: vi.fn(),
- updatePluginState: vi.fn(),
- },
- };
+const userId = 'message-db';
+const sessionId = '1';
+const topicId = 'topic-id';
+
+// Mock data
+const mockMessageId = 'mock-message-id';
+const mockMessage = {
+ id: mockMessageId,
+ content: 'Mock message content',
+ sessionId,
+ role: 'user',
+} as ChatMessage;
+
+const mockMessages = [mockMessage];
+
+beforeEach(async () => {
+ await initializeDB();
+
+ // 在每个测试用例之前,清空表
+ await clientDB.transaction(async (trx) => {
+ await trx.delete(users);
+ await trx.insert(users).values([{ id: userId }, { id: '456' }]);
+
+ await trx.insert(sessions).values([{ id: sessionId, userId }]);
+ await trx.insert(topics).values([{ id: topicId, sessionId, userId }]);
+ await trx.insert(files).values({
+ id: 'f1',
+ userId: userId,
+ url: 'abc',
+ name: 'file-1',
+ fileType: 'image/png',
+ size: 1000,
+ });
+ });
});
-describe('MessageClientService', () => {
- // Mock data
- const mockMessageId = 'mock-message-id';
- const mockMessage = {
- id: mockMessageId,
- content: 'Mock message content',
- sessionId: 'mock-session-id',
- createdAt: 100,
- updatedAt: 100,
- role: 'user',
- // ... other properties
- } as ChatMessage;
- const mockMessages = [mockMessage];
-
- beforeEach(() => {
- // Reset all mocks before running each test case
- vi.resetAllMocks();
- });
+afterEach(async () => {
+ // 在每个测试用例之后,清空表
+ await clientDB.delete(users);
+});
+
+const messageService = new ClientService(userId);
+describe('MessageClientService', () => {
describe('create', () => {
it('should create a message and return its id', async () => {
// Setup
- const createParams = {
+ const createParams: CreateMessageParams = {
content: 'New message content',
- sessionId: '1',
- // ... other properties
- } as CreateMessageParams;
- (MessageModel.create as Mock).mockResolvedValue({ id: mockMessageId });
+ sessionId,
+ role: 'user',
+ };
// Execute
const messageId = await messageService.createMessage(createParams);
// Assert
- expect(MessageModel.create).toHaveBeenCalledWith(createParams);
- expect(messageId).toBe(mockMessageId);
+ expect(messageId).toMatch(/^msg_/);
});
});
describe('batchCreate', () => {
it('should batch create messages', async () => {
- // Setup
- (MessageModel.batchCreate as Mock).mockResolvedValue(mockMessages);
-
// Execute
- const result = await messageService.batchCreateMessages(mockMessages);
+ await messageService.batchCreateMessages([
+ {
+ content: 'Mock message content',
+ sessionId,
+ role: 'user',
+ },
+ {
+ content: 'Mock message content',
+ sessionId,
+ role: 'user',
+ },
+ ] as MessageItem[]);
+ const count = await clientDB.$count(messages);
// Assert
- expect(MessageModel.batchCreate).toHaveBeenCalledWith(mockMessages);
- expect(result).toBe(mockMessages);
+ expect(count).toBe(2);
});
});
describe('removeMessage', () => {
it('should remove a message by id', async () => {
- // Setup
- (MessageModel.delete as Mock).mockResolvedValue(true);
-
// Execute
- const result = await messageService.removeMessage(mockMessageId);
+ await clientDB.insert(messages).values({ id: mockMessageId, role: 'user', userId });
+ await messageService.removeMessage(mockMessageId);
// Assert
- expect(MessageModel.delete).toHaveBeenCalledWith(mockMessageId);
- expect(result).toBe(true);
+ const count = await clientDB.$count(messages);
+
+ expect(count).toBe(0);
});
});
describe('removeMessages', () => {
it('should remove a message by id', async () => {
// Setup
- (MessageModel.bulkDelete as Mock).mockResolvedValue(true);
+ await clientDB.insert(messages).values([
+ { id: mockMessageId, role: 'user', userId },
+ { role: 'assistant', userId },
+ ]);
// Execute
- const result = await messageService.removeMessages([mockMessageId]);
+ await messageService.removeMessages([mockMessageId]);
// Assert
- expect(MessageModel.bulkDelete).toHaveBeenCalledWith([mockMessageId]);
- expect(result).toBe(true);
+ const count = await clientDB.$count(messages);
+
+ expect(count).toBe(1);
});
});
describe('getMessages', () => {
it('should retrieve messages by sessionId and topicId', async () => {
// Setup
- const sessionId = 'session-id';
- const topicId = 'topic-id';
- (MessageModel.query as Mock).mockResolvedValue(mockMessages);
+ await clientDB
+ .insert(messages)
+ .values({ id: mockMessageId, sessionId, topicId, role: 'user', userId });
// Execute
- const messages = await messageService.getMessages(sessionId, topicId);
+ const data = await messageService.getMessages(sessionId, topicId);
// Assert
- expect(MessageModel.query).toHaveBeenCalledWith({ sessionId, topicId });
- expect(messages).toEqual(mockMessages.map((i) => ({ ...i, imageList: [] })));
+ expect(data[0]).toMatchObject({ id: mockMessageId, role: 'user' });
});
});
@@ -135,14 +157,21 @@ describe('MessageClientService', () => {
it('should retrieve all messages in a session', async () => {
// Setup
const sessionId = 'session-id';
- (MessageModel.queryBySessionId as Mock).mockResolvedValue(mockMessages);
+ await clientDB.insert(sessions).values([
+ { id: 'bbb', userId },
+ { id: sessionId, userId },
+ ]);
+ await clientDB.insert(messages).values([
+ { sessionId, topicId, role: 'user', userId },
+ { sessionId, topicId, role: 'assistant', userId },
+ { sessionId: 'bbb', topicId, role: 'assistant', userId },
+ ]);
// Execute
- const messages = await messageService.getAllMessagesInSession(sessionId);
+ const data = await messageService.getAllMessagesInSession(sessionId);
// Assert
- expect(MessageModel.queryBySessionId).toHaveBeenCalledWith(sessionId);
- expect(messages).toBe(mockMessages);
+ expect(data.length).toBe(2);
});
});
@@ -150,77 +179,85 @@ describe('MessageClientService', () => {
it('should batch remove messages by assistantId and topicId', async () => {
// Setup
const assistantId = 'assistant-id';
- const topicId = 'topic-id';
- (MessageModel.batchDelete as Mock).mockResolvedValue(true);
+ const sessionId = 'session-id';
+ await clientDB.insert(sessions).values([
+ { id: 'bbb', userId },
+ { id: sessionId, userId },
+ ]);
+ await clientDB.insert(messages).values([
+ { sessionId, topicId, role: 'user', userId },
+ { sessionId, topicId, role: 'assistant', userId },
+ { sessionId: 'bbb', topicId, role: 'assistant', userId },
+ ]);
// Execute
- const result = await messageService.removeMessagesByAssistant(assistantId, topicId);
+ await messageService.removeMessagesByAssistant(sessionId, topicId);
// Assert
- expect(MessageModel.batchDelete).toHaveBeenCalledWith(assistantId, topicId);
- expect(result).toBe(true);
+ const result = await clientDB.query.messages.findMany({
+ where: and(eq(messages.sessionId, sessionId), eq(messages.topicId, topicId)),
+ });
+
+ expect(result.length).toBe(0);
});
});
describe('clearAllMessage', () => {
it('should clear all messages from the table', async () => {
// Setup
- (MessageModel.clearTable as Mock).mockResolvedValue(true);
+ await clientDB.insert(users).values({ id: 'another' });
+ await clientDB.insert(messages).values([
+ { id: mockMessageId, role: 'user', userId },
+ { role: 'user', userId: 'another' },
+ ]);
// Execute
- const result = await messageService.removeAllMessages();
+ await messageService.removeAllMessages();
// Assert
- expect(MessageModel.clearTable).toHaveBeenCalled();
- expect(result).toBe(true);
- });
- });
-
- describe('bindMessagesToTopic', () => {
- it('should batch update messages to bind them to a topic', async () => {
- // Setup
- const topicId = 'topic-id';
- const messageIds = [mockMessageId];
- (MessageModel.batchUpdate as Mock).mockResolvedValue(mockMessages);
-
- // Execute
- const result = await messageService.bindMessagesToTopic(topicId, messageIds);
-
- // Assert
- expect(MessageModel.batchUpdate).toHaveBeenCalledWith(messageIds, { topicId });
- expect(result).toBe(mockMessages);
+ const result = await clientDB.query.messages.findMany({
+ where: eq(messages.userId, userId),
+ });
+ expect(result.length).toBe(0);
});
});
describe('getAllMessages', () => {
it('should retrieve all messages', async () => {
- // Setup
- (MessageModel.queryAll as Mock).mockResolvedValue(mockMessages);
+ await clientDB.insert(messages).values([
+ { sessionId, topicId, content: '1', role: 'user', userId },
+ { sessionId, topicId, content: '2', role: 'assistant', userId },
+ ]);
// Execute
- const messages = await messageService.getAllMessages();
+ const data = await messageService.getAllMessages();
// Assert
- expect(MessageModel.queryAll).toHaveBeenCalled();
- expect(messages).toBe(mockMessages);
+ expect(data).toMatchObject([
+ { sessionId, topicId, content: '1', role: 'user', userId },
+ { sessionId, topicId, content: '2', role: 'assistant', userId },
+ ]);
});
});
describe('updateMessageError', () => {
it('should update the error field of a message', async () => {
// Setup
+ await clientDB.insert(messages).values({ id: mockMessageId, role: 'user', userId });
const newError = {
type: 'InvalidProviderAPIKey',
message: 'Error occurred',
} as ChatMessageError;
- (MessageModel.update as Mock).mockResolvedValue({ ...mockMessage, error: newError });
// Execute
- const result = await messageService.updateMessageError(mockMessageId, newError);
+ await messageService.updateMessageError(mockMessageId, newError);
// Assert
- expect(MessageModel.update).toHaveBeenCalledWith(mockMessageId, { error: newError });
- expect(result).toEqual({ ...mockMessage, error: newError });
+ const result = await clientDB.query.messages.findFirst({
+ where: eq(messages.id, mockMessageId),
+ });
+
+ expect(result!.error).toEqual(newError);
});
});
@@ -248,88 +285,85 @@ describe('MessageClientService', () => {
describe('updateMessagePluginState', () => {
it('should update the plugin state of a message', async () => {
// Setup
+ await clientDB.insert(messages).values({ id: mockMessageId, role: 'user', userId });
+ await clientDB.insert(messagePlugins).values({ id: mockMessageId });
const key = 'stateKey';
const value = 'stateValue';
const newPluginState = { [key]: value };
- (MessageModel.updatePluginState as Mock).mockResolvedValue({
- ...mockMessage,
- pluginState: newPluginState,
- });
// Execute
- const result = await messageService.updateMessagePluginState(mockMessageId, { key: value });
+ await messageService.updateMessagePluginState(mockMessageId, { stateKey: value });
// Assert
- expect(MessageModel.updatePluginState).toHaveBeenCalledWith(mockMessageId, { key: value });
- expect(result).toEqual({ ...mockMessage, pluginState: newPluginState });
+ const result = await clientDB.query.messagePlugins.findFirst({
+ where: eq(messagePlugins.id, mockMessageId),
+ });
+ expect(result!.state).toEqual(newPluginState);
});
});
describe('updateMessagePluginArguments', () => {
it('should update the plugin arguments object of a message', async () => {
// Setup
- const key = 'stateKey';
+ await clientDB.insert(messages).values({ id: mockMessageId, role: 'user', userId });
+ await clientDB.insert(messagePlugins).values({ id: mockMessageId });
const value = 'stateValue';
- (MessageModel.updatePlugin as Mock).mockResolvedValue({});
// Execute
await messageService.updateMessagePluginArguments(mockMessageId, { key: value });
// Assert
- expect(MessageModel.updatePlugin).toHaveBeenCalledWith(mockMessageId, {
- arguments: '{"key":"stateValue"}',
+ const result = await clientDB.query.messagePlugins.findFirst({
+ where: eq(messageTTS.id, mockMessageId),
});
+ expect(result).toMatchObject({ arguments: '{"key":"stateValue"}' });
});
it('should update the plugin arguments string of a message', async () => {
// Setup
- const key = 'stateKey';
+ await clientDB.insert(messages).values({ id: mockMessageId, role: 'user', userId });
+ await clientDB.insert(messagePlugins).values({ id: mockMessageId });
const value = 'stateValue';
- (MessageModel.updatePlugin as Mock).mockResolvedValue({});
-
// Execute
await messageService.updateMessagePluginArguments(
mockMessageId,
- JSON.stringify({ key: value }),
+ JSON.stringify({ abc: value }),
);
// Assert
- expect(MessageModel.updatePlugin).toHaveBeenCalledWith(mockMessageId, {
- arguments: '{"key":"stateValue"}',
+ const result = await clientDB.query.messagePlugins.findFirst({
+ where: eq(messageTTS.id, mockMessageId),
});
+ expect(result).toMatchObject({ arguments: '{"abc":"stateValue"}' });
});
});
describe('countMessages', () => {
it('should count the total number of messages', async () => {
// Setup
- const mockCount = 10;
- (MessageModel.count as Mock).mockResolvedValue(mockCount);
+ await clientDB.insert(messages).values({ id: mockMessageId, role: 'user', userId });
// Execute
const count = await messageService.countMessages();
// Assert
- expect(MessageModel.count).toHaveBeenCalled();
- expect(count).toBe(mockCount);
+ expect(count).toBe(1);
});
});
describe('countTodayMessages', () => {
it('should count the number of messages created today', async () => {
// Setup
- const today = dayjs().format('YYYY-MM-DD');
const mockMessages = [
- { ...mockMessage, createdAt: today },
- { ...mockMessage, createdAt: today },
- { ...mockMessage, createdAt: '2023-01-01' },
+ { ...mockMessage, id: undefined, createdAt: new Date(), userId },
+ { ...mockMessage, id: undefined, createdAt: new Date(), userId },
+ { ...mockMessage, id: undefined, createdAt: new Date('2023-01-01'), userId },
];
- (MessageModel.queryAll as Mock).mockResolvedValue(mockMessages);
+ await clientDB.insert(messages).values(mockMessages);
// Execute
const count = await messageService.countTodayMessages();
// Assert
- expect(MessageModel.queryAll).toHaveBeenCalled();
expect(count).toBe(2);
});
});
@@ -337,45 +371,46 @@ describe('MessageClientService', () => {
describe('updateMessageTTS', () => {
it('should update the TTS field of a message', async () => {
// Setup
- const newTTS: ChatTTS = {
- contentMd5: 'abc',
- file: 'file-abc',
- };
-
- (MessageModel.update as Mock).mockResolvedValue({ ...mockMessage, tts: newTTS });
+ await clientDB
+ .insert(files)
+ .values({ id: 'file-abc', fileType: 'text', name: 'abc', url: 'abc', size: 100, userId });
+ await clientDB.insert(messages).values({ id: mockMessageId, role: 'user', userId });
+ const newTTS: ChatTTS = { contentMd5: 'abc', file: 'file-abc' };
// Execute
- const result = await messageService.updateMessageTTS(mockMessageId, newTTS);
+ await messageService.updateMessageTTS(mockMessageId, newTTS);
// Assert
- expect(MessageModel.update).toHaveBeenCalledWith(mockMessageId, { tts: newTTS });
- expect(result).toEqual({ ...mockMessage, tts: newTTS });
+ const result = await clientDB.query.messageTTS.findFirst({
+ where: eq(messageTTS.id, mockMessageId),
+ });
+
+ expect(result).toMatchObject({ contentMd5: 'abc', fileId: 'file-abc', id: mockMessageId });
});
});
describe('updateMessageTranslate', () => {
it('should update the translate field of a message', async () => {
// Setup
- const newTranslate: ChatTranslate = {
- content: 'Translated text',
- to: 'es',
- };
-
- (MessageModel.update as Mock).mockResolvedValue({ ...mockMessage, translate: newTranslate });
+ await clientDB.insert(messages).values({ id: mockMessageId, role: 'user', userId });
+ const newTranslate: ChatTranslate = { content: 'Translated text', to: 'es' };
// Execute
- const result = await messageService.updateMessageTranslate(mockMessageId, newTranslate);
+ await messageService.updateMessageTranslate(mockMessageId, newTranslate);
// Assert
- expect(MessageModel.update).toHaveBeenCalledWith(mockMessageId, { translate: newTranslate });
- expect(result).toEqual({ ...mockMessage, translate: newTranslate });
+ const result = await clientDB.query.messageTranslates.findFirst({
+ where: eq(messageTranslates.id, mockMessageId),
+ });
+
+ expect(result).toMatchObject(newTranslate);
});
});
describe('hasMessages', () => {
it('should return true if there are messages', async () => {
// Setup
- (MessageModel.count as Mock).mockResolvedValue(1);
+ await clientDB.insert(messages).values({ id: mockMessageId, role: 'user', userId });
// Execute
const result = await messageService.hasMessages();
@@ -385,9 +420,6 @@ describe('MessageClientService', () => {
});
it('should return false if there are no messages', async () => {
- // Setup
- (MessageModel.count as Mock).mockResolvedValue(0);
-
// Execute
const result = await messageService.hasMessages();
diff --git a/src/services/message/client.ts b/src/services/message/client.ts
index d51d6d72a1a4..f0ed468a7e67 100644
--- a/src/services/message/client.ts
+++ b/src/services/message/client.ts
@@ -1,10 +1,10 @@
import dayjs from 'dayjs';
-import { FileModel } from '@/database/_deprecated/models/file';
-import { MessageModel } from '@/database/_deprecated/models/message';
-import { DB_Message } from '@/database/_deprecated/schemas/message';
+import { clientDB } from '@/database/client/db';
+import { MessageItem } from '@/database/schemas';
+import { MessageModel } from '@/database/server/models/message';
+import { BaseClientService } from '@/services/baseClientService';
import {
- ChatFileItem,
ChatMessage,
ChatMessageError,
ChatTTS,
@@ -14,102 +14,84 @@ import {
import { IMessageService } from './type';
-export class ClientService implements IMessageService {
+export class ClientService extends BaseClientService implements IMessageService {
+ private get messageModel(): MessageModel {
+ return new MessageModel(clientDB as any, this.userId);
+ }
+
async createMessage(data: CreateMessageParams) {
- const { id } = await MessageModel.create(data);
+ const { id } = await this.messageModel.create(data);
return id;
}
- async batchCreateMessages(messages: ChatMessage[]) {
- return MessageModel.batchCreate(messages);
+ async batchCreateMessages(messages: MessageItem[]) {
+ return this.messageModel.batchCreate(messages);
}
async getMessages(sessionId: string, topicId?: string): Promise {
- const messages = await MessageModel.query({ sessionId, topicId });
-
- const fileList = (await Promise.all(
- messages
- .flatMap((item) => item.files)
- .filter(Boolean)
- .map(async (id) => FileModel.findById(id!)),
- )) as ChatFileItem[];
-
- return messages.map((item) => ({
- ...item,
- imageList: fileList
- .filter((file) => item.files?.includes(file.id) && file.fileType.startsWith('image'))
- .map((file) => ({
- alt: file.name,
- id: file.id,
- url: file.url,
- })),
- }));
+ return this.messageModel.query({ sessionId, topicId });
}
async getAllMessages() {
- return MessageModel.queryAll();
+ return this.messageModel.queryAll();
}
async countMessages() {
- return MessageModel.count();
+ return this.messageModel.count();
}
async countTodayMessages() {
- const topics = await MessageModel.queryAll();
+ const topics = await this.messageModel.queryAll();
return topics.filter(
(item) => dayjs(item.createdAt).format('YYYY-MM-DD') === dayjs().format('YYYY-MM-DD'),
).length;
}
async getAllMessagesInSession(sessionId: string) {
- return MessageModel.queryBySessionId(sessionId);
+ return this.messageModel.queryBySessionId(sessionId);
}
async updateMessageError(id: string, error: ChatMessageError) {
- return MessageModel.update(id, { error });
+ return this.messageModel.update(id, { error });
}
- async updateMessage(id: string, message: Partial) {
- return MessageModel.update(id, message);
+ async updateMessage(id: string, message: Partial) {
+ return this.messageModel.update(id, message);
}
async updateMessageTTS(id: string, tts: Partial | false) {
- return MessageModel.update(id, { tts });
+ return this.messageModel.updateTTS(id, tts as any);
}
async updateMessageTranslate(id: string, translate: Partial | false) {
- return MessageModel.update(id, { translate });
+ return this.messageModel.updateTranslate(id, translate as any);
}
async updateMessagePluginState(id: string, value: Record) {
- return MessageModel.updatePluginState(id, value);
+ return this.messageModel.updatePluginState(id, value);
}
async updateMessagePluginArguments(id: string, value: string | Record) {
const args = typeof value === 'string' ? value : JSON.stringify(value);
- return MessageModel.updatePlugin(id, { arguments: args });
- }
-
- async bindMessagesToTopic(topicId: string, messageIds: string[]) {
- return MessageModel.batchUpdate(messageIds, { topicId });
+ return this.messageModel.updateMessagePlugin(id, { arguments: args });
}
async removeMessage(id: string) {
- return MessageModel.delete(id);
+ return this.messageModel.deleteMessage(id);
}
async removeMessages(ids: string[]) {
- return MessageModel.bulkDelete(ids);
+ return this.messageModel.deleteMessages(ids);
}
async removeMessagesByAssistant(assistantId: string, topicId?: string) {
- return MessageModel.batchDelete(assistantId, topicId);
+ return this.messageModel.deleteMessagesBySession(assistantId, topicId);
}
async removeAllMessages() {
- return MessageModel.clearTable();
+ return this.messageModel.deleteAllMessages();
}
async hasMessages() {
diff --git a/src/services/message/index.test.ts b/src/services/message/index.test.ts
deleted file mode 100644
index 625261f53c90..000000000000
--- a/src/services/message/index.test.ts
+++ /dev/null
@@ -1,48 +0,0 @@
-import { Mock, describe, expect, it, vi } from 'vitest';
-
-import { CreateMessageParams, MessageModel } from '@/database/_deprecated/models/message';
-import { ChatMessage, ChatMessageError, ChatPluginPayload } from '@/types/message';
-
-import { messageService } from './index';
-
-// Mock the MessageModel
-vi.mock('@/database/_deprecated/models/message', () => {
- return {
- MessageModel: {
- count: vi.fn(),
- },
- };
-});
-
-describe('MessageService', () => {
- beforeEach(() => {
- // Reset all mocks before running each test case
- vi.resetAllMocks();
- });
-
- describe('hasMessages', () => {
- it('should return true if there are messages', async () => {
- // Setup
- (MessageModel.count as Mock).mockResolvedValue(1);
-
- // Execute
- const hasMessages = await messageService.hasMessages();
-
- // Assert
- expect(MessageModel.count).toHaveBeenCalled();
- expect(hasMessages).toBe(true);
- });
-
- it('should return false if there are no messages', async () => {
- // Setup
- (MessageModel.count as Mock).mockResolvedValue(0);
-
- // Execute
- const hasMessages = await messageService.hasMessages();
-
- // Assert
- expect(MessageModel.count).toHaveBeenCalled();
- expect(hasMessages).toBe(false);
- });
- });
-});
diff --git a/src/services/message/server.ts b/src/services/message/server.ts
index 6562b4ec3e25..6860ede39df9 100644
--- a/src/services/message/server.ts
+++ b/src/services/message/server.ts
@@ -1,5 +1,6 @@
/* eslint-disable @typescript-eslint/no-unused-vars */
import { INBOX_SESSION_ID } from '@/const/session';
+import { MessageItem } from '@/database/schemas';
import { lambdaClient } from '@/libs/trpc/client';
import {
ChatMessage,
@@ -19,7 +20,7 @@ export class ServerService implements IMessageService {
});
}
- batchCreateMessages(messages: ChatMessage[]): Promise {
+ batchCreateMessages(messages: MessageItem[]): Promise {
return lambdaClient.message.batchCreateMessages.mutate(messages);
}
@@ -33,6 +34,7 @@ export class ServerService implements IMessageService {
getAllMessages(): Promise {
return lambdaClient.message.getAllMessages.query();
}
+
getAllMessagesInSession(sessionId: string): Promise {
return lambdaClient.message.getAllMessagesInSession.query({
sessionId: this.toDbSessionId(sessionId),
@@ -79,10 +81,6 @@ export class ServerService implements IMessageService {
return lambdaClient.message.updatePluginState.mutate({ id, value });
}
- bindMessagesToTopic(_topicId: string, _messageIds: string[]): Promise {
- throw new Error('Method not implemented.');
- }
-
removeMessage(id: string): Promise {
return lambdaClient.message.removeMessage.mutate({ id });
}
diff --git a/src/services/message/type.ts b/src/services/message/type.ts
index 2929620ad128..4ac08fd54c17 100644
--- a/src/services/message/type.ts
+++ b/src/services/message/type.ts
@@ -1,4 +1,4 @@
-import { DB_Message } from '@/database/_deprecated/schemas/message';
+import { MessageItem } from '@/database/schemas';
import {
ChatMessage,
ChatMessageError,
@@ -11,7 +11,7 @@ import {
export interface IMessageService {
createMessage(data: CreateMessageParams): Promise;
- batchCreateMessages(messages: ChatMessage[]): Promise;
+ batchCreateMessages(messages: MessageItem[]): Promise;
getMessages(sessionId: string, topicId?: string): Promise;
getAllMessages(): Promise;
@@ -20,11 +20,10 @@ export interface IMessageService {
countTodayMessages(): Promise;
updateMessageError(id: string, error: ChatMessageError): Promise;
- updateMessage(id: string, message: Partial): Promise;
+ updateMessage(id: string, message: Partial): Promise;
updateMessageTTS(id: string, tts: Partial | false): Promise;
updateMessageTranslate(id: string, translate: Partial | false): Promise;
updateMessagePluginState(id: string, value: Record): Promise;
- bindMessagesToTopic(topicId: string, messageIds: string[]): Promise;
removeMessage(id: string): Promise;
removeMessages(ids: string[]): Promise;
diff --git a/src/services/plugin/client.test.ts b/src/services/plugin/client.test.ts
index e2b6ccc66822..4f4c45df3aca 100644
--- a/src/services/plugin/client.test.ts
+++ b/src/services/plugin/client.test.ts
@@ -1,30 +1,29 @@
import { LobeChatPluginManifest } from '@lobehub/chat-plugin-sdk';
-import { beforeEach, describe, expect, it, vi } from 'vitest';
+import { eq } from 'drizzle-orm';
+import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
-import { PluginModel } from '@/database/_deprecated/models/plugin';
-import { DB_Plugin } from '@/database/_deprecated/schemas/plugin';
+import { clientDB, initializeDB } from '@/database/client/db';
+import { installedPlugins, users } from '@/database/schemas';
import { LobeTool } from '@/types/tool';
import { LobeToolCustomPlugin } from '@/types/tool/plugin';
import { ClientService } from './client';
import { InstallPluginParams } from './type';
-const pluginService = new ClientService();
-
// Mocking modules and functions
-vi.mock('@/database/_deprecated/models/plugin', () => ({
- PluginModel: {
- getList: vi.fn(),
- create: vi.fn(),
- delete: vi.fn(),
- update: vi.fn(),
- clear: vi.fn(),
- },
-}));
-
-beforeEach(() => {
- vi.resetAllMocks();
+const userId = 'message-db';
+const pluginService = new ClientService(userId);
+
+// Mock data
+beforeEach(async () => {
+ await initializeDB();
+
+ // 在每个测试用例之前,重置表数据
+ await clientDB.transaction(async (trx) => {
+ await trx.delete(users);
+ await trx.insert(users).values([{ id: userId }, { id: '456' }]);
+ });
});
describe('PluginService', () => {
@@ -32,18 +31,19 @@ describe('PluginService', () => {
it('should install a plugin', async () => {
// Arrange
const fakePlugin = {
- identifier: 'test-plugin',
+ identifier: 'test-plugin-d',
manifest: { name: 'TestPlugin', version: '1.0.0' } as unknown as LobeChatPluginManifest,
type: 'plugin',
} as InstallPluginParams;
- vi.mocked(PluginModel.create).mockResolvedValue(fakePlugin);
// Act
- const installedPlugin = await pluginService.installPlugin(fakePlugin);
+ await pluginService.installPlugin(fakePlugin);
// Assert
- expect(PluginModel.create).toHaveBeenCalledWith(fakePlugin);
- expect(installedPlugin).toEqual(fakePlugin);
+ const result = await clientDB.query.installedPlugins.findFirst({
+ where: eq(installedPlugins.identifier, fakePlugin.identifier),
+ });
+ expect(result).toMatchObject(fakePlugin);
});
});
@@ -51,14 +51,14 @@ describe('PluginService', () => {
it('should return a list of installed plugins', async () => {
// Arrange
const fakePlugins = [{ identifier: 'test-plugin', type: 'plugin' }] as LobeTool[];
- vi.mocked(PluginModel.getList).mockResolvedValue(fakePlugins as DB_Plugin[]);
-
+ await clientDB
+ .insert(installedPlugins)
+ .values([{ identifier: 'test-plugin', type: 'plugin', userId }]);
// Act
- const installedPlugins = await pluginService.getInstalledPlugins();
+ const data = await pluginService.getInstalledPlugins();
// Assert
- expect(PluginModel.getList).toHaveBeenCalled();
- expect(installedPlugins).toEqual(fakePlugins);
+ expect(data).toMatchObject(fakePlugins);
});
});
@@ -66,13 +66,15 @@ describe('PluginService', () => {
it('should uninstall a plugin', async () => {
// Arrange
const identifier = 'test-plugin';
- vi.mocked(PluginModel.delete).mockResolvedValue();
+ await clientDB.insert(installedPlugins).values([{ identifier, type: 'plugin', userId }]);
// Act
- const result = await pluginService.uninstallPlugin(identifier);
+ await pluginService.uninstallPlugin(identifier);
// Assert
- expect(PluginModel.delete).toHaveBeenCalledWith(identifier);
+ const result = await clientDB.query.installedPlugins.findFirst({
+ where: eq(installedPlugins.identifier, identifier),
+ });
expect(result).toBe(undefined);
});
});
@@ -81,67 +83,74 @@ describe('PluginService', () => {
it('should create a custom plugin', async () => {
// Arrange
const customPlugin = {
- identifier: 'custom-plugin',
+ identifier: 'custom-plugin-x',
manifest: {},
type: 'customPlugin',
} as LobeToolCustomPlugin;
- vi.mocked(PluginModel.create).mockResolvedValue(customPlugin);
// Act
- const result = await pluginService.createCustomPlugin(customPlugin);
+ await pluginService.createCustomPlugin(customPlugin);
// Assert
- expect(PluginModel.create).toHaveBeenCalledWith({
- ...customPlugin,
- type: 'customPlugin',
+ const result = await clientDB.query.installedPlugins.findFirst({
+ where: eq(installedPlugins.identifier, customPlugin.identifier),
});
- expect(result).toEqual(customPlugin);
+ expect(result).toMatchObject(customPlugin);
});
});
describe('updatePlugin', () => {
it('should update a plugin', async () => {
// Arrange
- const id = 'plugin-id';
- const value = { settings: { ab: '1' } } as unknown as LobeToolCustomPlugin;
- vi.mocked(PluginModel.update).mockResolvedValue(1);
+ const identifier = 'plugin-id';
+ const value = { customParams: { ab: '1' } } as unknown as LobeToolCustomPlugin;
+ await clientDB.insert(installedPlugins).values([{ identifier, type: 'plugin', userId }]);
// Act
- const result = await pluginService.updatePlugin(id, value);
+ await pluginService.updatePlugin(identifier, value);
// Assert
- expect(PluginModel.update).toHaveBeenCalledWith(id, value);
- expect(result).toEqual(undefined);
+ const result = await clientDB.query.installedPlugins.findFirst({
+ where: eq(installedPlugins.identifier, identifier),
+ });
+ expect(result).toMatchObject(value);
});
});
describe('updatePluginManifest', () => {
it('should update a plugin manifest', async () => {
// Arrange
- const id = 'plugin-id';
+ const identifier = 'plugin-id';
const manifest = { name: 'NewPluginManifest' } as unknown as LobeChatPluginManifest;
- vi.mocked(PluginModel.update).mockResolvedValue(1);
+ await clientDB.insert(installedPlugins).values([{ identifier, type: 'plugin', userId }]);
// Act
- const result = await pluginService.updatePluginManifest(id, manifest);
+ await pluginService.updatePluginManifest(identifier, manifest);
// Assert
- expect(PluginModel.update).toHaveBeenCalledWith(id, { manifest });
- expect(result).toEqual(undefined);
+ const result = await clientDB.query.installedPlugins.findFirst({
+ where: eq(installedPlugins.identifier, identifier),
+ });
+ expect(result).toMatchObject({ manifest });
});
});
describe('removeAllPlugins', () => {
it('should remove all plugins', async () => {
// Arrange
- vi.mocked(PluginModel.clear).mockResolvedValue(undefined);
+ await clientDB.insert(installedPlugins).values([
+ { identifier: '123', type: 'plugin', userId },
+ { identifier: '234', type: 'plugin', userId },
+ ]);
// Act
- const result = await pluginService.removeAllPlugins();
+ await pluginService.removeAllPlugins();
// Assert
- expect(PluginModel.clear).toHaveBeenCalled();
- expect(result).toBe(undefined);
+ const result = await clientDB.query.installedPlugins.findMany({
+ where: eq(installedPlugins.userId, userId),
+ });
+ expect(result.length).toEqual(0);
});
});
@@ -150,13 +159,17 @@ describe('PluginService', () => {
// Arrange
const id = 'plugin-id';
const settings = { color: 'blue' };
+ await clientDB.insert(installedPlugins).values([{ identifier: id, type: 'plugin', userId }]);
// Act
- const result = await pluginService.updatePluginSettings(id, settings);
+ await pluginService.updatePluginSettings(id, settings);
// Assert
- expect(PluginModel.update).toHaveBeenCalledWith(id, { settings });
- expect(result).toEqual(undefined);
+ const result = await clientDB.query.installedPlugins.findFirst({
+ where: eq(installedPlugins.identifier, id),
+ });
+
+ expect(result).toMatchObject({ settings });
});
});
});
diff --git a/src/services/plugin/client.ts b/src/services/plugin/client.ts
index c56f73119c14..b8cb130566aa 100644
--- a/src/services/plugin/client.ts
+++ b/src/services/plugin/client.ts
@@ -1,42 +1,51 @@
import { LobeChatPluginManifest } from '@lobehub/chat-plugin-sdk';
-import { PluginModel } from '@/database/_deprecated/models/plugin';
+import { clientDB } from '@/database/client/db';
+import { PluginModel } from '@/database/server/models/plugin';
+import { BaseClientService } from '@/services/baseClientService';
import { LobeTool } from '@/types/tool';
import { LobeToolCustomPlugin } from '@/types/tool/plugin';
import { IPluginService, InstallPluginParams } from './type';
-export class ClientService implements IPluginService {
+export class ClientService extends BaseClientService implements IPluginService {
+ private get pluginModel(): PluginModel {
+ return new PluginModel(clientDB as any, this.userId);
+ }
+
installPlugin = async (plugin: InstallPluginParams) => {
- return PluginModel.create(plugin);
+ await this.pluginModel.create(plugin);
+ return;
};
getInstalledPlugins = () => {
- return PluginModel.getList() as Promise;
+ return this.pluginModel.query() as Promise;
};
- uninstallPlugin(identifier: string) {
- return PluginModel.delete(identifier);
+ async uninstallPlugin(identifier: string) {
+ await this.pluginModel.delete(identifier);
+ return;
}
async createCustomPlugin(customPlugin: LobeToolCustomPlugin) {
- return PluginModel.create({ ...customPlugin, type: 'customPlugin' });
+ await this.pluginModel.create({ ...customPlugin, type: 'customPlugin' });
+ return;
}
async updatePlugin(id: string, value: LobeToolCustomPlugin) {
- await PluginModel.update(id, value);
+ await this.pluginModel.update(id, value);
return;
}
async updatePluginManifest(id: string, manifest: LobeChatPluginManifest) {
- await PluginModel.update(id, { manifest });
+ await this.pluginModel.update(id, { manifest });
}
async removeAllPlugins() {
- return PluginModel.clear();
+ await this.pluginModel.deleteAll();
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
async updatePluginSettings(id: string, settings: any, _?: AbortSignal) {
- await PluginModel.update(id, { settings });
+ await this.pluginModel.update(id, { settings });
}
}
diff --git a/src/services/session/client.test.ts b/src/services/session/client.test.ts
index 4a7274ce547a..83860a6a804f 100644
--- a/src/services/session/client.test.ts
+++ b/src/services/session/client.test.ts
@@ -1,193 +1,130 @@
+import { eq, not } from 'drizzle-orm/expressions';
import { Mock, beforeEach, describe, expect, it, vi } from 'vitest';
-import { SessionModel } from '@/database/_deprecated/models/session';
-import { SessionGroupModel } from '@/database/_deprecated/models/sessionGroup';
-import { LobeAgentConfig } from '@/types/agent';
+import { INBOX_SESSION_ID } from '@/const/session';
+import { clientDB, initializeDB } from '@/database/client/db';
+import {
+ NewSession,
+ SessionItem,
+ agents,
+ agentsToSessions,
+ sessionGroups,
+ sessions,
+ users,
+} from '@/database/schemas';
+import { LobeAgentChatConfig, LobeAgentConfig } from '@/types/agent';
import { LobeAgentSession, LobeSessionType, SessionGroups } from '@/types/session';
import { ClientService } from './client';
-const sessionService = new ClientService();
-
-// Mock the SessionModel
-vi.mock('@/database/_deprecated/models/session', () => {
- return {
- SessionModel: {
- create: vi.fn(),
- query: vi.fn(),
- delete: vi.fn(),
- clearTable: vi.fn(),
- update: vi.fn(),
- count: vi.fn(),
- batchCreate: vi.fn(),
- findById: vi.fn(),
- isEmpty: vi.fn(),
- queryByKeyword: vi.fn(),
- updateConfig: vi.fn(),
- queryByGroupIds: vi.fn(),
- updatePinned: vi.fn(),
- duplicate: vi.fn(),
- queryWithGroups: vi.fn(),
- },
- };
+const userId = 'message-db';
+const sessionService = new ClientService(userId);
+
+const mockSessionId = 'mock-session-id';
+
+// Mock data
+beforeEach(async () => {
+ await initializeDB();
+
+ // 在每个测试用例之前,清空表
+ await clientDB.transaction(async (trx) => {
+ await trx.insert(users).values([{ id: userId }, { id: '456' }]);
+ await trx.insert(sessions).values([{ id: mockSessionId, userId: userId }]);
+ await trx.insert(sessionGroups).values([
+ { id: 'group-1', name: 'group-A', sort: 2, userId },
+ { id: 'group-2', name: 'group-B', sort: 1, userId },
+ { id: 'group-4', name: 'group-C', sort: 1, userId: '456' },
+ ]);
+ });
});
-// Mock the SessionGroupModel
-vi.mock('@/database/_deprecated/models/sessionGroup', () => {
- return {
- SessionGroupModel: {
- create: vi.fn(),
- query: vi.fn(),
- delete: vi.fn(),
- clear: vi.fn(),
- update: vi.fn(),
- batchCreate: vi.fn(),
- isEmpty: vi.fn(),
- updateOrder: vi.fn(),
- queryByKeyword: vi.fn(),
- updateConfig: vi.fn(),
- queryByGroupIds: vi.fn(),
- },
- };
+afterEach(async () => {
+ // 在每个测试用例之后,清空表
+ await clientDB.delete(users);
});
describe('SessionService', () => {
- const mockSessionId = 'mock-session-id';
const mockSession = {
id: mockSessionId,
type: 'agent',
meta: { title: 'Mock Session' },
} as LobeAgentSession;
- const mockSessions = [mockSession];
-
- beforeEach(() => {
- // Reset all mocks before running each test case
- vi.resetAllMocks();
- });
describe('createSession', () => {
it('should create a new session and return its id', async () => {
// Setup
const sessionType = LobeSessionType.Agent;
const defaultValue = { meta: { title: 'New Session' } } as Partial;
- (SessionModel.create as Mock).mockResolvedValue(mockSession);
// Execute
const sessionId = await sessionService.createSession(sessionType, defaultValue);
// Assert
- expect(SessionModel.create).toHaveBeenCalledWith(sessionType, defaultValue);
- expect(sessionId).toBe(mockSessionId);
- });
-
- it('should throw an error if session creation fails', async () => {
- // Setup
- const sessionType = LobeSessionType.Agent;
- const defaultValue = { meta: { title: 'New Session' } } as Partial;
- (SessionModel.create as Mock).mockResolvedValue(null);
-
- // Execute & Assert
- await expect(sessionService.createSession(sessionType, defaultValue)).rejects.toThrow(
- 'session create Error',
- );
- });
- });
-
- describe('batchCreateSessions', () => {
- it('should batch create sessions', async () => {
- // Setup
- (SessionModel.batchCreate as Mock).mockResolvedValue(mockSessions);
-
- // Execute
- const result = await sessionService.batchCreateSessions(mockSessions);
-
- // Assert
- expect(SessionModel.batchCreate).toHaveBeenCalledWith(mockSessions);
- expect(result).toBe(mockSessions);
- });
- });
-
- describe('getSessionsByType', () => {
- it('should retrieve sessions with their group ids', async () => {
- // Setup
- (SessionModel.query as Mock).mockResolvedValue(mockSessions);
-
- // Execute
- const sessions = await sessionService.getSessionsByType();
-
- // Assert
- expect(SessionModel.query).toHaveBeenCalled();
- expect(sessions).toBe(mockSessions);
- });
-
- it('should retrieve all agent sessions', async () => {
- // Setup
- // Assuming that SessionModel.query has been modified to accept filters
- const agentSessions = mockSessions.filter((session) => session.type === 'agent');
- (SessionModel.query as Mock).mockResolvedValue(agentSessions);
-
- // Execute
- const result = await sessionService.getSessionsByType('agent');
-
- // Assert
- // Assuming that SessionModel.query would be called with a filter for agents
- expect(SessionModel.query).toHaveBeenCalled(); // Add filter argument if applicable
- expect(result).toBe(agentSessions);
+ expect(sessionId).toMatch(/^ssn_/);
});
});
describe('removeSession', () => {
it('should remove a session by its id', async () => {
- // Setup
- (SessionModel.delete as Mock).mockResolvedValue(true);
-
// Execute
- const result = await sessionService.removeSession(mockSessionId);
+ await sessionService.removeSession(mockSessionId);
// Assert
- expect(SessionModel.delete).toHaveBeenCalledWith(mockSessionId);
- expect(result).toBe(true);
+
+ const result = await clientDB.query.sessions.findFirst({
+ where: eq(sessions.id, mockSessionId),
+ });
+ // Assert
+ expect(result).toBeUndefined();
});
});
describe('removeAllSessions', () => {
it('should clear all sessions from the table', async () => {
// Setup
- (SessionModel.clearTable as Mock).mockResolvedValue(true);
+ await clientDB
+ .insert(sessions)
+ .values([{ userId: userId }, { userId: userId }, { userId: userId }]);
// Execute
- const result = await sessionService.removeAllSessions();
+ await sessionService.removeAllSessions();
// Assert
- expect(SessionModel.clearTable).toHaveBeenCalled();
- expect(result).toBe(true);
+ const result = await clientDB.query.sessions.findMany({
+ where: eq(sessionGroups.userId, userId),
+ });
+
+ expect(result.length).toBe(0);
});
});
describe('updateSession', () => {
- it('should update the group of a session', async () => {
+ it.skip('should update the group of a session', async () => {
// Setup
const groupId = 'new-group';
- (SessionModel.update as Mock).mockResolvedValue({ ...mockSession, group: groupId });
// Execute
- const result = await sessionService.updateSession(mockSessionId, { group: groupId });
+ await sessionService.updateSession(mockSessionId, { group: groupId });
// Assert
- expect(SessionModel.update).toHaveBeenCalledWith(mockSessionId, { group: groupId });
- expect(result).toEqual({ ...mockSession, group: groupId });
+ const result = await clientDB.query.sessions.findFirst({
+ where: eq(sessions.id, mockSessionId),
+ });
+ expect(result).toMatchObject({ group: groupId });
});
- it('should update the meta of a session', async () => {
+ it.skip('should update the meta of a session', async () => {
// Setup
const newMeta = { description: 'Updated description' };
- (SessionModel.update as Mock).mockResolvedValue({ ...mockSession, meta: newMeta });
// Execute
- const result = await sessionService.updateSession(mockSessionId, { meta: newMeta });
+ await sessionService.updateSession(mockSessionId, { meta: newMeta });
// Assert
- expect(SessionModel.update).toHaveBeenCalledWith(mockSessionId, { meta: newMeta });
+ const result = await clientDB.query.sessions.findFirst({
+ where: eq(sessions.id, mockSessionId),
+ });
+
expect(result).toEqual({ ...mockSession, meta: newMeta });
});
@@ -199,121 +136,180 @@ describe('SessionService', () => {
await sessionService.updateSession(mockSessionId, { pinned });
// Assert
- expect(SessionModel.update).toHaveBeenCalledWith(mockSessionId, { pinned: 1 });
+ const result = await clientDB.query.sessions.findFirst({
+ where: eq(sessions.id, mockSessionId),
+ });
+
+ expect(result!.pinned).toBeTruthy();
});
});
- describe('updateSessionConfig', () => {
+ describe.skip('updateSessionConfig', () => {
it('should update the config of a session', async () => {
// Setup
const newConfig = { model: 'abc' } as LobeAgentConfig;
- (SessionModel.updateConfig as Mock).mockResolvedValue({ ...mockSession, config: newConfig });
// Execute
- const result = await sessionService.updateSessionConfig(mockSessionId, newConfig);
+ await sessionService.updateSessionConfig(mockSessionId, newConfig);
// Assert
- expect(SessionModel.updateConfig).toHaveBeenCalledWith(mockSessionId, newConfig);
+ const result = await sessionService.getSessionConfig(mockSessionId);
expect(result).toEqual({ ...mockSession, config: newConfig });
});
});
describe('countSessions', () => {
it('should return false if no sessions exist', async () => {
- // Setup
- (SessionModel.count as Mock).mockResolvedValue(0);
+ await clientDB.delete(sessions);
// Execute
const result = await sessionService.countSessions();
// Assert
- expect(SessionModel.count).toHaveBeenCalled();
expect(result).toBe(0);
});
it('should return true if sessions exist', async () => {
// Setup
- (SessionModel.count as Mock).mockResolvedValue(1);
+ await clientDB.delete(sessions);
+ await clientDB.insert(sessions).values([{ userId }]);
// Execute
const result = await sessionService.countSessions();
// Assert
- expect(SessionModel.count).toHaveBeenCalled();
expect(result).toBe(1);
});
});
- describe('hasSessions', () => {
- it('should return false if no sessions exist', async () => {
- // Setup
- (SessionModel.count as Mock).mockResolvedValue(0);
-
- // Execute
- const result = await sessionService.hasSessions();
-
- // Assert
- expect(SessionModel.count).toHaveBeenCalled();
- expect(result).toBe(false);
- });
-
- it('should return true if sessions exist', async () => {
- // Setup
- (SessionModel.count as Mock).mockResolvedValue(1);
-
- // Execute
- const result = await sessionService.hasSessions();
-
- // Assert
- expect(SessionModel.count).toHaveBeenCalled();
- expect(result).toBe(true);
- });
- });
-
describe('searchSessions', () => {
it('should return sessions that match the keyword', async () => {
// Setup
- const keyword = 'search';
- (SessionModel.queryByKeyword as Mock).mockResolvedValue(mockSessions);
+ await clientDB.insert(agents).values({ userId, id: 'agent-1', title: 'Session Name' });
+ await clientDB
+ .insert(agentsToSessions)
+ .values({ agentId: 'agent-1', sessionId: mockSessionId });
// Execute
+ const keyword = 'Name';
const result = await sessionService.searchSessions(keyword);
// Assert
- expect(SessionModel.queryByKeyword).toHaveBeenCalledWith(keyword);
- expect(result).toBe(mockSessions);
+ // TODO: 后续需要把这个搜索的标题和描述都加上,现在这个 client 搜索会有问题
+ expect(result).toMatchObject([{ id: mockSessionId }]);
});
});
- describe('cloneSession', () => {
+ describe.skip('cloneSession', () => {
it('should duplicate a session and return its id', async () => {
// Setup
const newTitle = 'Duplicated Session';
- (SessionModel.duplicate as Mock).mockResolvedValue({
- ...mockSession,
+ const session: NewSession = {
id: 'duplicated-session-id',
- });
+ title: '123',
+ userId,
+ };
+ await clientDB.insert(sessions).values([session]);
+ await clientDB.insert(agents).values({ userId, id: 'agent-1' });
+ await clientDB
+ .insert(agentsToSessions)
+ .values({ agentId: 'agent-1', sessionId: 'duplicated-session-id' });
// Execute
const duplicatedSessionId = await sessionService.cloneSession(mockSessionId, newTitle);
// Assert
- expect(SessionModel.duplicate).toHaveBeenCalledWith(mockSessionId, newTitle);
- expect(duplicatedSessionId).toBe('duplicated-session-id');
+
+ const result = await clientDB.query.sessions.findFirst({
+ where: eq(sessions.id, duplicatedSessionId!),
+ });
+ expect(result).toEqual({});
});
});
describe('getGroupedSessions', () => {
it('should retrieve sessions with their group', async () => {
- // Setup
- (SessionModel.queryWithGroups as Mock).mockResolvedValue(mockSessions);
-
// Execute
const sessionsWithGroup = await sessionService.getGroupedSessions();
- // Assert
- expect(SessionModel.queryWithGroups).toHaveBeenCalled();
- expect(sessionsWithGroup).toBe(mockSessions);
+ expect(sessionsWithGroup).toMatchObject({
+ sessionGroups: [
+ { id: 'group-2', name: 'group-B', sort: 1 },
+ { id: 'group-1', name: 'group-A', sort: 2 },
+ ],
+ sessions: [{ id: 'mock-session-id', type: 'agent' }],
+ });
+ });
+ });
+
+ describe('getSessionsByType', () => {
+ it('should get sessions by type "all"', async () => {
+ const sessions = await sessionService.getSessionsByType('all');
+ expect(sessions).toBeDefined();
+ });
+
+ it('should get sessions by type "agent"', async () => {
+ const sessions = await sessionService.getSessionsByType('agent');
+ expect(sessions).toBeDefined();
+ });
+
+ it('should get sessions by type "group"', async () => {
+ const sessions = await sessionService.getSessionsByType('group');
+ expect(sessions).toBeDefined();
+ });
+ });
+
+ describe('getSessionConfig', () => {
+ it.skip('should get default config for INBOX_SESSION_ID', async () => {
+ const config = await sessionService.getSessionConfig(INBOX_SESSION_ID);
+ expect(config).toBeDefined();
+ });
+
+ it('should throw error for non-existent session', async () => {
+ await expect(sessionService.getSessionConfig('non-existent')).rejects.toThrow(
+ 'Session not found',
+ );
+ });
+ });
+
+ describe('updateSessionMeta', () => {
+ it('should not update meta for INBOX_SESSION_ID', async () => {
+ const result = await sessionService.updateSessionMeta(INBOX_SESSION_ID, {
+ title: 'New Title',
+ });
+ expect(result).toBeUndefined();
+ });
+
+ it('should update meta for normal session', async () => {
+ const meta = { title: 'Updated Title' };
+ await sessionService.updateSessionMeta(mockSessionId, meta);
+
+ const session = await clientDB.query.sessions.findFirst({
+ where: eq(sessions.id, mockSessionId),
+ });
+ expect(session).toBeDefined();
+ });
+ });
+
+ describe('updateSessionChatConfig', () => {
+ it('should update chat config', async () => {
+ const chatConfig = { temperature: 0.8 } as Partial;
+ const result = await sessionService.updateSessionChatConfig(mockSessionId, chatConfig);
+ expect(result).toBeDefined();
+ });
+ });
+
+ describe('model getters', () => {
+ it('should return session model instance', () => {
+ // @ts-ignore - accessing private getter
+ const model = sessionService.sessionModel;
+ expect(model).toBeDefined();
+ });
+
+ it('should return session group model instance', () => {
+ // @ts-ignore - accessing private getter
+ const model = sessionService.sessionGroupModel;
+ expect(model).toBeDefined();
});
});
@@ -323,84 +319,66 @@ describe('SessionService', () => {
// Setup
const groupName = 'New Group';
const sort = 1;
- (SessionGroupModel.create as Mock).mockResolvedValue({
- id: 'new-group-id',
- name: groupName,
- sort,
- });
// Execute
const groupId = await sessionService.createSessionGroup(groupName, sort);
// Assert
- expect(SessionGroupModel.create).toHaveBeenCalledWith(groupName, sort);
- expect(groupId).toBe('new-group-id');
- });
- });
-
- describe('batchCreateSessionGroups', () => {
- it('should batch create session groups', async () => {
- // Setup
- const groups = [
- { id: 'group-1', name: 'Group 1', sort: 1 },
- { id: 'group-2', name: 'Group 2', sort: 2 },
- ] as SessionGroups;
-
- (SessionGroupModel.batchCreate as Mock).mockResolvedValue(groups);
+ expect(groupId).toMatch(/^sg_/);
- // Execute
- const result = await sessionService.batchCreateSessionGroups(groups);
+ const result = await clientDB.query.sessionGroups.findFirst({
+ where: eq(sessionGroups.id, groupId),
+ });
- // Assert
- expect(SessionGroupModel.batchCreate).toHaveBeenCalledWith(groups);
- expect(result).toBe(groups);
+ expect(result).toMatchObject({ id: groupId, name: groupName, sort });
});
});
describe('removeSessionGroup', () => {
it('should remove a session group by its id', async () => {
- // Setup
- const removeChildren = true;
- (SessionGroupModel.delete as Mock).mockResolvedValue(true);
-
+ const groupId = 'group-1';
// Execute
- const result = await sessionService.removeSessionGroup('group-id', removeChildren);
+ await sessionService.removeSessionGroup(groupId);
+ const result = await clientDB.query.sessionGroups.findFirst({
+ where: eq(sessionGroups.id, groupId),
+ });
// Assert
- expect(SessionGroupModel.delete).toHaveBeenCalledWith('group-id', removeChildren);
- expect(result).toBe(true);
+ expect(result).toBeUndefined();
});
});
describe('clearSessionGroups', () => {
it('should clear all session groups', async () => {
- // Setup
- (SessionGroupModel.clear as Mock).mockResolvedValue(true);
-
// Execute
- const result = await sessionService.removeSessionGroups();
+ await sessionService.removeSessionGroups();
// Assert
- expect(SessionGroupModel.clear).toHaveBeenCalled();
- expect(result).toBe(true);
+ const result = await clientDB.query.sessionGroups.findMany({
+ where: eq(sessionGroups.userId, userId),
+ });
+
+ expect(result.length).toBe(0);
+
+ const result2 = await clientDB.query.sessionGroups.findMany({
+ where: not(eq(sessionGroups.userId, userId)),
+ });
+
+ expect(result2.length).toBeGreaterThan(0);
});
});
describe('getSessionGroups', () => {
it('should retrieve all session groups', async () => {
- // Setup
- const groups = [
- { id: 'group-1', name: 'Group 1', sort: 1 },
- { id: 'group-2', name: 'Group 2', sort: 2 },
- ];
- (SessionGroupModel.query as Mock).mockResolvedValue(groups);
-
// Execute
const result = await sessionService.getSessionGroups();
// Assert
- expect(SessionGroupModel.query).toHaveBeenCalled();
- expect(result).toBe(groups);
+ const groups = [
+ { id: 'group-2', name: 'group-B', sort: 1 },
+ { id: 'group-1', name: 'group-A', sort: 2 },
+ ];
+ expect(result).toMatchObject(groups);
});
});
@@ -409,14 +387,15 @@ describe('SessionService', () => {
// Setup
const groupId = 'group-1';
const data = { name: 'Updated Group', sort: 2 };
- (SessionGroupModel.update as Mock).mockResolvedValue({ id: groupId, ...data });
// Execute
- const result = await sessionService.updateSessionGroup(groupId, data);
+ await sessionService.updateSessionGroup(groupId, data);
// Assert
- expect(SessionGroupModel.update).toHaveBeenCalledWith(groupId, data);
- expect(result).toEqual({ id: groupId, ...data });
+ const result = await clientDB.query.sessionGroups.findFirst({
+ where: eq(sessionGroups.id, groupId),
+ });
+ expect(result).toMatchObject({ id: groupId, ...data });
});
});
@@ -427,14 +406,18 @@ describe('SessionService', () => {
{ id: 'group-1', sort: 2 },
{ id: 'group-2', sort: 1 },
];
- (SessionGroupModel.updateOrder as Mock).mockResolvedValue(true);
// Execute
- const result = await sessionService.updateSessionGroupOrder(sortMap);
+ await sessionService.updateSessionGroupOrder(sortMap);
// Assert
- expect(SessionGroupModel.updateOrder).toHaveBeenCalledWith(sortMap);
- expect(result).toBe(true);
+ const data = await clientDB.query.sessionGroups.findMany({
+ where: eq(sessionGroups.userId, userId),
+ });
+ expect(data).toMatchObject([
+ { id: 'group-1', sort: 2 },
+ { id: 'group-2', sort: 1 },
+ ]);
});
});
});
diff --git a/src/services/session/client.ts b/src/services/session/client.ts
index 2d205cca8c8c..7be00e50add4 100644
--- a/src/services/session/client.ts
+++ b/src/services/session/client.ts
@@ -1,9 +1,11 @@
import { DeepPartial } from 'utility-types';
import { INBOX_SESSION_ID } from '@/const/session';
-import { SessionModel } from '@/database/_deprecated/models/session';
-import { SessionGroupModel } from '@/database/_deprecated/models/sessionGroup';
-import { UserModel } from '@/database/_deprecated/models/user';
+import { clientDB } from '@/database/client/db';
+import { AgentItem } from '@/database/schemas';
+import { SessionModel } from '@/database/server/models/session';
+import { SessionGroupModel } from '@/database/server/models/sessionGroup';
+import { BaseClientService } from '@/services/baseClientService';
import { useUserStore } from '@/store/user';
import { LobeAgentChatConfig, LobeAgentConfig } from '@/types/agent';
import { MetaData } from '@/types/meta';
@@ -19,12 +21,23 @@ import { merge } from '@/utils/merge';
import { ISessionService } from './type';
-export class ClientService implements ISessionService {
- async createSession(
- type: LobeSessionType,
- defaultValue: Partial,
- ): Promise {
- const item = await SessionModel.create(type, defaultValue);
+export class ClientService extends BaseClientService implements ISessionService {
+ private get sessionModel(): SessionModel {
+ return new SessionModel(clientDB as any, this.userId);
+ }
+
+ private get sessionGroupModel(): SessionGroupModel {
+ return new SessionGroupModel(clientDB as any, this.userId);
+ }
+
+ async createSession(type: LobeSessionType, data: Partial): Promise {
+ const { config, group, meta, ...session } = data;
+
+ const item = await this.sessionModel.create({
+ config: { ...config, ...meta } as any,
+ session: { ...session, groupId: group },
+ type,
+ });
if (!item) {
throw new Error('session create Error');
}
@@ -32,72 +45,65 @@ export class ClientService implements ISessionService {
}
async batchCreateSessions(importSessions: LobeSessions) {
- return SessionModel.batchCreate(importSessions);
+ // @ts-ignore
+ return this.sessionModel.batchCreate(importSessions);
}
async cloneSession(id: string, newTitle: string): Promise {
- const res = await SessionModel.duplicate(id, newTitle);
+ const res = await this.sessionModel.duplicate(id, newTitle);
if (res) return res?.id;
}
async getGroupedSessions(): Promise {
- return SessionModel.queryWithGroups();
+ return this.sessionModel.queryWithGroups();
}
async getSessionConfig(id: string): Promise {
- if (!id || id === INBOX_SESSION_ID) {
- return UserModel.getAgentConfig();
- }
-
- const res = await SessionModel.findById(id);
+ const res = await this.sessionModel.findByIdOrSlug(id);
if (!res) throw new Error('Session not found');
- return res.config as LobeAgentConfig;
+ return res.agent as LobeAgentConfig;
}
+ /**
+ * 这个方法要对应移除的
+ */
async getSessionsByType(type: 'agent' | 'group' | 'all' = 'all'): Promise {
switch (type) {
// TODO: add a filter to get only agents or agents
case 'group': {
- return SessionModel.query();
+ // @ts-ignore
+ return this.sessionModel.query();
}
case 'agent': {
- return SessionModel.query();
+ // @ts-ignore
+ return this.sessionModel.query();
}
case 'all': {
- return SessionModel.query();
+ // @ts-ignore
+ return this.sessionModel.query();
}
}
}
- async getAllAgents(): Promise {
- // TODO: add a filter to get only agents
- return await SessionModel.query();
- }
-
async countSessions() {
- return SessionModel.count();
- }
-
- async hasSessions() {
- return (await this.countSessions()) !== 0;
+ return this.sessionModel.count();
}
async searchSessions(keyword: string) {
- return SessionModel.queryByKeyword(keyword);
+ return this.sessionModel.queryByKeyword(keyword);
}
async updateSession(
id: string,
data: Partial>,
) {
- const pinned = typeof data.pinned === 'boolean' ? (data.pinned ? 1 : 0) : undefined;
- const prev = await SessionModel.findById(id);
+ const prev = await this.sessionModel.findByIdOrSlug(id);
- return SessionModel.update(id, merge(prev, { ...data, pinned }));
+ return this.sessionModel.update(id, merge(prev, data));
}
async updateSessionConfig(
@@ -112,7 +118,7 @@ export class ClientService implements ISessionService {
return useUserStore.getState().updateDefaultAgent({ config });
}
- return SessionModel.updateConfig(activeId, config);
+ return this.sessionModel.updateConfig(activeId, config as AgentItem);
}
async updateSessionMeta(
@@ -124,7 +130,7 @@ export class ClientService implements ISessionService {
// inbox 不允许修改 meta
if (activeId === INBOX_SESSION_ID) return;
- return SessionModel.update(activeId, { meta });
+ return this.sessionModel.update(activeId, meta);
}
async updateSessionChatConfig(
@@ -137,11 +143,11 @@ export class ClientService implements ISessionService {
}
async removeSession(id: string) {
- return SessionModel.delete(id);
+ return this.sessionModel.delete(id);
}
async removeAllSessions() {
- return SessionModel.clearTable();
+ return this.sessionModel.deleteAll();
}
// ************************************** //
@@ -149,7 +155,7 @@ export class ClientService implements ISessionService {
// ************************************** //
async createSessionGroup(name: string, sort?: number) {
- const item = await SessionGroupModel.create(name, sort);
+ const item = await this.sessionGroupModel.create({ name, sort });
if (!item) {
throw new Error('session group create Error');
}
@@ -157,27 +163,28 @@ export class ClientService implements ISessionService {
return item.id;
}
- async batchCreateSessionGroups(groups: SessionGroups) {
- return SessionGroupModel.batchCreate(groups);
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
+ async batchCreateSessionGroups(_groups: SessionGroups) {
+ return { added: 0, ids: [], skips: [], success: true };
}
- async removeSessionGroup(id: string, removeChildren?: boolean) {
- return await SessionGroupModel.delete(id, removeChildren);
+ async removeSessionGroup(id: string) {
+ return await this.sessionGroupModel.delete(id);
}
async updateSessionGroup(id: string, data: Partial) {
- return SessionGroupModel.update(id, data);
+ return this.sessionGroupModel.update(id, data);
}
async updateSessionGroupOrder(sortMap: { id: string; sort: number }[]) {
- return SessionGroupModel.updateOrder(sortMap);
+ return this.sessionGroupModel.updateOrder(sortMap);
}
async getSessionGroups(): Promise {
- return SessionGroupModel.query();
+ return this.sessionGroupModel.query();
}
async removeSessionGroups() {
- return SessionGroupModel.clear();
+ return this.sessionGroupModel.deleteAll();
}
}
diff --git a/src/services/session/type.ts b/src/services/session/type.ts
index f51f2f39780d..59528fe138ca 100644
--- a/src/services/session/type.ts
+++ b/src/services/session/type.ts
@@ -16,13 +16,21 @@ import {
export interface ISessionService {
createSession(type: LobeSessionType, defaultValue: Partial): Promise;
+
+ /**
+ * 需要废弃
+ * @deprecated
+ */
batchCreateSessions(importSessions: LobeSessions): Promise;
cloneSession(id: string, newTitle: string): Promise;
getGroupedSessions(): Promise;
+
+ /**
+ * @deprecated
+ */
getSessionsByType(type: 'agent' | 'group' | 'all'): Promise;
countSessions(): Promise;
- hasSessions(): Promise;
searchSessions(keyword: string): Promise;
updateSession(
@@ -53,6 +61,11 @@ export interface ISessionService {
// ************************************** //
createSessionGroup(name: string, sort?: number): Promise;
+
+ /**
+ * 需要废弃
+ * @deprecated
+ */
batchCreateSessionGroups(groups: SessionGroups): Promise;
getSessionGroups(): Promise;
diff --git a/src/services/topic/client.test.ts b/src/services/topic/client.test.ts
index 211abefa7d36..476dbb36a6f0 100644
--- a/src/services/topic/client.test.ts
+++ b/src/services/topic/client.test.ts
@@ -1,75 +1,62 @@
-import { Mock, beforeAll, beforeEach, describe, expect, it, vi } from 'vitest';
+import { eq } from 'drizzle-orm';
+import { beforeEach, describe, expect, it, vi } from 'vitest';
-import { SessionModel } from '@/database/_deprecated/models/session';
-import { CreateTopicParams, TopicModel } from '@/database/_deprecated/models/topic';
+import { clientDB, initializeDB } from '@/database/client/db';
+import { sessions, topics, users } from '@/database/schemas';
import { ChatTopic } from '@/types/topic';
import { ClientService } from './client';
-const topicService = new ClientService();
-// Mock the TopicModel
-vi.mock('@/database/_deprecated/models/topic', () => {
- return {
- TopicModel: {
- create: vi.fn(),
- query: vi.fn(),
- delete: vi.fn(),
- count: vi.fn(),
- batchDeleteBySessionId: vi.fn(),
- batchDelete: vi.fn(),
- clearTable: vi.fn(),
- toggleFavorite: vi.fn(),
- batchCreate: vi.fn(),
- update: vi.fn(),
- queryAll: vi.fn(),
- queryByKeyword: vi.fn(),
- },
- };
-});
+// Mock data
+const userId = 'topic-user-test';
+const sessionId = 'topic-session';
+const mockTopicId = 'mock-topic-id';
-describe('TopicService', () => {
- // Mock data
- const mockTopicId = 'mock-topic-id';
- const mockTopic: ChatTopic = {
- createdAt: 100,
- updatedAt: 100,
- id: mockTopicId,
- title: 'Mock Topic',
- };
- const mockTopics = [mockTopic];
-
- beforeEach(() => {
- // Reset all mocks before running each test case
- vi.resetAllMocks();
+const mockTopic = {
+ id: mockTopicId,
+ title: 'Mock Topic',
+};
+
+const topicService = new ClientService(userId);
+
+beforeEach(async () => {
+ await initializeDB();
+
+ await clientDB.delete(users);
+
+ // 创建测试数据
+ await clientDB.transaction(async (tx) => {
+ await tx.insert(users).values({ id: userId });
+ await tx.insert(sessions).values({ id: sessionId, userId });
+ await tx.insert(topics).values({ ...mockTopic, sessionId, userId });
});
+});
+describe('TopicService', () => {
describe('createTopic', () => {
it('should create a topic and return its id', async () => {
// Setup
- const createParams: CreateTopicParams = {
+ const createParams = {
title: 'New Topic',
- sessionId: '1',
+ sessionId: sessionId,
};
- (TopicModel.create as Mock).mockResolvedValue(mockTopic);
// Execute
const topicId = await topicService.createTopic(createParams);
// Assert
- expect(TopicModel.create).toHaveBeenCalledWith(createParams);
- expect(topicId).toBe(mockTopicId);
+ expect(topicId).toBeDefined();
});
+
it('should throw an error if topic creation fails', async () => {
// Setup
- const createParams: CreateTopicParams = {
+ const createParams = {
title: 'New Topic',
- sessionId: '1',
+ sessionId: 123 as any, // sessionId should be string
};
- (TopicModel.create as Mock).mockResolvedValue(null);
-
// Execute & Assert
- await expect(topicService.createTopic(createParams)).rejects.toThrow('topic create Error');
+ await expect(topicService.createTopic(createParams)).rejects.toThrowError();
});
});
@@ -77,56 +64,46 @@ describe('TopicService', () => {
// Example for getTopics
it('should query topics with given parameters', async () => {
// Setup
- const queryParams = { sessionId: 'session-id' };
- (TopicModel.query as Mock).mockResolvedValue(mockTopics);
+ const queryParams = { sessionId };
// Execute
- const topics = await topicService.getTopics(queryParams);
+ const data = await topicService.getTopics(queryParams);
// Assert
- expect(TopicModel.query).toHaveBeenCalledWith(queryParams);
- expect(topics).toBe(mockTopics);
+ expect(data[0]).toMatchObject(mockTopic);
});
});
describe('updateTopic', () => {
// Example for updateFavorite
it('should toggle favorite status of a topic', async () => {
- // Setup
- const newState = true;
-
// Execute
- await topicService.updateTopic(mockTopicId, { favorite: newState });
+ const result = await topicService.updateTopic(mockTopicId, { favorite: true });
// Assert
- expect(TopicModel.update).toHaveBeenCalledWith(mockTopicId, { favorite: 1 });
+ expect(result[0].favorite).toBeTruthy();
});
it('should update the title of a topic', async () => {
// Setup
const newTitle = 'Updated Topic Title';
- (TopicModel.update as Mock).mockResolvedValue({ ...mockTopic, title: newTitle });
// Execute
const result = await topicService.updateTopic(mockTopicId, { title: newTitle });
// Assert
- expect(TopicModel.update).toHaveBeenCalledWith(mockTopicId, { title: newTitle });
- expect(result).toEqual({ ...mockTopic, title: newTitle });
+ expect(result[0].title).toEqual(newTitle);
});
});
describe('removeTopic', () => {
it('should remove a topic by id', async () => {
- // Setup
- (TopicModel.delete as Mock).mockResolvedValue(true);
-
// Execute
- const result = await topicService.removeTopic(mockTopicId);
+ await topicService.removeTopic(mockTopicId);
+ const result = await clientDB.query.topics.findFirst({ where: eq(topics.id, mockTopicId) });
// Assert
- expect(TopicModel.delete).toHaveBeenCalledWith(mockTopicId);
- expect(result).toBe(true);
+ expect(result).toBeUndefined();
});
});
@@ -134,111 +111,101 @@ describe('TopicService', () => {
it('should remove all topics with a given session id', async () => {
// Setup
const sessionId = 'session-id';
- (TopicModel.batchDeleteBySessionId as Mock).mockResolvedValue(true);
// Execute
- const result = await topicService.removeTopics(sessionId);
+ await topicService.removeTopics(sessionId);
+ const result = await clientDB.query.topics.findMany({
+ where: eq(topics.sessionId, sessionId),
+ });
- // Assert
- expect(TopicModel.batchDeleteBySessionId).toHaveBeenCalledWith(sessionId);
- expect(result).toBe(true);
+ expect(result.length).toEqual(0);
});
});
describe('batchRemoveTopics', () => {
it('should batch remove topics', async () => {
+ await clientDB.insert(topics).values([{ id: 'topic-id-1', title: 'topic-title', userId }]);
// Setup
const topicIds = [mockTopicId, 'another-topic-id'];
- (TopicModel.batchDelete as Mock).mockResolvedValue(true);
// Execute
- const result = await topicService.batchRemoveTopics(topicIds);
+ await topicService.batchRemoveTopics(topicIds);
+
+ const count = await clientDB.$count(topics);
// Assert
- expect(TopicModel.batchDelete).toHaveBeenCalledWith(topicIds);
- expect(result).toBe(true);
+ expect(count).toBe(1);
});
});
describe('removeAllTopic', () => {
it('should clear all topics from the table', async () => {
- // Setup
- (TopicModel.clearTable as Mock).mockResolvedValue(true);
-
// Execute
- const result = await topicService.removeAllTopic();
+ await topicService.removeAllTopic();
+ const count = await clientDB.$count(topics);
// Assert
- expect(TopicModel.clearTable).toHaveBeenCalled();
- expect(result).toBe(true);
+ expect(count).toBe(0);
});
});
describe('batchCreateTopics', () => {
it('should batch create topics', async () => {
- // Setup
- (TopicModel.batchCreate as Mock).mockResolvedValue(mockTopics);
-
// Execute
- const result = await topicService.batchCreateTopics(mockTopics);
+ const result = await topicService.batchCreateTopics([
+ { id: 'topic-id-1', title: 'topic-title' },
+ { id: 'topic-id-2', title: 'topic-title' },
+ ] as ChatTopic[]);
// Assert
- expect(TopicModel.batchCreate).toHaveBeenCalledWith(mockTopics);
- expect(result).toBe(mockTopics);
+ expect(result.success).toBeTruthy();
+ expect(result.added).toBe(2);
});
});
describe('getAllTopics', () => {
it('should retrieve all topics', async () => {
- // Setup
- (TopicModel.queryAll as Mock).mockResolvedValue(mockTopics);
-
+ await clientDB.insert(topics).values([
+ { id: 'topic-id-1', title: 'topic-title', userId },
+ { id: 'topic-id-2', title: 'topic-title', userId },
+ ]);
// Execute
const result = await topicService.getAllTopics();
// Assert
- expect(TopicModel.queryAll).toHaveBeenCalled();
- expect(result).toBe(mockTopics);
+ expect(result.length).toEqual(3);
});
});
describe('searchTopics', () => {
it('should return all topics that match the keyword', async () => {
// Setup
- const keyword = 'search';
- (TopicModel.queryByKeyword as Mock).mockResolvedValue(mockTopics);
+ const keyword = 'Topic';
// Execute
- const result = await topicService.searchTopics(keyword, undefined);
+ const result = await topicService.searchTopics(keyword, sessionId);
// Assert
- expect(TopicModel.queryByKeyword).toHaveBeenCalledWith(keyword, undefined);
- expect(result).toBe(mockTopics);
+ expect(result.length).toEqual(1);
});
- });
-
- describe('countTopics', () => {
- it('should return false if no topics exist', async () => {
+ it('should return empty topic if not match the keyword', async () => {
// Setup
- (TopicModel.count as Mock).mockResolvedValue(0);
+ const keyword = 'search';
// Execute
- const result = await topicService.countTopics();
+ const result = await topicService.searchTopics(keyword, sessionId);
// Assert
- expect(TopicModel.count).toHaveBeenCalled();
- expect(result).toBe(0);
+ expect(result.length).toEqual(0);
});
+ });
- it('should return true if topics exist', async () => {
- // Setup
- (TopicModel.count as Mock).mockResolvedValue(1);
-
+ describe('countTopics', () => {
+ it('should return topic counts', async () => {
// Execute
const result = await topicService.countTopics();
// Assert
- expect(TopicModel.count).toHaveBeenCalled();
expect(result).toBe(1);
});
});
diff --git a/src/services/topic/client.ts b/src/services/topic/client.ts
index eeb2ffa2e395..08ae914bd8b9 100644
--- a/src/services/topic/client.ts
+++ b/src/services/topic/client.ts
@@ -1,11 +1,17 @@
-import { TopicModel } from '@/database/_deprecated/models/topic';
+import { clientDB } from '@/database/client/db';
+import { TopicModel } from '@/database/server/models/topic';
+import { BaseClientService } from '@/services/baseClientService';
import { ChatTopic } from '@/types/topic';
import { CreateTopicParams, ITopicService, QueryTopicParams } from './type';
-export class ClientService implements ITopicService {
+export class ClientService extends BaseClientService implements ITopicService {
+ private get topicModel(): TopicModel {
+ return new TopicModel(clientDB as any, this.userId);
+ }
+
async createTopic(params: CreateTopicParams): Promise {
- const item = await TopicModel.create(params as any);
+ const item = await this.topicModel.create(params as any);
if (!item) {
throw new Error('topic create Error');
@@ -15,56 +21,54 @@ export class ClientService implements ITopicService {
}
async batchCreateTopics(importTopics: ChatTopic[]) {
- return TopicModel.batchCreate(importTopics as any);
+ const data = await this.topicModel.batchCreate(importTopics as any);
+
+ return { added: data.length, ids: [], skips: [], success: true };
}
async cloneTopic(id: string, newTitle?: string) {
- return TopicModel.duplicateTopic(id, newTitle);
+ const data = await this.topicModel.duplicate(id, newTitle);
+ return data.topic.id;
}
- async getTopics(params: QueryTopicParams): Promise {
- return TopicModel.query(params);
+ async getTopics(params: QueryTopicParams) {
+ const data = await this.topicModel.query(params);
+ return data as unknown as Promise;
}
async searchTopics(keyword: string, sessionId?: string) {
- return TopicModel.queryByKeyword(keyword, sessionId);
- }
+ const data = await this.topicModel.queryByKeyword(keyword, sessionId);
- async getAllTopics() {
- return TopicModel.queryAll();
+ return data as unknown as Promise;
}
- async countTopics() {
- return TopicModel.count();
- }
+ async getAllTopics() {
+ const data = await this.topicModel.queryAll();
- async updateTopicFavorite(id: string, favorite?: boolean) {
- return this.updateTopic(id, { favorite });
+ return data as unknown as Promise;
}
- async updateTopicTitle(id: string, text: string) {
- return this.updateTopic(id, { title: text });
+ async countTopics() {
+ return this.topicModel.count();
}
async updateTopic(id: string, data: Partial) {
- const favorite = typeof data.favorite !== 'undefined' ? (data.favorite ? 1 : 0) : undefined;
-
- return TopicModel.update(id, { ...data, favorite });
+ return this.topicModel.update(id, data as any);
}
async removeTopic(id: string) {
- return TopicModel.delete(id);
+ return this.topicModel.delete(id);
}
async removeTopics(sessionId: string) {
- return TopicModel.batchDeleteBySessionId(sessionId);
+ return this.topicModel.batchDeleteBySessionId(sessionId);
}
async batchRemoveTopics(topics: string[]) {
- return TopicModel.batchDelete(topics);
+ return this.topicModel.batchDelete(topics);
}
async removeAllTopic() {
- return TopicModel.clearTable();
+ return this.topicModel.deleteAll();
}
}
diff --git a/src/services/topic/index.ts b/src/services/topic/index.ts
index 360656149ea5..7105ac9765dc 100644
--- a/src/services/topic/index.ts
+++ b/src/services/topic/index.ts
@@ -1,4 +1,3 @@
-
import { ClientService } from './client';
import { ServerService } from './server';
diff --git a/src/services/upload.ts b/src/services/upload.ts
index 38f6cdd2959b..64fec769ecad 100644
--- a/src/services/upload.ts
+++ b/src/services/upload.ts
@@ -1,7 +1,8 @@
import { fileEnv } from '@/config/file';
import { edgeClient } from '@/libs/trpc/client';
import { API_ENDPOINTS } from '@/services/_url';
-import { FileMetadata, UploadFileParams } from '@/types/files';
+import { clientS3Storage } from '@/services/file/ClientS3';
+import { FileMetadata } from '@/types/files';
import { FileUploadState, FileUploadStatus } from '@/types/files/upload';
import { uuid } from '@/utils/uuid';
@@ -66,23 +67,14 @@ class UploadService {
return result;
};
- uploadToClientDB = async (params: UploadFileParams, file: File) => {
- const { FileModel } = await import('@/database/_deprecated/models/file');
- const fileArrayBuffer = await file.arrayBuffer();
-
- // save to local storage
- // we may want to save to a remote server later
- const res = await FileModel.create({
- createdAt: Date.now(),
- ...params,
- data: fileArrayBuffer,
- });
- // arrayBuffer to url
- const base64 = Buffer.from(fileArrayBuffer).toString('base64');
+ uploadToClientS3 = async (hash: string, file: File): Promise => {
+ await clientS3Storage.putObject(hash, file);
return {
- id: res.id,
- url: `data:${params.fileType};base64,${base64}`,
+ date: (Date.now() / 1000 / 60 / 60).toFixed(0),
+ dirname: '',
+ filename: file.name,
+ path: `client-s3://${hash}`,
};
};
diff --git a/src/services/user/client.test.ts b/src/services/user/client.test.ts
index f79f8294a2f5..abb57b37baab 100644
--- a/src/services/user/client.test.ts
+++ b/src/services/user/client.test.ts
@@ -1,22 +1,14 @@
+import { eq } from 'drizzle-orm';
import { DeepPartial } from 'utility-types';
-import { Mock, beforeEach, describe, expect, it, vi } from 'vitest';
+import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
-import { UserModel } from '@/database/_deprecated/models/user';
+import { clientDB, initializeDB } from '@/database/client/db';
+import { userSettings, users } from '@/database/schemas';
import { UserPreference } from '@/types/user';
import { UserSettings } from '@/types/user/settings';
-import { AsyncLocalStorage } from '@/utils/localStorage';
import { ClientService } from './client';
-vi.mock('@/database/_deprecated/models/user', () => ({
- UserModel: {
- getUser: vi.fn(),
- updateSettings: vi.fn(),
- resetSettings: vi.fn(),
- updateAvatar: vi.fn(),
- },
-}));
-
const mockUser = {
avatar: 'avatar.png',
settings: { themeMode: 'light' } as unknown as UserSettings,
@@ -26,63 +18,67 @@ const mockUser = {
const mockPreference = {
useCmdEnterToSend: true,
} as UserPreference;
+const clientService = new ClientService(mockUser.uuid);
-describe('ClientService', () => {
- let clientService: ClientService;
+beforeEach(async () => {
+ vi.clearAllMocks();
- beforeEach(() => {
- vi.clearAllMocks();
- clientService = new ClientService();
- });
+ await initializeDB();
+ await clientDB.delete(users);
+ await clientDB.insert(users).values({ id: mockUser.uuid, avatar: 'avatar.png' });
+ await clientDB
+ .insert(userSettings)
+ .values({ id: mockUser.uuid, general: { themeMode: 'light' } });
+});
+
+describe('ClientService', () => {
it('should get user state correctly', async () => {
- (UserModel.getUser as Mock).mockResolvedValue(mockUser);
const spyOn = vi
.spyOn(clientService['preferenceStorage'], 'getFromLocalStorage')
.mockResolvedValue(mockPreference);
const userState = await clientService.getUserState();
- expect(userState).toEqual({
+ expect(userState).toMatchObject({
avatar: mockUser.avatar,
isOnboard: true,
canEnablePWAGuide: false,
hasConversation: false,
canEnableTrace: false,
preference: mockPreference,
- settings: mockUser.settings,
+ settings: { general: { themeMode: 'light' } },
userId: mockUser.uuid,
});
- expect(UserModel.getUser).toHaveBeenCalledTimes(1);
expect(spyOn).toHaveBeenCalledTimes(1);
});
it('should update user settings correctly', async () => {
const settingsPatch: DeepPartial = { general: { themeMode: 'dark' } };
- (UserModel.updateSettings as Mock).mockResolvedValue(undefined);
await clientService.updateUserSettings(settingsPatch);
- expect(UserModel.updateSettings).toHaveBeenCalledWith(settingsPatch);
- expect(UserModel.updateSettings).toHaveBeenCalledTimes(1);
+ const result = await clientDB.query.userSettings.findFirst({
+ where: eq(userSettings.id, mockUser.uuid),
+ });
+
+ expect(result).toMatchObject(settingsPatch);
});
it('should reset user settings correctly', async () => {
- (UserModel.resetSettings as Mock).mockResolvedValue(undefined);
-
await clientService.resetUserSettings();
- expect(UserModel.resetSettings).toHaveBeenCalledTimes(1);
+ const result = await clientDB.query.userSettings.findFirst({
+ where: eq(userSettings.id, mockUser.uuid),
+ });
+
+ expect(result).toBeUndefined();
});
it('should update user avatar correctly', async () => {
const newAvatar = 'new-avatar.png';
- (UserModel.updateAvatar as Mock).mockResolvedValue(undefined);
await clientService.updateAvatar(newAvatar);
-
- expect(UserModel.updateAvatar).toHaveBeenCalledWith(newAvatar);
- expect(UserModel.updateAvatar).toHaveBeenCalledTimes(1);
});
it('should update user preference correctly', async () => {
diff --git a/src/services/user/client.ts b/src/services/user/client.ts
index d05a3c104987..f6d3ee863b6d 100644
--- a/src/services/user/client.ts
+++ b/src/services/user/client.ts
@@ -1,49 +1,67 @@
import { DeepPartial } from 'utility-types';
-import { MessageModel } from '@/database/_deprecated/models/message';
-import { SessionModel } from '@/database/_deprecated/models/session';
-import { UserModel } from '@/database/_deprecated/models/user';
+import { clientDB } from '@/database/client/db';
+import { users } from '@/database/schemas';
+import { MessageModel } from '@/database/server/models/message';
+import { SessionModel } from '@/database/server/models/session';
+import { UserModel } from '@/database/server/models/user';
+import { BaseClientService } from '@/services/baseClientService';
import { UserGuide, UserInitializationState, UserPreference } from '@/types/user';
import { UserSettings } from '@/types/user/settings';
import { AsyncLocalStorage } from '@/utils/localStorage';
+import { uuid } from '@/utils/uuid';
import { IUserService } from './type';
-export class ClientService implements IUserService {
+export class ClientService extends BaseClientService implements IUserService {
private preferenceStorage: AsyncLocalStorage;
- constructor() {
+ private get userModel(): UserModel {
+ return new UserModel(clientDB as any, this.userId);
+ }
+ private get messageModel(): MessageModel {
+ return new MessageModel(clientDB as any, this.userId);
+ }
+ private get sessionModel(): SessionModel {
+ return new SessionModel(clientDB as any, this.userId);
+ }
+
+ constructor(userId?: string) {
+ super(userId);
this.preferenceStorage = new AsyncLocalStorage('LOBE_PREFERENCE');
}
async getUserState(): Promise {
- const user = await UserModel.getUser();
- const messageCount = await MessageModel.count();
- const sessionCount = await SessionModel.count();
+ // if user not exist in the db, create one to make sure the user exist
+ // and init the window.__lobeClientUserId
+ await this.makeSureUserExist();
+
+ const state = await this.userModel.getUserState();
+ const user = await UserModel.findById(clientDB as any, this.userId);
+ const messageCount = await this.messageModel.count();
+ const sessionCount = await this.sessionModel.count();
return {
- avatar: user.avatar,
+ ...state,
+ avatar: user?.avatar as string,
canEnablePWAGuide: messageCount >= 4,
canEnableTrace: messageCount >= 4,
hasConversation: messageCount > 0 || sessionCount > 0,
isOnboard: true,
preference: await this.preferenceStorage.getFromLocalStorage(),
- settings: user.settings as UserSettings,
- userId: user.uuid,
};
}
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
- updateUserSettings = async (patch: DeepPartial, _?: any) => {
- return UserModel.updateSettings(patch);
+ updateUserSettings = async (patch: DeepPartial) => {
+ return this.userModel.updateSetting(patch as UserSettings);
};
resetUserSettings = async () => {
- return UserModel.resetSettings();
+ return this.userModel.deleteSetting();
};
async updateAvatar(avatar: string) {
- await UserModel.updateAvatar(avatar);
+ await this.userModel.updateUser({ avatar });
}
async updatePreference(preference: Partial) {
@@ -54,4 +72,20 @@ export class ClientService implements IUserService {
async updateGuide(guide: Partial) {
throw new Error('Method not implemented.');
}
+
+ private async makeSureUserExist() {
+ const existUsers = await clientDB.query.users.findMany();
+
+ let user: { id: string };
+ if (existUsers.length === 0) {
+ const result = await clientDB.insert(users).values({ id: uuid() }).returning();
+ user = result[0];
+ } else {
+ user = existUsers[0];
+ }
+
+ if (typeof window !== 'undefined') {
+ window.__lobeClientUserId = user.id;
+ }
+ }
}
diff --git a/src/store/chat/slices/builtinTool/action.test.ts b/src/store/chat/slices/builtinTool/action.test.ts
index 795806b44195..01c73a6b8091 100644
--- a/src/store/chat/slices/builtinTool/action.test.ts
+++ b/src/store/chat/slices/builtinTool/action.test.ts
@@ -2,6 +2,8 @@ import { act, renderHook } from '@testing-library/react';
import { describe, expect, it, vi } from 'vitest';
import { fileService } from '@/services/file';
+import { ClientService } from '@/services/file/client';
+import { messageService } from '@/services/message';
import { imageGenerationService } from '@/services/textToImage';
import { uploadService } from '@/services/upload';
import { chatSelectors } from '@/store/chat/selectors';
@@ -39,17 +41,22 @@ describe('chatToolSlice', () => {
vi.spyOn(uploadService, 'getImageFileByUrlWithCORS').mockResolvedValue(
new File(['1'], 'file.png', { type: 'image/png' }),
);
- vi.spyOn(uploadService, 'uploadToClientDB').mockResolvedValue({} as any);
- vi.spyOn(fileService, 'createFile').mockResolvedValue({ id: mockId, url: '' });
+ vi.spyOn(uploadService, 'uploadToClientS3').mockResolvedValue({} as any);
+ vi.spyOn(ClientService.prototype, 'createFile').mockResolvedValue({
+ id: mockId,
+ url: '',
+ });
vi.spyOn(result.current, 'toggleDallEImageLoading');
+ vi.spyOn(ClientService.prototype, 'checkFileHash').mockImplementation(async () => ({
+ isExist: false,
+ }));
await act(async () => {
await result.current.generateImageFromPrompts(prompts, messageId);
});
// For each prompt, loading is toggled on and then off
expect(imageGenerationService.generateImage).toHaveBeenCalledTimes(prompts.length);
- expect(uploadService.uploadToClientDB).toHaveBeenCalledTimes(prompts.length);
-
+ expect(uploadService.uploadToClientS3).toHaveBeenCalledTimes(prompts.length);
expect(result.current.toggleDallEImageLoading).toHaveBeenCalledTimes(prompts.length * 2);
});
});
@@ -75,6 +82,7 @@ describe('chatToolSlice', () => {
content: initialMessageContent,
}) as ChatMessage,
);
+ vi.spyOn(messageService, 'updateMessage').mockResolvedValueOnce(undefined);
await act(async () => {
await result.current.updateImageItem(messageId, updateFunction);
diff --git a/src/store/file/slices/upload/action.ts b/src/store/file/slices/upload/action.ts
index 1134d7907f43..3f73de77b08b 100644
--- a/src/store/file/slices/upload/action.ts
+++ b/src/store/file/slices/upload/action.ts
@@ -6,14 +6,11 @@ import { message } from '@/components/AntdStaticMethods';
import { LOBE_CHAT_CLOUD } from '@/const/branding';
import { isServerMode } from '@/const/version';
import { fileService } from '@/services/file';
-import { ServerService } from '@/services/file/server';
import { uploadService } from '@/services/upload';
import { FileMetadata, UploadFileItem } from '@/types/files';
import { FileStore } from '../../store';
-const serverFileService = new ServerService();
-
interface UploadWithProgressParams {
file: File;
knowledgeBaseId?: string;
@@ -43,10 +40,6 @@ interface UploadWithProgressResult {
}
export interface FileUploadAction {
- internal_uploadToClientDB: (
- params: Omit,
- ) => Promise;
- internal_uploadToServer: (params: UploadWithProgressParams) => Promise;
uploadWithProgress: (
params: UploadWithProgressParams,
) => Promise;
@@ -57,51 +50,14 @@ export const createFileUploadSlice: StateCreator<
[['zustand/devtools', never]],
[],
FileUploadAction
-> = (set, get) => ({
- internal_uploadToClientDB: async ({ file, onStatusUpdate, skipCheckFileType }) => {
- if (!skipCheckFileType && !file.type.startsWith('image')) {
- onStatusUpdate?.({ id: file.name, type: 'removeFile' });
- message.info({
- content: t('upload.fileOnlySupportInServerMode', {
- cloud: LOBE_CHAT_CLOUD,
- ext: file.name.split('.').pop(),
- ns: 'error',
- }),
- duration: 5,
- });
- return;
- }
-
- const fileArrayBuffer = await file.arrayBuffer();
-
- const hash = sha256(fileArrayBuffer);
-
- const data = await uploadService.uploadToClientDB(
- { fileType: file.type, hash, name: file.name, saveMode: 'local', size: file.size },
- file,
- );
-
- onStatusUpdate?.({
- id: file.name,
- type: 'updateFile',
- value: {
- fileUrl: data.url,
- id: data.id,
- status: 'success',
- uploadState: { progress: 100, restTime: 0, speed: 0 },
- },
- });
-
- return data;
- },
-
- internal_uploadToServer: async ({ file, onStatusUpdate, knowledgeBaseId }) => {
+> = () => ({
+ uploadWithProgress: async ({ file, onStatusUpdate, knowledgeBaseId, skipCheckFileType }) => {
const fileArrayBuffer = await file.arrayBuffer();
// 1. check file hash
const hash = sha256(fileArrayBuffer);
- const checkStatus = await serverFileService.checkFileHash(hash);
+ const checkStatus = await fileService.checkFileHash(hash);
let metadata: FileMetadata;
// 2. if file exist, just skip upload
@@ -112,17 +68,37 @@ export const createFileUploadSlice: StateCreator<
type: 'updateFile',
value: { status: 'processing', uploadState: { progress: 100, restTime: 0, speed: 0 } },
});
- } else {
- // 2. if file don't exist, need upload files
- metadata = await uploadService.uploadWithProgress(file, {
- onProgress: (status, upload) => {
- onStatusUpdate?.({
- id: file.name,
- type: 'updateFile',
- value: { status: status === 'success' ? 'processing' : status, uploadState: upload },
+ }
+ // 2. if file don't exist, need upload files
+ else {
+ // if is server mode, upload to server s3, or upload to client s3
+ if (isServerMode) {
+ metadata = await uploadService.uploadWithProgress(file, {
+ onProgress: (status, upload) => {
+ onStatusUpdate?.({
+ id: file.name,
+ type: 'updateFile',
+ value: { status: status === 'success' ? 'processing' : status, uploadState: upload },
+ });
+ },
+ });
+ } else {
+ if (!skipCheckFileType && !file.type.startsWith('image')) {
+ onStatusUpdate?.({ id: file.name, type: 'removeFile' });
+ message.info({
+ content: t('upload.fileOnlySupportInServerMode', {
+ cloud: LOBE_CHAT_CLOUD,
+ ext: file.name.split('.').pop(),
+ ns: 'error',
+ }),
+ duration: 5,
});
- },
- });
+ return;
+ }
+
+ // Upload to the indexeddb in the browser
+ metadata = await uploadService.uploadToClientS3(hash, file);
+ }
}
// 3. use more powerful file type detector to get file type
@@ -138,12 +114,10 @@ export const createFileUploadSlice: StateCreator<
// 4. create file to db
const data = await fileService.createFile(
{
- createdAt: Date.now(),
fileType,
hash,
metadata,
name: file.name,
- saveMode: 'url',
size: file.size,
url: metadata.path,
},
@@ -163,12 +137,4 @@ export const createFileUploadSlice: StateCreator<
return data;
},
-
- uploadWithProgress: async (payload) => {
- const { internal_uploadToServer, internal_uploadToClientDB } = get();
-
- if (isServerMode) return internal_uploadToServer(payload);
-
- return internal_uploadToClientDB(payload);
- },
});
diff --git a/src/store/global/actions/clientDb.ts b/src/store/global/actions/clientDb.ts
new file mode 100644
index 000000000000..d8ebc238ef6b
--- /dev/null
+++ b/src/store/global/actions/clientDb.ts
@@ -0,0 +1,48 @@
+import { SWRResponse } from 'swr';
+import type { StateCreator } from 'zustand/vanilla';
+
+import { useOnlyFetchOnceSWR } from '@/libs/swr';
+import type { GlobalStore } from '@/store/global';
+import { DatabaseLoadingState, OnStageChange } from '@/types/clientDB';
+
+type InitClientDBParams = { onStateChange: OnStageChange };
+/**
+ * 设置操作
+ */
+export interface GlobalClientDBAction {
+ initializeClientDB: (params?: InitClientDBParams) => Promise;
+ markPgliteEnabled: () => void;
+ useInitClientDB: (params?: InitClientDBParams) => SWRResponse;
+}
+
+export const clientDBSlice: StateCreator<
+ GlobalStore,
+ [['zustand/devtools', never]],
+ [],
+ GlobalClientDBAction
+> = (set, get) => ({
+ initializeClientDB: async (params) => {
+ // if the db has started initialized or not error, just skip.
+ if (
+ get().initClientDBStage !== DatabaseLoadingState.Idle &&
+ get().initClientDBStage !== DatabaseLoadingState.Error
+ )
+ return;
+
+ const { initializeDB } = await import('@/database/client/db');
+ await initializeDB({
+ onProgress: (data) => {
+ set({ initClientDBProcess: data });
+ },
+ onStateChange: (state) => {
+ set({ initClientDBStage: state });
+ params?.onStateChange?.(state);
+ },
+ });
+ },
+ markPgliteEnabled: () => {
+ get().updateSystemStatus({ isEnablePglite: true });
+ },
+ useInitClientDB: (params) =>
+ useOnlyFetchOnceSWR('initClientDB', () => get().initializeClientDB(params)),
+});
diff --git a/src/store/global/initialState.ts b/src/store/global/initialState.ts
index 5da29090f9a3..5c28efc707ae 100644
--- a/src/store/global/initialState.ts
+++ b/src/store/global/initialState.ts
@@ -1,5 +1,6 @@
import { AppRouterInstance } from 'next/dist/shared/lib/app-router-context.shared-runtime';
+import { DatabaseLoadingState } from '@/types/clientDB';
import { SessionDefaultGroup } from '@/types/session';
import { AsyncLocalStorage } from '@/utils/localStorage';
@@ -37,6 +38,10 @@ export interface SystemStatus {
hidePWAInstaller?: boolean;
hideThreadLimitAlert?: boolean;
inputHeight: number;
+ /**
+ * 应用初始化时不启用 PGLite,只有当用户手动开启时才启用
+ */
+ isEnablePglite?: boolean;
mobileShowPortal?: boolean;
mobileShowTopic?: boolean;
sessionsWidth: number;
@@ -50,6 +55,12 @@ export interface SystemStatus {
export interface GlobalState {
hasNewVersion?: boolean;
+ initClientDBProcess?: { costTime?: number; phase: 'wasm' | 'dependencies'; progress: number };
+ /**
+ * 客户端数据库初始化状态
+ * 启动时为 Idle,完成为 Ready,报错为 Error
+ */
+ initClientDBStage: DatabaseLoadingState;
isMobile?: boolean;
isStatusInit?: boolean;
latestVersion?: string;
@@ -76,6 +87,7 @@ export const INITIAL_STATUS = {
} satisfies SystemStatus;
export const initialState: GlobalState = {
+ initClientDBStage: DatabaseLoadingState.Idle,
isMobile: false,
isStatusInit: false,
sidebarKey: SidebarTabKey.Chat,
diff --git a/src/store/global/selectors.ts b/src/store/global/selectors.ts
index 65936ca26bb7..97967d2ea14f 100644
--- a/src/store/global/selectors.ts
+++ b/src/store/global/selectors.ts
@@ -1,4 +1,6 @@
+import { isServerMode } from '@/const/version';
import { GlobalStore } from '@/store/global';
+import { DatabaseLoadingState } from '@/types/clientDB';
import { INITIAL_STATUS } from './initialState';
@@ -22,11 +24,27 @@ const filePanelWidth = (s: GlobalStore) => s.status.filePanelWidth;
const inputHeight = (s: GlobalStore) => s.status.inputHeight;
const threadInputHeight = (s: GlobalStore) => s.status.threadInputHeight;
-const isPgliteNotEnabled = () => false;
+const isPgliteNotEnabled = (s: GlobalStore) =>
+ !isServerMode && s.isStatusInit && !s.status.isEnablePglite;
-const isPgliteNotInited = () => false;
+/**
+ * 当且仅当 client db 模式,且 pglite 未初始化完成时返回 true
+ */
+const isPgliteNotInited = (s: GlobalStore) =>
+ !isServerMode &&
+ s.isStatusInit &&
+ s.status.isEnablePglite &&
+ s.initClientDBStage !== DatabaseLoadingState.Ready;
-const isPgliteInited = (): boolean => true;
+/**
+ * 当且仅当 client db 模式,且 pglite 初始化完成时返回 true
+ */
+const isPgliteInited = (s: GlobalStore): boolean =>
+ isServerMode ||
+ (s.isStatusInit &&
+ s.status.isEnablePglite &&
+ s.initClientDBStage === DatabaseLoadingState.Ready) ||
+ false;
export const systemStatusSelectors = {
filePanelWidth,
diff --git a/src/store/global/store.ts b/src/store/global/store.ts
index 5bf1ac439f19..ed3f28e9389a 100644
--- a/src/store/global/store.ts
+++ b/src/store/global/store.ts
@@ -5,15 +5,17 @@ import { StateCreator } from 'zustand/vanilla';
import { createDevtools } from '../middleware/createDevtools';
import { type GlobalStoreAction, globalActionSlice } from './action';
+import { type GlobalClientDBAction, clientDBSlice } from './actions/clientDb';
import { type GlobalState, initialState } from './initialState';
// =============== 聚合 createStoreFn ============ //
-export type GlobalStore = GlobalState & GlobalStoreAction;
+export type GlobalStore = GlobalState & GlobalStoreAction & GlobalClientDBAction;
const createStore: StateCreator = (...parameters) => ({
...initialState,
...globalActionSlice(...parameters),
+ ...clientDBSlice(...parameters),
});
// =============== 实装 useStore ============ //
diff --git a/src/store/session/slices/sessionGroup/reducer.test.ts b/src/store/session/slices/sessionGroup/reducer.test.ts
index 17a947ca217a..52fe51b9e329 100644
--- a/src/store/session/slices/sessionGroup/reducer.test.ts
+++ b/src/store/session/slices/sessionGroup/reducer.test.ts
@@ -10,14 +10,14 @@ describe('sessionGroupsReducer', () => {
{
id: nanoid(),
name: 'Group 1',
- createdAt: Date.now(),
- updatedAt: Date.now(),
+ createdAt: new Date(),
+ updatedAt: new Date(),
},
{
id: nanoid(),
name: 'Group 2',
- createdAt: Date.now(),
- updatedAt: Date.now(),
+ createdAt: new Date(),
+ updatedAt: new Date(),
sort: 1,
},
];
@@ -26,8 +26,8 @@ describe('sessionGroupsReducer', () => {
const newItem: SessionGroupItem = {
id: nanoid(),
name: 'New Group',
- createdAt: Date.now(),
- updatedAt: Date.now(),
+ createdAt: new Date(),
+ updatedAt: new Date(),
};
const result = sessionGroupsReducer(initialState, {
diff --git a/src/types/clientDB.ts b/src/types/clientDB.ts
new file mode 100644
index 000000000000..20febdf542a7
--- /dev/null
+++ b/src/types/clientDB.ts
@@ -0,0 +1,29 @@
+// 定义加载状态类型
+export enum DatabaseLoadingState {
+ Error = 'error',
+ Finished = 'finished',
+ Idle = 'idle',
+ Initializing = 'initializing',
+ LoadingDependencies = 'loadingDependencies',
+ LoadingWasm = 'loadingWasm',
+ Migrating = 'migrating',
+ Ready = 'ready',
+}
+
+export const ClientDatabaseInitStages = [
+ DatabaseLoadingState.Idle,
+ DatabaseLoadingState.Initializing,
+ DatabaseLoadingState.LoadingDependencies,
+ DatabaseLoadingState.LoadingWasm,
+ DatabaseLoadingState.Migrating,
+ DatabaseLoadingState.Finished,
+];
+
+// 定义进度回调接口
+export interface ClientDBLoadingProgress {
+ costTime?: number;
+ phase: 'wasm' | 'dependencies';
+ progress: number;
+}
+
+export type OnStageChange = (state: DatabaseLoadingState) => void;
diff --git a/src/types/files/upload.ts b/src/types/files/upload.ts
index 83aa3d5be9ce..b94b69a5fdee 100644
--- a/src/types/files/upload.ts
+++ b/src/types/files/upload.ts
@@ -53,7 +53,6 @@ export const FileMetadataSchema = z.object({
export type FileMetadata = z.infer;
export const UploadFileSchema = z.object({
- data: z.instanceof(ArrayBuffer).optional(),
/**
* file type
* @example 'image/png'
@@ -77,7 +76,6 @@ export const UploadFileSchema = z.object({
* local mean save the raw file into data
* url mean upload the file to a cdn and then save the url
*/
- saveMode: z.enum(['local', 'url']),
/**
* file size
*/
@@ -89,3 +87,11 @@ export const UploadFileSchema = z.object({
});
export type UploadFileParams = z.infer;
+
+export interface CheckFileHashResult {
+ fileType?: string;
+ isExist: boolean;
+ metadata?: unknown;
+ size?: number;
+ url?: string;
+}
diff --git a/src/types/global.d.ts b/src/types/global.d.ts
index 0d4762a75d2a..074ec166c3a0 100644
--- a/src/types/global.d.ts
+++ b/src/types/global.d.ts
@@ -12,3 +12,12 @@ declare module 'antd-style' {
declare module 'styled-components' {
export interface DefaultTheme extends AntdToken, LobeCustomToken {}
}
+
+declare global {
+ interface Window {
+ /**
+ * 客户端 db 模式下的用户 id
+ */
+ __lobeClientUserId?: string;
+ }
+}
diff --git a/src/types/meta.ts b/src/types/meta.ts
index 459aece85cdb..23ce2b941c2f 100644
--- a/src/types/meta.ts
+++ b/src/types/meta.ts
@@ -21,19 +21,10 @@ export const LobeMetaDataSchema = z.object({
export type MetaData = z.infer;
export interface BaseDataModel {
- /**
- * @deprecated
- */
- createAt?: number;
-
createdAt: number;
id: string;
meta: MetaData;
- /**
- * @deprecated
- */
- updateAt?: number;
updatedAt: number;
}
diff --git a/src/types/session/sessionGroup.ts b/src/types/session/sessionGroup.ts
index 85fb3675021b..1c8dbcda048a 100644
--- a/src/types/session/sessionGroup.ts
+++ b/src/types/session/sessionGroup.ts
@@ -8,11 +8,11 @@ export enum SessionDefaultGroup {
export type SessionGroupId = SessionDefaultGroup | string;
export interface SessionGroupItem {
- createdAt: number;
+ createdAt: Date;
id: string;
name: string;
- sort?: number;
- updatedAt: number;
+ sort?: number | null;
+ updatedAt: Date;
}
export type SessionGroups = SessionGroupItem[];