Skip to content

Commit

Permalink
Merge pull request TransformerOptimus#1356 from TransformerOptimus/fi…
Browse files Browse the repository at this point in the history
…xes-for-main

fixes for main
  • Loading branch information
rounak610 authored Nov 3, 2023
2 parents 42b22c6 + b9f7a60 commit 4afbd7c
Show file tree
Hide file tree
Showing 16 changed files with 162 additions and 93 deletions.
2 changes: 1 addition & 1 deletion docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ services:
- redis_data:/data

super__postgres:
image: "docker.io/library/postgres:latest"
image: "docker.io/library/postgres:15"
environment:
- POSTGRES_USER=superagi
- POSTGRES_PASSWORD=password
Expand Down
4 changes: 2 additions & 2 deletions gui/pages/Content/Models/AddModel.js
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
import React, {useEffect, useState} from "react";
import ModelForm from "./ModelForm";

export default function AddModel({internalId, getModels, sendModelData}){
export default function AddModel({internalId, getModels, sendModelData, env}){

return(
<div id="add_model">
<div className="row">
<div className="col-3" />
<div className="col-6 col-6-scrollable">
<ModelForm internalId={internalId} getModels={getModels} sendModelData={sendModelData}/>
<ModelForm internalId={internalId} getModels={getModels} sendModelData={sendModelData} env={env}/>
</div>
<div className="col-3" />
</div>
Expand Down
65 changes: 55 additions & 10 deletions gui/pages/Content/Models/ModelForm.js
Original file line number Diff line number Diff line change
@@ -1,22 +1,25 @@
import React, {useEffect, useRef, useState} from "react";
import {removeTab, openNewTab, createInternalId, getUserClick} from "@/utils/utils";
import Image from "next/image";
import {fetchApiKey, storeModel, verifyEndPoint} from "@/pages/api/DashboardService";
import {fetchApiKey, storeModel, testModel, verifyEndPoint} from "@/pages/api/DashboardService";
import {BeatLoader, ClipLoader} from "react-spinners";
import {ToastContainer, toast} from 'react-toastify';

export default function ModelForm({internalId, getModels, sendModelData}){
const models = ['OpenAI', 'Replicate', 'Hugging Face', 'Google Palm'];
export default function ModelForm({internalId, getModels, sendModelData, env}){
const models = env === 'DEV' ? ['OpenAI', 'Replicate', 'Hugging Face', 'Google Palm', 'Local LLM'] : ['OpenAI', 'Replicate', 'Hugging Face', 'Google Palm'];
const [selectedModel, setSelectedModel] = useState('Select a Model');
const [modelName, setModelName] = useState('');
const [modelDescription, setModelDescription] = useState('');
const [modelTokenLimit, setModelTokenLimit] = useState(4096);
const [modelEndpoint, setModelEndpoint] = useState('');
const [modelDropdown, setModelDropdown] = useState(false);
const [modelVersion, setModelVersion] = useState('');
const [modelContextLength, setContextLength] = useState(4096);
const [tokenError, setTokenError] = useState(false);
const [lockAddition, setLockAddition] = useState(true);
const [isLoading, setIsLoading] = useState(false)
const [modelStatus, setModelStatus] = useState(null);
const [createClickable, setCreateClickable] = useState(true);
const modelRef = useRef(null);

useEffect(() => {
Expand Down Expand Up @@ -79,13 +82,31 @@ export default function ModelForm({internalId, getModels, sendModelData}){
})
}

const handleModelStatus = async () => {
try {
setCreateClickable(false);
const response = await testModel();
if(response.status === 200) {
setModelStatus(true);
setCreateClickable(true);
} else {
setModelStatus(false);
setCreateClickable(true);
}
} catch(error) {
console.log("Error Message:: " + error);
setModelStatus(false);
setCreateClickable(true);
}
}

const handleModelSuccess = (model) => {
model.contentType = 'Model'
sendModelData(model)
}

const storeModelDetails = (modelProviderId) => {
storeModel(modelName,modelDescription, modelEndpoint, modelProviderId, modelTokenLimit, "Custom", modelVersion).then((response) =>{
storeModel(modelName,modelDescription, modelEndpoint, modelProviderId, modelTokenLimit, "Custom", modelVersion, modelContextLength).then((response) =>{
setIsLoading(false)
let data = response.data
if (data.error) {
Expand Down Expand Up @@ -153,18 +174,42 @@ export default function ModelForm({internalId, getModels, sendModelData}){
onChange={(event) => setModelVersion(event.target.value)}/>
</div>}

{(selectedModel === 'Local LLM') && <div className="mt_24">
<span>Model Context Length</span>
<input className="input_medium mt_8" type="number" placeholder="Enter Model Context Length" value={modelContextLength}
onChange={(event) => setContextLength(event.target.value)}/>
</div>}

<div className="mt_24">
<span>Token Limit</span>
<input className="input_medium mt_8" type="number" placeholder="Enter Model Token Limit" value={modelTokenLimit}
onChange={(event) => setModelTokenLimit(parseInt(event.target.value, 10))}/>
</div>

<div className="horizontal_container justify_end mt_24">
<button className="secondary_button mr_7"
onClick={() => removeTab(-5, "new model", "Add_Model", internalId)}>Cancel</button>
<button className='primary_button' onClick={handleAddModel} disabled={lockAddition || isLoading}>
{isLoading ? <><span>Adding Model &nbsp;</span><ClipLoader size={16} color={"#000000"} /></> : 'Add Model'}
</button>
{modelStatus===false && <div className="horizontal_container align_start error_box mt_24 gap_6">
<Image width={16} height={16} src="/images/icon_error.svg" alt="error-icon" />
<div className="vertical_containers">
<span className="text_12 color_white lh_16">Test model failed</span>
</div>
</div>}

{modelStatus===true && <div className="horizontal_container align_start success_box mt_24 gap_6">
<Image width={16} height={16} src="/images/icon_info.svg"/>
<div className="vertical_containers">
<span className="text_12 color_white lh_16">Test model successful</span>
</div>
</div>}

<div className="horizontal_container justify_space_between w_100 mt_24">
{selectedModel==='Local LLM' && <button className="secondary_button flex_none" disabled={!createClickable}
onClick={() => {handleModelStatus();}}>{createClickable ? 'Test Model' : 'Testing model...'}</button>}
<div className="horizontal_container justify_end">
<button className="secondary_button mr_7"
onClick={() => removeTab(-5, "new model", "Add_Model", internalId)}>Cancel</button>
<button className='primary_button' onClick={handleAddModel} disabled={lockAddition || isLoading || (selectedModel==='Local LLM' && !modelStatus)}>
{isLoading ? <><span>Adding Model &nbsp;</span><ClipLoader size={16} color={"#000000"} /></> : 'Add Model'}
</button>
</div>
</div>
<ToastContainer className="text_16"/>
</div>
Expand Down
2 changes: 1 addition & 1 deletion gui/pages/Dashboard/Content.js
Original file line number Diff line number Diff line change
Expand Up @@ -470,7 +470,7 @@ export default function Content({env, selectedView, selectedProjectId, organisat
organisationId={organisationId} sendKnowledgeData={addTab}
sendAgentData={addTab} selectedProjectId={selectedProjectId} editAgentId={tab.id}
fetchAgents={getAgentList} toolkits={toolkits} template={null} edit={true} agents={agents}/>}
{tab.contentType === 'Add_Model' && <AddModel internalId={tab.internalId} getModels={getModels} sendModelData={addTab}/>}
{tab.contentType === 'Add_Model' && <AddModel internalId={tab.internalId} getModels={getModels} sendModelData={addTab} env={env}/>}
{tab.contentType === 'Model' && <ModelDetails modelId={tab.id} modelName={tab.name} />}
</div>}
</div>
Expand Down
70 changes: 10 additions & 60 deletions gui/pages/_app.css
Original file line number Diff line number Diff line change
Expand Up @@ -231,18 +231,6 @@ input[type="range"]::-moz-range-track {
z-index: 10;
}

.dropdown_container_models {
flex-direction: column;
align-items: flex-start;
border-radius: 8px;
background: #2E293F;
box-shadow: -2px 2px 24px rgba(0, 0, 0, 0.4);
position: absolute;
width: fit-content;
height: fit-content;
padding: 8px;
}

.dropdown_container {
width: 150px;
height: auto;
Expand Down Expand Up @@ -783,7 +771,6 @@ p {
.mt_74{margin-top: 74px;}
.mt_80{margin-top: 80px;}
.mt_90{margin-top: 90px;}
.mt_130{margin-top: 130px;}

.mb_1{margin-bottom: 1px;}
.mb_2{margin-bottom: 2px;}
Expand Down Expand Up @@ -991,22 +978,6 @@ p {
line-height: normal;
}

.text_20 {
color: #FFF;
font-size: 20px;
font-style: normal;
font-weight: 400;
line-height: normal;
}

.text_20 {
color: #FFF;
font-size: 20px;
font-style: normal;
font-weight: 400;
line-height: normal;
}

.text_20_bold{
color: #FFF;
font-size: 20px;
Expand Down Expand Up @@ -1107,7 +1078,6 @@ p {
.w_73{width: 73%}
.w_97{width: 97%}
.w_100{width: 100%}
.w_99vw{width: 99vw}
.w_inherit{width: inherit}
.w_fit_content{width:fit-content}
.w_inherit{width: inherit}
Expand All @@ -1125,11 +1095,11 @@ p {
.h_80vh{height: 80vh}
.h_calc92{height: calc(100vh - 92px)}
.h_calc_add40{height: calc(80vh + 40px)}
.h_calc_sub_60{height: calc(92.5vh - 60px)}

.mxh_78vh{max-height: 78vh}

.flex_dir_col{flex-direction: column}
.flex_none{flex: none}

.justify_center{justify-content: center}
.justify_end{justify-content: flex-end}
Expand All @@ -1138,8 +1108,6 @@ p {

.display_flex{display: inline-flex}
.display_flex_container{display: flex}
.display_none{display: none}
.display_block{display: block}

.align_center{align-items: center}
.align_start{align-items: flex-start}
Expand Down Expand Up @@ -1178,8 +1146,6 @@ p {

.bt_white{border-top: 1px solid rgba(255, 255, 255, 0.08);}

.bt_white{border-top: 1px solid rgba(255, 255, 255, 0.08);}

.color_white{color:#FFFFFF}
.color_gray{color:#888888}

Expand All @@ -1188,7 +1154,7 @@ p {
.lh_18{line-height: 18px;}
.lh_24{line-height: 24px;}

.padding_0{padding: 0}
.padding_0{padding: 0;}
.padding_5{padding: 5px;}
.padding_6{padding: 6px;}
.padding_8{padding: 8px;}
Expand Down Expand Up @@ -1505,7 +1471,6 @@ tr{
.bg_none{background: none;}
.bg_primary{background: #2E293F;}
.bg_secondary{background: #272335;}
.bg_none{background: none}

.container {
height: 100%;
Expand Down Expand Up @@ -1871,6 +1836,13 @@ tr{
padding: 12px;
}

.success_box{
border-radius: 8px;
padding: 12px;
border-left: 4px solid rgba(255, 255, 255, 0.60);
background: rgba(255, 255, 255, 0.08);
}

.horizontal_line {
margin: 16px 0 16px -16px;
border: 1px solid #ffffff20;
Expand Down Expand Up @@ -1922,26 +1894,4 @@ tr{
.tooltip-class {
background-color: green;
border-radius: 6px;
}

.text_dropdown {
color: #FFFFFF;
font-family: Plus Jakarta Sans, sans-serif;
font-style: normal;
font-weight: 500;
line-height: normal;
}

.text_dropdown_18 {
font-size: 18px;
}

.vertical_divider {
background: transparent;
/*border-color: rgba(255, 255, 255, 0.08);*/
border: 1.2px solid rgba(255, 255, 255, 0.08);;
height: 20px;
width: 0;
}


}
9 changes: 6 additions & 3 deletions gui/pages/api/DashboardService.js
Original file line number Diff line number Diff line change
Expand Up @@ -358,8 +358,12 @@ export const verifyEndPoint = (model_api_key, end_point, model_provider) => {
});
}

export const storeModel = (model_name, description, end_point, model_provider_id, token_limit, type, version) => {
return api.post(`/models_controller/store_model`,{model_name, description, end_point, model_provider_id, token_limit, type, version});
export const storeModel = (model_name, description, end_point, model_provider_id, token_limit, type, version, context_length) => {
return api.post(`/models_controller/store_model`,{model_name, description, end_point, model_provider_id, token_limit, type, version, context_length});
}

export const testModel = () => {
return api.get(`/models_controller/test_local_llm`);
}

export const fetchModels = () => {
Expand Down Expand Up @@ -389,7 +393,6 @@ export const getToolLogs = (toolName) => {
export const publishTemplateToMarketplace = (agentData) => {
return api.post(`/agent_templates/publish_template`, agentData);
};

export const getKnowledgeMetrics = (knowledgeName) => {
return api.get(`analytics/knowledge/${knowledgeName}/usage`)
}
Expand Down
37 changes: 35 additions & 2 deletions superagi/controllers/models_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,15 @@
from superagi.helper.auth import check_auth, get_user_organisation
from superagi.helper.models_helper import ModelsHelper
from superagi.apm.call_log_helper import CallLogHelper
from superagi.lib.logger import logger
from superagi.models.models import Models
from superagi.models.models_config import ModelsConfig
from superagi.config.config import get_config
from superagi.controllers.types.models_types import ModelsTypes
from fastapi_sqlalchemy import db
import logging
from pydantic import BaseModel
from superagi.helper.llm_loader import LLMLoader

router = APIRouter()

Expand All @@ -26,6 +28,7 @@ class StoreModelRequest(BaseModel):
token_limit: int
type: str
version: str
context_length: int

class ModelName (BaseModel):
model: str
Expand Down Expand Up @@ -69,7 +72,9 @@ async def verify_end_point(model_api_key: str = None, end_point: str = None, mod
@router.post("/store_model", status_code=200)
async def store_model(request: StoreModelRequest, organisation=Depends(get_user_organisation)):
try:
return Models.store_model_details(db.session, organisation.id, request.model_name, request.description, request.end_point, request.model_provider_id, request.token_limit, request.type, request.version)
#context_length = 4096
logger.info(request)
return Models.store_model_details(db.session, organisation.id, request.model_name, request.description, request.end_point, request.model_provider_id, request.token_limit, request.type, request.version, request.context_length)
except Exception as e:
logging.error(f"Error storing the Model Details: {str(e)}")
raise HTTPException(status_code=500, detail="Internal Server Error")
Expand Down Expand Up @@ -164,4 +169,32 @@ def get_models_details(page: int = 0):
marketplace_models = Models.fetch_marketplace_list(page)
marketplace_models_with_install = Models.get_model_install_details(db.session, marketplace_models, organisation_id,
ModelsTypes.MARKETPLACE.value)
return marketplace_models_with_install
return marketplace_models_with_install

@router.get("/test_local_llm", status_code=200)
def test_local_llm():
try:
llm_loader = LLMLoader(context_length=4096)
llm_model = llm_loader.model
llm_grammar = llm_loader.grammar
if llm_model is None:
logger.error("Model not found.")
raise HTTPException(status_code=404, detail="Error while loading the model. Please check your model path and try again.")
if llm_grammar is None:
logger.error("Grammar not found.")
raise HTTPException(status_code=404, detail="Grammar not found.")

messages = [
{"role":"system",
"content":"You are an AI assistant. Give response in a proper JSON format"},
{"role":"user",
"content":"Hi!"}
]
response = llm_model.create_chat_completion(messages=messages, grammar=llm_grammar)
content = response["choices"][0]["message"]["content"]
logger.info(content)
return "Model loaded successfully."

except Exception as e:
logger.info("Error: ",e)
raise HTTPException(status_code=404, detail="Error while loading the model. Please check your model path and try again.")
2 changes: 1 addition & 1 deletion superagi/helper/llm_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,4 +35,4 @@ def grammar(self):
"superagi/llms/grammar/json.gbnf")
except Exception as e:
logger.error(e)
return self._grammar
return self._grammar
Loading

0 comments on commit 4afbd7c

Please sign in to comment.