-
Notifications
You must be signed in to change notification settings - Fork 3
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: Add Xpuls AI Integration + Demo Environment + Prompt Debugging (…
…#11) * - Add Xpuls.ai Integration - Add Demo Environment * cleanup * Make xpulsai tracing optional * Make xpulsai tracing optional * Bump version to 0.1.0
- Loading branch information
1 parent
38d3b1c
commit e5e9eca
Showing
15 changed files
with
345 additions
and
42 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,3 +1,5 @@ | ||
.idea | ||
venv | ||
dist | ||
|
||
*.env |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
from demo.openai_langchain import run_openai_agent | ||
|
||
res = run_openai_agent() | ||
print(str(res)) |
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,65 @@ | ||
import logging | ||
import os | ||
|
||
import openai | ||
from langchain.agents import initialize_agent, AgentType | ||
from langchain.chat_models import AzureChatOpenAI | ||
from langchain.memory import ConversationBufferMemory | ||
|
||
from xpuls.mlmonitor.langchain.decorators.map_xpuls_project import MapXpulsProject | ||
from xpuls.mlmonitor.langchain.decorators.telemetry_override_labels import TelemetryOverrideLabels | ||
from xpuls.mlmonitor.langchain.instrument import LangchainTelemetry | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
openai.api_key = os.getenv("OPENAI_API_KEY") | ||
openai.api_type = "azure" | ||
openai.api_base = os.getenv("OPENAI_URL") | ||
os.environ["OPENAI_API_BASE"] = os.getenv("OPENAI_URL") | ||
os.environ["OPENAI_API_VERSION"] = "2023-03-15-preview" | ||
openai.api_version = "2023-03-15-preview" | ||
|
||
# Set this to enable Advanced prompt tracing with server | ||
# os.environ["XPULSAI_TRACING_ENABLED"] = "false" | ||
os.environ["XPULSAI_TRACING_ENABLED"] = "true" | ||
|
||
default_labels = {"system": "openai-ln-test", "agent_name": "fallback_value"} | ||
|
||
LangchainTelemetry( | ||
default_labels=default_labels, | ||
xpuls_host_url="http://localhost:8000" | ||
).auto_instrument() | ||
|
||
memory = ConversationBufferMemory(memory_key="chat_history") | ||
chat_model = AzureChatOpenAI( | ||
deployment_name="gpt35turbo", | ||
model_name="gpt-35-turbo", | ||
temperature=0 | ||
) | ||
|
||
|
||
@TelemetryOverrideLabels(agent_name="chat_agent_alpha") | ||
@MapXpulsProject(project_id="default") # Get Project ID from console | ||
def run_openai_agent(): | ||
agent = initialize_agent(llm=chat_model, | ||
verbose=True, | ||
tools=[], | ||
agent=AgentType.CONVERSATIONAL_REACT_DESCRIPTION, | ||
memory=memory, | ||
# handle_parsing_errors="Check your output and make sure it conforms!", | ||
return_intermediate_steps=False, | ||
agent_executor_kwargs={"extra_prompt_messages": "test"}) | ||
|
||
try: | ||
res = agent.run("You are to behave as a think tank to answer the asked question in most creative way," | ||
" ensure to NOT be abusive or racist, you should validate your response w.r.t to validity " | ||
"in practical world before giving final answer" + | ||
f"\nQuestion: How does nature work?, is balance of life true? \n") | ||
except ValueError as e: | ||
res = str(e) | ||
if not res.startswith("Could not parse LLM output: `"): | ||
raise e | ||
logger.error(f" Got ValueError: {e}") | ||
res = res.removeprefix("Could not parse LLM output: `").removesuffix("`") | ||
|
||
return res |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,2 @@ | ||
openai | ||
langchain |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,5 +1,4 @@ | ||
opentelemetry-instrumentation-requests | ||
opentelemetry-api | ||
opentelemetry-sdk | ||
prometheus-client | ||
pydantic | ||
requests | ||
urllib3 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,21 @@ | ||
import contextvars | ||
from typing import Optional, Any, Dict | ||
|
||
|
||
class MapXpulsProject: | ||
_context: contextvars.ContextVar[Optional[Dict[str, Any]]] = contextvars.ContextVar('telemetry_extra_labels_vars', | ||
default=None) | ||
|
||
def __init__(self, project_id: Optional[str] = None, project_slug: Optional[str] = None): | ||
if project_id is None and project_slug is None: | ||
raise ValueError("Both `project_id` and `project_slug` cannot be null") | ||
self.project_id = project_id | ||
self.project_slug = project_slug | ||
|
||
def __call__(self, func): | ||
def wrapped_func(*args, **kwargs): | ||
self._context.set({'project_id': self.project_id, 'project_slug': self.project_slug}) | ||
|
||
return func(*args, **kwargs) | ||
|
||
return wrapped_func |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,22 +1,29 @@ | ||
from typing import Dict, Any | ||
from langsmith import Client | ||
|
||
|
||
from xpuls.mlmonitor.langchain.patches import patch_chain | ||
from xpuls.mlmonitor.langchain.profiling.prometheus import LangchainPrometheusMetrics | ||
from xpuls.mlmonitor.langchain.xpuls_client import XpulsAILangChainClient | ||
|
||
|
||
class LangchainTelemetry: | ||
def __init__(self, default_labels: Dict[str, Any], | ||
enable_prometheus=True, | ||
enable_otel_tracing=True, | ||
enable_otel_logging=False): | ||
xpuls_host_url: str = "http://localhost:8000", | ||
enable_prometheus: bool = True, | ||
enable_otel_tracing: bool = True, | ||
enable_otel_logging: bool = False): | ||
self.ln_metrics = LangchainPrometheusMetrics(default_labels) | ||
|
||
self.xpuls_client = XpulsAILangChainClient( | ||
api_url=xpuls_host_url | ||
) | ||
|
||
self.default_labels = default_labels | ||
self.enable_prometheus = enable_prometheus | ||
self.enable_otel_tracing = enable_otel_tracing | ||
self.enable_otel_logging = enable_otel_logging | ||
|
||
def auto_instrument(self): | ||
patch_chain(self.ln_metrics) | ||
patch_chain(self.ln_metrics, self.xpuls_client) | ||
print("** ProfileML -> Langchain auto-instrumentation completed successfully **") | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.