Skip to content

Commit

Permalink
Merge pull request #26 from ucbepic/shreyashankar/ollamastructured
Browse files Browse the repository at this point in the history
fix: add error messages if model doesnt support tool calling
  • Loading branch information
shreyashankar authored Sep 30, 2024
2 parents a8c7ac8 + c26994d commit 4cae2a4
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 3 deletions.
2 changes: 1 addition & 1 deletion docetl/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "0.1.0"
__version__ = "0.1.5"

from docetl.runner import DSLRunner
from docetl.builder import Optimizer
Expand Down
13 changes: 12 additions & 1 deletion docetl/operations/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import threading
from concurrent.futures import as_completed
from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Union
import litellm

from dotenv import load_dotenv
from frozendict import frozendict
Expand Down Expand Up @@ -388,6 +389,11 @@ def call_llm(
Raises:
TimeoutError: If the call times out after retrying.
"""
if not litellm.supports_function_calling(model):
raise ValueError(
f"Model {model} does not support function calling (which we use for structured outputs). Please use a different model."
)

key = cache_key(model, op_type, messages, output_schema, scratchpad)

max_retries = max_retries_per_timeout
Expand Down Expand Up @@ -632,6 +638,11 @@ def call_llm_with_gleaning(
Returns:
Tuple[str, float]: A tuple containing the final LLM response and the total cost.
"""
if not litellm.supports_function_calling(model):
raise ValueError(
f"Model {model} does not support function calling (which we use for structured outputs). Please use a different model."
)

props = {key: convert_val(value) for key, value in output_schema.items()}

parameters = {"type": "object", "properties": props}
Expand Down Expand Up @@ -814,7 +825,7 @@ def parse_llm_response(
output_dict = json.loads(tool_call.function.arguments)
if "ollama" in response.model:
for key, value in output_dict.items():
if isinstance(value, str):
if not isinstance(value, str):
continue
try:
output_dict[key] = ast.literal_eval(value)
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "docetl"
version = "0.1.4"
version = "0.1.5"
description = "ETL with LLM operations."
authors = ["Shreya Shankar <shreyashankar@berkeley.edu>"]
license = "MIT"
Expand Down

0 comments on commit 4cae2a4

Please sign in to comment.