Skip to content

Commit

Permalink
refactor: the creation of prompts that require instructions to be ins…
Browse files Browse the repository at this point in the history
…erted
  • Loading branch information
provos committed Oct 22, 2024
1 parent b1c5c44 commit df6aa44
Showing 1 changed file with 14 additions and 18 deletions.
32 changes: 14 additions & 18 deletions src/planai/llm_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,18 +24,6 @@
from .llm_interface import LLMInterface
from .task import Task, TaskWorker

PROMPT = dedent(
"""
Here is your input data:
{task}
Here are your instructions:
{instructions}
{format_instructions}
"""
).strip()

PROMPT_STRUCTURED_OUTPUT = dedent(
"""
Here is your input data:
Expand All @@ -46,6 +34,8 @@
"""
).strip()

PROMPT_FORMAT_INSTRUCTIONS = "\n\n{format_instructions}"


class LLMTaskWorker(TaskWorker):
model_config = ConfigDict(arbitrary_types_allowed=True)
Expand Down Expand Up @@ -110,9 +100,12 @@ def extra_validation_with_task(response: Task):

response = self.llm.generate_pydantic(
prompt_template=(
PROMPT
if not self.llm.support_structured_outputs
else PROMPT_STRUCTURED_OUTPUT
PROMPT_STRUCTURED_OUTPUT
+ (
PROMPT_FORMAT_INSTRUCTIONS
if not self.llm.support_structured_outputs
else ""
)
),
output_schema=self._output_type(),
system=self.system_prompt,
Expand All @@ -135,9 +128,12 @@ def get_full_prompt(self, task: Task) -> str:

return self.llm.generate_full_prompt(
prompt_template=(
PROMPT
if not self.llm.support_structured_outputs
else PROMPT_STRUCTURED_OUTPUT
PROMPT_STRUCTURED_OUTPUT
+ (
PROMPT_FORMAT_INSTRUCTIONS
if not self.llm.support_structured_outputs
else ""
)
),
system=self.system_prompt,
task=processed_task.model_dump_json(indent=2),
Expand Down

0 comments on commit df6aa44

Please sign in to comment.