Skip to content

Commit

Permalink
ensure the final tool call result updates the UI
Browse files Browse the repository at this point in the history
  • Loading branch information
rgbkrk committed Mar 4, 2024
1 parent fc31e06 commit c493e12
Showing 1 changed file with 54 additions and 23 deletions.
77 changes: 54 additions & 23 deletions chatlab/views/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,27 +18,6 @@
from instructor.dsl.partialjson import JSONParser


class ToolCalled(AutoUpdate):
"""Once a tool has finished up, this is the view."""

id: str
name: str
arguments: str = ""
verbage: str = "Called"
result: str = ""

def render(self):
return ChatFunctionComponent(name=self.name, verbage=self.verbage, input=self.arguments, output=self.result)

# TODO: This is only here for legacy function calling
def get_function_called_message(self):
return function_result(self.name, self.result)

def get_tool_called_message(self):
# NOTE: OpenAI has mismatched types where it doesn't include the `name`
# xref: https://github.com/openai/openai-python/issues/1078
return tool_result(tool_call_id=self.id, content=self.result, name=self.name)


class ToolArguments(AutoUpdate):
id: str
Expand Down Expand Up @@ -130,11 +109,13 @@ def append_arguments(self, arguments: str):

def apply_result(self, result: str):
"""Replaces the existing display with a new one that shows the result of the tool being called."""
return ToolCalled(
tc = ToolCalled(
id=self.id, name=self.name, arguments=self.arguments, result=result, display_id=self.display_id
)
tc.update()
return tc

async def call(self, function_registry: FunctionRegistry) -> ToolCalled:
async def call(self, function_registry: FunctionRegistry) -> 'ToolCalled':
"""Call the function and return a stack of messages for LLM and human consumption."""
function_name = self.name
function_args = self.arguments
Expand Down Expand Up @@ -188,3 +169,53 @@ async def call(self, function_registry: FunctionRegistry) -> ToolCalled:
self.verbage = "Ran"

return self.apply_result(repr_llm)


class ToolCalled(ToolArguments):
"""Once a tool has finished up, this is the view."""

id: str
name: str
arguments: str = ""
verbage: str = "Called"
result: str = ""

def render(self):
if self.custom_render is not None:
# We use the same definition as was in the original function
try:
parser = JSONParser()
possible_args = parser.parse(self.arguments)

Model = extract_model_from_function(self.name, self.custom_render)
# model = Model.model_validate(possible_args)
model = Model(**possible_args)

# Pluck the kwargs out from the crafted model, as we can't pass the pydantic model as the arguments
# However any "inner" models should retain their pydantic Model nature
kwargs = {k: getattr(model, k) for k in model.__dict__.keys()}

except FunctionArgumentError:
return None
except ValidationError:
return None

try:
return self.custom_render(**kwargs)
except Exception as e:
# Exception in userland code
# Would be preferable to bubble up, however
# it might be due to us passing a not-quite model
warnings.warn_explicit(f"Exception in userland code: {e}", UserWarning, "chatlab", 0)
raise

return ChatFunctionComponent(name=self.name, verbage=self.verbage, input=self.arguments, output=self.result)

# TODO: This is only here for legacy function calling
def get_function_called_message(self):
return function_result(self.name, self.result)

def get_tool_called_message(self):
# NOTE: OpenAI has mismatched types where it doesn't include the `name`
# xref: https://github.com/openai/openai-python/issues/1078
return tool_result(tool_call_id=self.id, content=self.result, name=self.name)

0 comments on commit c493e12

Please sign in to comment.