feat: streaming responses with function call

This commit is contained in:
Siddhant Rai
2025-03-05 09:02:55 +05:30
parent c6ce4d9374
commit f88c34a0be
6 changed files with 237 additions and 80 deletions

View File

@@ -152,7 +152,15 @@ class GoogleLLM(BaseLLM):
config=config,
)
for chunk in response:
if chunk.text is not None:
if hasattr(chunk, "candidates") and chunk.candidates:
for candidate in chunk.candidates:
if candidate.content and candidate.content.parts:
for part in candidate.content.parts:
if part.function_call:
yield part
elif part.text:
yield part.text
elif hasattr(chunk, "text"):
yield chunk.text
def _supports_tools(self):

View File

@@ -111,13 +111,24 @@ class OpenAILLM(BaseLLM):
**kwargs,
):
messages = self._clean_messages_openai(messages)
response = self.client.chat.completions.create(
model=model, messages=messages, stream=stream, **kwargs
)
if tools:
response = self.client.chat.completions.create(
model=model,
messages=messages,
stream=stream,
tools=tools,
**kwargs,
)
else:
response = self.client.chat.completions.create(
model=model, messages=messages, stream=stream, **kwargs
)
for line in response:
if line.choices[0].delta.content is not None:
yield line.choices[0].delta.content
else:
yield line.choices[0]
def _supports_tools(self):
return True