feat: streaming responses with function call

This commit is contained in:
Siddhant Rai
2025-03-05 09:02:55 +05:30
parent c6ce4d9374
commit f88c34a0be
6 changed files with 237 additions and 80 deletions

View File

@@ -111,13 +111,24 @@ class OpenAILLM(BaseLLM):
**kwargs,
):
messages = self._clean_messages_openai(messages)
response = self.client.chat.completions.create(
model=model, messages=messages, stream=stream, **kwargs
)
if tools:
response = self.client.chat.completions.create(
model=model,
messages=messages,
stream=stream,
tools=tools,
**kwargs,
)
else:
response = self.client.chat.completions.create(
model=model, messages=messages, stream=stream, **kwargs
)
for line in response:
if line.choices[0].delta.content is not None:
yield line.choices[0].delta.content
else:
yield line.choices[0]
def _supports_tools(self):
return True