mirror of
https://github.com/arc53/DocsGPT.git
synced 2025-11-30 17:13:15 +00:00
feat: streaming responses with function call
This commit is contained in:
@@ -152,7 +152,15 @@ class GoogleLLM(BaseLLM):
|
||||
config=config,
|
||||
)
|
||||
for chunk in response:
|
||||
if chunk.text is not None:
|
||||
if hasattr(chunk, "candidates") and chunk.candidates:
|
||||
for candidate in chunk.candidates:
|
||||
if candidate.content and candidate.content.parts:
|
||||
for part in candidate.content.parts:
|
||||
if part.function_call:
|
||||
yield part
|
||||
elif part.text:
|
||||
yield part.text
|
||||
elif hasattr(chunk, "text"):
|
||||
yield chunk.text
|
||||
|
||||
def _supports_tools(self):
|
||||
|
||||
@@ -111,13 +111,24 @@ class OpenAILLM(BaseLLM):
|
||||
**kwargs,
|
||||
):
|
||||
messages = self._clean_messages_openai(messages)
|
||||
response = self.client.chat.completions.create(
|
||||
model=model, messages=messages, stream=stream, **kwargs
|
||||
)
|
||||
if tools:
|
||||
response = self.client.chat.completions.create(
|
||||
model=model,
|
||||
messages=messages,
|
||||
stream=stream,
|
||||
tools=tools,
|
||||
**kwargs,
|
||||
)
|
||||
else:
|
||||
response = self.client.chat.completions.create(
|
||||
model=model, messages=messages, stream=stream, **kwargs
|
||||
)
|
||||
|
||||
for line in response:
|
||||
if line.choices[0].delta.content is not None:
|
||||
yield line.choices[0].delta.content
|
||||
else:
|
||||
yield line.choices[0]
|
||||
|
||||
def _supports_tools(self):
|
||||
return True
|
||||
|
||||
Reference in New Issue
Block a user