less token less issues

This commit is contained in:
Alex
2023-06-03 16:31:10 +01:00
parent 899777632b
commit 577d58c92b

View File

@@ -170,7 +170,7 @@ def complete_stream(question, docsearch, chat_history, api_key):
messages_combine.append({"role": "system", "content": i["response"]})
messages_combine.append({"role": "user", "content": question})
completion = openai.ChatCompletion.create(model="gpt-3.5-turbo",
messages=messages_combine, stream=True, max_tokens=1000, temperature=0)
messages=messages_combine, stream=True, max_tokens=500, temperature=0)
for line in completion:
if 'content' in line['choices'][0]['delta']: