From 577d58c92bf28755d7724c395177521400612298 Mon Sep 17 00:00:00 2001 From: Alex Date: Sat, 3 Jun 2023 16:31:10 +0100 Subject: [PATCH] less token less issues --- application/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/application/app.py b/application/app.py index baf62003..6e19f03b 100644 --- a/application/app.py +++ b/application/app.py @@ -170,7 +170,7 @@ def complete_stream(question, docsearch, chat_history, api_key): messages_combine.append({"role": "system", "content": i["response"]}) messages_combine.append({"role": "user", "content": question}) completion = openai.ChatCompletion.create(model="gpt-3.5-turbo", - messages=messages_combine, stream=True, max_tokens=1000, temperature=0) + messages=messages_combine, stream=True, max_tokens=500, temperature=0) for line in completion: if 'content' in line['choices'][0]['delta']: