From 9bbe7564a99c798a14fd9bb670e5eb123074502b Mon Sep 17 00:00:00 2001 From: Alex Date: Wed, 13 Nov 2024 11:45:42 +0000 Subject: [PATCH] fix: llamacpp --- application/api/answer/routes.py | 1 + application/retriever/classic_rag.py | 3 --- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/application/api/answer/routes.py b/application/api/answer/routes.py index 182cdf2b..f109db26 100644 --- a/application/api/answer/routes.py +++ b/application/api/answer/routes.py @@ -241,6 +241,7 @@ def complete_stream( yield f"data: {data}\n\n" except Exception as e: print("\033[91merr", str(e), file=sys.stderr) + traceback.print_exc() data = json.dumps( { "type": "error", diff --git a/application/retriever/classic_rag.py b/application/retriever/classic_rag.py index 6a67cb38..42e318d2 100644 --- a/application/retriever/classic_rag.py +++ b/application/retriever/classic_rag.py @@ -45,7 +45,6 @@ class ClassicRAG(BaseRetriever): settings.VECTOR_STORE, self.vectorstore, settings.EMBEDDINGS_KEY ) docs_temp = docsearch.search(self.question, k=self.chunks) - print(docs_temp) docs = [ { "title": i.metadata.get( @@ -60,8 +59,6 @@ class ClassicRAG(BaseRetriever): } for i in docs_temp ] - if settings.LLM_NAME == "llama.cpp": - docs = [docs[0]] return docs