diff --git a/llm-server/routes/root_service.py b/llm-server/routes/root_service.py index eae88e92d..29eec1f57 100644 --- a/llm-server/routes/root_service.py +++ b/llm-server/routes/root_service.py @@ -157,10 +157,7 @@ async def handle_request( "chatbot response", error=str(e), method="handle_request", - apis=apis, prev_conversations=prev_conversations, - context=context, - flows=flows, ) return {"response": str(e), "error": "An error occured in handle request"} diff --git a/llm-server/routes/workflow/utils/api_retrievers.py b/llm-server/routes/workflow/utils/api_retrievers.py index d3da77346..c7e1c12b7 100644 --- a/llm-server/routes/workflow/utils/api_retrievers.py +++ b/llm-server/routes/workflow/utils/api_retrievers.py @@ -87,7 +87,7 @@ async def get_relevant_apis_summaries(text: str, bot_id: str) -> List[ApiOperati apis_retriever = apis.as_retriever( search_kwargs={ - "k": 5, + "k": 3, "score_threshold": score_threshold, "filter": {"bot_id": bot_id}, }, diff --git a/llm-server/routes/workflow/utils/process_conversation_step.py b/llm-server/routes/workflow/utils/process_conversation_step.py index 48ad6cb96..b82779b9b 100644 --- a/llm-server/routes/workflow/utils/process_conversation_step.py +++ b/llm-server/routes/workflow/utils/process_conversation_step.py @@ -75,7 +75,7 @@ def process_conversation_step( {{ "ids": ["list", "of", "operationIds", "for apis to be called"], "bot_message": "your response based on the instructions provided at the beginning", - "missing_information": "Optional Field; Incase of ambiguity where user input is not sufficient to make the api call, ask follow up questions. Followup question should only be asked once per user input" + "missing_information": "Optional Field; Incase of ambiguity where user input is not sufficient to make the api call, ask follow up questions." }} """ )