From 684b076dbf7a79a771e6d0d74d0261186fa3ab0d Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Sat, 28 Dec 2024 17:31:41 -0500 Subject: [PATCH] Adjustments to get smaller agent models working better. --- ra_aid/__main__.py | 9 ++++++++- ra_aid/agent_utils.py | 7 ++++++- ra_aid/agents/ciayn_agent.py | 1 + ra_aid/llm.py | 1 + ra_aid/prompts.py | 3 ++- 5 files changed, 18 insertions(+), 3 deletions(-) diff --git a/ra_aid/__main__.py b/ra_aid/__main__.py index 63812e6..0536902 100644 --- a/ra_aid/__main__.py +++ b/ra_aid/__main__.py @@ -1,6 +1,7 @@ import argparse import sys import uuid +from datetime import datetime from rich.panel import Panel from rich.console import Console from langgraph.checkpoint.memory import MemorySaver @@ -191,6 +192,10 @@ def main(): # Get initial request from user initial_request = ask_human.invoke({"question": "What would you like help with?"}) + # Get working directory and current date + working_directory = os.getcwd() + current_date = datetime.now().strftime("%Y-%m-%d") + # Run chat agent with CHAT_PROMPT config = { "configurable": {"thread_id": uuid.uuid4()}, @@ -219,7 +224,9 @@ def main(): # Run chat agent and exit run_agent_with_retry(chat_agent, CHAT_PROMPT.format( initial_request=initial_request, - web_research_section=WEB_RESEARCH_PROMPT_SECTION_CHAT if web_research_enabled else "" + web_research_section=WEB_RESEARCH_PROMPT_SECTION_CHAT if web_research_enabled else "", + working_directory=working_directory, + current_date=current_date ), config) return diff --git a/ra_aid/agent_utils.py b/ra_aid/agent_utils.py index 94a2dfa..1163020 100644 --- a/ra_aid/agent_utils.py +++ b/ra_aid/agent_utils.py @@ -569,7 +569,12 @@ def run_agent_with_retry(agent, prompt: str, config: dict) -> Optional[str]: logger.debug("Agent output: %s", chunk) check_interrupt() print_agent_output(chunk) - if _global_memory['task_completed']: + if _global_memory['plan_completed']: + _global_memory['plan_completed'] = False + _global_memory['task_completed'] = False + _global_memory['completion_message'] = '' + break + if _global_memory['task_completed'] or _global_memory['plan_completed']: _global_memory['task_completed'] = False _global_memory['completion_message'] = '' break diff --git a/ra_aid/agents/ciayn_agent.py b/ra_aid/agents/ciayn_agent.py index 240b6af..d908c4a 100644 --- a/ra_aid/agents/ciayn_agent.py +++ b/ra_aid/agents/ciayn_agent.py @@ -91,6 +91,7 @@ Start by asking the user what they want. You must carefully review the conversation history, which functions were called so far, returned results, etc., and make sure the very next function call you make makes sense in order to achieve the original goal. +You must achieve the goal in as few steps possible, but no fewer. You must ONLY use ONE of the following functions (these are the ONLY functions that exist): diff --git a/ra_aid/llm.py b/ra_aid/llm.py index 35d0376..bc43fd5 100644 --- a/ra_aid/llm.py +++ b/ra_aid/llm.py @@ -39,6 +39,7 @@ def initialize_llm(provider: str, model_name: str) -> BaseChatModel: return ChatOpenAI( api_key=os.getenv("OPENAI_API_KEY"), base_url=os.getenv("OPENAI_API_BASE"), + temperature=0.3, model=model_name, ) else: diff --git a/ra_aid/prompts.py b/ra_aid/prompts.py index 369bd22..08a56d8 100644 --- a/ra_aid/prompts.py +++ b/ra_aid/prompts.py @@ -557,7 +557,8 @@ NEVER ANNOUNCE WHAT YOU ARE DOING, JUST DO IT! """ # New agentic chat prompt for interactive mode -CHAT_PROMPT = """ +CHAT_PROMPT = """Working Directory: {working_directory} +Current Date: {current_date} Agentic Chat Mode Instructions: Overview: