Integrate project info into chat prompt.
This commit is contained in:
parent
b631a4bf57
commit
d01d2a4865
|
|
@ -8,6 +8,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
- Improve prompts for better open model support.
|
- Improve prompts for better open model support.
|
||||||
- Better handle 429 errors on openrouter.
|
- Better handle 429 errors on openrouter.
|
||||||
|
- Get project info programmatically to save tokens.
|
||||||
|
|
||||||
## [0.12.1] - 2025-01-08
|
## [0.12.1] - 2025-01-08
|
||||||
- Fix bug where directories are added as related files.
|
- Fix bug where directories are added as related files.
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@ from rich.panel import Panel
|
||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
from langgraph.checkpoint.memory import MemorySaver
|
from langgraph.checkpoint.memory import MemorySaver
|
||||||
from ra_aid.env import validate_environment
|
from ra_aid.env import validate_environment
|
||||||
|
from ra_aid.project_info import get_project_info, format_project_info, display_project_status
|
||||||
from ra_aid.tools.memory import _global_memory
|
from ra_aid.tools.memory import _global_memory
|
||||||
from ra_aid.tools.human import ask_human
|
from ra_aid.tools.human import ask_human
|
||||||
from ra_aid import print_stage_header, print_error
|
from ra_aid import print_stage_header, print_error
|
||||||
|
|
@ -206,6 +207,14 @@ def main():
|
||||||
|
|
||||||
print_stage_header("Chat Mode")
|
print_stage_header("Chat Mode")
|
||||||
|
|
||||||
|
# Get project info
|
||||||
|
try:
|
||||||
|
project_info = get_project_info(".", file_limit=2000)
|
||||||
|
formatted_project_info = format_project_info(project_info)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to get project info: {e}")
|
||||||
|
formatted_project_info = ""
|
||||||
|
|
||||||
# Get initial request from user
|
# Get initial request from user
|
||||||
initial_request = ask_human.invoke({"question": "What would you like help with?"})
|
initial_request = ask_human.invoke({"question": "What would you like help with?"})
|
||||||
|
|
||||||
|
|
@ -243,7 +252,8 @@ def main():
|
||||||
initial_request=initial_request,
|
initial_request=initial_request,
|
||||||
web_research_section=WEB_RESEARCH_PROMPT_SECTION_CHAT if web_research_enabled else "",
|
web_research_section=WEB_RESEARCH_PROMPT_SECTION_CHAT if web_research_enabled else "",
|
||||||
working_directory=working_directory,
|
working_directory=working_directory,
|
||||||
current_date=current_date
|
current_date=current_date,
|
||||||
|
project_info=formatted_project_info
|
||||||
), config)
|
), config)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,8 @@ import threading
|
||||||
import time
|
import time
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
from ra_aid.project_info import get_project_info, format_project_info, display_project_status
|
||||||
|
|
||||||
from langgraph.prebuilt import create_react_agent
|
from langgraph.prebuilt import create_react_agent
|
||||||
from ra_aid.agents.ciayn_agent import CiaynAgent
|
from ra_aid.agents.ciayn_agent import CiaynAgent
|
||||||
from ra_aid.project_info import get_project_info, format_project_info, display_project_status
|
from ra_aid.project_info import get_project_info, format_project_info, display_project_status
|
||||||
|
|
@ -42,6 +44,8 @@ from ra_aid.prompts import (
|
||||||
WEB_RESEARCH_PROMPT_SECTION_PLANNING,
|
WEB_RESEARCH_PROMPT_SECTION_PLANNING,
|
||||||
HUMAN_PROMPT_SECTION_PLANNING,
|
HUMAN_PROMPT_SECTION_PLANNING,
|
||||||
WEB_RESEARCH_PROMPT,
|
WEB_RESEARCH_PROMPT,
|
||||||
|
EXPERT_PROMPT_SECTION_CHAT,
|
||||||
|
CHAT_PROMPT,
|
||||||
)
|
)
|
||||||
from langgraph.checkpoint.memory import MemorySaver
|
from langgraph.checkpoint.memory import MemorySaver
|
||||||
|
|
||||||
|
|
@ -185,7 +189,6 @@ def run_research_agent(
|
||||||
try:
|
try:
|
||||||
project_info = get_project_info(".", file_limit=2000)
|
project_info = get_project_info(".", file_limit=2000)
|
||||||
formatted_project_info = format_project_info(project_info)
|
formatted_project_info = format_project_info(project_info)
|
||||||
display_project_status(project_info) # Add status display
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Failed to get project info: {e}")
|
logger.warning(f"Failed to get project info: {e}")
|
||||||
formatted_project_info = ""
|
formatted_project_info = ""
|
||||||
|
|
@ -217,6 +220,9 @@ def run_research_agent(
|
||||||
if console_message:
|
if console_message:
|
||||||
console.print(Panel(Markdown(console_message), title="🔬 Looking into it..."))
|
console.print(Panel(Markdown(console_message), title="🔬 Looking into it..."))
|
||||||
|
|
||||||
|
if project_info:
|
||||||
|
display_project_status(project_info)
|
||||||
|
|
||||||
# Run agent with retry logic if available
|
# Run agent with retry logic if available
|
||||||
if agent is not None:
|
if agent is not None:
|
||||||
logger.debug("Research agent completed successfully")
|
logger.debug("Research agent completed successfully")
|
||||||
|
|
|
||||||
|
|
@ -105,16 +105,14 @@ def display_project_status(info: ProjectInfo) -> None:
|
||||||
info: ProjectInfo object containing project state
|
info: ProjectInfo object containing project state
|
||||||
"""
|
"""
|
||||||
# Create project status text
|
# Create project status text
|
||||||
status = "New/Empty Project" if info.is_new else "Existing Project"
|
status = "**New/empty project**" if info.is_new else "**Existing project**"
|
||||||
|
|
||||||
# Format file count (with truncation notice if needed)
|
# Format file count (with truncation notice if needed)
|
||||||
file_count = f"{len(info.files)} of {info.total_files}" if len(info.files) < info.total_files else str(info.total_files)
|
file_count = f"{len(info.files)} of {info.total_files}" if len(info.files) < info.total_files else str(info.total_files)
|
||||||
|
|
||||||
# Build status text with markdown
|
# Build status text with markdown
|
||||||
status_text = f"""
|
status_text = f"""
|
||||||
# Project Status
|
{status} with **{file_count} file(s)**
|
||||||
- **Status:** {status}
|
|
||||||
- **Total Files:** {file_count}
|
|
||||||
"""
|
"""
|
||||||
# Add truncation notice if list was truncated
|
# Add truncation notice if list was truncated
|
||||||
if len(info.files) < info.total_files:
|
if len(info.files) < info.total_files:
|
||||||
|
|
|
||||||
|
|
@ -627,6 +627,9 @@ NEVER ANNOUNCE WHAT YOU ARE DOING, JUST DO IT!
|
||||||
# New agentic chat prompt for interactive mode
|
# New agentic chat prompt for interactive mode
|
||||||
CHAT_PROMPT = """Working Directory: {working_directory}
|
CHAT_PROMPT = """Working Directory: {working_directory}
|
||||||
Current Date: {current_date}
|
Current Date: {current_date}
|
||||||
|
Project Info:
|
||||||
|
{project_info}
|
||||||
|
|
||||||
Agentic Chat Mode Instructions:
|
Agentic Chat Mode Instructions:
|
||||||
|
|
||||||
Overview:
|
Overview:
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,10 @@
|
||||||
from ra_aid.tools import (
|
from ra_aid.tools import (
|
||||||
ask_expert, ask_human, run_shell_command, run_programming_task,
|
ask_expert, ask_human, run_shell_command, run_programming_task,
|
||||||
emit_research_notes, emit_plan, emit_related_files, emit_task,
|
emit_research_notes, emit_plan, emit_related_files,
|
||||||
emit_expert_context, emit_key_facts, delete_key_facts,
|
emit_expert_context, emit_key_facts, delete_key_facts,
|
||||||
emit_key_snippets, delete_key_snippets, deregister_related_files, delete_tasks, read_file_tool,
|
emit_key_snippets, delete_key_snippets, deregister_related_files, read_file_tool,
|
||||||
fuzzy_find_project_files, ripgrep_search, list_directory_tree,
|
fuzzy_find_project_files, ripgrep_search, list_directory_tree,
|
||||||
swap_task_order, monorepo_detected, existing_project_detected, ui_detected,
|
monorepo_detected, ui_detected,
|
||||||
task_completed, plan_implementation_completed, web_search_tavily
|
task_completed, plan_implementation_completed, web_search_tavily
|
||||||
)
|
)
|
||||||
from ra_aid.tools.memory import one_shot_completed
|
from ra_aid.tools.memory import one_shot_completed
|
||||||
|
|
@ -52,7 +52,6 @@ RESEARCH_TOOLS = [
|
||||||
emit_research_notes,
|
emit_research_notes,
|
||||||
one_shot_completed,
|
one_shot_completed,
|
||||||
monorepo_detected,
|
monorepo_detected,
|
||||||
existing_project_detected,
|
|
||||||
ui_detected
|
ui_detected
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue