expert LLM configuration

This commit is contained in:
AI Christianson 2024-12-13 14:54:56 -05:00
parent 36e00d8227
commit 6c85d7fd6f
4 changed files with 59 additions and 38 deletions

View File

@ -157,7 +157,7 @@ RA.Aid supports multiple AI providers and models. The default model is Anthropic
The programmer tool (aider) automatically selects its model based on your available API keys. It will use Claude models if ANTHROPIC_API_KEY is set, or fall back to OpenAI models if only OPENAI_API_KEY is available.
Note: The expert tool can be configured to use different providers (OpenAI, Anthropic, OpenRouter) using the --expert-provider flag along with the corresponding EXPERT_*_KEY environment variables. Each provider requires its own API key set through the appropriate environment variable.
Note: The expert tool can be configured to use different providers (OpenAI, Anthropic, OpenRouter) using the --expert-provider flag along with the corresponding EXPERT_*API_KEY environment variables. Each provider requires its own API key set through the appropriate environment variable.
#### Environment Variables
@ -169,10 +169,10 @@ RA.Aid supports multiple providers through environment variables:
- `OPENAI_API_BASE`: Required for OpenAI-compatible providers along with `OPENAI_API_KEY`
Expert Tool Environment Variables:
- `EXPERT_OPENAI_KEY`: API key for expert tool using OpenAI provider
- `EXPERT_ANTHROPIC_KEY`: API key for expert tool using Anthropic provider
- `EXPERT_OPENROUTER_KEY`: API key for expert tool using OpenRouter provider
- `EXPERT_OPENAI_BASE`: Base URL for expert tool using OpenAI-compatible provider
- `EXPERT_OPENAI_API_KEY`: API key for expert tool using OpenAI provider
- `EXPERT_ANTHROPIC_API_KEY`: API key for expert tool using Anthropic provider
- `EXPERT_OPENROUTER_API_KEY`: API key for expert tool using OpenRouter provider
- `EXPERT_OPENAI_API_BASE`: Base URL for expert tool using OpenAI-compatible provider
You can set these permanently in your shell's configuration file (e.g., `~/.bashrc` or `~/.zshrc`):
@ -214,15 +214,21 @@ Note: The expert tool defaults to OpenAI's o1-preview model with the OpenAI prov
```
4. **Configuring Expert Provider**
The expert tool is used by the agent for complex logic and debugging tasks. It can be configured to use different providers (OpenAI, Anthropic, OpenRouter) using the --expert-provider flag along with the corresponding EXPERT_*API_KEY environment variables.
```bash
# Use Anthropic for expert tool
ra-aid -m "Your task" --expert-provider anthropic
export EXPERT_ANTHROPIC_API_KEY=your_anthropic_api_key
ra-aid -m "Your task" --expert-provider anthropic --expert-model claude-3-5-sonnet-20241022
# Use OpenRouter for expert tool
ra-aid -m "Your task" --expert-provider openrouter
export OPENROUTER_API_KEY=your_openrouter_api_key
ra-aid -m "Your task" --expert-provider openrouter --expert-model mistralai/mistral-large-2411
# Use default OpenAI for expert tool
ra-aid -m "Your task" --expert-provider openai
export EXPERT_OPENAI_API_KEY=your_openai_api_key
ra-aid -m "Your task" --expert-provider openai --expert-model o1-preview
```
**Important Notes:**

View File

@ -290,21 +290,31 @@ def validate_environment(args):
if not os.environ.get('OPENAI_API_BASE'):
missing.append('OPENAI_API_BASE environment variable is not set')
# Check expert provider keys
# Check expert provider keys with fallback to regular keys if providers match
if expert_provider == "anthropic":
if not os.environ.get('EXPERT_ANTHROPIC_KEY'):
missing.append('EXPERT_ANTHROPIC_KEY environment variable is not set')
expert_key_missing = not os.environ.get('EXPERT_ANTHROPIC_API_KEY')
fallback_available = expert_provider == provider and os.environ.get('ANTHROPIC_API_KEY')
if expert_key_missing and not fallback_available:
missing.append('EXPERT_ANTHROPIC_API_KEY environment variable is not set')
elif expert_provider == "openai":
if not os.environ.get('EXPERT_OPENAI_KEY'):
missing.append('EXPERT_OPENAI_KEY environment variable is not set')
expert_key_missing = not os.environ.get('EXPERT_OPENAI_API_KEY')
fallback_available = expert_provider == provider and os.environ.get('OPENAI_API_KEY')
if expert_key_missing and not fallback_available:
missing.append('EXPERT_OPENAI_API_KEY environment variable is not set')
elif expert_provider == "openrouter":
if not os.environ.get('EXPERT_OPENROUTER_KEY'):
missing.append('EXPERT_OPENROUTER_KEY environment variable is not set')
expert_key_missing = not os.environ.get('EXPERT_OPENROUTER_API_KEY')
fallback_available = expert_provider == provider and os.environ.get('OPENROUTER_API_KEY')
if expert_key_missing and not fallback_available:
missing.append('EXPERT_OPENROUTER_API_KEY environment variable is not set')
elif expert_provider == "openai-compatible":
if not os.environ.get('EXPERT_OPENAI_KEY'):
missing.append('EXPERT_OPENAI_KEY environment variable is not set')
if not os.environ.get('EXPERT_OPENAI_BASE'):
missing.append('EXPERT_OPENAI_BASE environment variable is not set')
expert_key_missing = not os.environ.get('EXPERT_OPENAI_API_KEY')
fallback_available = expert_provider == provider and os.environ.get('OPENAI_API_KEY')
if expert_key_missing and not fallback_available:
missing.append('EXPERT_OPENAI_API_KEY environment variable is not set')
expert_base_missing = not os.environ.get('EXPERT_OPENAI_API_BASE')
base_fallback_available = expert_provider == provider and os.environ.get('OPENAI_API_BASE')
if expert_base_missing and not base_fallback_available:
missing.append('EXPERT_OPENAI_API_BASE environment variable is not set')
if missing:
print_error("Missing required dependencies:")

View File

@ -21,24 +21,24 @@ def initialize_llm(provider: str, model_name: str) -> BaseChatModel:
"""
if provider == "openai":
return ChatOpenAI(
openai_api_key=os.getenv("OPENAI_API_KEY"),
api_key=os.getenv("OPENAI_API_KEY"),
model=model_name
)
elif provider == "anthropic":
return ChatAnthropic(
anthropic_api_key=os.getenv("ANTHROPIC_API_KEY"),
model=model_name
api_key=os.getenv("ANTHROPIC_API_KEY"),
model_name=model_name
)
elif provider == "openrouter":
return ChatOpenAI(
openai_api_key=os.getenv("OPENROUTER_API_KEY"),
openai_api_base="https://openrouter.ai/api/v1",
api_key=os.getenv("OPENROUTER_API_KEY"),
base_url="https://openrouter.ai/api/v1",
model=model_name
)
elif provider == "openai-compatible":
return ChatOpenAI(
openai_api_key=os.getenv("OPENAI_API_KEY"),
openai_api_base=os.getenv("OPENAI_API_BASE"),
api_key=os.getenv("OPENAI_API_KEY"),
base_url=os.getenv("OPENAI_API_BASE"),
model=model_name
)
else:
@ -63,24 +63,24 @@ def initialize_expert_llm(provider: str = "openai", model_name: str = "o1-previe
"""
if provider == "openai":
return ChatOpenAI(
openai_api_key=os.getenv("EXPERT_OPENAI_KEY"),
api_key=os.getenv("EXPERT_OPENAI_API_KEY"),
model=model_name
)
elif provider == "anthropic":
return ChatAnthropic(
anthropic_api_key=os.getenv("EXPERT_ANTHROPIC_KEY"),
model=model_name
api_key=os.getenv("EXPERT_ANTHROPIC_API_KEY"),
model_name=model_name
)
elif provider == "openrouter":
return ChatOpenAI(
openai_api_key=os.getenv("EXPERT_OPENROUTER_KEY"),
openai_api_base="https://openrouter.ai/api/v1",
api_key=os.getenv("EXPERT_OPENROUTER_API_KEY"),
base_url="https://openrouter.ai/api/v1",
model=model_name
)
elif provider == "openai-compatible":
return ChatOpenAI(
openai_api_key=os.getenv("EXPERT_OPENAI_KEY"),
openai_api_base=os.getenv("EXPERT_OPENAI_BASE"),
api_key=os.getenv("EXPERT_OPENAI_API_KEY"),
base_url=os.getenv("EXPERT_OPENAI_API_BASE"),
model=model_name
)
else:

View File

@ -5,17 +5,22 @@ from rich.console import Console
from rich.panel import Panel
from rich.markdown import Markdown
from ..llm import initialize_expert_llm
from .memory import get_memory_value, get_related_files
from .memory import get_memory_value, get_related_files, _global_memory
console = Console()
_model = None
def get_model():
global _model
try:
if _model is None:
provider = get_memory_value('expert_provider') or 'openai'
model = get_memory_value('expert_model') or 'o1-preview'
provider = _global_memory['config']['expert_provider'] or 'openai'
model = _global_memory['config']['expert_model'] or 'o1-preview'
_model = initialize_expert_llm(provider, model)
except Exception as e:
_model = None
console.print(Panel(f"Failed to initialize expert model: {e}", title="Error", border_style="red"))
raise
return _model
# Keep track of context globally