support default temp on a per-model basis; show status panel
This commit is contained in:
parent
264f5025ed
commit
a1371fc7e0
|
|
@ -7,6 +7,7 @@ from datetime import datetime
|
|||
from langgraph.checkpoint.memory import MemorySaver
|
||||
from rich.console import Console
|
||||
from rich.panel import Panel
|
||||
from rich.text import Text
|
||||
|
||||
from ra_aid import print_error, print_stage_header
|
||||
from ra_aid.__version__ import __version__
|
||||
|
|
@ -282,25 +283,41 @@ def main():
|
|||
) # Will exit if main env vars missing
|
||||
logger.debug("Environment validation successful")
|
||||
|
||||
if expert_missing:
|
||||
console.print(
|
||||
Panel(
|
||||
"[yellow]Expert tools disabled due to missing configuration:[/yellow]\n"
|
||||
+ "\n".join(f"- {m}" for m in expert_missing)
|
||||
+ "\nSet the required environment variables or args to enable expert mode.",
|
||||
title="Expert Tools Disabled",
|
||||
style="yellow",
|
||||
)
|
||||
)
|
||||
# Validate model configuration early
|
||||
from ra_aid.models_params import models_params
|
||||
model_config = models_params.get(args.provider, {}).get(args.model or "", {})
|
||||
supports_temperature = model_config.get("supports_temperature", args.provider in ["anthropic", "openai", "openrouter", "openai-compatible", "deepseek"])
|
||||
|
||||
if supports_temperature and args.temperature is None:
|
||||
args.temperature = model_config.get("default_temperature")
|
||||
if args.temperature is None:
|
||||
print_error(f"Temperature must be provided for model {args.model} which supports temperature")
|
||||
sys.exit(1)
|
||||
logger.debug(f"Using default temperature {args.temperature} for model {args.model}")
|
||||
|
||||
# Display status line
|
||||
status = Text()
|
||||
status.append("🤖 ")
|
||||
status.append(f"{args.provider}/{args.model}")
|
||||
status.append(f" @ T{args.temperature or 'N/A'}")
|
||||
|
||||
if expert_enabled:
|
||||
status.append(" | 🤔 ")
|
||||
status.append(f"{args.expert_provider}/{args.expert_model}")
|
||||
else:
|
||||
status.append(" | 🤔 Expert: ")
|
||||
status.append("Disabled", style="italic")
|
||||
|
||||
status.append(" | 🔍 Search: ")
|
||||
status.append("Enabled" if web_research_enabled else "Disabled",
|
||||
style=None if web_research_enabled else "italic")
|
||||
|
||||
if web_research_missing:
|
||||
console.print(
|
||||
Panel(
|
||||
"[yellow]Web research disabled due to missing configuration:[/yellow]\n"
|
||||
+ "\n".join(f"- {m}" for m in web_research_missing)
|
||||
+ "\nSet the required environment variables to enable web research.",
|
||||
title="Web Research Disabled",
|
||||
style="yellow",
|
||||
status,
|
||||
title="Config",
|
||||
style="bold blue",
|
||||
padding=(0, 1)
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,11 @@
|
|||
"""Module for efficient file listing using git."""
|
||||
|
||||
import subprocess
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Tuple
|
||||
import tempfile
|
||||
import shutil
|
||||
|
||||
|
||||
class FileListerError(Exception):
|
||||
|
|
@ -70,7 +73,7 @@ def is_git_repo(directory: str) -> bool:
|
|||
|
||||
|
||||
def get_file_listing(
|
||||
directory: str, limit: Optional[int] = None
|
||||
directory: str, limit: Optional[int] = None, include_hidden: bool = False
|
||||
) -> Tuple[List[str], int]:
|
||||
"""
|
||||
Get a list of tracked files in a git repository.
|
||||
|
|
@ -82,6 +85,7 @@ def get_file_listing(
|
|||
Args:
|
||||
directory: Path to the git repository
|
||||
limit: Optional maximum number of files to return
|
||||
include_hidden: Whether to include hidden files (starting with .) in the results
|
||||
|
||||
Returns:
|
||||
Tuple[List[str], int]: Tuple containing:
|
||||
|
|
@ -95,42 +99,66 @@ def get_file_listing(
|
|||
FileListerError: For other unexpected errors
|
||||
"""
|
||||
try:
|
||||
# Check if directory is a git repo first
|
||||
# Check if directory exists and is accessible
|
||||
if not os.path.exists(directory):
|
||||
raise DirectoryNotFoundError(f"Directory not found: {directory}")
|
||||
if not os.path.isdir(directory):
|
||||
raise DirectoryNotFoundError(f"Not a directory: {directory}")
|
||||
|
||||
# Check if it's a git repository
|
||||
if not is_git_repo(directory):
|
||||
return [], 0
|
||||
|
||||
# Run git ls-files
|
||||
result = subprocess.run(
|
||||
# Get list of files from git ls-files
|
||||
try:
|
||||
# Get both tracked and untracked files
|
||||
tracked_files_process = subprocess.run(
|
||||
["git", "ls-files"],
|
||||
cwd=directory,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True,
|
||||
)
|
||||
|
||||
# Process the output
|
||||
files = [line.strip() for line in result.stdout.splitlines() if line.strip()]
|
||||
|
||||
# Deduplicate and sort for consistency
|
||||
files = list(dict.fromkeys(files)) # Remove duplicates while preserving order
|
||||
|
||||
# Sort for consistency
|
||||
files.sort()
|
||||
|
||||
# Get total count before truncation
|
||||
total_count = len(files)
|
||||
|
||||
# Truncate if limit specified
|
||||
if limit is not None:
|
||||
files = files[:limit]
|
||||
|
||||
return files, total_count
|
||||
|
||||
untracked_files_process = subprocess.run(
|
||||
["git", "ls-files", "--others", "--exclude-standard"],
|
||||
cwd=directory,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True,
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise GitCommandError(f"Git command failed: {e}")
|
||||
except PermissionError as e:
|
||||
raise DirectoryAccessError(f"Cannot access directory {directory}: {e}")
|
||||
except Exception as e:
|
||||
if isinstance(e, FileListerError):
|
||||
raise DirectoryAccessError(f"Permission denied: {e}")
|
||||
|
||||
# Combine and process the files
|
||||
all_files = []
|
||||
for file in tracked_files_process.stdout.splitlines() + untracked_files_process.stdout.splitlines():
|
||||
file = file.strip()
|
||||
if not file:
|
||||
continue
|
||||
# Skip hidden files unless explicitly included
|
||||
if not include_hidden and (file.startswith(".") or any(part.startswith(".") for part in file.split("/"))):
|
||||
continue
|
||||
# Skip .aider files
|
||||
if ".aider" in file:
|
||||
continue
|
||||
all_files.append(file)
|
||||
|
||||
# Remove duplicates and sort
|
||||
all_files = sorted(set(all_files))
|
||||
total_count = len(all_files)
|
||||
|
||||
# Apply limit if specified
|
||||
if limit is not None:
|
||||
all_files = all_files[:limit]
|
||||
|
||||
return all_files, total_count
|
||||
|
||||
except (DirectoryNotFoundError, DirectoryAccessError, GitCommandError) as e:
|
||||
# Re-raise known exceptions
|
||||
raise
|
||||
raise FileListerError(f"Error listing files: {e}")
|
||||
except PermissionError as e:
|
||||
raise DirectoryAccessError(f"Permission denied: {e}")
|
||||
except Exception as e:
|
||||
raise FileListerError(f"Unexpected error: {e}")
|
||||
|
|
|
|||
|
|
@ -170,10 +170,10 @@ def create_llm_client(
|
|||
# Handle temperature settings
|
||||
if is_expert:
|
||||
temp_kwargs = {"temperature": 0} if supports_temperature else {}
|
||||
elif temperature is not None and supports_temperature:
|
||||
elif supports_temperature:
|
||||
if temperature is None:
|
||||
raise ValueError(f"Temperature must be provided for model {model_name} which supports temperature")
|
||||
temp_kwargs = {"temperature": temperature}
|
||||
elif provider == "openai-compatible" and supports_temperature:
|
||||
temp_kwargs = {"temperature": 0.3}
|
||||
else:
|
||||
temp_kwargs = {}
|
||||
|
||||
|
|
|
|||
|
|
@ -3,28 +3,29 @@ List of model parameters
|
|||
"""
|
||||
|
||||
DEFAULT_TOKEN_LIMIT = 100000
|
||||
DEFAULT_TEMPERATURE = 0.7
|
||||
|
||||
models_params = {
|
||||
"openai": {
|
||||
"gpt-3.5-turbo-0125": {"token_limit": 16385, "supports_temperature": True},
|
||||
"gpt-3.5": {"token_limit": 4096, "supports_temperature": True},
|
||||
"gpt-3.5-turbo": {"token_limit": 16385, "supports_temperature": True},
|
||||
"gpt-3.5-turbo-1106": {"token_limit": 16385, "supports_temperature": True},
|
||||
"gpt-3.5-turbo-instruct": {"token_limit": 4096, "supports_temperature": True},
|
||||
"gpt-4-0125-preview": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gpt-4-turbo-preview": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gpt-4-turbo": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gpt-4-turbo-2024-04-09": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gpt-4-1106-preview": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gpt-4-vision-preview": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gpt-4": {"token_limit": 8192, "supports_temperature": True},
|
||||
"gpt-4-0613": {"token_limit": 8192, "supports_temperature": True},
|
||||
"gpt-4-32k": {"token_limit": 32768, "supports_temperature": True},
|
||||
"gpt-4-32k-0613": {"token_limit": 32768, "supports_temperature": True},
|
||||
"gpt-4o": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gpt-4o-2024-08-06": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gpt-4o-2024-05-13": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gpt-4o-mini": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gpt-3.5-turbo-0125": {"token_limit": 16385, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-3.5": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-3.5-turbo": {"token_limit": 16385, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-3.5-turbo-1106": {"token_limit": 16385, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-3.5-turbo-instruct": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4-0125-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4-turbo-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4-turbo": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4-turbo-2024-04-09": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4-1106-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4-vision-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4-0613": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4-32k": {"token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4-32k-0613": {"token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4o": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4o-2024-08-06": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4o-2024-05-13": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4o-mini": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"o1-preview": {"token_limit": 128000, "supports_temperature": False},
|
||||
"o1-mini": {"token_limit": 128000, "supports_temperature": False},
|
||||
"o1-preview": {"token_limit": 128000, "supports_temperature": False},
|
||||
|
|
@ -32,301 +33,343 @@ models_params = {
|
|||
"o3-mini": {"token_limit": 200000, "supports_temperature": False},
|
||||
},
|
||||
"azure_openai": {
|
||||
"gpt-3.5-turbo-0125": {"token_limit": 16385, "supports_temperature": True},
|
||||
"gpt-3.5": {"token_limit": 4096, "supports_temperature": True},
|
||||
"gpt-3.5-turbo": {"token_limit": 16385, "supports_temperature": True},
|
||||
"gpt-3.5-turbo-1106": {"token_limit": 16385, "supports_temperature": True},
|
||||
"gpt-3.5-turbo-instruct": {"token_limit": 4096, "supports_temperature": True},
|
||||
"gpt-4-0125-preview": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gpt-4-turbo-preview": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gpt-4-turbo": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gpt-4-turbo-2024-04-09": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gpt-4-1106-preview": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gpt-4-vision-preview": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gpt-4": {"token_limit": 8192, "supports_temperature": True},
|
||||
"gpt-4-0613": {"token_limit": 8192, "supports_temperature": True},
|
||||
"gpt-4-32k": {"token_limit": 32768, "supports_temperature": True},
|
||||
"gpt-4-32k-0613": {"token_limit": 32768, "supports_temperature": True},
|
||||
"gpt-4o": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gpt-4o-mini": {"token_limit": 128000, "supports_temperature": True},
|
||||
"chatgpt-4o-latest": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gpt-3.5-turbo-0125": {"token_limit": 16385, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-3.5": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-3.5-turbo": {"token_limit": 16385, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-3.5-turbo-1106": {"token_limit": 16385, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-3.5-turbo-instruct": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4-0125-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4-turbo-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4-turbo": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4-turbo-2024-04-09": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4-1106-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4-vision-preview": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4-0613": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4-32k": {"token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4-32k-0613": {"token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4o": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gpt-4o-mini": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"chatgpt-4o-latest": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"o1-preview": {"token_limit": 128000, "supports_temperature": False},
|
||||
"o1-mini": {"token_limit": 128000, "supports_temperature": False},
|
||||
},
|
||||
"google_genai": {
|
||||
"gemini-pro": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gemini-pro": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gemini-1.5-flash-latest": {
|
||||
"token_limit": 128000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"gemini-1.5-pro-latest": {"token_limit": 128000, "supports_temperature": True},
|
||||
"models/embedding-001": {"token_limit": 2048, "supports_temperature": True},
|
||||
"gemini-1.5-pro-latest": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"models/embedding-001": {"token_limit": 2048, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
},
|
||||
"google_vertexai": {
|
||||
"gemini-1.5-flash": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gemini-1.5-pro": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gemini-1.0-pro": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gemini-1.5-flash": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gemini-1.5-pro": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gemini-1.0-pro": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
},
|
||||
"ollama": {
|
||||
"command-r": {"token_limit": 12800, "supports_temperature": True},
|
||||
"codellama": {"token_limit": 16000, "supports_temperature": True},
|
||||
"dbrx": {"token_limit": 32768, "supports_temperature": True},
|
||||
"deepseek-coder:33b": {"token_limit": 16000, "supports_temperature": True},
|
||||
"falcon": {"token_limit": 2048, "supports_temperature": True},
|
||||
"llama2": {"token_limit": 4096, "supports_temperature": True},
|
||||
"llama2:7b": {"token_limit": 4096, "supports_temperature": True},
|
||||
"llama2:13b": {"token_limit": 4096, "supports_temperature": True},
|
||||
"llama2:70b": {"token_limit": 4096, "supports_temperature": True},
|
||||
"llama3": {"token_limit": 8192, "supports_temperature": True},
|
||||
"llama3:8b": {"token_limit": 8192, "supports_temperature": True},
|
||||
"llama3:70b": {"token_limit": 8192, "supports_temperature": True},
|
||||
"llama3.1": {"token_limit": 128000, "supports_temperature": True},
|
||||
"llama3.1:8b": {"token_limit": 128000, "supports_temperature": True},
|
||||
"llama3.1:70b": {"token_limit": 128000, "supports_temperature": True},
|
||||
"lama3.1:405b": {"token_limit": 128000, "supports_temperature": True},
|
||||
"llama3.2": {"token_limit": 128000, "supports_temperature": True},
|
||||
"llama3.2:1b": {"token_limit": 128000, "supports_temperature": True},
|
||||
"llama3.2:3b": {"token_limit": 128000, "supports_temperature": True},
|
||||
"llama3.3:70b": {"token_limit": 128000, "supports_temperature": True},
|
||||
"scrapegraph": {"token_limit": 8192, "supports_temperature": True},
|
||||
"mistral-small": {"token_limit": 128000, "supports_temperature": True},
|
||||
"mistral-openorca": {"token_limit": 32000, "supports_temperature": True},
|
||||
"mistral-large": {"token_limit": 128000, "supports_temperature": True},
|
||||
"grok-1": {"token_limit": 8192, "supports_temperature": True},
|
||||
"llava": {"token_limit": 4096, "supports_temperature": True},
|
||||
"mixtral:8x22b-instruct": {"token_limit": 65536, "supports_temperature": True},
|
||||
"nomic-embed-text": {"token_limit": 8192, "supports_temperature": True},
|
||||
"nous-hermes2:34b": {"token_limit": 4096, "supports_temperature": True},
|
||||
"orca-mini": {"token_limit": 2048, "supports_temperature": True},
|
||||
"phi3:3.8b": {"token_limit": 12800, "supports_temperature": True},
|
||||
"phi3:14b": {"token_limit": 128000, "supports_temperature": True},
|
||||
"qwen:0.5b": {"token_limit": 32000, "supports_temperature": True},
|
||||
"qwen:1.8b": {"token_limit": 32000, "supports_temperature": True},
|
||||
"qwen:4b": {"token_limit": 32000, "supports_temperature": True},
|
||||
"qwen:14b": {"token_limit": 32000, "supports_temperature": True},
|
||||
"qwen:32b": {"token_limit": 32000, "supports_temperature": True},
|
||||
"qwen:72b": {"token_limit": 32000, "supports_temperature": True},
|
||||
"qwen:110b": {"token_limit": 32000, "supports_temperature": True},
|
||||
"stablelm-zephyr": {"token_limit": 8192, "supports_temperature": True},
|
||||
"wizardlm2:8x22b": {"token_limit": 65536, "supports_temperature": True},
|
||||
"mistral": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gemma2": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gemma2:9b": {"token_limit": 128000, "supports_temperature": True},
|
||||
"gemma2:27b": {"token_limit": 128000, "supports_temperature": True},
|
||||
"command-r": {"token_limit": 12800, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"codellama": {"token_limit": 16000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"dbrx": {"token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"deepseek-coder:33b": {"token_limit": 16000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"falcon": {"token_limit": 2048, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"llama2": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"llama2:7b": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"llama2:13b": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"llama2:70b": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"llama3": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"llama3:8b": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"llama3:70b": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"llama3.1": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"llama3.1:8b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"llama3.1:70b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"lama3.1:405b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"llama3.2": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"llama3.2:1b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"llama3.2:3b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"llama3.3:70b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"scrapegraph": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"mistral-small": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"mistral-openorca": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"mistral-large": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"grok-1": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"llava": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"mixtral:8x22b-instruct": {"token_limit": 65536, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"nomic-embed-text": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"nous-hermes2:34b": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"orca-mini": {"token_limit": 2048, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"phi3:3.8b": {"token_limit": 12800, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"phi3:14b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"qwen:0.5b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"qwen:1.8b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"qwen:4b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"qwen:14b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"qwen:32b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"qwen:72b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"qwen:110b": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"stablelm-zephyr": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"wizardlm2:8x22b": {"token_limit": 65536, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"mistral": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gemma2": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gemma2:9b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gemma2:27b": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
# embedding models
|
||||
"shaw/dmeta-embedding-zh-small-q4": {
|
||||
"token_limit": 8192,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"shaw/dmeta-embedding-zh-q4": {
|
||||
"token_limit": 8192,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"chevalblanc/acge_text_embedding": {
|
||||
"token_limit": 8192,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"martcreation/dmeta-embedding-zh": {
|
||||
"token_limit": 8192,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"snowflake-arctic-embed": {"token_limit": 8192, "supports_temperature": True},
|
||||
"mxbai-embed-large": {"token_limit": 512, "supports_temperature": True},
|
||||
"snowflake-arctic-embed": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"mxbai-embed-large": {"token_limit": 512, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
},
|
||||
"oneapi": {"qwen-turbo": {"token_limit": 6000, "supports_temperature": True}},
|
||||
"oneapi": {"qwen-turbo": {"token_limit": 6000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE}},
|
||||
"nvidia": {
|
||||
"meta/llama3-70b-instruct": {"token_limit": 419, "supports_temperature": True},
|
||||
"meta/llama3-8b-instruct": {"token_limit": 419, "supports_temperature": True},
|
||||
"nemotron-4-340b-instruct": {"token_limit": 1024, "supports_temperature": True},
|
||||
"databricks/dbrx-instruct": {"token_limit": 4096, "supports_temperature": True},
|
||||
"google/codegemma-7b": {"token_limit": 8192, "supports_temperature": True},
|
||||
"google/gemma-2b": {"token_limit": 2048, "supports_temperature": True},
|
||||
"google/gemma-7b": {"token_limit": 8192, "supports_temperature": True},
|
||||
"google/recurrentgemma-2b": {"token_limit": 2048, "supports_temperature": True},
|
||||
"meta/codellama-70b": {"token_limit": 16384, "supports_temperature": True},
|
||||
"meta/llama2-70b": {"token_limit": 4096, "supports_temperature": True},
|
||||
"meta/llama3-70b-instruct": {"token_limit": 419, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"meta/llama3-8b-instruct": {"token_limit": 419, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"nemotron-4-340b-instruct": {"token_limit": 1024, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"databricks/dbrx-instruct": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"google/codegemma-7b": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"google/gemma-2b": {"token_limit": 2048, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"google/gemma-7b": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"google/recurrentgemma-2b": {"token_limit": 2048, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"meta/codellama-70b": {"token_limit": 16384, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"meta/llama2-70b": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"microsoft/phi-3-mini-128k-instruct": {
|
||||
"token_limit": 122880,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"mistralai/mistral-7b-instruct-v0.2": {
|
||||
"token_limit": 4096,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"mistralai/mistral-large": {"token_limit": 8192, "supports_temperature": True},
|
||||
"mistralai/mistral-large": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"mistralai/mixtral-8x22b-instruct-v0.1": {
|
||||
"token_limit": 32768,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"mistralai/mixtral-8x7b-instruct-v0.1": {
|
||||
"token_limit": 8192,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"snowflake/arctic": {"token_limit": 16384, "supports_temperature": True},
|
||||
"snowflake/arctic": {"token_limit": 16384, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
},
|
||||
"groq": {
|
||||
"llama3-8b-8192": {"token_limit": 8192, "supports_temperature": True},
|
||||
"llama3-70b-8192": {"token_limit": 8192, "supports_temperature": True},
|
||||
"mixtral-8x7b-32768": {"token_limit": 32768, "supports_temperature": True},
|
||||
"gemma-7b-it": {"token_limit": 8192, "supports_temperature": True},
|
||||
"claude-3-haiku-20240307'": {"token_limit": 8192, "supports_temperature": True},
|
||||
"llama3-8b-8192": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"llama3-70b-8192": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"mixtral-8x7b-32768": {"token_limit": 32768, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"gemma-7b-it": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"claude-3-haiku-20240307'": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
},
|
||||
"toghetherai": {
|
||||
"meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo": {
|
||||
"token_limit": 128000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": {
|
||||
"token_limit": 128000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"mistralai/Mixtral-8x22B-Instruct-v0.1": {
|
||||
"token_limit": 128000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"stabilityai/stable-diffusion-xl-base-1.0": {
|
||||
"token_limit": 2048,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo": {
|
||||
"token_limit": 128000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"NousResearch/Hermes-3-Llama-3.1-405B-Turbo": {
|
||||
"token_limit": 128000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"Gryphe/MythoMax-L2-13b-Lite": {
|
||||
"token_limit": 8192,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"Salesforce/Llama-Rank-V1": {"token_limit": 8192, "supports_temperature": True},
|
||||
"Salesforce/Llama-Rank-V1": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"meta-llama/Meta-Llama-Guard-3-8B": {
|
||||
"token_limit": 128000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"meta-llama/Meta-Llama-3-70B-Instruct-Turbo": {
|
||||
"token_limit": 128000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"meta-llama/Llama-3-8b-chat-hf": {
|
||||
"token_limit": 8192,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"meta-llama/Llama-3-70b-chat-hf": {
|
||||
"token_limit": 8192,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"Qwen/Qwen2-72B-Instruct": {
|
||||
"token_limit": 128000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"google/gemma-2-27b-it": {"token_limit": 8192, "supports_temperature": True},
|
||||
"google/gemma-2-27b-it": {"token_limit": 8192, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
},
|
||||
"anthropic": {
|
||||
"claude_instant": {"token_limit": 100000, "supports_temperature": True},
|
||||
"claude2": {"token_limit": 9000, "supports_temperature": True},
|
||||
"claude2.1": {"token_limit": 200000, "supports_temperature": True},
|
||||
"claude3": {"token_limit": 200000, "supports_temperature": True},
|
||||
"claude3.5": {"token_limit": 200000, "supports_temperature": True},
|
||||
"claude-3-opus-20240229": {"token_limit": 200000, "supports_temperature": True},
|
||||
"claude_instant": {"token_limit": 100000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"claude2": {"token_limit": 9000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"claude2.1": {"token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"claude3": {"token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"claude3.5": {"token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"claude-3-opus-20240229": {"token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"claude-3-sonnet-20240229": {
|
||||
"token_limit": 200000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"claude-3-haiku-20240307": {
|
||||
"token_limit": 200000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"claude-3-5-sonnet-20240620": {
|
||||
"token_limit": 200000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"claude-3-5-sonnet-20241022": {
|
||||
"token_limit": 200000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"claude-3-5-haiku-latest": {
|
||||
"token_limit": 200000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
},
|
||||
"bedrock": {
|
||||
"anthropic.claude-3-haiku-20240307-v1:0": {
|
||||
"token_limit": 200000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"anthropic.claude-3-sonnet-20240229-v1:0": {
|
||||
"token_limit": 200000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"anthropic.claude-3-opus-20240229-v1:0": {
|
||||
"token_limit": 200000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"anthropic.claude-3-5-sonnet-20240620-v1:0": {
|
||||
"token_limit": 200000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"claude-3-5-haiku-latest": {
|
||||
"token_limit": 200000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"anthropic.claude-v2:1": {"token_limit": 200000, "supports_temperature": True},
|
||||
"anthropic.claude-v2": {"token_limit": 100000, "supports_temperature": True},
|
||||
"anthropic.claude-v2:1": {"token_limit": 200000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"anthropic.claude-v2": {"token_limit": 100000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"anthropic.claude-instant-v1": {
|
||||
"token_limit": 100000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"meta.llama3-8b-instruct-v1:0": {
|
||||
"token_limit": 8192,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"meta.llama3-70b-instruct-v1:0": {
|
||||
"token_limit": 8192,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"meta.llama2-13b-chat-v1": {"token_limit": 4096, "supports_temperature": True},
|
||||
"meta.llama2-70b-chat-v1": {"token_limit": 4096, "supports_temperature": True},
|
||||
"meta.llama2-13b-chat-v1": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"meta.llama2-70b-chat-v1": {"token_limit": 4096, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"mistral.mistral-7b-instruct-v0:2": {
|
||||
"token_limit": 32768,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"mistral.mixtral-8x7b-instruct-v0:1": {
|
||||
"token_limit": 32768,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"mistral.mistral-large-2402-v1:0": {
|
||||
"token_limit": 32768,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"mistral.mistral-small-2402-v1:0": {
|
||||
"token_limit": 32768,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"amazon.titan-embed-text-v1": {
|
||||
"token_limit": 8000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"amazon.titan-embed-text-v2:0": {
|
||||
"token_limit": 8000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
"cohere.embed-english-v3": {"token_limit": 512, "supports_temperature": True},
|
||||
"cohere.embed-english-v3": {"token_limit": 512, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"cohere.embed-multilingual-v3": {
|
||||
"token_limit": 512,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
},
|
||||
},
|
||||
"mistralai": {
|
||||
"mistral-large-latest": {"token_limit": 128000, "supports_temperature": True},
|
||||
"open-mistral-nemo": {"token_limit": 128000, "supports_temperature": True},
|
||||
"codestral-latest": {"token_limit": 32000, "supports_temperature": True},
|
||||
"mistral-large-latest": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"open-mistral-nemo": {"token_limit": 128000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
"codestral-latest": {"token_limit": 32000, "supports_temperature": True, "default_temperature": DEFAULT_TEMPERATURE},
|
||||
},
|
||||
"togetherai": {
|
||||
"Meta-Llama-3.1-70B-Instruct-Turbo": {
|
||||
"token_limit": 128000,
|
||||
"supports_temperature": True,
|
||||
"default_temperature": DEFAULT_TEMPERATURE,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ def get_read_only_tools(
|
|||
|
||||
# Define constant tool groups
|
||||
READ_ONLY_TOOLS = get_read_only_tools()
|
||||
MODIFICATION_TOOLS = [run_programming_task, put_complete_file_contents]
|
||||
MODIFICATION_TOOLS = [run_programming_task]
|
||||
COMMON_TOOLS = get_read_only_tools()
|
||||
EXPERT_TOOLS = [emit_expert_context, ask_expert]
|
||||
RESEARCH_TOOLS = [
|
||||
|
|
|
|||
|
|
@ -118,6 +118,8 @@ def run_programming_task(
|
|||
# Log the programming task
|
||||
log_work_event(f"Executed programming task: {_truncate_for_log(instructions)}")
|
||||
|
||||
print(repr(result))
|
||||
|
||||
# Return structured output
|
||||
return {
|
||||
"output": truncate_output(result[0].decode()) if result[0] else "",
|
||||
|
|
|
|||
|
|
@ -12,17 +12,16 @@ console = Console()
|
|||
|
||||
@tool
|
||||
def put_complete_file_contents(
|
||||
filepath: str, content: str, encoding: str = "utf-8", verbose: bool = True
|
||||
filepath: str, complete_file_contents: str = "", encoding: str = "utf-8", verbose: bool = True
|
||||
) -> Dict[str, any]:
|
||||
"""Write the complete contents of a file, creating it if it doesn't exist.
|
||||
This tool is specifically for writing the entire contents of a file at once,
|
||||
not for appending or partial writes.
|
||||
|
||||
`filepath` and `content` must ALWAYS be provided.
|
||||
|
||||
Args:
|
||||
filepath: Path to the file to write
|
||||
content: Complete string content to write to the file
|
||||
filepath: (Required) Path to the file to write. Must be provided.
|
||||
complete_file_contents: Complete string content to write to the file. Defaults to
|
||||
an empty string, which will create an empty file.
|
||||
encoding: File encoding to use (default: utf-8)
|
||||
verbose: Whether to display a Rich panel with write statistics (default: True)
|
||||
|
||||
|
|
@ -55,14 +54,18 @@ def put_complete_file_contents(
|
|||
logging.debug(f"Starting to write file: {filepath}")
|
||||
|
||||
with open(filepath, "w", encoding=encoding) as f:
|
||||
f.write(content)
|
||||
result["bytes_written"] = len(content.encode(encoding))
|
||||
logging.debug(f"Writing {len(complete_file_contents)} bytes to {filepath}")
|
||||
f.write(complete_file_contents)
|
||||
result["bytes_written"] = len(complete_file_contents.encode(encoding))
|
||||
|
||||
elapsed = time.time() - start_time
|
||||
result["elapsed_time"] = elapsed
|
||||
result["success"] = True
|
||||
result["filepath"] = filepath
|
||||
result["message"] = "Operation completed successfully"
|
||||
result["message"] = (
|
||||
f"Successfully {'initialized empty file' if not complete_file_contents else f'wrote {result['bytes_written']} bytes'} "
|
||||
f"at {filepath} in {result['elapsed_time']:.3f}s"
|
||||
)
|
||||
|
||||
logging.debug(
|
||||
f"File write complete: {result['bytes_written']} bytes in {elapsed:.2f}s"
|
||||
|
|
@ -71,7 +74,7 @@ def put_complete_file_contents(
|
|||
if verbose:
|
||||
console.print(
|
||||
Panel(
|
||||
f"Wrote {result['bytes_written']} bytes to {filepath} in {elapsed:.2f}s",
|
||||
f"{'Initialized empty file' if not complete_file_contents else f'Wrote {result['bytes_written']} bytes'} at {filepath} in {elapsed:.2f}s",
|
||||
title="💾 File Write",
|
||||
border_style="bright_green",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ def test_basic_write_functionality(temp_test_dir):
|
|||
test_file = temp_test_dir / "test.txt"
|
||||
content = "Hello, World!\nTest content"
|
||||
|
||||
result = put_complete_file_contents({"filepath": str(test_file), "content": content})
|
||||
result = put_complete_file_contents({"filepath": str(test_file), "complete_file_contents": content})
|
||||
|
||||
# Verify file contents
|
||||
assert test_file.read_text() == content
|
||||
|
|
@ -38,7 +38,7 @@ def test_directory_creation(temp_test_dir):
|
|||
test_file = nested_dir / "test.txt"
|
||||
content = "Test content"
|
||||
|
||||
result = put_complete_file_contents({"filepath": str(test_file), "content": content})
|
||||
result = put_complete_file_contents({"filepath": str(test_file), "complete_file_contents": content})
|
||||
|
||||
assert test_file.exists()
|
||||
assert test_file.read_text() == content
|
||||
|
|
@ -52,14 +52,14 @@ def test_different_encodings(temp_test_dir):
|
|||
|
||||
# Test UTF-8
|
||||
result_utf8 = put_complete_file_contents(
|
||||
{"filepath": str(test_file), "content": content, "encoding": "utf-8"}
|
||||
{"filepath": str(test_file), "complete_file_contents": content, "encoding": "utf-8"}
|
||||
)
|
||||
assert result_utf8["success"] is True
|
||||
assert test_file.read_text(encoding="utf-8") == content
|
||||
|
||||
# Test UTF-16
|
||||
result_utf16 = put_complete_file_contents(
|
||||
{"filepath": str(test_file), "content": content, "encoding": "utf-16"}
|
||||
{"filepath": str(test_file), "complete_file_contents": content, "encoding": "utf-16"}
|
||||
)
|
||||
assert result_utf16["success"] is True
|
||||
assert test_file.read_text(encoding="utf-16") == content
|
||||
|
|
@ -72,7 +72,7 @@ def test_permission_error(mock_open_func, temp_test_dir):
|
|||
test_file = temp_test_dir / "noperm.txt"
|
||||
|
||||
result = put_complete_file_contents(
|
||||
{"filepath": str(test_file), "content": "test content"}
|
||||
{"filepath": str(test_file), "complete_file_contents": "test content"}
|
||||
)
|
||||
|
||||
assert result["success"] is False
|
||||
|
|
@ -87,7 +87,7 @@ def test_io_error(mock_open_func, temp_test_dir):
|
|||
test_file = temp_test_dir / "ioerror.txt"
|
||||
|
||||
result = put_complete_file_contents(
|
||||
{"filepath": str(test_file), "content": "test content"}
|
||||
{"filepath": str(test_file), "complete_file_contents": "test content"}
|
||||
)
|
||||
|
||||
assert result["success"] is False
|
||||
|
|
@ -99,12 +99,26 @@ def test_empty_content(temp_test_dir):
|
|||
"""Test writing empty content to a file."""
|
||||
test_file = temp_test_dir / "empty.txt"
|
||||
|
||||
result = put_complete_file_contents({"filepath": str(test_file), "content": ""})
|
||||
result = put_complete_file_contents({"filepath": str(test_file)})
|
||||
|
||||
assert test_file.exists()
|
||||
assert test_file.read_text() == ""
|
||||
assert result["success"] is True
|
||||
assert result["bytes_written"] == 0
|
||||
assert "initialized empty file" in result["message"].lower()
|
||||
|
||||
|
||||
def test_write_empty_file_default(temp_test_dir):
|
||||
"""Test creating an empty file using default parameter."""
|
||||
test_file = temp_test_dir / "empty_default.txt"
|
||||
|
||||
result = put_complete_file_contents({"filepath": str(test_file)})
|
||||
|
||||
assert test_file.exists()
|
||||
assert test_file.read_text() == ""
|
||||
assert result["success"] is True
|
||||
assert result["bytes_written"] == 0
|
||||
assert "initialized empty file" in result["message"].lower()
|
||||
|
||||
|
||||
def test_overwrite_existing_file(temp_test_dir):
|
||||
|
|
@ -117,7 +131,7 @@ def test_overwrite_existing_file(temp_test_dir):
|
|||
# Overwrite with new content
|
||||
new_content = "New content"
|
||||
result = put_complete_file_contents(
|
||||
{"filepath": str(test_file), "content": new_content}
|
||||
{"filepath": str(test_file), "complete_file_contents": new_content}
|
||||
)
|
||||
|
||||
assert test_file.read_text() == new_content
|
||||
|
|
@ -130,7 +144,7 @@ def test_large_file_write(temp_test_dir):
|
|||
test_file = temp_test_dir / "large.txt"
|
||||
content = "Large content\n" * 1000 # Create substantial content
|
||||
|
||||
result = put_complete_file_contents({"filepath": str(test_file), "content": content})
|
||||
result = put_complete_file_contents({"filepath": str(test_file), "complete_file_contents": content})
|
||||
|
||||
assert test_file.exists()
|
||||
assert test_file.read_text() == content
|
||||
|
|
@ -144,7 +158,7 @@ def test_invalid_path_characters(temp_test_dir):
|
|||
invalid_path = temp_test_dir / "invalid\0file.txt"
|
||||
|
||||
result = put_complete_file_contents(
|
||||
{"filepath": str(invalid_path), "content": "test content"}
|
||||
{"filepath": str(invalid_path), "complete_file_contents": "test content"}
|
||||
)
|
||||
|
||||
assert result["success"] is False
|
||||
|
|
@ -162,7 +176,7 @@ def test_write_to_readonly_directory(temp_test_dir):
|
|||
|
||||
try:
|
||||
result = put_complete_file_contents(
|
||||
{"filepath": str(test_file), "content": "test content"}
|
||||
{"filepath": str(test_file), "complete_file_contents": "test content"}
|
||||
)
|
||||
assert result["success"] is False
|
||||
assert "Permission" in result["message"]
|
||||
|
|
|
|||
|
|
@ -56,6 +56,109 @@ def sample_git_repo(empty_git_repo):
|
|||
return empty_git_repo
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def git_repo_with_untracked(sample_git_repo):
|
||||
"""Create a git repository with both tracked and untracked files."""
|
||||
# Create untracked files
|
||||
untracked_files = [
|
||||
"untracked.txt",
|
||||
"src/untracked.py",
|
||||
"docs/draft.md"
|
||||
]
|
||||
|
||||
for file_path in untracked_files:
|
||||
full_path = sample_git_repo / file_path
|
||||
full_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
full_path.write_text(f"Untracked content of {file_path}")
|
||||
|
||||
return sample_git_repo
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def git_repo_with_ignores(git_repo_with_untracked):
|
||||
"""Create a git repository with .gitignore rules."""
|
||||
# Create .gitignore file
|
||||
gitignore_content = """
|
||||
# Python
|
||||
__pycache__/
|
||||
*.pyc
|
||||
|
||||
# Project specific
|
||||
*.log
|
||||
temp/
|
||||
ignored.txt
|
||||
docs/draft.md
|
||||
"""
|
||||
gitignore_path = git_repo_with_untracked / ".gitignore"
|
||||
gitignore_path.write_text(gitignore_content)
|
||||
|
||||
# Add and commit .gitignore first
|
||||
subprocess.run(["git", "add", ".gitignore"], cwd=git_repo_with_untracked)
|
||||
subprocess.run(
|
||||
["git", "commit", "-m", "Add .gitignore"],
|
||||
cwd=git_repo_with_untracked,
|
||||
env={
|
||||
"GIT_AUTHOR_NAME": "Test",
|
||||
"GIT_AUTHOR_EMAIL": "test@example.com",
|
||||
"GIT_COMMITTER_NAME": "Test",
|
||||
"GIT_COMMITTER_EMAIL": "test@example.com",
|
||||
},
|
||||
)
|
||||
|
||||
# Create some ignored files
|
||||
ignored_files = [
|
||||
"ignored.txt",
|
||||
"temp/temp.txt",
|
||||
"src/__pycache__/main.cpython-39.pyc"
|
||||
]
|
||||
|
||||
for file_path in ignored_files:
|
||||
full_path = git_repo_with_untracked / file_path
|
||||
full_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
full_path.write_text(f"Ignored content of {file_path}")
|
||||
|
||||
return git_repo_with_untracked
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def git_repo_with_aider_files(sample_git_repo):
|
||||
"""Create a git repository with .aider files that should be ignored."""
|
||||
# Create .aider files
|
||||
aider_files = [
|
||||
".aider.chat.history.md",
|
||||
".aider.input.history",
|
||||
".aider.tags.cache.v3/some_file",
|
||||
"src/.aider.local.settings"
|
||||
]
|
||||
|
||||
# Create regular files
|
||||
regular_files = [
|
||||
"main.cpp",
|
||||
"src/helper.cpp"
|
||||
]
|
||||
|
||||
# Create all files
|
||||
for file_path in aider_files + regular_files:
|
||||
full_path = sample_git_repo / file_path
|
||||
full_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
full_path.write_text(f"Content of {file_path}")
|
||||
|
||||
# Add all files (both .aider and regular) to git
|
||||
subprocess.run(["git", "add", "."], cwd=sample_git_repo)
|
||||
subprocess.run(
|
||||
["git", "commit", "-m", "Add files including .aider"],
|
||||
cwd=sample_git_repo,
|
||||
env={
|
||||
"GIT_AUTHOR_NAME": "Test",
|
||||
"GIT_AUTHOR_EMAIL": "test@example.com",
|
||||
"GIT_COMMITTER_NAME": "Test",
|
||||
"GIT_COMMITTER_EMAIL": "test@example.com",
|
||||
},
|
||||
)
|
||||
|
||||
return sample_git_repo
|
||||
|
||||
|
||||
def test_is_git_repo(sample_git_repo, tmp_path_factory):
|
||||
"""Test git repository detection."""
|
||||
# Create a new directory that is not a git repository
|
||||
|
|
@ -248,39 +351,199 @@ def mock_is_git_repo():
|
|||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_os_path(monkeypatch):
|
||||
"""Mock os.path functions."""
|
||||
def mock_exists(path):
|
||||
return True
|
||||
|
||||
def mock_isdir(path):
|
||||
return True
|
||||
|
||||
monkeypatch.setattr(os.path, 'exists', mock_exists)
|
||||
monkeypatch.setattr(os.path, 'isdir', mock_isdir)
|
||||
return monkeypatch
|
||||
|
||||
|
||||
@pytest.mark.parametrize("test_case", FILE_LISTING_TEST_CASES, ids=lambda x: x["name"])
|
||||
def test_get_file_listing(test_case, mock_subprocess, mock_is_git_repo):
|
||||
def test_get_file_listing(test_case, mock_subprocess, mock_is_git_repo, mock_os_path):
|
||||
"""Test get_file_listing with various inputs."""
|
||||
mock_subprocess.return_value = create_mock_process(test_case["git_output"])
|
||||
files, total = get_file_listing(DUMMY_PATH, limit=test_case["limit"])
|
||||
|
||||
assert files == test_case["expected_files"]
|
||||
assert total == test_case["expected_total"]
|
||||
|
||||
|
||||
def test_get_file_listing_non_git_repo(mock_is_git_repo):
|
||||
def test_get_file_listing_non_git_repo(mock_is_git_repo, mock_os_path):
|
||||
"""Test get_file_listing with non-git repository."""
|
||||
mock_is_git_repo.return_value = False
|
||||
files, total = get_file_listing(DUMMY_PATH)
|
||||
assert files == EMPTY_FILE_LIST
|
||||
assert total == EMPTY_FILE_TOTAL
|
||||
assert files == []
|
||||
assert total == 0
|
||||
|
||||
|
||||
def test_get_file_listing_git_error(mock_subprocess, mock_is_git_repo):
|
||||
def test_get_file_listing_git_error(mock_subprocess, mock_is_git_repo, mock_os_path):
|
||||
"""Test get_file_listing when git command fails."""
|
||||
mock_subprocess.side_effect = GitCommandError("Git command failed")
|
||||
with pytest.raises(GitCommandError):
|
||||
get_file_listing(DUMMY_PATH)
|
||||
|
||||
|
||||
def test_get_file_listing_permission_error(mock_subprocess, mock_is_git_repo):
|
||||
def test_get_file_listing_permission_error(mock_subprocess, mock_is_git_repo, mock_os_path):
|
||||
"""Test get_file_listing with permission error."""
|
||||
mock_subprocess.side_effect = PermissionError("Permission denied")
|
||||
with pytest.raises(DirectoryAccessError):
|
||||
get_file_listing(DUMMY_PATH)
|
||||
|
||||
|
||||
def test_get_file_listing_unexpected_error(mock_subprocess, mock_is_git_repo):
|
||||
def test_get_file_listing_unexpected_error(mock_subprocess, mock_is_git_repo, mock_os_path):
|
||||
"""Test get_file_listing with unexpected error."""
|
||||
mock_subprocess.side_effect = Exception("Unexpected error")
|
||||
with pytest.raises(FileListerError):
|
||||
get_file_listing(DUMMY_PATH)
|
||||
|
||||
|
||||
def test_get_file_listing_with_untracked(git_repo_with_untracked):
|
||||
"""Test that file listing includes both tracked and untracked files."""
|
||||
files, count = get_file_listing(str(git_repo_with_untracked))
|
||||
|
||||
# Check tracked files are present
|
||||
assert "README.md" in files
|
||||
assert "src/main.py" in files
|
||||
|
||||
# Check untracked files are present
|
||||
assert "untracked.txt" in files
|
||||
assert "src/untracked.py" in files
|
||||
|
||||
# Verify count includes both tracked and untracked
|
||||
expected_count = 8 # 5 tracked + 3 untracked (excluding .gitignore)
|
||||
assert count == expected_count
|
||||
|
||||
def test_get_file_listing_with_untracked_and_limit(git_repo_with_untracked):
|
||||
"""Test that file listing with limit works correctly with untracked files."""
|
||||
limit = 3
|
||||
files, count = get_file_listing(str(git_repo_with_untracked), limit=limit)
|
||||
|
||||
# Total count should still be full count
|
||||
assert count == 8 # 5 tracked + 3 untracked (excluding .gitignore)
|
||||
|
||||
# Only limit number of files should be returned
|
||||
assert len(files) == limit
|
||||
|
||||
# Files should be sorted, so we can check first 3
|
||||
assert files == sorted(files)
|
||||
|
||||
def test_get_file_listing_respects_gitignore(git_repo_with_ignores):
|
||||
"""Test that file listing respects .gitignore rules."""
|
||||
# First test with hidden files excluded (default)
|
||||
files, count = get_file_listing(str(git_repo_with_ignores))
|
||||
|
||||
# These files should be included (tracked or untracked but not ignored)
|
||||
assert "README.md" in files
|
||||
assert "src/main.py" in files
|
||||
assert "untracked.txt" in files
|
||||
assert "src/untracked.py" in files
|
||||
|
||||
# .gitignore should be excluded as it's hidden
|
||||
assert ".gitignore" not in files
|
||||
|
||||
# These files should be excluded (ignored)
|
||||
assert "ignored.txt" not in files
|
||||
assert "temp/temp.txt" not in files
|
||||
assert "src/__pycache__/main.cpython-39.pyc" not in files
|
||||
assert "docs/draft.md" not in files # Explicitly ignored in .gitignore
|
||||
|
||||
# Count should include non-ignored, non-hidden files
|
||||
expected_count = 7 # 4 tracked + 2 untracked (excluding .gitignore)
|
||||
assert count == expected_count
|
||||
|
||||
# Now test with hidden files included
|
||||
files, count = get_file_listing(str(git_repo_with_ignores), include_hidden=True)
|
||||
|
||||
# .gitignore should now be included
|
||||
assert ".gitignore" in files
|
||||
|
||||
# Count should include non-ignored files plus .gitignore
|
||||
expected_count = 8 # 5 tracked + 2 untracked + .gitignore
|
||||
assert count == expected_count
|
||||
|
||||
def test_aider_files_excluded(git_repo_with_aider_files):
|
||||
"""Test that .aider files are excluded from the file listing."""
|
||||
files, count = get_file_listing(str(git_repo_with_aider_files))
|
||||
|
||||
# Regular files should be included
|
||||
assert "main.cpp" in files
|
||||
assert "src/helper.cpp" in files
|
||||
|
||||
# .aider files should be excluded
|
||||
assert ".aider.chat.history.md" not in files
|
||||
assert ".aider.input.history" not in files
|
||||
assert ".aider.tags.cache.v3/some_file" not in files
|
||||
assert "src/.aider.local.settings" not in files
|
||||
|
||||
# Only the regular files should be counted
|
||||
expected_count = 7 # 5 original files from sample_git_repo + 2 new regular files
|
||||
assert count == expected_count
|
||||
assert len(files) == expected_count
|
||||
|
||||
def test_hidden_files_excluded_by_default(git_repo_with_aider_files):
|
||||
"""Test that hidden files are excluded by default."""
|
||||
# Create some hidden files
|
||||
hidden_files = [
|
||||
".config",
|
||||
".env",
|
||||
"src/.local",
|
||||
".gitattributes"
|
||||
]
|
||||
|
||||
# Create regular files
|
||||
regular_files = [
|
||||
"main.cpp",
|
||||
"src/helper.cpp"
|
||||
]
|
||||
|
||||
# Create all files
|
||||
for file_path in hidden_files + regular_files:
|
||||
full_path = git_repo_with_aider_files / file_path
|
||||
full_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
full_path.write_text(f"Content of {file_path}")
|
||||
|
||||
# Add all files to git
|
||||
subprocess.run(["git", "add", "."], cwd=git_repo_with_aider_files)
|
||||
subprocess.run(
|
||||
["git", "commit", "-m", "Add files including hidden files"],
|
||||
cwd=git_repo_with_aider_files,
|
||||
env={
|
||||
"GIT_AUTHOR_NAME": "Test",
|
||||
"GIT_AUTHOR_EMAIL": "test@example.com",
|
||||
"GIT_COMMITTER_NAME": "Test",
|
||||
"GIT_COMMITTER_EMAIL": "test@example.com",
|
||||
},
|
||||
)
|
||||
|
||||
# Test default behavior (hidden files excluded)
|
||||
files, count = get_file_listing(str(git_repo_with_aider_files))
|
||||
|
||||
# Regular files should be included
|
||||
assert "main.cpp" in files
|
||||
assert "src/helper.cpp" in files
|
||||
|
||||
# Hidden files should be excluded
|
||||
for hidden_file in hidden_files:
|
||||
assert hidden_file not in files
|
||||
|
||||
# Only regular files should be counted
|
||||
assert count == 7 # 5 original files + 2 new regular files
|
||||
|
||||
# Test with include_hidden=True
|
||||
files, count = get_file_listing(str(git_repo_with_aider_files), include_hidden=True)
|
||||
|
||||
# Both regular and hidden files should be included
|
||||
assert "main.cpp" in files
|
||||
assert "src/helper.cpp" in files
|
||||
for hidden_file in hidden_files:
|
||||
assert hidden_file in files
|
||||
|
||||
# All files should be counted
|
||||
assert count == 11 # 5 original + 2 regular + 4 hidden
|
||||
|
|
|
|||
Loading…
Reference in New Issue