From f096a0611922430fc297118bc8da17003b48d702 Mon Sep 17 00:00:00 2001 From: AI Christianson Date: Fri, 20 Dec 2024 13:41:27 -0500 Subject: [PATCH] Reduce model temperatures to 0. --- CHANGELOG.md | 1 + ra_aid/llm.py | 24 ++++++++++++++++-------- 2 files changed, 17 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 561bef3..987f281 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Added ask_human tool to allow human operator to answer questions asked by the agent. - Handle keyboard interrupt (ctrl-c.) - Disable PAGERs for shell commands so agent can work autonomously. +- Reduce model temperatures to 0. ## [0.6.4] - 2024-12-19 diff --git a/ra_aid/llm.py b/ra_aid/llm.py index 0ad4358..dd1da0e 100644 --- a/ra_aid/llm.py +++ b/ra_aid/llm.py @@ -22,24 +22,28 @@ def initialize_llm(provider: str, model_name: str) -> BaseChatModel: if provider == "openai": return ChatOpenAI( api_key=os.getenv("OPENAI_API_KEY"), - model=model_name + model=model_name, + temperature=0 ) elif provider == "anthropic": return ChatAnthropic( api_key=os.getenv("ANTHROPIC_API_KEY"), - model_name=model_name + model_name=model_name, + temperature=0 ) elif provider == "openrouter": return ChatOpenAI( api_key=os.getenv("OPENROUTER_API_KEY"), base_url="https://openrouter.ai/api/v1", - model=model_name + model=model_name, + temperature=0 ) elif provider == "openai-compatible": return ChatOpenAI( api_key=os.getenv("OPENAI_API_KEY"), base_url=os.getenv("OPENAI_API_BASE"), - model=model_name + model=model_name, + temperature=0 ) else: raise ValueError(f"Unsupported provider: {provider}") @@ -64,25 +68,29 @@ def initialize_expert_llm(provider: str = "openai", model_name: str = "o1-previe if provider == "openai": return ChatOpenAI( api_key=os.getenv("EXPERT_OPENAI_API_KEY"), - model=model_name + model=model_name, + temperature=0 ) elif provider == "anthropic": return ChatAnthropic( api_key=os.getenv("EXPERT_ANTHROPIC_API_KEY"), model_name=model_name, - max_tokens=200000 + max_tokens=200000, + temperature=0 ) elif provider == "openrouter": return ChatOpenAI( api_key=os.getenv("EXPERT_OPENROUTER_API_KEY"), base_url="https://openrouter.ai/api/v1", - model=model_name + model=model_name, + temperature=0 ) elif provider == "openai-compatible": return ChatOpenAI( api_key=os.getenv("EXPERT_OPENAI_API_KEY"), base_url=os.getenv("EXPERT_OPENAI_API_BASE"), - model=model_name + model=model_name, + temperature=0 ) else: raise ValueError(f"Unsupported provider: {provider}")