Reduce model temperatures to 0.

This commit is contained in:
AI Christianson 2024-12-20 13:41:27 -05:00
parent 36b80038ca
commit f096a06119
2 changed files with 17 additions and 8 deletions

View File

@ -13,6 +13,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Added ask_human tool to allow human operator to answer questions asked by the agent.
- Handle keyboard interrupt (ctrl-c.)
- Disable PAGERs for shell commands so agent can work autonomously.
- Reduce model temperatures to 0.
## [0.6.4] - 2024-12-19

View File

@ -22,24 +22,28 @@ def initialize_llm(provider: str, model_name: str) -> BaseChatModel:
if provider == "openai":
return ChatOpenAI(
api_key=os.getenv("OPENAI_API_KEY"),
model=model_name
model=model_name,
temperature=0
)
elif provider == "anthropic":
return ChatAnthropic(
api_key=os.getenv("ANTHROPIC_API_KEY"),
model_name=model_name
model_name=model_name,
temperature=0
)
elif provider == "openrouter":
return ChatOpenAI(
api_key=os.getenv("OPENROUTER_API_KEY"),
base_url="https://openrouter.ai/api/v1",
model=model_name
model=model_name,
temperature=0
)
elif provider == "openai-compatible":
return ChatOpenAI(
api_key=os.getenv("OPENAI_API_KEY"),
base_url=os.getenv("OPENAI_API_BASE"),
model=model_name
model=model_name,
temperature=0
)
else:
raise ValueError(f"Unsupported provider: {provider}")
@ -64,25 +68,29 @@ def initialize_expert_llm(provider: str = "openai", model_name: str = "o1-previe
if provider == "openai":
return ChatOpenAI(
api_key=os.getenv("EXPERT_OPENAI_API_KEY"),
model=model_name
model=model_name,
temperature=0
)
elif provider == "anthropic":
return ChatAnthropic(
api_key=os.getenv("EXPERT_ANTHROPIC_API_KEY"),
model_name=model_name,
max_tokens=200000
max_tokens=200000,
temperature=0
)
elif provider == "openrouter":
return ChatOpenAI(
api_key=os.getenv("EXPERT_OPENROUTER_API_KEY"),
base_url="https://openrouter.ai/api/v1",
model=model_name
model=model_name,
temperature=0
)
elif provider == "openai-compatible":
return ChatOpenAI(
api_key=os.getenv("EXPERT_OPENAI_API_KEY"),
base_url=os.getenv("EXPERT_OPENAI_API_BASE"),
model=model_name
model=model_name,
temperature=0
)
else:
raise ValueError(f"Unsupported provider: {provider}")