Reduce model temperatures to 0.
This commit is contained in:
parent
36b80038ca
commit
f096a06119
|
|
@ -13,6 +13,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||||
- Added ask_human tool to allow human operator to answer questions asked by the agent.
|
- Added ask_human tool to allow human operator to answer questions asked by the agent.
|
||||||
- Handle keyboard interrupt (ctrl-c.)
|
- Handle keyboard interrupt (ctrl-c.)
|
||||||
- Disable PAGERs for shell commands so agent can work autonomously.
|
- Disable PAGERs for shell commands so agent can work autonomously.
|
||||||
|
- Reduce model temperatures to 0.
|
||||||
|
|
||||||
## [0.6.4] - 2024-12-19
|
## [0.6.4] - 2024-12-19
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -22,24 +22,28 @@ def initialize_llm(provider: str, model_name: str) -> BaseChatModel:
|
||||||
if provider == "openai":
|
if provider == "openai":
|
||||||
return ChatOpenAI(
|
return ChatOpenAI(
|
||||||
api_key=os.getenv("OPENAI_API_KEY"),
|
api_key=os.getenv("OPENAI_API_KEY"),
|
||||||
model=model_name
|
model=model_name,
|
||||||
|
temperature=0
|
||||||
)
|
)
|
||||||
elif provider == "anthropic":
|
elif provider == "anthropic":
|
||||||
return ChatAnthropic(
|
return ChatAnthropic(
|
||||||
api_key=os.getenv("ANTHROPIC_API_KEY"),
|
api_key=os.getenv("ANTHROPIC_API_KEY"),
|
||||||
model_name=model_name
|
model_name=model_name,
|
||||||
|
temperature=0
|
||||||
)
|
)
|
||||||
elif provider == "openrouter":
|
elif provider == "openrouter":
|
||||||
return ChatOpenAI(
|
return ChatOpenAI(
|
||||||
api_key=os.getenv("OPENROUTER_API_KEY"),
|
api_key=os.getenv("OPENROUTER_API_KEY"),
|
||||||
base_url="https://openrouter.ai/api/v1",
|
base_url="https://openrouter.ai/api/v1",
|
||||||
model=model_name
|
model=model_name,
|
||||||
|
temperature=0
|
||||||
)
|
)
|
||||||
elif provider == "openai-compatible":
|
elif provider == "openai-compatible":
|
||||||
return ChatOpenAI(
|
return ChatOpenAI(
|
||||||
api_key=os.getenv("OPENAI_API_KEY"),
|
api_key=os.getenv("OPENAI_API_KEY"),
|
||||||
base_url=os.getenv("OPENAI_API_BASE"),
|
base_url=os.getenv("OPENAI_API_BASE"),
|
||||||
model=model_name
|
model=model_name,
|
||||||
|
temperature=0
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"Unsupported provider: {provider}")
|
raise ValueError(f"Unsupported provider: {provider}")
|
||||||
|
|
@ -64,25 +68,29 @@ def initialize_expert_llm(provider: str = "openai", model_name: str = "o1-previe
|
||||||
if provider == "openai":
|
if provider == "openai":
|
||||||
return ChatOpenAI(
|
return ChatOpenAI(
|
||||||
api_key=os.getenv("EXPERT_OPENAI_API_KEY"),
|
api_key=os.getenv("EXPERT_OPENAI_API_KEY"),
|
||||||
model=model_name
|
model=model_name,
|
||||||
|
temperature=0
|
||||||
)
|
)
|
||||||
elif provider == "anthropic":
|
elif provider == "anthropic":
|
||||||
return ChatAnthropic(
|
return ChatAnthropic(
|
||||||
api_key=os.getenv("EXPERT_ANTHROPIC_API_KEY"),
|
api_key=os.getenv("EXPERT_ANTHROPIC_API_KEY"),
|
||||||
model_name=model_name,
|
model_name=model_name,
|
||||||
max_tokens=200000
|
max_tokens=200000,
|
||||||
|
temperature=0
|
||||||
)
|
)
|
||||||
elif provider == "openrouter":
|
elif provider == "openrouter":
|
||||||
return ChatOpenAI(
|
return ChatOpenAI(
|
||||||
api_key=os.getenv("EXPERT_OPENROUTER_API_KEY"),
|
api_key=os.getenv("EXPERT_OPENROUTER_API_KEY"),
|
||||||
base_url="https://openrouter.ai/api/v1",
|
base_url="https://openrouter.ai/api/v1",
|
||||||
model=model_name
|
model=model_name,
|
||||||
|
temperature=0
|
||||||
)
|
)
|
||||||
elif provider == "openai-compatible":
|
elif provider == "openai-compatible":
|
||||||
return ChatOpenAI(
|
return ChatOpenAI(
|
||||||
api_key=os.getenv("EXPERT_OPENAI_API_KEY"),
|
api_key=os.getenv("EXPERT_OPENAI_API_KEY"),
|
||||||
base_url=os.getenv("EXPERT_OPENAI_API_BASE"),
|
base_url=os.getenv("EXPERT_OPENAI_API_BASE"),
|
||||||
model=model_name
|
model=model_name,
|
||||||
|
temperature=0
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"Unsupported provider: {provider}")
|
raise ValueError(f"Unsupported provider: {provider}")
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue