Use proper model config for research agent.
This commit is contained in:
parent
ae6052ed15
commit
85f56cb75c
|
|
@ -227,7 +227,11 @@ def main():
|
|||
|
||||
# Store config in global memory for access by is_informational_query
|
||||
_global_memory['config'] = config
|
||||
|
||||
|
||||
# Store model configuration
|
||||
_global_memory['config']['provider'] = args.provider
|
||||
_global_memory['config']['model'] = args.model
|
||||
|
||||
# Store expert provider and model in config
|
||||
_global_memory['config']['expert_provider'] = args.expert_provider
|
||||
_global_memory['config']['expert_model'] = args.expert_model
|
||||
|
|
|
|||
|
|
@ -24,8 +24,9 @@ def request_research(query: str) -> Dict[str, Any]:
|
|||
- success: Whether completed or interrupted
|
||||
- reason: Reason for failure, if any
|
||||
"""
|
||||
# Initialize model
|
||||
model = initialize_llm("anthropic", "claude-3-sonnet-20240229")
|
||||
# Initialize model from config
|
||||
config = _global_memory.get('config', {})
|
||||
model = initialize_llm(config.get('provider', 'anthropic'), config.get('model', 'claude-3-5-sonnet-20241022'))
|
||||
|
||||
try:
|
||||
# Run research agent
|
||||
|
|
|
|||
Loading…
Reference in New Issue