tokens for qwq

This commit is contained in:
AI Christianson 2025-03-08 08:56:16 -05:00
parent b4d7cd30ce
commit c1b84cd645
1 changed files with 2 additions and 2 deletions

View File

@ -167,11 +167,11 @@ models_params = {
},
"openai-compatible": {
"qwen-qwq-32b": {
"token_limit": 130000,
"token_limit": 131072,
"supports_think_tag": True,
"supports_temperature": True,
"latency_coefficient": DEFAULT_BASE_LATENCY,
"max_tokens": 130000,
"max_tokens": 131072,
}
},
"azure_openai": {