tokens for qwq
This commit is contained in:
parent
b4d7cd30ce
commit
c1b84cd645
|
|
@ -167,11 +167,11 @@ models_params = {
|
||||||
},
|
},
|
||||||
"openai-compatible": {
|
"openai-compatible": {
|
||||||
"qwen-qwq-32b": {
|
"qwen-qwq-32b": {
|
||||||
"token_limit": 130000,
|
"token_limit": 131072,
|
||||||
"supports_think_tag": True,
|
"supports_think_tag": True,
|
||||||
"supports_temperature": True,
|
"supports_temperature": True,
|
||||||
"latency_coefficient": DEFAULT_BASE_LATENCY,
|
"latency_coefficient": DEFAULT_BASE_LATENCY,
|
||||||
"max_tokens": 130000,
|
"max_tokens": 131072,
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"azure_openai": {
|
"azure_openai": {
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue