Skip to content

Commit fc06be6

Browse files
authored
support qwen3 models
1 parent 883b513 commit fc06be6

File tree

1 file changed

+32
-1
lines changed

1 file changed

+32
-1
lines changed

request_llms/bridge_all.py

Lines changed: 32 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -869,7 +869,10 @@ def decode(self, *args, **kwargs):
869869
logger.error(trimmed_format_exc())
870870

871871
# -=-=-=-=-=-=- 阿里云百炼(通义)-在线模型 -=-=-=-=-=-=-
872-
qwen_models = ["qwen-max-latest", "qwen-max-2025-01-25","qwen-max","qwen-turbo","qwen-plus","dashscope-deepseek-r1","dashscope-deepseek-v3"]
872+
qwen_models = ["qwen-max-latest", "qwen-max-2025-01-25","qwen-max","qwen-turbo","qwen-plus",
873+
"dashscope-deepseek-r1","dashscope-deepseek-v3",
874+
"dashscope-qwen3-14b", "dashscope-qwen3-235b-a22b", "dashscope-qwen3-qwen3-32b",
875+
]
873876
if any(item in qwen_models for item in AVAIL_LLM_MODELS):
874877
try:
875878
from .bridge_qwen import predict_no_ui_long_connection as qwen_noui
@@ -938,6 +941,34 @@ def decode(self, *args, **kwargs):
938941
"max_token": 57344,
939942
"tokenizer": tokenizer_gpt35,
940943
"token_cnt": get_token_num_gpt35,
944+
},
945+
"dashscope-qwen3-14b": {
946+
"fn_with_ui": qwen_ui,
947+
"fn_without_ui": qwen_noui,
948+
"enable_reasoning": True,
949+
"can_multi_thread": True,
950+
"endpoint": None,
951+
"max_token": 129024,
952+
"tokenizer": tokenizer_gpt35,
953+
"token_cnt": get_token_num_gpt35,
954+
},
955+
"dashscope-qwen3-235b-a22b": {
956+
"fn_with_ui": qwen_ui,
957+
"fn_without_ui": qwen_noui,
958+
"can_multi_thread": True,
959+
"endpoint": None,
960+
"max_token": 129024,
961+
"tokenizer": tokenizer_gpt35,
962+
"token_cnt": get_token_num_gpt35,
963+
},
964+
"dashscope-qwen3-32b": {
965+
"fn_with_ui": qwen_ui,
966+
"fn_without_ui": qwen_noui,
967+
"can_multi_thread": True,
968+
"endpoint": None,
969+
"max_token": 129024,
970+
"tokenizer": tokenizer_gpt35,
971+
"token_cnt": get_token_num_gpt35,
941972
}
942973
})
943974
except:

0 commit comments

Comments
 (0)