Merge branch 'master' into frontier

This commit is contained in:
binary-husky
2024-06-10 14:19:26 +00:00
3 changed files with 34 additions and 1 deletions

View File

@@ -41,6 +41,7 @@ AVAIL_LLM_MODELS = ["gpt-4-1106-preview", "gpt-4-turbo-preview", "gpt-4-vision-p
# --- --- --- ---
# P.S. 其他可用的模型还包括
# AVAIL_LLM_MODELS = [
# "glm-4-0520", "glm-4-air", "glm-4-airx", "glm-4-flash",
# "qianfan", "deepseekcoder",
# "spark", "sparkv2", "sparkv3", "sparkv3.5",
# "qwen-turbo", "qwen-plus", "qwen-max", "qwen-local",

View File

@@ -306,6 +306,38 @@ model_info = {
"tokenizer": tokenizer_gpt35,
"token_cnt": get_token_num_gpt35,
},
"glm-4-0520": {
"fn_with_ui": zhipu_ui,
"fn_without_ui": zhipu_noui,
"endpoint": None,
"max_token": 10124 * 8,
"tokenizer": tokenizer_gpt35,
"token_cnt": get_token_num_gpt35,
},
"glm-4-air": {
"fn_with_ui": zhipu_ui,
"fn_without_ui": zhipu_noui,
"endpoint": None,
"max_token": 10124 * 8,
"tokenizer": tokenizer_gpt35,
"token_cnt": get_token_num_gpt35,
},
"glm-4-airx": {
"fn_with_ui": zhipu_ui,
"fn_without_ui": zhipu_noui,
"endpoint": None,
"max_token": 10124 * 8,
"tokenizer": tokenizer_gpt35,
"token_cnt": get_token_num_gpt35,
},
"glm-4-flash": {
"fn_with_ui": zhipu_ui,
"fn_without_ui": zhipu_noui,
"endpoint": None,
"max_token": 10124 * 8,
"tokenizer": tokenizer_gpt35,
"token_cnt": get_token_num_gpt35,
},
"glm-4v": {
"fn_with_ui": zhipu_ui,
"fn_without_ui": zhipu_noui,

View File

@@ -965,7 +965,7 @@ async function GptAcademicJavaScriptInit(dark, prompt, live2d, layout, tts) {
if (getCookie("js_md_dropdown_cookie")) {
const cached_model = getCookie("js_md_dropdown_cookie");
var model_sel = await get_gradio_component("elem_model_sel");
// deterine whether the cached model is in the choices
// determine whether the cached model is in the choices
if (model_sel.props.choices.includes(cached_model)){
// change dropdown
gpt_academic_gradio_saveload("load", "elem_model_sel", "js_md_dropdown_cookie", null, "str");