handle local llm dependency error properly

This commit is contained in:
binary-husky
2023-08-07 02:11:48 +08:00
parent c17fc2a9b5
commit 184e417fec
2 changed files with 4 additions and 2 deletions

View File

@@ -58,8 +58,8 @@ class GetONNXGLMHandle(LocalLLMHandle):
def try_to_import_special_deps(self, **kwargs):
# import something that will raise error if the user does not install requirement_*.txt
# 🏃‍♂️🏃‍♂️🏃‍♂️ 主进程执行
# from modelscope import AutoModelForCausalLM, AutoTokenizer, GenerationConfig
pass
import importlib
importlib.import_module('modelscope')
# ------------------------------------------------------------------------------------------------------------------------