handle local llm dependency error properly
This commit is contained in:
@@ -58,8 +58,8 @@ class GetONNXGLMHandle(LocalLLMHandle):
|
||||
def try_to_import_special_deps(self, **kwargs):
|
||||
# import something that will raise error if the user does not install requirement_*.txt
|
||||
# 🏃♂️🏃♂️🏃♂️ 主进程执行
|
||||
# from modelscope import AutoModelForCausalLM, AutoTokenizer, GenerationConfig
|
||||
pass
|
||||
import importlib
|
||||
importlib.import_module('modelscope')
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
Reference in New Issue
Block a user