Spaces:
Sleeping
Sleeping
Tuchuanhuhuhu
commited on
Commit
·
8b610a5
1
Parent(s):
3cd91ed
bugfix: 选择LoRA模型时出错
Browse files- modules/models/models.py +2 -3
modules/models/models.py
CHANGED
@@ -228,7 +228,7 @@ class OpenAIClient(BaseLLMModel):
|
|
228 |
ret = super().set_key(new_access_key)
|
229 |
self._refresh_header()
|
230 |
return ret
|
231 |
-
|
232 |
# def auto_name_chat_history(self, user_question, chatbot, user_name):
|
233 |
# return super().auto_name_chat_history(user_question, chatbot, user_name)
|
234 |
|
@@ -583,8 +583,7 @@ def get_model(
|
|
583 |
logging.info(msg)
|
584 |
lora_selector_visibility = True
|
585 |
if os.path.isdir("lora"):
|
586 |
-
|
587 |
-
"lora", filetypes=[""])
|
588 |
lora_choices = ["No LoRA"] + lora_choices
|
589 |
elif model_type == ModelType.LLaMA and lora_model_path != "":
|
590 |
logging.info(f"正在加载LLaMA模型: {model_name} + {lora_model_path}")
|
|
|
228 |
ret = super().set_key(new_access_key)
|
229 |
self._refresh_header()
|
230 |
return ret
|
231 |
+
|
232 |
# def auto_name_chat_history(self, user_question, chatbot, user_name):
|
233 |
# return super().auto_name_chat_history(user_question, chatbot, user_name)
|
234 |
|
|
|
583 |
logging.info(msg)
|
584 |
lora_selector_visibility = True
|
585 |
if os.path.isdir("lora"):
|
586 |
+
get_file_names_by_pinyin("lora", filetypes=[""])
|
|
|
587 |
lora_choices = ["No LoRA"] + lora_choices
|
588 |
elif model_type == ModelType.LLaMA and lora_model_path != "":
|
589 |
logging.info(f"正在加载LLaMA模型: {model_name} + {lora_model_path}")
|