Spaces:
Sleeping
Sleeping
Upload 6 files
Browse files- mindsearch/agent/__init__.py +2 -2
- mindsearch/agent/models.py +19 -2
- mindsearch/app.py +5 -1
mindsearch/agent/__init__.py
CHANGED
@@ -17,7 +17,7 @@ from mindsearch.agent.mindsearch_prompt import (
|
|
17 |
LLM = {}
|
18 |
|
19 |
|
20 |
-
def init_agent(lang='cn', model_format='internlm_server'):
|
21 |
llm = LLM.get(model_format, None)
|
22 |
if llm is None:
|
23 |
llm_cfg = getattr(llm_factory, model_format)
|
@@ -43,7 +43,7 @@ def init_agent(lang='cn', model_format='internlm_server'):
|
|
43 |
searcher_cfg=dict(
|
44 |
llm=llm,
|
45 |
plugin_executor=ActionExecutor(
|
46 |
-
BingBrowser(searcher_type=
|
47 |
topk=6,
|
48 |
api_key=os.environ.get('BING_API_KEY',
|
49 |
'YOUR BING API'))),
|
|
|
17 |
LLM = {}
|
18 |
|
19 |
|
20 |
+
def init_agent(lang='cn', model_format='internlm_server',search_engine='DuckDuckGoSearch'):
|
21 |
llm = LLM.get(model_format, None)
|
22 |
if llm is None:
|
23 |
llm_cfg = getattr(llm_factory, model_format)
|
|
|
43 |
searcher_cfg=dict(
|
44 |
llm=llm,
|
45 |
plugin_executor=ActionExecutor(
|
46 |
+
BingBrowser(searcher_type=search_engine,
|
47 |
topk=6,
|
48 |
api_key=os.environ.get('BING_API_KEY',
|
49 |
'YOUR BING API'))),
|
mindsearch/agent/models.py
CHANGED
@@ -34,11 +34,11 @@ internlm_hf = dict(type=HFTransformerCasualLM,
|
|
34 |
max_new_tokens=8192,
|
35 |
repetition_penalty=1.02,
|
36 |
stop_words=['<|im_end|>'])
|
37 |
-
|
38 |
gpt4 = dict(type=GPTAPI,
|
39 |
model_type='gpt-4-turbo',
|
40 |
key=os.environ.get('OPENAI_API_KEY', 'YOUR OPENAI API KEY'),
|
41 |
-
openai_api_base=os.environ.get('OPENAI_API_BASE', 'https://api.openai.com/v1'),
|
42 |
)
|
43 |
|
44 |
url = 'https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation'
|
@@ -58,3 +58,20 @@ qwen = dict(type=GPTAPI,
|
|
58 |
max_new_tokens=4096,
|
59 |
repetition_penalty=1.02,
|
60 |
stop_words=['<|im_end|>'])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
34 |
max_new_tokens=8192,
|
35 |
repetition_penalty=1.02,
|
36 |
stop_words=['<|im_end|>'])
|
37 |
+
# openai_api_base needs to fill in the complete chat api address, such as: https://api.openai.com/v1/chat/completions
|
38 |
gpt4 = dict(type=GPTAPI,
|
39 |
model_type='gpt-4-turbo',
|
40 |
key=os.environ.get('OPENAI_API_KEY', 'YOUR OPENAI API KEY'),
|
41 |
+
openai_api_base=os.environ.get('OPENAI_API_BASE', 'https://api.openai.com/v1/chat/completions'),
|
42 |
)
|
43 |
|
44 |
url = 'https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation'
|
|
|
58 |
max_new_tokens=4096,
|
59 |
repetition_penalty=1.02,
|
60 |
stop_words=['<|im_end|>'])
|
61 |
+
|
62 |
+
internlm_silicon = dict(type=GPTAPI,
|
63 |
+
model_type='internlm/internlm2_5-7b-chat',
|
64 |
+
key=os.environ.get('SILICON_API_KEY', 'YOUR SILICON API KEY'),
|
65 |
+
openai_api_base='https://api.siliconflow.cn/v1/chat/completions',
|
66 |
+
meta_template=[
|
67 |
+
dict(role='system', api_role='system'),
|
68 |
+
dict(role='user', api_role='user'),
|
69 |
+
dict(role='assistant', api_role='assistant'),
|
70 |
+
dict(role='environment', api_role='system')
|
71 |
+
],
|
72 |
+
top_p=0.8,
|
73 |
+
top_k=1,
|
74 |
+
temperature=0,
|
75 |
+
max_new_tokens=8192,
|
76 |
+
repetition_penalty=1.02,
|
77 |
+
stop_words=['<|im_end|>'])
|
mindsearch/app.py
CHANGED
@@ -23,6 +23,10 @@ def parse_arguments():
|
|
23 |
default='internlm_server',
|
24 |
type=str,
|
25 |
help='Model format')
|
|
|
|
|
|
|
|
|
26 |
return parser.parse_args()
|
27 |
|
28 |
|
@@ -123,7 +127,7 @@ async def run(request: GenerationParams):
|
|
123 |
await queue.wait_closed()
|
124 |
|
125 |
inputs = request.inputs
|
126 |
-
agent = init_agent(lang=args.lang, model_format=args.model_format)
|
127 |
return EventSourceResponse(generate())
|
128 |
|
129 |
|
|
|
23 |
default='internlm_server',
|
24 |
type=str,
|
25 |
help='Model format')
|
26 |
+
parser.add_argument('--search_engine',
|
27 |
+
default='DuckDuckGoSearch',
|
28 |
+
type=str,
|
29 |
+
help='Search engine')
|
30 |
return parser.parse_args()
|
31 |
|
32 |
|
|
|
127 |
await queue.wait_closed()
|
128 |
|
129 |
inputs = request.inputs
|
130 |
+
agent = init_agent(lang=args.lang, model_format=args.model_format,search_engine=args.search_engine)
|
131 |
return EventSourceResponse(generate())
|
132 |
|
133 |
|