Upload folder using huggingface_hub
Browse files- README.md +8 -8
- conversation.py +3 -3
README.md
CHANGED
@@ -330,7 +330,7 @@ from lmdeploy import pipeline, TurbomindEngineConfig, ChatTemplateConfig
|
|
330 |
from lmdeploy.vl import load_image
|
331 |
|
332 |
model = 'OpenGVLab/InternVL2-8B'
|
333 |
-
system_prompt = '我是书生·万象,英文名是InternVL
|
334 |
image = load_image('https://raw.githubusercontent.com/open-mmlab/mmdeploy/main/tests/data/tiger.jpeg')
|
335 |
chat_template_config = ChatTemplateConfig('internvl-internlm2')
|
336 |
chat_template_config.meta_instruction = system_prompt
|
@@ -352,7 +352,7 @@ from lmdeploy.vl import load_image
|
|
352 |
from lmdeploy.vl.constants import IMAGE_TOKEN
|
353 |
|
354 |
model = 'OpenGVLab/InternVL2-8B'
|
355 |
-
system_prompt = '我是书生·万象,英文名是InternVL
|
356 |
chat_template_config = ChatTemplateConfig('internvl-internlm2')
|
357 |
chat_template_config.meta_instruction = system_prompt
|
358 |
pipe = pipeline(model, chat_template_config=chat_template_config,
|
@@ -378,7 +378,7 @@ from lmdeploy import pipeline, TurbomindEngineConfig, ChatTemplateConfig
|
|
378 |
from lmdeploy.vl import load_image
|
379 |
|
380 |
model = 'OpenGVLab/InternVL2-8B'
|
381 |
-
system_prompt = '我是书生·万象,英文名是InternVL
|
382 |
chat_template_config = ChatTemplateConfig('internvl-internlm2')
|
383 |
chat_template_config.meta_instruction = system_prompt
|
384 |
pipe = pipeline(model, chat_template_config=chat_template_config,
|
@@ -402,7 +402,7 @@ from lmdeploy import pipeline, TurbomindEngineConfig, ChatTemplateConfig, Genera
|
|
402 |
from lmdeploy.vl import load_image
|
403 |
|
404 |
model = 'OpenGVLab/InternVL2-8B'
|
405 |
-
system_prompt = '我是书生·万象,英文名是InternVL
|
406 |
chat_template_config = ChatTemplateConfig('internvl-internlm2')
|
407 |
chat_template_config.meta_instruction = system_prompt
|
408 |
pipe = pipeline(model, chat_template_config=chat_template_config,
|
@@ -524,7 +524,7 @@ from lmdeploy import pipeline, TurbomindEngineConfig, ChatTemplateConfig
|
|
524 |
from lmdeploy.vl import load_image
|
525 |
|
526 |
model = 'OpenGVLab/InternVL2-8B'
|
527 |
-
system_prompt = '我是书生·万象,英文名是InternVL
|
528 |
image = load_image('https://raw.githubusercontent.com/open-mmlab/mmdeploy/main/tests/data/tiger.jpeg')
|
529 |
chat_template_config = ChatTemplateConfig('internvl-internlm2')
|
530 |
chat_template_config.meta_instruction = system_prompt
|
@@ -546,7 +546,7 @@ from lmdeploy.vl import load_image
|
|
546 |
from lmdeploy.vl.constants import IMAGE_TOKEN
|
547 |
|
548 |
model = 'OpenGVLab/InternVL2-8B'
|
549 |
-
system_prompt = '我是书生·万象,英文名是InternVL
|
550 |
chat_template_config = ChatTemplateConfig('internvl-internlm2')
|
551 |
chat_template_config.meta_instruction = system_prompt
|
552 |
pipe = pipeline(model, chat_template_config=chat_template_config,
|
@@ -571,7 +571,7 @@ from lmdeploy import pipeline, TurbomindEngineConfig, ChatTemplateConfig
|
|
571 |
from lmdeploy.vl import load_image
|
572 |
|
573 |
model = 'OpenGVLab/InternVL2-8B'
|
574 |
-
system_prompt = '我是书生·万象,英文名是InternVL
|
575 |
chat_template_config = ChatTemplateConfig('internvl-internlm2')
|
576 |
chat_template_config.meta_instruction = system_prompt
|
577 |
pipe = pipeline(model, chat_template_config=chat_template_config,
|
@@ -595,7 +595,7 @@ from lmdeploy import pipeline, TurbomindEngineConfig, ChatTemplateConfig, Genera
|
|
595 |
from lmdeploy.vl import load_image
|
596 |
|
597 |
model = 'OpenGVLab/InternVL2-8B'
|
598 |
-
system_prompt = '我是书生·万象,英文名是InternVL
|
599 |
chat_template_config = ChatTemplateConfig('internvl-internlm2')
|
600 |
chat_template_config.meta_instruction = system_prompt
|
601 |
pipe = pipeline(model, chat_template_config=chat_template_config,
|
|
|
330 |
from lmdeploy.vl import load_image
|
331 |
|
332 |
model = 'OpenGVLab/InternVL2-8B'
|
333 |
+
system_prompt = '我是书生·万象,英文名是InternVL,是由上海人工智能实验室及多家合作单位联合开发的多模态大语言模型。人工智能实验室致力于原始技术创新,开源开放,共享共创,推动科技进步和产业发展。'
|
334 |
image = load_image('https://raw.githubusercontent.com/open-mmlab/mmdeploy/main/tests/data/tiger.jpeg')
|
335 |
chat_template_config = ChatTemplateConfig('internvl-internlm2')
|
336 |
chat_template_config.meta_instruction = system_prompt
|
|
|
352 |
from lmdeploy.vl.constants import IMAGE_TOKEN
|
353 |
|
354 |
model = 'OpenGVLab/InternVL2-8B'
|
355 |
+
system_prompt = '我是书生·万象,英文名是InternVL,是由上海人工智能实验室及多家合作单位联合开发的多模态大语言模型。人工智能实验室致力于原始技术创新,开源开放,共享共创,推动科技进步和产业发展。'
|
356 |
chat_template_config = ChatTemplateConfig('internvl-internlm2')
|
357 |
chat_template_config.meta_instruction = system_prompt
|
358 |
pipe = pipeline(model, chat_template_config=chat_template_config,
|
|
|
378 |
from lmdeploy.vl import load_image
|
379 |
|
380 |
model = 'OpenGVLab/InternVL2-8B'
|
381 |
+
system_prompt = '我是书生·万象,英文名是InternVL,是由上海人工智能实验室及多家合作单位联合开发的多模态大语言模型。人工智能实验室致力于原始技术创新,开源开放,共享共创,推动科技进步和产业发展。'
|
382 |
chat_template_config = ChatTemplateConfig('internvl-internlm2')
|
383 |
chat_template_config.meta_instruction = system_prompt
|
384 |
pipe = pipeline(model, chat_template_config=chat_template_config,
|
|
|
402 |
from lmdeploy.vl import load_image
|
403 |
|
404 |
model = 'OpenGVLab/InternVL2-8B'
|
405 |
+
system_prompt = '我是书生·万象,英文名是InternVL,是由上海人工智能实验室及多家合作单位联合开发的多模态大语言模型。人工智能实验室致力于原始技术创新,开源开放,共享共创,推动科技进步和产业发展。'
|
406 |
chat_template_config = ChatTemplateConfig('internvl-internlm2')
|
407 |
chat_template_config.meta_instruction = system_prompt
|
408 |
pipe = pipeline(model, chat_template_config=chat_template_config,
|
|
|
524 |
from lmdeploy.vl import load_image
|
525 |
|
526 |
model = 'OpenGVLab/InternVL2-8B'
|
527 |
+
system_prompt = '我是书生·万象,英文名是InternVL,是由上海人工智能实验室及多家合作单位联合开发的多模态大语言模型。人工智能实验室致力于原始技术创新,开源开放,共享共创,推动科技进步和产业发展。'
|
528 |
image = load_image('https://raw.githubusercontent.com/open-mmlab/mmdeploy/main/tests/data/tiger.jpeg')
|
529 |
chat_template_config = ChatTemplateConfig('internvl-internlm2')
|
530 |
chat_template_config.meta_instruction = system_prompt
|
|
|
546 |
from lmdeploy.vl.constants import IMAGE_TOKEN
|
547 |
|
548 |
model = 'OpenGVLab/InternVL2-8B'
|
549 |
+
system_prompt = '我是书生·万象,英文名是InternVL,是由上海人工智能实验室及多家合作单���联合开发的多模态大语言模型。人工智能实验室致力于原始技术创新,开源开放,共享共创,推动科技进步和产业发展。'
|
550 |
chat_template_config = ChatTemplateConfig('internvl-internlm2')
|
551 |
chat_template_config.meta_instruction = system_prompt
|
552 |
pipe = pipeline(model, chat_template_config=chat_template_config,
|
|
|
571 |
from lmdeploy.vl import load_image
|
572 |
|
573 |
model = 'OpenGVLab/InternVL2-8B'
|
574 |
+
system_prompt = '我是书生·万象,英文名是InternVL,是由上海人工智能实验室及多家合作单位联合开发的多模态大语言模型。人工智能实验室致力于原始技术创新,开源开放,共享共创,推动科技进步和产业发展。'
|
575 |
chat_template_config = ChatTemplateConfig('internvl-internlm2')
|
576 |
chat_template_config.meta_instruction = system_prompt
|
577 |
pipe = pipeline(model, chat_template_config=chat_template_config,
|
|
|
595 |
from lmdeploy.vl import load_image
|
596 |
|
597 |
model = 'OpenGVLab/InternVL2-8B'
|
598 |
+
system_prompt = '我是书生·万象,英文名是InternVL,是由上海人工智能实验室及多家合作单位联合开发的多模态大语言模型。人工智能实验室致力于原始技术创新,开源开放,共享共创,推动科技进步和产业发展。'
|
599 |
chat_template_config = ChatTemplateConfig('internvl-internlm2')
|
600 |
chat_template_config.meta_instruction = system_prompt
|
601 |
pipe = pipeline(model, chat_template_config=chat_template_config,
|
conversation.py
CHANGED
@@ -336,7 +336,7 @@ register_conv_template(
|
|
336 |
name='Hermes-2',
|
337 |
system_template='<|im_start|>system\n{system_message}',
|
338 |
# note: The new system prompt was not used here to avoid changes in benchmark performance.
|
339 |
-
# system_message='我是书生·万象,英文名是InternVL
|
340 |
system_message='你是由上海人工智能实验室联合商汤科技开发的书生多模态大模型,英文名叫InternVL, 是一个有用无害的人工智能助手。',
|
341 |
roles=('<|im_start|>user\n', '<|im_start|>assistant\n'),
|
342 |
sep_style=SeparatorStyle.MPT,
|
@@ -357,7 +357,7 @@ register_conv_template(
|
|
357 |
name='internlm2-chat',
|
358 |
system_template='<|im_start|>system\n{system_message}',
|
359 |
# note: The new system prompt was not used here to avoid changes in benchmark performance.
|
360 |
-
# system_message='我是书生·万象,英文名是InternVL
|
361 |
system_message='你是由上海人工智能实验室联合商汤科技开发的书生多模态大模型,英文名叫InternVL, 是一个有用无害的人工智能助手。',
|
362 |
roles=('<|im_start|>user\n', '<|im_start|>assistant\n'),
|
363 |
sep_style=SeparatorStyle.MPT,
|
@@ -376,7 +376,7 @@ register_conv_template(
|
|
376 |
name='phi3-chat',
|
377 |
system_template='<|system|>\n{system_message}',
|
378 |
# note: The new system prompt was not used here to avoid changes in benchmark performance.
|
379 |
-
# system_message='我是书生·万象,英文名是InternVL
|
380 |
system_message='你是由上海人工智能实验室联合商汤科技开发的书生多模态大模型,英文名叫InternVL, 是一个有用无害的人工智能助手。',
|
381 |
roles=('<|user|>\n', '<|assistant|>\n'),
|
382 |
sep_style=SeparatorStyle.MPT,
|
|
|
336 |
name='Hermes-2',
|
337 |
system_template='<|im_start|>system\n{system_message}',
|
338 |
# note: The new system prompt was not used here to avoid changes in benchmark performance.
|
339 |
+
# system_message='我是书生·万象,英文名是InternVL,是由上海人工智能实验室及多家合作单位联合开发的多模态大语言模型。人工智能实验室致力于原始技术创新,开源开放,共享共创,推动科技进步和产业发展。',
|
340 |
system_message='你是由上海人工智能实验室联合商汤科技开发的书生多模态大模型,英文名叫InternVL, 是一个有用无害的人工智能助手。',
|
341 |
roles=('<|im_start|>user\n', '<|im_start|>assistant\n'),
|
342 |
sep_style=SeparatorStyle.MPT,
|
|
|
357 |
name='internlm2-chat',
|
358 |
system_template='<|im_start|>system\n{system_message}',
|
359 |
# note: The new system prompt was not used here to avoid changes in benchmark performance.
|
360 |
+
# system_message='我是书生·万象,英文名是InternVL,是由上海人工智能实验室及多家合作单位联合开发的多模态大语言模型。人工智能实验室致力于原始技术创新,开源开放,共享共创,推动科技进步和产业发展。',
|
361 |
system_message='你是由上海人工智能实验室联合商汤科技开发的书生多模态大模型,英文名叫InternVL, 是一个有用无害的人工智能助手。',
|
362 |
roles=('<|im_start|>user\n', '<|im_start|>assistant\n'),
|
363 |
sep_style=SeparatorStyle.MPT,
|
|
|
376 |
name='phi3-chat',
|
377 |
system_template='<|system|>\n{system_message}',
|
378 |
# note: The new system prompt was not used here to avoid changes in benchmark performance.
|
379 |
+
# system_message='我是书生·万象,英文名是InternVL,是由上海人工智能实验室及多家合作单位联合开发的多模态大语言模型。人工智能实验室致力于原始技术创新,开源开放,共享共创,推动科技进步和产业发展。',
|
380 |
system_message='你是由上海人工智能实验室联合商汤科技开发的书生多模态大模型,英文名叫InternVL, 是一个有用无害的人工智能助手。',
|
381 |
roles=('<|user|>\n', '<|assistant|>\n'),
|
382 |
sep_style=SeparatorStyle.MPT,
|