macadeliccc
commited on
Commit
•
4c6fcfd
1
Parent(s):
23fb710
Update README.md
Browse files
README.md
CHANGED
@@ -10,22 +10,10 @@ model = model.to(device='cuda')
|
|
10 |
tokenizer = AutoTokenizer.from_pretrained('openbmb/MiniCPM-Llama3-V-2_5', trust_remote_code=True)
|
11 |
model.eval()
|
12 |
|
13 |
-
image = Image.open('xx.
|
14 |
question = 'What is in the image?'
|
15 |
msgs = [{'role': 'user', 'content': question}]
|
16 |
|
17 |
-
res = model.chat(
|
18 |
-
image=image,
|
19 |
-
msgs=msgs,
|
20 |
-
tokenizer=tokenizer,
|
21 |
-
sampling=True, # if sampling=False, beam_search will be used by default
|
22 |
-
temperature=0.7,
|
23 |
-
# system_prompt='' # pass system_prompt if needed
|
24 |
-
)
|
25 |
-
print(res)
|
26 |
-
|
27 |
-
## if you want to use streaming, please make sure sampling=True and stream=True
|
28 |
-
## the model.chat will return a generator
|
29 |
res = model.chat(
|
30 |
image=image,
|
31 |
msgs=msgs,
|
|
|
10 |
tokenizer = AutoTokenizer.from_pretrained('openbmb/MiniCPM-Llama3-V-2_5', trust_remote_code=True)
|
11 |
model.eval()
|
12 |
|
13 |
+
image = Image.open('xx.png').convert('RGB')
|
14 |
question = 'What is in the image?'
|
15 |
msgs = [{'role': 'user', 'content': question}]
|
16 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
res = model.chat(
|
18 |
image=image,
|
19 |
msgs=msgs,
|