fengguo21 commited on
Commit
e5548d9
1 Parent(s): a98ed14
Files changed (1) hide show
  1. app.py +36 -0
app.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import BlipForConditionalGeneration
2
+ from transformers import AutoProcessor
3
+ from PIL import Image
4
+ import requests
5
+ import gradio as gr
6
+
7
+ url = "http://images.cocodataset.org/val2017/000000039769.jpg"
8
+ image = Image.open(requests.get(url, stream=True).raw).convert("RGB")
9
+
10
+
11
+ processor = AutoProcessor.from_pretrained("Salesforce/blip-image-captioning-base")
12
+ model = BlipForConditionalGeneration.from_pretrained("Salesforce/blip-image-captioning-base")
13
+
14
+
15
+ image = processor(image, return_tensors="pt")
16
+ generated_ids = model.generate(**image)
17
+ generated_text = processor.batch_decode(generated_ids, skip_special_tokens=True)[0].strip()
18
+ print(generated_text)
19
+
20
+
21
+
22
+ def launch(input):
23
+ url = input
24
+ image = Image.open(requests.get(url, stream=True).raw).convert("RGB")
25
+ processor = AutoProcessor.from_pretrained("Salesforce/blip-image-captioning-base")
26
+ model = BlipForConditionalGeneration.from_pretrained("Salesforce/blip-image-captioning-base")
27
+ image = processor(image, return_tensors="pt")
28
+ generated_ids = model.generate(**image)
29
+ generated_text = processor.batch_decode(generated_ids, skip_special_tokens=True)[0].strip()
30
+ return generated_text
31
+
32
+
33
+ iface = gr.Interface(fn=launch, inputs="text", outputs="text")
34
+
35
+ iface.launch()
36
+