Spaces:
Running
on
Zero
Running
on
Zero
jadechoghari
commited on
Commit
β’
e3feb3f
1
Parent(s):
e362ceb
add fixes
Browse files- 8b23f327b90b6211049acd36e3f99975.jpg +0 -0
- __pycache__/builder.cpython-310.pyc +0 -0
- __pycache__/conversation.cpython-310.pyc +0 -0
- __pycache__/inference.cpython-310.pyc +0 -0
- __pycache__/mm_utils.cpython-310.pyc +0 -0
- __pycache__/model_UI.cpython-310.pyc +0 -0
- __pycache__/utils.cpython-310.pyc +0 -0
- app.py +25 -16
- eval.json +1 -1
- eval_output.jsonl/0_of_1.jsonl +1 -0
- gradio_web_server.log +0 -0
- serve_images/2024-10-20/8b23f327b90b6211049acd36e3f99975.jpg +0 -0
8b23f327b90b6211049acd36e3f99975.jpg
ADDED
__pycache__/builder.cpython-310.pyc
CHANGED
Binary files a/__pycache__/builder.cpython-310.pyc and b/__pycache__/builder.cpython-310.pyc differ
|
|
__pycache__/conversation.cpython-310.pyc
CHANGED
Binary files a/__pycache__/conversation.cpython-310.pyc and b/__pycache__/conversation.cpython-310.pyc differ
|
|
__pycache__/inference.cpython-310.pyc
CHANGED
Binary files a/__pycache__/inference.cpython-310.pyc and b/__pycache__/inference.cpython-310.pyc differ
|
|
__pycache__/mm_utils.cpython-310.pyc
CHANGED
Binary files a/__pycache__/mm_utils.cpython-310.pyc and b/__pycache__/mm_utils.cpython-310.pyc differ
|
|
__pycache__/model_UI.cpython-310.pyc
CHANGED
Binary files a/__pycache__/model_UI.cpython-310.pyc and b/__pycache__/model_UI.cpython-310.pyc differ
|
|
__pycache__/utils.cpython-310.pyc
CHANGED
Binary files a/__pycache__/utils.cpython-310.pyc and b/__pycache__/utils.cpython-310.pyc differ
|
|
app.py
CHANGED
@@ -216,13 +216,14 @@ def http_bot(state, model_selector, temperature, top_p, max_new_tokens, request:
|
|
216 |
|
217 |
# Construct prompt
|
218 |
prompt = state.get_prompt()
|
219 |
-
|
220 |
all_images = state.get_images(return_pil=True)
|
221 |
all_image_hash = [hashlib.md5(image.tobytes()).hexdigest() for image in all_images]
|
222 |
for image, hash in zip(all_images, all_image_hash):
|
223 |
t = datetime.datetime.now()
|
224 |
-
dir_path = os.path.join(LOGDIR, "serve_images", f"{t.year}-{t.month:02d}-{t.day:02d}")
|
225 |
# filename = os.path.join(LOGDIR, "serve_images", f"{t.year}-{t.month:02d}-{t.day:02d}", f"{hash}.jpg")
|
|
|
226 |
filename = os.path.join(dir_path, f"{hash}.jpg")
|
227 |
if not os.path.isfile(filename):
|
228 |
os.makedirs(os.path.dirname(filename), exist_ok=True)
|
@@ -251,7 +252,7 @@ def http_bot(state, model_selector, temperature, top_p, max_new_tokens, request:
|
|
251 |
# headers=headers, json=pload, stream=True, timeout=10)
|
252 |
stop = state.sep if state.sep_style in [SeparatorStyle.SINGLE, SeparatorStyle.MPT] else state.sep2
|
253 |
#TODO: define inference and run function
|
254 |
-
|
255 |
image_path=filename, # double check this
|
256 |
image_dir=dir_path,
|
257 |
prompt=prompt,
|
@@ -264,19 +265,27 @@ def http_bot(state, model_selector, temperature, top_p, max_new_tokens, request:
|
|
264 |
)
|
265 |
response = extracted_texts
|
266 |
logger.info(f"This is the respone {response}")
|
267 |
-
|
268 |
-
|
269 |
-
|
270 |
-
|
271 |
-
|
272 |
-
|
273 |
-
|
274 |
-
|
275 |
-
|
276 |
-
|
277 |
-
|
278 |
-
|
279 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
280 |
except requests.exceptions.RequestException as e:
|
281 |
state.messages[-1][-1] = server_error_msg
|
282 |
yield (state, state.to_gradio_chatbot()) + (disable_btn, disable_btn, disable_btn, enable_btn, enable_btn)
|
|
|
216 |
|
217 |
# Construct prompt
|
218 |
prompt = state.get_prompt()
|
219 |
+
dir_path = "./"
|
220 |
all_images = state.get_images(return_pil=True)
|
221 |
all_image_hash = [hashlib.md5(image.tobytes()).hexdigest() for image in all_images]
|
222 |
for image, hash in zip(all_images, all_image_hash):
|
223 |
t = datetime.datetime.now()
|
224 |
+
# dir_path = os.path.join(LOGDIR, "serve_images", f"{t.year}-{t.month:02d}-{t.day:02d}")
|
225 |
# filename = os.path.join(LOGDIR, "serve_images", f"{t.year}-{t.month:02d}-{t.day:02d}", f"{hash}.jpg")
|
226 |
+
# filename = os.path.join(dir_path, f"{hash}.jpg")
|
227 |
filename = os.path.join(dir_path, f"{hash}.jpg")
|
228 |
if not os.path.isfile(filename):
|
229 |
os.makedirs(os.path.dirname(filename), exist_ok=True)
|
|
|
252 |
# headers=headers, json=pload, stream=True, timeout=10)
|
253 |
stop = state.sep if state.sep_style in [SeparatorStyle.SINGLE, SeparatorStyle.MPT] else state.sep2
|
254 |
#TODO: define inference and run function
|
255 |
+
extracted_texts = inference_and_run(
|
256 |
image_path=filename, # double check this
|
257 |
image_dir=dir_path,
|
258 |
prompt=prompt,
|
|
|
265 |
)
|
266 |
response = extracted_texts
|
267 |
logger.info(f"This is the respone {response}")
|
268 |
+
delay=0.01
|
269 |
+
# for chunk in response.iter_lines(decode_unicode=False, delimiter=b"\0"):
|
270 |
+
# if chunk:
|
271 |
+
# data = json.loads(chunk.decode())
|
272 |
+
# if data["error_code"] == 0:
|
273 |
+
# output = data["text"][len(prompt):].strip()
|
274 |
+
# state.messages[-1][-1] = output + "β"
|
275 |
+
# yield (state, state.to_gradio_chatbot()) + (disable_btn,) * 5
|
276 |
+
# else:
|
277 |
+
# output = data["text"] + f" (error_code: {data['error_code']})"
|
278 |
+
# state.messages[-1][-1] = output
|
279 |
+
# yield (state, state.to_gradio_chatbot()) + (disable_btn, disable_btn, disable_btn, enable_btn, enable_btn)
|
280 |
+
# return
|
281 |
+
# time.sleep(0.03)
|
282 |
+
text = response[0]
|
283 |
+
output = "" # Will hold the progressively built output
|
284 |
+
for i, char in enumerate(text):
|
285 |
+
output += char
|
286 |
+
state.messages[-1][-1] = output + "β" # Add cursor β at the end
|
287 |
+
yield (state, state.to_gradio_chatbot()) + (disable_btn,) * 5
|
288 |
+
time.sleep(delay) # Control typing speed with delay
|
289 |
except requests.exceptions.RequestException as e:
|
290 |
state.messages[-1][-1] = server_error_msg
|
291 |
yield (state, state.to_gradio_chatbot()) + (disable_btn, disable_btn, disable_btn, enable_btn, enable_btn)
|
eval.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
[{"id": 0, "image": "8b23f327b90b6211049acd36e3f99975.jpg", "image_h": 433, "image_w": 400, "conversations": [{"from": "human", "value": "<image>\nA chat between a human and an AI that understands visuals. In images, [x, y] denotes points: top-left [0, 0], bottom-right [width-1, height-1]. Increasing x moves right; y moves down. Bounding box: [x1, y1, x2, y2]. Image size: 1000x1000. Follow instructions.<start_of_turn>user\n<image>\
|
|
|
1 |
+
[{"id": 0, "image": "8b23f327b90b6211049acd36e3f99975.jpg", "image_h": 433, "image_w": 400, "conversations": [{"from": "human", "value": "<image>\nA chat between a human and an AI that understands visuals. In images, [x, y] denotes points: top-left [0, 0], bottom-right [width-1, height-1]. Increasing x moves right; y moves down. Bounding box: [x1, y1, x2, y2]. Image size: 1000x1000. Follow instructions.<start_of_turn>user\n<image>\nexplain what you see<end_of_turn>\n<start_of_turn>model\n"}]}]
|
eval_output.jsonl/0_of_1.jsonl
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"id": 0, "image_path": "8b23f327b90b6211049acd36e3f99975.jpg", "prompt": "A chat between a human and an AI that understands visuals. In images, [x, y] denotes points: top-left [0, 0], bottom-right [width-1, height-1]. Increasing x moves right; y moves down. Bounding box: [x1, y1, x2, y2]. Image size: 1000x1000. Follow instructions.<start_of_turn>user", "text": "A chat between a human and an unknown entity. \n\nThe conversation starts with a message from Jackyline Herrera saying, \"Ask Jackie to borrow her truck\". The reply is, \"Get gravel for bow, walk, 10, 1, 1, Shopping List\". \n\nThe next message is from Get Gravel for the truck, and the reply is, \"Buy mulch, #shoppinglist\". \n\nThe third message is from Buy mulch for the garden, and the reply is, \"Pick up succulents\". \n\nThe fourth message is from Pick up succulents for the garden, and the reply is, \"Buy soil for succulents\". \n\nThe fifth message is from Buy soil for succulents, and the reply is, \"Pick up soil for succulents\". \n\nThe sixth message is from Pick up succulents for the garden, and the reply is, \"Pick up soil for succulents\". \n\nThe seventh message is from Pick up succulents for the garden, and the reply is, \"Pick up soil for succulents\". \n\nThe eighth message is from Pick up succulents for the garden, and the reply is, \"Pick up soil for succulents\". \n\nThe ninth message is from Pick up succulents for the garden, and the reply is, \"Look up native vegetables along the fence\". \n\nThe tenth message is from Shopping List, and the reply is, \"Shopping List\". \n\nThe message at the bottom is from Shopping List, and the reply is, \"Look up native vegetables along the fence\". \n\nThe message at the very bottom is from Shopping List, and the reply is, \"Looking: Fran\".", "label": null}
|
gradio_web_server.log
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
serve_images/2024-10-20/8b23f327b90b6211049acd36e3f99975.jpg
ADDED