Spaces:
Paused
Paused
File size: 3,502 Bytes
a84a5a1 e5c9ee0 326115c a84a5a1 e5c9ee0 a84a5a1 c5fe4a2 a84a5a1 acec8bf 326115c c5fe4a2 3bd20e4 a84a5a1 3bd20e4 c5fe4a2 3bd20e4 c5fe4a2 a84a5a1 3bd20e4 a84a5a1 3bd20e4 a84a5a1 c5fe4a2 a84a5a1 ca06190 c5fe4a2 a84a5a1 c5fe4a2 ca06190 a84a5a1 c5fe4a2 a84a5a1 f623930 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 |
import argparse
import cv2
import gradio as gr
import json
import logging
import nh3
import numpy as np
import os
import re
import sys
import torch
import torch.nn.functional as F
from fastapi import FastAPI, File, UploadFile, Request
from fastapi.responses import HTMLResponse, RedirectResponse
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from transformers import AutoTokenizer, BitsAndBytesConfig, CLIPImageProcessor
from typing import Callable
from model.LISA import LISAForCausalLM
from model.llava import conversation as conversation_lib
from model.llava.mm_utils import tokenizer_image_token
from model.segment_anything.utils.transforms import ResizeLongestSide
from utils import session_logger
from utils.utils import (DEFAULT_IM_END_TOKEN, DEFAULT_IM_START_TOKEN,
DEFAULT_IMAGE_TOKEN, IMAGE_TOKEN_INDEX)
session_logger.change_logging(logging.DEBUG)
CUSTOM_GRADIO_PATH = "/"
app = FastAPI(title="lisa_app", version="1.0")
FASTAPI_STATIC = os.getenv("FASTAPI_STATIC")
os.makedirs(FASTAPI_STATIC, exist_ok=True)
app.mount("/static", StaticFiles(directory=FASTAPI_STATIC), name="static")
templates = Jinja2Templates(directory="templates")
@app.get("/health")
@session_logger.set_uuid_logging
def health() -> str:
try:
logging.info("health check")
return json.dumps({"msg": "ok"})
except Exception as e:
logging.error(f"exception:{e}.")
return json.dumps({"msg": "request failed"})
@session_logger.set_uuid_logging
def get_cleaned_input(input_str):
logging.info(f"start cleaning of input_str: {input_str}.")
input_str = nh3.clean(
input_str,
tags={
"a",
"abbr",
"acronym",
"b",
"blockquote",
"code",
"em",
"i",
"li",
"ol",
"strong",
"ul",
},
attributes={
"a": {"href", "title"},
"abbr": {"title"},
"acronym": {"title"},
},
url_schemes={"http", "https", "mailto"},
link_rel=None,
)
logging.info(f"cleaned input_str: {input_str}.")
return input_str
@session_logger.set_uuid_logging
def get_inference_model_by_args(args_to_parse):
logging.info(f"args_to_parse:{args_to_parse}.")
@session_logger.set_uuid_logging
def inference(input_str, input_image):
## filter out special chars
input_str = get_cleaned_input(input_str)
logging.info(f"input_str type: {type(input_str)}, input_image type: {type(input_image)}.")
logging.info(f"input_str: {input_str}.")
return output_image, output_str
return inference
@session_logger.set_uuid_logging
def get_gradio_interface(fn_inference: Callable):
return gr.Interface(
fn_inference,
inputs=[
gr.Textbox(lines=1, placeholder=None, label="Text Instruction"),
gr.Image(type="filepath", label="Input Image")
],
outputs=[
gr.Image(type="pil", label="Segmentation Output"),
gr.Textbox(lines=1, placeholder=None, label="Text Output"),
],
title=title,
description=description,
article=article,
examples=examples,
allow_flagging="auto",
)
args = parse_args(sys.argv[1:])
inference_fn = get_inference_model_by_args(args)
io = get_gradio_interface(inference_fn)
app = gr.mount_gradio_app(app, io, path=CUSTOM_GRADIO_PATH)
|