Spaces:
Running
Running
import os | |
import time | |
from typing import List | |
import replicate | |
os.environ("REPLICATE_API_TOKEN", "r8_0BaoQW0G8nWFXY8YWBCCUDurANxCtY72rarv9") | |
class BaseModelWorker: | |
def __init__(self, | |
model_name: str, | |
i2s_model: bool, | |
online_model: bool, | |
model_path: str = None, | |
): | |
self.model_name = model_name | |
self.i2s_model = i2s_model | |
self.online_model = online_model | |
self.model_path = model_path | |
self.model = None | |
if self.online_model: | |
assert not self.model_path, f"Please give model_path of {model_name}" | |
self.model = self.load_model() | |
def check_online(self) -> bool: | |
if self.online_model and not self.model: | |
return True | |
else: | |
return False | |
def load_model(self): | |
pass | |
def inference(self, prompt): | |
pass | |
def render(self, shape): | |
pass | |
class HuggingfaceApiWorker(BaseModelWorker): | |
def __init__( | |
self, | |
model_name: str, | |
i2s_model: bool, | |
online_model: bool, | |
model_api: str, | |
model_path: str = None, | |
): | |
super().__init__( | |
model_name, | |
i2s_model, | |
online_model, | |
model_path, | |
) | |
self.model_api = model_api | |
class PointE_Worker(BaseModelWorker): | |
def __init__(self, | |
model_name: str, | |
i2s_model: bool, | |
online_model: bool, | |
model_api: str, | |
model_path: str = None): | |
super().__init__(model_name, i2s_model, online_model, model_path) | |
self.model_api = model_api | |
class LGM_Worker(BaseModelWorker): | |
def __init__(self, | |
model_name: str, | |
i2s_model: bool, | |
online_model: bool, | |
model_path: str = "camenduru/lgm-ply-to-glb:eb217314ab0d025370df16b8c9127f9ac1a0e4b3ffbff6b323d598d3c814d258"): | |
super().__init__(model_name, i2s_model, online_model, model_path) | |
def inference(self, image): | |
output = replicate.run( | |
self.model_path, | |
input={"ply_file_url": image} | |
) | |
#=> .glb file url: "https://replicate.delivery/pbxt/r4iOSfk7cv2wACJL539ACB4E... | |
return output | |
if __name__=="__main__": | |
input = { | |
"ply_file_url": "https://replicate.delivery/pbxt/UvKKgNj9mT7pIVHzwerhcjkp5cMH4FS5emPVghk2qyzMRwUSA/gradio_output.ply" | |
} | |
print("Start...") | |
output = replicate.run( | |
"camenduru/lgm-ply-to-glb:eb217314ab0d025370df16b8c9127f9ac1a0e4b3ffbff6b323d598d3c814d258", | |
input=input | |
) | |
print("output: ", output) | |
#=> "https://replicate.delivery/pbxt/r4iOSfk7cv2wACJL539ACB4E... |