File size: 2,823 Bytes
7c1eee1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
import os
import time
from typing import List
import replicate

os.environ("REPLICATE_API_TOKEN", "r8_0BaoQW0G8nWFXY8YWBCCUDurANxCtY72rarv9")

class BaseModelWorker:
    def __init__(self,
                 model_name: str,
                 i2s_model: bool, 
                 online_model: bool,
                 model_path: str = None,
                 ):
        self.model_name = model_name
        self.i2s_model = i2s_model
        self.online_model = online_model
        self.model_path = model_path
        self.model = None
        
        if self.online_model:
            assert not self.model_path, f"Please give model_path of {model_name}"
            self.model = self.load_model()

    def check_online(self) -> bool:
        if self.online_model and not self.model:
            return True
        else:
            return False
        
    def load_model(self):
        pass
    
    def inference(self, prompt):
        pass

    def render(self, shape):
        pass

class HuggingfaceApiWorker(BaseModelWorker):
    def __init__(
            self,
            model_name: str,
            i2s_model: bool, 
            online_model: bool,
            model_api: str,
            model_path: str = None,
    ):
        super().__init__(
            model_name,
            i2s_model, 
            online_model,
            model_path,
        )
        self.model_api = model_api

class PointE_Worker(BaseModelWorker):
    def __init__(self, 
                 model_name: str, 
                 i2s_model: bool, 
                 online_model: bool, 
                 model_api: str,
                 model_path: str = None):
        super().__init__(model_name, i2s_model, online_model, model_path)
        self.model_api = model_api


class LGM_Worker(BaseModelWorker):
    def __init__(self, 
                 model_name: str, 
                 i2s_model: bool, 
                 online_model: bool, 
                 model_path: str = "camenduru/lgm-ply-to-glb:eb217314ab0d025370df16b8c9127f9ac1a0e4b3ffbff6b323d598d3c814d258"):
        super().__init__(model_name, i2s_model, online_model, model_path)
    
    def inference(self, image):
        output = replicate.run(
            self.model_path,
            input={"ply_file_url": image}
        )
        #=> .glb file url: "https://replicate.delivery/pbxt/r4iOSfk7cv2wACJL539ACB4E...
        return output


if __name__=="__main__":
    input = {
    "ply_file_url": "https://replicate.delivery/pbxt/UvKKgNj9mT7pIVHzwerhcjkp5cMH4FS5emPVghk2qyzMRwUSA/gradio_output.ply"
    }
    print("Start...")
    output = replicate.run(
    "camenduru/lgm-ply-to-glb:eb217314ab0d025370df16b8c9127f9ac1a0e4b3ffbff6b323d598d3c814d258",
    input=input
    )
    print("output: ", output)
    #=> "https://replicate.delivery/pbxt/r4iOSfk7cv2wACJL539ACB4E...