seawolf2357 commited on
Commit
95635e6
ยท
verified ยท
1 Parent(s): 56fcdd0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -5
app.py CHANGED
@@ -10,10 +10,10 @@ from PIL import Image
10
  import io
11
  import gradio as gr
12
  import threading
13
- from googletrans import Translator
14
 
15
  # ๋กœ๊น… ์„ค์ •
16
- logging.basicConfig(level=logging.DEBUG, format='%(asctime)s:%(levelname)s:%(name)s: %(message)s', handlers=[logging.StreamHandler()])
17
 
18
  # ๋””์Šค์ฝ”๋“œ ์ธํ…ํŠธ ์„ค์ •
19
  intents = discord.Intents.default()
@@ -22,10 +22,12 @@ intents.messages = True
22
  intents.guilds = True
23
  intents.guild_messages = True
24
 
 
 
 
25
  # PaliGemma ๋ชจ๋ธ ์„ค์ • (CPU ๋ชจ๋“œ)
26
  model = PaliGemmaForConditionalGeneration.from_pretrained("gokaygokay/sd3-long-captioner").to("cpu").eval()
27
  processor = PaliGemmaProcessor.from_pretrained("gokaygokay/sd3-long-captioner")
28
- translator = Translator()
29
 
30
  def modify_caption(caption: str) -> str:
31
  prefix_substrings = [
@@ -59,9 +61,26 @@ async def create_captions_rich(image: Image.Image) -> str:
59
  return modified_caption
60
 
61
  async def translate_to_korean(text: str) -> str:
 
 
 
 
 
62
  loop = asyncio.get_event_loop()
63
- translated = await loop.run_in_executor(None, lambda: translator.translate(text, dest='ko').text)
64
- return translated
 
 
 
 
 
 
 
 
 
 
 
 
65
 
66
  # Gradio ์ธํ„ฐํŽ˜์ด์Šค ์„ค์ •
67
  def create_captions_rich_sync(image):
 
10
  import io
11
  import gradio as gr
12
  import threading
13
+ from huggingface_hub import InferenceClient
14
 
15
  # ๋กœ๊น… ์„ค์ •
16
+ logging.basicConfig(level=logging.DEBUG, format='%(asctime)s:%(levelname)s:%(name)s:%(message)s', handlers=[logging.StreamHandler()])
17
 
18
  # ๋””์Šค์ฝ”๋“œ ์ธํ…ํŠธ ์„ค์ •
19
  intents = discord.Intents.default()
 
22
  intents.guilds = True
23
  intents.guild_messages = True
24
 
25
+ # Cohere ๋ชจ๋ธ ์„ค์ •
26
+ hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus", token=os.getenv("HF_TOKEN"))
27
+
28
  # PaliGemma ๋ชจ๋ธ ์„ค์ • (CPU ๋ชจ๋“œ)
29
  model = PaliGemmaForConditionalGeneration.from_pretrained("gokaygokay/sd3-long-captioner").to("cpu").eval()
30
  processor = PaliGemmaProcessor.from_pretrained("gokaygokay/sd3-long-captioner")
 
31
 
32
  def modify_caption(caption: str) -> str:
33
  prefix_substrings = [
 
61
  return modified_caption
62
 
63
  async def translate_to_korean(text: str) -> str:
64
+ messages = [
65
+ {"role": "system", "content": "Translate the following text from English to Korean."},
66
+ {"role": "user", "content": text}
67
+ ]
68
+
69
  loop = asyncio.get_event_loop()
70
+ response = await loop.run_in_executor(
71
+ None,
72
+ lambda: hf_client.chat_completion(
73
+ messages, max_tokens=1000, stream=True, temperature=0.7, top_p=0.85
74
+ )
75
+ )
76
+
77
+ full_response = []
78
+ for part in response:
79
+ if part.choices and part.choices[0].delta and part.choices[0].delta.content:
80
+ full_response.append(part.choices[0].delta.content)
81
+
82
+ full_response_text = ''.join(full_response)
83
+ return full_response_text.strip()
84
 
85
  # Gradio ์ธํ„ฐํŽ˜์ด์Šค ์„ค์ •
86
  def create_captions_rich_sync(image):