Kims12 commited on
Commit
3b0e559
โ€ข
1 Parent(s): eb45f3d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -6
app.py CHANGED
@@ -10,16 +10,31 @@ HARD_CODED_MODEL = "CohereForAI/c4ai-command-r-plus"
10
  def create_client(model_name):
11
  return InferenceClient(model_name, token=os.getenv("HF_TOKEN"))
12
 
13
- def call_api(content, system_message, max_tokens, temperature, top_p):
14
  client = create_client(HARD_CODED_MODEL)
15
- messages = [{"role": "system", "content": system_message}, {"role": "user", "content": content}]
 
 
 
16
  random_seed = random.randint(0, 1000000)
17
- response = client.chat_completion(messages=messages, max_tokens=max_tokens, temperature=temperature, top_p=top_p, seed=random_seed)
 
 
 
 
 
 
18
  return response.choices[0].message.content
19
 
20
- def generate_text(user_message, system_message, max_tokens, temperature, top_p):
21
- return call_api(user_message, system_message, max_tokens, temperature, top_p)
 
22
 
 
 
 
 
 
23
  def process_excel(file):
24
  if file is not None:
25
  # ์—‘์…€ ํŒŒ์ผ์„ ๋ฐ์ดํ„ฐํ”„๋ ˆ์ž„์œผ๋กœ ์ฝ๊ธฐ
@@ -86,8 +101,13 @@ with gr.Blocks() as demo:
86
  inputs=upload_excel,
87
  outputs=[user_message, input1]) # ๋ถ„์„ ๊ฒฐ๊ณผ๋ฅผ ๊ธ์ •๋ฆฌ๋ทฐ์™€ ๋ถ€์ •๋ฆฌ๋ทฐ ์ž…๋ ฅ์นธ์— ๋ฐ”๋กœ ๋„ฃ๊ธฐ
88
 
89
- generate_btn.click(fn=generate_text,
 
90
  inputs=[user_message, system_message, max_tokens, temperature, top_p],
91
  outputs=[output1])
92
 
 
 
 
 
93
  demo.launch()
 
10
  def create_client(model_name):
11
  return InferenceClient(model_name, token=os.getenv("HF_TOKEN"))
12
 
13
+ def call_api(model_input, system_message, max_tokens, temperature, top_p):
14
  client = create_client(HARD_CODED_MODEL)
15
+ messages = [
16
+ {"role": "system", "content": system_message},
17
+ {"role": "user", "content": model_input}
18
+ ]
19
  random_seed = random.randint(0, 1000000)
20
+ response = client.chat_completion(
21
+ messages=messages,
22
+ max_tokens=max_tokens,
23
+ temperature=temperature,
24
+ top_p=top_p,
25
+ seed=random_seed
26
+ )
27
  return response.choices[0].message.content
28
 
29
+ # ๊ธ์ •๋ฆฌ๋ทฐ ๋ถ„์„์„ ์œ„ํ•œ LLM ํ˜ธ์ถœ
30
+ def analyze_positive_reviews(positive_reviews, positive_prompt, max_tokens, temperature, top_p):
31
+ return call_api(positive_reviews, positive_prompt, max_tokens, temperature, top_p)
32
 
33
+ # ๋ถ€์ •๋ฆฌ๋ทฐ ๋ถ„์„์„ ์œ„ํ•œ LLM ํ˜ธ์ถœ
34
+ def analyze_negative_reviews(negative_reviews, negative_prompt, max_tokens, temperature, top_p):
35
+ return call_api(negative_reviews, negative_prompt, max_tokens, temperature, top_p)
36
+
37
+ # ์—‘์…€ ํŒŒ์ผ ์ฒ˜๋ฆฌ ๋ฐ ๊ธ์ •/๋ถ€์ • ๋ฆฌ๋ทฐ ์ถ”์ถœ
38
  def process_excel(file):
39
  if file is not None:
40
  # ์—‘์…€ ํŒŒ์ผ์„ ๋ฐ์ดํ„ฐํ”„๋ ˆ์ž„์œผ๋กœ ์ฝ๊ธฐ
 
101
  inputs=upload_excel,
102
  outputs=[user_message, input1]) # ๋ถ„์„ ๊ฒฐ๊ณผ๋ฅผ ๊ธ์ •๋ฆฌ๋ทฐ์™€ ๋ถ€์ •๋ฆฌ๋ทฐ ์ž…๋ ฅ์นธ์— ๋ฐ”๋กœ ๋„ฃ๊ธฐ
103
 
104
+ # ๊ธ์ •๋ฆฌ๋ทฐ๋ถ„์„ ๋ฐ ๋ถ€์ •๋ฆฌ๋ทฐ๋ถ„์„์„ LLM์œผ๋กœ ์ถ”๋ก 
105
+ generate_btn.click(fn=analyze_positive_reviews,
106
  inputs=[user_message, system_message, max_tokens, temperature, top_p],
107
  outputs=[output1])
108
 
109
+ generate_btn.click(fn=analyze_negative_reviews,
110
+ inputs=[input1, input2, max_tokens, temperature, top_p],
111
+ outputs=[output2])
112
+
113
  demo.launch()