leepokai commited on
Commit
6f930be
·
verified ·
1 Parent(s): db62c9a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -23
app.py CHANGED
@@ -1,35 +1,16 @@
1
  import gradio as gr
2
- from huggingface_hub import InferenceClient
3
  import evaluate
4
 
5
  # 创建困惑度计算工具
6
  perplexity = evaluate.load("perplexity", module_type="metric")
7
 
8
- # 创建推理客户端
9
- client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
10
-
11
- def compute_perplexity(message):
12
- # 制备消息列表,这里只有用户消息
13
- messages = [{"role": "user", "content": message}]
14
-
15
- # 通过客户端完成聊天生成任务
16
- response = client.chat_completion(
17
- messages,
18
- max_tokens=512,
19
- stream=False,
20
- temperature=0.7,
21
- top_p=0.95
22
- )
23
-
24
- # 获取生成的文本内容
25
- generated_text = response.choices[0].delta.content
26
-
27
- # 计算困惑度
28
- perplexity_results = perplexity.compute(model_id='gpt2', add_start_token=False, predictions=[generated_text])
29
  perplexity_value = perplexity_results['perplexity']
30
 
31
  # 返回困惑度结果
32
- return f"Perplexity of the response: {perplexity_value}"
33
 
34
  # 设置 Gradio 界面
35
  demo = gr.Interface(
 
1
  import gradio as gr
 
2
  import evaluate
3
 
4
  # 创建困惑度计算工具
5
  perplexity = evaluate.load("perplexity", module_type="metric")
6
 
7
+ def compute_perplexity(text):
8
+ # 直接使用输入的文本计算困惑度
9
+ perplexity_results = perplexity.compute(model_id='gpt2', add_start_token=False, predictions=[text])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  perplexity_value = perplexity_results['perplexity']
11
 
12
  # 返回困惑度结果
13
+ return f"Perplexity of the input text: {perplexity_value}"
14
 
15
  # 设置 Gradio 界面
16
  demo = gr.Interface(