Arseney's picture
Update app.py
9fac9d2 verified
# from huggingface_hub import InferenceClient
# model_name = "Qwen/Qwen2.5-72B-Instruct"
# client = InferenceClient(model_name)
# def llm_inference(user_sample):
# output = client.chat.completions.create(
# messages=[
# {"role": "system", "content": "you are a university english grammar teacher\n"
# "answer the questions based on english grammar, IELTS (International English Language Testing System)\n"
# "and top-rated universities in english speaking countries especially where english is an official language\n"
# "if there are other questions which do not connected with these topics: grammar, IELTS and top-rated English universities write that you cannot provide an answer\n"
# "answer in a formal way using B2-C1 (upper-intermediate and advanced level of English) without colloquial phrases, slang and so on\n"
# "your answer must be no more than 50 words"
# },
# {"role": "user",
# "content": f"answer the question based on these topics: english grammar, IELTS and top-rated universities {user_sample}"},
# ],
# stream=False,
# max_tokens=128,
# temperature=0.5,
# top_p=0.1
# )
# return output.choices[0].get('message')['content']
# import gradio as gr
# interface = gr.Interface(fn=llm_inference,
# inputs=gr.Textbox(lines=2, placeholder="Write your question here..."),
# outputs="text",
# css=".gradio-container {background-image: url('https://i.pinimg.com/originals/9b/6a/a8/9b6aa8867dbe29f2d475b7a550e06490.jpg')}",
# title="ASK A QUESTION BASED ON ENGLISH GRAMMAR, IELTS OR TOP-RATED UNIVERSITIES")
# interface.launch(debug=True)