HarnithaS commited on
Commit
ca93b8e
·
1 Parent(s): 291463c

intial version 1

Browse files
Files changed (2) hide show
  1. app.py +54 -24
  2. requirements.txt +2 -1
app.py CHANGED
@@ -1,12 +1,10 @@
1
  import gradio as gr
 
2
  from huggingface_hub import InferenceClient
3
 
4
- """
5
- For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
- """
7
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
8
-
9
-
10
  def respond(
11
  message,
12
  history: list[tuple[str, str]],
@@ -35,29 +33,61 @@ def respond(
35
  top_p=top_p,
36
  ):
37
  token = message.choices[0].delta.content
38
-
39
  response += token
40
  yield response
41
 
42
- """
43
- For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
44
- """
45
- demo = gr.ChatInterface(
46
- respond,
47
- additional_inputs=[
48
- gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
49
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
50
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
51
- gr.Slider(
52
- minimum=0.1,
53
- maximum=1.0,
54
- value=0.95,
55
- step=0.05,
56
- label="Top-p (nucleus sampling)",
57
- ),
58
- ],
59
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
60
 
 
 
 
 
 
 
61
 
62
  if __name__ == "__main__":
63
  demo.launch()
 
1
  import gradio as gr
2
+ import subprocess
3
  from huggingface_hub import InferenceClient
4
 
 
 
 
5
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
6
+ question = "Fibonacci series."
7
+ prompt = f"You are an expert in coding. your task is to explain error and give hint to understand question{question}.Do not give complete answer.Do not give implemmentation."
8
  def respond(
9
  message,
10
  history: list[tuple[str, str]],
 
33
  top_p=top_p,
34
  ):
35
  token = message.choices[0].delta.content
 
36
  response += token
37
  yield response
38
 
39
+ def run_python_code(code):
40
+ try:
41
+ result = subprocess.run(['python3', '-c', code], capture_output=True, text=True)
42
+ output = result.stdout if result.stdout else result.stderr
43
+ return output
44
+ except Exception as e:
45
+ return str(e)
46
+
47
+ def AI_analyse(output):
48
+ try:
49
+ system_message = prompt
50
+ max_tokens = 512
51
+ temperature = 0.7
52
+ top_p = 0.95
53
+ message = prompt + "Please analyse the following code:\n" + output
54
+ response = respond(message, [], system_message, max_tokens, temperature, top_p)
55
+ for word in response:
56
+ res=str(word)
57
+ return res
58
+ except Exception as e:
59
+ return str(e)
60
+
61
+ with gr.Blocks() as demo:
62
+ gr.Markdown("# Code Wiz")
63
+
64
+ with gr.Row():
65
+ with gr.Column():
66
+ #question = gr.Markdown("### Question: Write a program to print Fibonacci series.")
67
+ gr.Textbox(label="Question: Write a program to print Fibonacci series.", lines=1,interactive=False)
68
+ with gr.Row():
69
+ with gr.Column():
70
+ code = gr.Code(label="Python Code", language="python", lines=5,elem_id="box")
71
+ run_button = gr.Button("Run")
72
+ with gr.Row():
73
+ with gr.Column():
74
+ output = gr.Textbox(label="Output", lines=3, max_lines=20, interactive=False, elem_id="box")
75
+
76
+ with gr.Row():
77
+ with gr.Column():
78
+ analyse_button = gr.Button("Analyse")
79
+ ai_suggestion = gr.Textbox(label="AI Suggest", lines=7, placeholder="AI suggestions will be displayed here", interactive=False,elem_id="box")
80
+
81
+
82
+ run_button.click(fn=run_python_code, inputs=code, outputs=output)
83
+ analyse_button.click(fn=AI_analyse, inputs=output, outputs=ai_suggestion)
84
 
85
+ # Add custom CSS
86
+ demo.css = """
87
+ #box {
88
+ overflow-y: scroll;
89
+ }
90
+ """
91
 
92
  if __name__ == "__main__":
93
  demo.launch()
requirements.txt CHANGED
@@ -1 +1,2 @@
1
- huggingface_hub==0.22.2
 
 
1
+ huggingface_hub==0.22.2
2
+ subprocess