import gradio as gr from huggingface_hub import InferenceClient from transformers import AutoTokenizer import torch # Initialize model and tokenizer model_name = "erikbeltran/pydiff" client = InferenceClient(model_name) tokenizer = AutoTokenizer.from_pretrained(model_name) def format_diff_response(response): """Format the response to look like a diff output""" lines = response.split('\n') formatted = [] for line in lines: if line.startswith('+'): formatted.append(f'{line}') elif line.startswith('-'): formatted.append(f'{line}') else: formatted.append(line) return '
'.join(formatted) def respond(request, file_content, system_message, max_tokens, temperature, top_p): messages = [ {"role": "system", "content": system_message}, {"role": "user", "content": f"""{request} {file_content} """} ] response = "" for message in client.chat_completion( messages, max_tokens=max_tokens, stream=True, temperature=temperature, top_p=top_p, ): token = message.choices[0].delta.content response += token # Format as diff and yield yield format_diff_response(response) # Create the Gradio interface with gr.Blocks() as demo: gr.Markdown("# Code Review Assistant") with gr.Row(): with gr.Column(): request_input = gr.Textbox( label="Request", placeholder="Enter your request (e.g., 'fix the function', 'add error handling')", lines=3 ) file_input = gr.Code( label="File Content", language="python", lines=10 ) with gr.Column(): output = gr.HTML(label="Diff Output") with gr.Accordion("Advanced Settings", open=False): system_msg = gr.Textbox( value="You are a code review assistant. Analyze the code and provide suggestions in diff format. Use '+' for additions and '-' for deletions.", label="System Message" ) max_tokens = gr.Slider( minimum=1, maximum=2048, value=512, step=1, label="Max Tokens" ) temperature = gr.Slider( minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature" ) top_p = gr.Slider( minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p" ) submit_btn = gr.Button("Submit") submit_btn.click( fn=respond, inputs=[ request_input, file_input, system_msg, max_tokens, temperature, top_p ], outputs=output ) if __name__ == "__main__": demo.launch()