import gradio as gr import requests # LM Studio REST API base URL BASE_URL = "http://localhost:1234/api/v0" # Function to handle chat completions def chat_with_lmstudio(messages): url = f"{BASE_URL}/chat/completions" payload = { "model": "granite-3.0-2b-instruct", # Replace with the model you have loaded "messages": messages, "temperature": 0.7, "max_tokens": 1024, "stream": False } response = requests.post(url, json=payload) response.raise_for_status() response_data = response.json() return response_data['choices'][0]['message']['content'] # Function to handle text completions def get_text_completion(prompt): url = f"{BASE_URL}/completions" payload = { "model": "granite-3.0-2b-instruct", # Replace with the model you have loaded "prompt": prompt, "temperature": 0.7, "max_tokens": 100, "stream": False } response = requests.post(url, json=payload) response.raise_for_status() response_data = response.json() return response_data['choices'][0]['text'] # Function to handle text embeddings def get_text_embedding(text): url = f"{BASE_URL}/embeddings" payload = { "model": "text-embedding-nomic-embed-text-v1.5", # Replace with your embedding model "input": text } response = requests.post(url, json=payload) response.raise_for_status() response_data = response.json() return response_data['data'][0]['embedding'] # Gradio interface for chat def gradio_chat_interface(): def chat_interface(user_input, history): # Format history in LM Studio messages format messages = [] for user_msg, assistant_msg in history: messages.append({"role": "user", "content": user_msg}) messages.append({"role": "assistant", "content": assistant_msg}) messages.append({"role": "user", "content": user_input}) # Get response from LM Studio response = chat_with_lmstudio(messages) # Update history with the assistant's response history.append((user_input, response)) return history, history chat_interface = gr.ChatInterface(chat_interface, type='messages') chat_interface.launch(share=True) # Gradio interface for text completion def gradio_text_completion(): gr.Interface( fn=get_text_completion, inputs="text", outputs="text", title="Text Completion with LM Studio" ).launch(share=True) # Gradio interface for text embedding def gradio_text_embedding(): gr.Interface( fn=get_text_embedding, inputs="text", outputs="text", title="Text Embedding with LM Studio" ).launch(share=True) # Main menu to choose the interface def main(): with gr.Blocks() as demo: gr.Markdown(""" # LM Studio API Interface Choose which function you want to use with LM Studio: """) with gr.Row(): gr.Button("Chat with Model").click(gradio_chat_interface) gr.Button("Text Completion").click(gradio_text_completion) gr.Button("Text Embedding").click(gradio_text_embedding) demo.launch(share=True) if __name__ == "__main__": main()