Spaces:
Runtime error
Runtime error
# Setup script for Claude in VS Code on Hugging Face Space | |
echo "Setting up Python environment for working with Claude..." | |
# Create a virtual environment | |
python -m venv ~/claude-env | |
# Activate the virtual environment | |
source ~/claude-env/bin/activate | |
# Install required packages | |
pip install -U huggingface_hub gradio transformers datasets sentence-transformers faiss-cpu torch langchain | |
# Create initial files | |
mkdir -p ~/hf_implementation | |
cd ~/hf_implementation | |
# Create a simple Gradio app | |
cat > app.py << 'EOL' | |
import gradio as gr | |
import os | |
def process_file(file): | |
"""Process an uploaded file.""" | |
filename = os.path.basename(file.name) | |
return f"File {filename} would be processed using HF models." | |
def query_index(query): | |
"""Query the RAG index.""" | |
return f"Query: {query}\nResponse: This is a placeholder. The real implementation will use sentence-transformers and FAISS." | |
# Create the Gradio interface | |
with gr.Blocks(title="RAG Document Processor") as demo: | |
gr.Markdown("# RAG Document Processing System") | |
with gr.Tab("Upload & Process"): | |
file_input = gr.File(label="Upload Document") | |
process_button = gr.Button("Process Document") | |
output = gr.Textbox(label="Processing Result") | |
process_button.click(process_file, inputs=file_input, outputs=output) | |
with gr.Tab("Query Documents"): | |
query_input = gr.Textbox(label="Enter your query") | |
query_button = gr.Button("Search") | |
response = gr.Textbox(label="Response") | |
query_button.click(query_index, inputs=query_input, outputs=response) | |
# Launch the app | |
if __name__ == "__main__": | |
demo.launch(server_name="0.0.0.0", server_port=7860) | |
EOL | |
# Create a sample implementation file | |
cat > hf_embeddings.py << 'EOL' | |
""" | |
Embeddings module using sentence-transformers. | |
""" | |
from sentence_transformers import SentenceTransformer | |
import numpy as np | |
class HFEmbeddings: | |
def __init__(self, model_name="sentence-transformers/all-MiniLM-L6-v2"): | |
"""Initialize the embedding model. | |
Args: | |
model_name: Name of the sentence-transformers model to use | |
""" | |
self.model = SentenceTransformer(model_name) | |
def embed_texts(self, texts): | |
"""Generate embeddings for a list of texts. | |
Args: | |
texts: List of strings to embed | |
Returns: | |
List of embedding vectors | |
""" | |
return self.model.encode(texts) | |
def embed_query(self, query): | |
"""Generate embedding for a query string. | |
Args: | |
query: Query string | |
Returns: | |
Embedding vector | |
""" | |
return self.model.encode(query) | |
EOL | |
# Create a README for the implementation | |
cat > README.md << 'EOL' | |
# Hugging Face RAG Implementation | |
This directory contains the Hugging Face native implementation of the RAG system. | |
## Files | |
- `app.py` - Gradio interface for the RAG system | |
- `hf_embeddings.py` - Embedding generation with sentence-transformers | |
## Running the Application | |
```bash | |
python app.py | |
``` | |
## Implementation Plan | |
See `CLAUDE_HF.md` in the main directory for the complete implementation plan. | |
EOL | |
echo "Setup complete!" | |
echo "To use the environment:" | |
echo "1. Run 'source ~/claude-env/bin/activate'" | |
echo "2. Navigate to '~/hf_implementation'" | |
echo "3. Run 'python app.py' to start the Gradio interface" | |