Spaces:
Sleeping
Sleeping
import os | |
from llama_index.core import Settings | |
from llama_index.llms.huggingface import HuggingFaceInferenceAPI | |
from llama_index.embeddings.huggingface import HuggingFaceEmbedding | |
from dotenv import load_dotenv | |
# Load environment variables | |
load_dotenv() | |
# Configure the Llama index settings | |
def initialize_llama_settings(): | |
Settings.llm = HuggingFaceInferenceAPI( | |
model_name="google/gemma-1.1-7b-it", | |
tokenizer_name="google/gemma-1.1-7b-it", | |
context_window=3000, | |
token=os.getenv("HF_TOKEN"), | |
max_new_tokens=512, | |
generate_kwargs={"temperature": 0.1}, | |
) | |
Settings.embed_model = HuggingFaceEmbedding( | |
model_name="BAAI/bge-small-en-v1.5" | |
) | |
# Ensure data directory and persistent storage directory exist | |
def setup_directories(data_dir="data", persist_dir="./db"): | |
os.makedirs(data_dir, exist_ok=True) | |
os.makedirs(persist_dir, exist_ok=True) | |
return data_dir, persist_dir |