updated
Browse files- app.py +5 -0
- llama_models.py +2 -2
app.py
CHANGED
@@ -8,6 +8,10 @@ import os
|
|
8 |
# Load environment variables from .env file
|
9 |
load_dotenv()
|
10 |
|
|
|
|
|
|
|
|
|
11 |
async def process_csv(file):
|
12 |
print("Reading CSV file...")
|
13 |
df = pd.read_csv(file, header=None) # Read the CSV file without a header
|
@@ -35,6 +39,7 @@ async def process_csv(file):
|
|
35 |
df['predictions'] = results
|
36 |
df.columns = df.columns.astype(str) # Convert column names to strings to avoid warnings
|
37 |
print("Results assigned to DataFrame successfully.")
|
|
|
38 |
return df
|
39 |
|
40 |
st.title("Finance Model Deployment")
|
|
|
8 |
# Load environment variables from .env file
|
9 |
load_dotenv()
|
10 |
|
11 |
+
# Ensure API key is loaded correctly
|
12 |
+
api_key = os.getenv("HUGGINGFACE_API_KEY")
|
13 |
+
print(f"Hugging Face API Key: {api_key}")
|
14 |
+
|
15 |
async def process_csv(file):
|
16 |
print("Reading CSV file...")
|
17 |
df = pd.read_csv(file, header=None) # Read the CSV file without a header
|
|
|
39 |
df['predictions'] = results
|
40 |
df.columns = df.columns.astype(str) # Convert column names to strings to avoid warnings
|
41 |
print("Results assigned to DataFrame successfully.")
|
42 |
+
print(df.head()) # Print first few rows of the DataFrame to verify
|
43 |
return df
|
44 |
|
45 |
st.title("Finance Model Deployment")
|
llama_models.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
import os
|
2 |
-
from transformers import AutoTokenizer,
|
3 |
import aiohttp
|
4 |
|
5 |
HUGGINGFACE_API_KEY = os.getenv("HUGGINGFACE_API_KEY")
|
@@ -11,7 +11,7 @@ def load_model(model_name):
|
|
11 |
if not tokenizer or not model:
|
12 |
print("Loading model and tokenizer...")
|
13 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
14 |
-
model =
|
15 |
print("Model and tokenizer loaded successfully.")
|
16 |
return tokenizer, model
|
17 |
|
|
|
1 |
import os
|
2 |
+
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM # Ensure correct model class
|
3 |
import aiohttp
|
4 |
|
5 |
HUGGINGFACE_API_KEY = os.getenv("HUGGINGFACE_API_KEY")
|
|
|
11 |
if not tokenizer or not model:
|
12 |
print("Loading model and tokenizer...")
|
13 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
14 |
+
model = AutoModelForSeq2SeqLM.from_pretrained(model_name) # Ensure correct model class
|
15 |
print("Model and tokenizer loaded successfully.")
|
16 |
return tokenizer, model
|
17 |
|