Spaces:
Sleeping
Sleeping
Create recommendations.py
Browse files- recommendations.py +67 -0
recommendations.py
ADDED
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# recommendations.py
|
2 |
+
import openai
|
3 |
+
from typing import List, Tuple
|
4 |
+
|
5 |
+
from utils import get_embedding
|
6 |
+
from pinecone import Pinecone
|
7 |
+
|
8 |
+
# Function to recommend products
|
9 |
+
def recommend_products(query: str, openai_api_key: str, pinecone_api_key: str, pinecone_env: str, top_k: int = 10) -> List[Tuple[str, str]]:
|
10 |
+
"""
|
11 |
+
Recommend products based on the user query.
|
12 |
+
|
13 |
+
Args:
|
14 |
+
query (str): User query.
|
15 |
+
openai_api_key (str): OpenAI API key.
|
16 |
+
pinecone_api_key (str): Pinecone API key.
|
17 |
+
pinecone_env (str): Pinecone environment.
|
18 |
+
top_k (int): Number of top recommendations to return. Default is 10.
|
19 |
+
|
20 |
+
Returns:
|
21 |
+
List[Tuple[str, str]]: List of recommended products with image URL and product name.
|
22 |
+
"""
|
23 |
+
query_embedding = get_embedding(query, openai_api_key)
|
24 |
+
|
25 |
+
if not query_embedding:
|
26 |
+
return []
|
27 |
+
|
28 |
+
try:
|
29 |
+
# Initialize Pinecone
|
30 |
+
pc = Pinecone(api_key=pinecone_api_key)
|
31 |
+
index = pc.Index("product-recommendations")
|
32 |
+
|
33 |
+
results = index.query(vector=query_embedding, top_k=top_k, include_metadata=True)
|
34 |
+
recommended_products = [(match['metadata']['image_url'], f"{match['metadata']['product_name']} (Score: {match['score']})") for match in results['matches']]
|
35 |
+
return recommended_products
|
36 |
+
except Exception as e:
|
37 |
+
print(f"Error querying Pinecone: {e}")
|
38 |
+
return []
|
39 |
+
|
40 |
+
# Function to generate contextual message
|
41 |
+
def generate_contextual_message(query: str, recommendations: List[Tuple[str, str]], openai_api_key: str, system_prompt: str) -> str:
|
42 |
+
"""
|
43 |
+
Generate a contextual message based on the user query and recommendations.
|
44 |
+
|
45 |
+
Args:
|
46 |
+
query (str): User query.
|
47 |
+
recommendations (List[Tuple[str, str]]): List of recommended products.
|
48 |
+
openai_api_key (str): OpenAI API key.
|
49 |
+
system_prompt (str): System prompt for the assistant.
|
50 |
+
|
51 |
+
Returns:
|
52 |
+
str: Generated contextual message.
|
53 |
+
"""
|
54 |
+
openai.api_key = openai_api_key
|
55 |
+
product_names = [rec[1] for rec in recommendations]
|
56 |
+
prompt = f"User query: {query}\nRecommended products: {', '.join(product_names)}\n{system_prompt}"
|
57 |
+
|
58 |
+
try:
|
59 |
+
response = openai.ChatCompletion.create(
|
60 |
+
model="gpt-4", # or use "gpt-3.5-turbo" if preferred
|
61 |
+
messages=[{"role": "system", "content": "You are a helpful assistant."},
|
62 |
+
{"role": "user", "content": prompt}]
|
63 |
+
)
|
64 |
+
return response['choices'][0]['message']['content']
|
65 |
+
except Exception as e:
|
66 |
+
print(f"Error generating contextual message: {e}")
|
67 |
+
return "Failed to generate contextual message."
|