Spaces:
Sleeping
Sleeping
File size: 6,316 Bytes
5366a00 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 |
import ast
import os
import pickle
import random
from datetime import datetime, timedelta
import gradio as gr
import pandas as pd
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_core.messages import HumanMessage, SystemMessage
from pytrends.request import TrendReq
from mlxtend.preprocessing import TransactionEncoder
def convert_keywords_to_list(keywords_str):
try:
return ast.literal_eval(keywords_str)
except (SyntaxError, ValueError):
return []
def convert_scores_to_list(scores_float):
try:
return ast.literal_eval(scores_float)
except (SyntaxError, ValueError):
return []
video_df = pd.read_csv('video_df_complete.csv')
video_df['keywords'] = video_df['keywords'].apply(convert_keywords_to_list)
video_df['trend_scores'] = video_df['trend_scores'].apply(convert_scores_to_list)
video_df['total_score'] = video_df['trend_scores'].apply(lambda x: sum(x) / len(x) if len(x) > 0 else 0)
transactions = []
for index, row in video_df.iterrows():
transactions.append(row['keywords'])
te = TransactionEncoder()
te_ary = te.fit(transactions).transform(transactions)
df = pd.DataFrame(te_ary, columns=te.columns_)
merged_df = pd.concat([df, video_df['total_score'], video_df['engagement_rate']], axis=1)
rules = pd.read_csv('association_rules.csv')
rules['antecedents'] = rules['antecedents'].apply(lambda x: list(eval(x)))
rules['consequents'] = rules['consequents'].apply(lambda x: list(eval(x)))
model_filename = os.path.join('regression_model_final.pkl')
with open(model_filename, 'rb') as file:
model = pickle.load(file)
llm = ChatGoogleGenerativeAI(model="gemini-1.5-pro", convert_system_message_to_human=True)
def custom_predict(keywords, total_score):
"""
Custom prediction function using the trained linear regression model.
Args:
keywords: A list of keywords.
total_score: The total trend score.
Returns:
The predicted engagement rate.
"""
new_data = pd.DataFrame([{col: 0 for col in merged_df.columns}])
for keyword in keywords:
if keyword in new_data.columns:
new_data.at[0, keyword] = 1
new_data.at[0, 'total_score'] = total_score
new_data = new_data.drop('engagement_rate', axis=1)
prediction = model.predict(new_data)
return prediction[0][0]
def generate_keyword_scores(keywords):
scaled_rate = min(100, 4.5 * 10)
return [
round(random.uniform(scaled_rate * 0.7, min(100, scaled_rate * 1.2)), 2)
for _ in keywords
]
def get_google_trends_score(keywords, end_date, days_back=7):
"""
Mengambil skor tren Google untuk kata kunci tertentu selama periode waktu tertentu.
Parameters:
keywords (list): Daftar kata kunci yang ingin dianalisis.
end_date (datetime): Tanggal akhir untuk data tren.
days_back (int): Jumlah hari ke belakang dari end_date untuk menentukan rentang waktu (default: 7 hari).
Returns:
pd.DataFrame: DataFrame berisi data tren per kata kunci selama periode waktu yang ditentukan.
"""
try:
if not keywords:
raise ValueError("Daftar kata kunci tidak boleh kosong.")
pytrends = TrendReq()
start_date = end_date - timedelta(days=days_back)
timeframe = f"{start_date.strftime('%Y-%m-%d')} {end_date.strftime('%Y-%m-%d')}"
pytrends.build_payload(keywords, timeframe=timeframe, geo='ID', gprop='youtube')
trends_df = pytrends.interest_over_time()
if 'isPartial' in trends_df.columns:
trends_df = trends_df.drop(columns=['isPartial'])
return trends_df
except Exception as e:
return pd.DataFrame(generate_keyword_scores(keywords))
def generate_title(keyword, category):
if category != 'Gaming':
return "Category belum supported."
recommendation = recommend_keyword(keyword)
if not recommendation:
return "No recommendations found."
else:
result = llm(
[
SystemMessage(
content=f"Kamu adalah seorang penulis judul video youtube"
f"Kamu akan diberikan beberapa buah keyword yang wajib digunakan untuk judul"
f"Buat judul yang semenarik mungkin untuk memberikan viewer rasa suka"
f"Cukup keluarkan satu judul saja dalam satu kalimat"
f"Jangan gunnakan formatting seperti '\n' atau hal lainnya. Gunakan saja raw string"
f"Boleh pake emoji"
),
HumanMessage(
content=f"keyword yang digunakan adalah sebagai berikut: {recommendation}"
f"Total jumlah keyword adalah: {len(recommendation)}"
f"Video memiliki kategori: {category}"
)
]
)
return result.content
def recommend_keyword(keyword):
keyword_rules = rules[
rules['antecedents'].astype(str).str.contains(keyword) | rules['consequents'].astype(str).str.contains(keyword)]
top_5_rules = keyword_rules.sort_values(by='lift', ascending=False).head(5)
recommendation = []
engages = []
for idx, row in top_5_rules.iterrows():
antecedents = list(row['antecedents'])[0]
consequents = list(row['consequents'])
recommendation.append([keyword] + consequents)
if not recommendation:
return []
for rec in recommendation:
trends_df = get_google_trends_score(rec, datetime.now())
batch_scores = [
round(trends_df[keyword].mean(), 2) if keyword in trends_df.columns else 0
for keyword in rec
]
batch_scores = sum(batch_scores) / len(batch_scores)
engagement_rate = custom_predict(rec, batch_scores)
engages.append(engagement_rate)
return recommendation[engages.index(max(engages))]
distinct_categories = video_df['catergory'].unique()
iface = gr.Interface(
fn=generate_title,
inputs=[
gr.Textbox(label="Enter a keyword"),
gr.Dropdown(label="Select a category", choices=list(distinct_categories))
],
outputs=gr.Textbox(label="Recommendations"),
title="Title Recommendation",
description="Do'akan saya langgeng sm Ei"
)
iface.launch()
|