import spaces import os os.environ["COMMANDLINE_ARGS"] = "--no-gradio-queue" from dotenv import load_dotenv load_dotenv() import re from urllib.parse import urlparse, parse_qs import pandas as pd import unicodedata as uni import emoji from langchain_openai import ChatOpenAI from langchain_community.embeddings import HuggingFaceEmbeddings from langchain_community.document_loaders import DataFrameLoader from langchain.text_splitter import RecursiveCharacterTextSplitter from langchain_community.vectorstores import FAISS from langchain.chains import RetrievalQA # from tokopedia import request_product_id, request_product_review import gradio as gr shop_id = "" item_id = "" item = {} LIMIT = 1000 # Limit to 1000 reviews so that processing does not take too long import logging # Configure logging logging.basicConfig( level=logging.DEBUG, format="%(asctime)s [%(levelname)s] %(message)s", handlers=[logging.StreamHandler()], ) logger = logging.getLogger(__name__) import requests def request_product_id(shop_domain, product_key): ENDPOINT = "https://gql.tokopedia.com/graphql/PDPGetLayoutQuery" payload = { "operationName": "PDPGetLayoutQuery", "variables": { "shopDomain": f"{shop_domain}", "productKey": f"{product_key}", "apiVersion": 1, }, "query": """fragment ProductVariant on pdpDataProductVariant { errorCode parentID defaultChild children { productID } __typename } query PDPGetLayoutQuery($shopDomain: String, $productKey: String, $layoutID: String, $apiVersion: Float, $userLocation: pdpUserLocation, $extParam: String, $tokonow: pdpTokoNow, $deviceID: String) { pdpGetLayout(shopDomain: $shopDomain, productKey: $productKey, layoutID: $layoutID, apiVersion: $apiVersion, userLocation: $userLocation, extParam: $extParam, tokonow: $tokonow, deviceID: $deviceID) { requestID name pdpSession basicInfo { id: productID } components { name type position data { ...ProductVariant __typename } __typename } __typename } } """, } headers = { "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36", "Referer": "https://www.tokopedia.com", "X-TKPD-AKAMAI": "pdpGetLayout", } try: response = requests.request( method="POST", url=ENDPOINT, json=payload, headers=headers, timeout=30 ) response.raise_for_status() # Raise an exception for non-2xx status codes logger.info(f"Request successful. Status code: {response.status_code}") # Process the response data except requests.exceptions.RequestException as e: logger.error(f"Request failed: {e}") else: return response def request_product_review(product_id, page=1, limit=20): ENDPOINT = "https://gql.tokopedia.com/graphql/productReviewList" payload = { "operationName": "productReviewList", "variables": { "productID": f"{product_id}", "page": page, "limit": limit, "sortBy": "", "filterBy": "", }, "query": """query productReviewList($productID: String!, $page: Int!, $limit: Int!, $sortBy: String, $filterBy: String) { productrevGetProductReviewList(productID: $productID, page: $page, limit: $limit, sortBy: $sortBy, filterBy: $filterBy) { productID list { id: feedbackID variantName message productRating reviewCreateTime reviewCreateTimestamp isReportable isAnonymous reviewResponse { message createTime __typename } user { userID fullName image url __typename } likeDislike { totalLike likeStatus __typename } stats { key formatted count __typename } badRatingReasonFmt __typename } shop { shopID name url image __typename } hasNext totalReviews __typename } } """, } headers = { "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36", "Referer": "https://www.tokopedia.com", "X-TKPD-AKAMAI": "productReviewList", } try: response = requests.request( method="POST", url=ENDPOINT, json=payload, headers=headers, timeout=30 ) response.raise_for_status() # Raise an exception for non-2xx status codes logger.info(f"Request successful. Status code: {response.status_code}") # Process the response data except requests.exceptions.RequestException as e: logger.error(f"Request failed: {e}") else: return response def scrape(product_id, max_reviews=LIMIT): all_reviews = [] page = 1 has_next = True logger.info("Extracting product reviews...") # while has_next and len(all_reviews) <= max_reviews: response = request_product_review(product_id, page=page) data = response.json()["data"]["productrevGetProductReviewList"] reviews = data["list"] all_reviews.extend(reviews) has_next = data["hasNext"] page += 1 reviews_df = pd.json_normalize(all_reviews) reviews_df.rename(columns={"message": "comment"}, inplace=True) reviews_df = reviews_df[["comment"]] logger.info(reviews_df.head()) return reviews_df def get_product_id(URL): parsed_url = urlparse(URL) *_, SHOP, PRODUCT_KEY = parsed_url.path.split("/") product_id = request_product_id(SHOP, PRODUCT_KEY).json()["data"]["pdpGetLayout"][ "basicInfo" ]["id"] logger.info(product_id) return product_id # Clean def clean(df): df = df.dropna().copy().reset_index(drop=True) # drop reviews with empty comments df = df[df["comment"] != ""].reset_index(drop=True) # remove empty reviews df["comment"] = df["comment"].apply(lambda x: clean_text(x)) # clean text df = df[df["comment"] != ""].reset_index(drop=True) # remove empty reviews logger.info("cleaned") return df def clean_text(text): text = uni.normalize("NFKD", text) # normalise characters text = emoji.replace_emoji(text, "") # remove emoji text = re.sub(r"(\w)\1{2,}", r"\1", text) # repeated chars text = re.sub(r"[ ]+", " ", text).strip() # remove extra spaces return text # LLM OpenAIModel = "gpt-3.5-turbo" llm = ChatOpenAI(model=OpenAIModel, temperature=0.1) # Embeddings embeddings = HuggingFaceEmbeddings(model_name="LazarusNLP/all-indobert-base-v2") cache_URL = "" db = None qa = None cache = {} @spaces.GPU async def generate(URL, query): global cache_URL, db, qa, cache if URL == "" or query == "": return "Input kosong" else: try: product_id = get_product_id(URL) if URL not in cache: # Get reviews try: reviews = scrape(product_id) # Clean reviews cleaned_reviews = clean(reviews) # Load data loader = DataFrameLoader( cleaned_reviews, page_content_column="comment" ) documents = loader.load() except Exception as e: return "Error getting reviews: " + str(e) else: # Split text text_splitter = RecursiveCharacterTextSplitter( chunk_size=1000, chunk_overlap=50 ) docs = text_splitter.split_documents(documents) # Vector store db = FAISS.from_documents(docs, embeddings) # Store in cache cache[URL] = (docs, db) # Retrieve from cache docs, db = cache[URL] # Chain to answer questions qa = RetrievalQA.from_chain_type(llm=llm, retriever=db.as_retriever()) res = await qa.ainvoke(query) # Process result return res["result"] except: return "Gagal mendapatkan review dari URL" # Gradio product_box = gr.Textbox(label="URL Produk", placeholder="URL produk dari Tokopedia") query_box = gr.Textbox( lines=2, label="Kueri", placeholder="Contoh: Apa yang orang katakan tentang kualitas produknya?, Bagaimana pendapat orang yang kurang puas dengan produknya?", ) gr.Interface( fn=generate, inputs=[product_box, query_box], outputs=[gr.Textbox(label="Jawaban")], title="RingkasUlas", description="Bot percakapan yang bisa meringkas ulasan-ulasan produk di Tokopedia Indonesia (https://tokopedia.com/). Harap bersabar, bot ini dapat memakan waktu agak lama saat mengambil ulasan dari Tokopedia dan menyiapkan jawabannya.", allow_flagging="never", ).launch(debug=True)