{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import pandas as pd\n", "import urllib3\n", "import json\n", "from bs4 import BeautifulSoup\n", "import numpy as np\n", "from concurrent.futures import ThreadPoolExecutor\n", "from concurrent.futures import Future\n", "from traitlets import List\n", "\n", "from reddit.reddit_info import subreddit_name_l, subreddit_sort_l, subreddit_t_l\n", "import itertools\n", "import random\n", "from pathlib import Path\n", "\n", "from tqdm import tqdm\n", "\n", "from datetime import datetime, timezone\n", "from typing import Optional\n", "from requests.utils import requote_uri\n", "from random_word import RandomWords\n", "from wonderwords import RandomSentence" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def get_subreddit_url(subreddit, sort_by:str = \"hot\", sort_time:str=\"all\", limit:int=100, query:Optional[str]=None):\n", " if not query:\n", " return f'https://www.reddit.com/r/{subreddit}/{sort_by}/.json?raw_json=1&t={sort_time}&limit={limit}'\n", " else:\n", " return f'https://www.reddit.com/r/{subreddit}/search/.json?raw_json=1&q={query}&limit={100}'" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "subreddit_url = get_subreddit_url(\"CityPorn\", \"hot\", \"all\", 100)#, query=\"6 years ago\")\n", "subreddit_url = requote_uri(subreddit_url)\n", "subreddit_url" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "user_agent = {'user-agent': 'Mozilla/5.0 (Windows NT 6.3; rv:36.0) ..'}\n", "pool_manager = urllib3.PoolManager(headers=user_agent)\n", "res = []" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def fetch_subreddit_image_entries(subreddit_url: str, pool_manager):\n", " result = [ ]\n", " try:\n", " response = pool_manager.request('GET', subreddit_url)\n", " #print(response.data)\n", " subreddit_data = json.loads(response.data) \n", " \n", " if not \"data\" in subreddit_data: return []\n", " if not \"children\" in subreddit_data[\"data\"]: return []\n", " \n", " for content in subreddit_data['data']['children']:\n", " try:\n", " if content['data'].get('post_hint', 'none') == 'image' and 'preview' in content['data']: \n", " created_utc = datetime.fromtimestamp(content['data'][\"created_utc\"], timezone.utc)\n", " \n", " #print(created_utc)\n", " \n", " source_d = content['data']['preview']['images'][0]['source']\n", " image_url, width, height = source_d['url'], source_d[\"width\"], source_d[\"height\"]\n", " image_title = content['data']['title']\n", " image_id = content['data']['id']\n", " data_url = content['data']['url']\n", " subreddit = content['data']['subreddit']\n", " if content['data']['is_video'] : continue \n", " result.append({\n", " \"image_url\" : image_url,\n", " \"title\" : image_title,\n", " \"image_id\" : image_id,\n", " \"url\" : data_url,\n", " \"subreddit\" : subreddit,\n", " \"width\" : width,\n", " \"height\" : height,\n", " \"created_utc\" : created_utc,\n", " })\n", " except Exception as e:\n", " pass\n", " return result\n", " except Exception as e:\n", " #print(e)\n", " return []\n", " \n", "#fetch_subreddit_image_entries(subreddit_url, pool_manager)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def fetch_multiple_subreddit_image_entries(subreddit_urls: str, thread_pool_size: int=5, urllib_pool_size:int=5):\n", " \n", " pool_manager = urllib3.PoolManager(maxsize=urllib_pool_size)\n", " thread_pool = ThreadPoolExecutor(thread_pool_size)\n", " res_futs = [ ]\n", " \n", " for subreddit_url in subreddit_urls:\n", " res_futs.append(thread_pool.submit(fetch_subreddit_image_entries, subreddit_url, pool_manager))\n", " \n", " res :[List[Future]] = []\n", " \n", " for r in res_futs:\n", " res.extend(r.result())\n", " \n", " return list({x[\"image_id\"] : x for x in res}.values())" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def get_random_subreddit_urls(num_urls:int = 20):\n", " subr_l = list(itertools.product(subreddit_name_l, subreddit_sort_l, subreddit_t_l))\n", " return [get_subreddit_url(*xs, 100) for xs in random.sample(subr_l, k=num_urls)]\n", "\n", "def get_random_subreddit_query_urls(num_urls:int = 20, query_type: str = \"chronology\"):\n", " '''\n", " query_type:\n", " chronology\n", " random_word\n", " random_phrase\n", " '''\n", " timeline = random.choices([\"days\", \"months\", \"years\"], k = num_urls)\n", " timevalue = random.choices(range(1, 12), k = num_urls)\n", " subr = random.sample(subreddit_name_l, k = num_urls)\n", " \n", " if query_type == \"chronology\":\n", " return [get_subreddit_url(subreddit=sr, query=f\"{tv} {tl} ago\") for (sr, tl, tv) in list(itertools.product(subr, timeline, timevalue))]\n", " elif query_type == \"random_word\":\n", " r = RandomWords()\n", " return [get_subreddit_url(subreddit=sr, query=f\"{r.get_random_word()}\") for sr in subr]\n", " elif query_type == \"random_phrase\":\n", " s = RandomSentence()\n", " return [get_subreddit_url(subreddit=sr, query=f\"{s.sentence()}\") for sr in subr]\n", " else:\n", " return [ ] \n", "\n", "\n", "\n", "#get_random_subreddit_urls() \n", "#get_random_subreddit_query_urls(query_type=\"random_phrase\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#subreddit_url = get_random_subreddit_query_urls(num_urls=5)[2]\n", "#subreddit_url = get_subreddit_url(\"CityPorn\", \"hot\", \"all\", 100, query=\"11 years ago\")\n", "#fetch_subreddit_image_entries(subreddit_url, pool_manager)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#res = fetch_multiple_subreddit_image_entries(get_random_subreddit_urls(num_urls=100))\n", "#res = fetch_multiple_subreddit_image_entries(get_random_subreddit_query_urls(num_urls=5))\n", "#len(res)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "policies = [\n", " (\"subredit_sort\", lambda: fetch_multiple_subreddit_image_entries(get_random_subreddit_urls(num_urls=100))),\n", " (\"subreddit_chrono\", lambda: get_random_subreddit_query_urls(num_urls=5, query_type=\"chronology\")),\n", " (\"subreddit_random_word\", lambda: get_random_subreddit_query_urls(num_urls=5, query_type=\"random_word\")),\n", " (\"subreddit_random_phrase\", lambda: get_random_subreddit_query_urls(num_urls=5, query_type=\"random_phrase\")),\n", "]\n", "\n", "policy_count = [[]]*len(policies)\n", "policy_hit = [[]]*len(policies)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "dfname = \"reddit.csv\"\n", "otime = 0\n", "\n", "tarr = []\n", "karr = []\n", "\n", "total_updates = 0\n", "\n", "with tqdm(total=10000) as pbar:\n", " for _ in range(10000):\n", " if random.random() > 0.6:\n", " res = fetch_multiple_subreddit_image_entries(get_random_subreddit_urls(num_urls=100))\n", " else:\n", " res = fetch_multiple_subreddit_image_entries(get_random_subreddit_query_urls(num_urls=5, query_type=\"random_phrase\"))\n", " \n", " num_fetched = len(res)\n", " \n", " if res:\n", " if not Path(dfname).exists():\n", " pd.DataFrame(res).to_csv(dfname, index=False)\n", " karr.append(len(res))\n", " else:\n", " df = pd.read_csv(dfname)\n", " keys = set(df[\"image_id\"])\n", " cres = [x for x in res if not (x[\"image_id\"] in keys)]\n", " \n", " if cres:\n", " ndf = pd.DataFrame(cres)\n", " ndf.to_csv(dfname, mode=\"a\", header=None, index=False)\n", " karr.append(len(cres))\n", " else:\n", " karr.append(0)\n", " \n", " ntime = pbar.format_dict['elapsed']\n", " N = len(pd.read_csv(dfname))\n", " tarr.append(ntime-otime)\n", " otime = ntime\n", " tarr = tarr[-25:]\n", " karr = karr[-25:]\n", " rate = sum(karr)/sum(tarr)\n", " pbar.update(1)\n", " total_updates = total_updates + karr[-1]\n", " pbar.set_description_str(f\"count:{N}, fetch rate:{rate:.3f}, last_update:{karr[-1]}, total_updates:{total_updates}\")" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.11.6" }, "orig_nbformat": 4 }, "nbformat": 4, "nbformat_minor": 2 }