{ "cells": [ { "cell_type": "code", "execution_count": 14, "metadata": {}, "outputs": [], "source": [ "# # import json as pandas\n", "# import pandas as pd\n", "# # read the data\n", "# x = pd.read_json(\"processed/sales/final5.jsonl\", lines=True)\n", "# # x\n", "# x[\"Region Type\"].unique()\n", "# x[\"Home Type\"].unique()\n", "# x[\"Bedroom Count\"].unique()" ] }, { "cell_type": "code", "execution_count": 17, "metadata": {}, "outputs": [], "source": [ "from datasets import load_dataset\n", "from os import path" ] }, { "cell_type": "code", "execution_count": 19, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "home_values_forecasts\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "Downloading builder script: 100%|██████████| 26.9k/26.9k [00:00<00:00, 9.97MB/s]\n", "Downloading readme: 100%|██████████| 24.0k/24.0k [00:00<00:00, 24.7MB/s]\n", "Downloading data: 100%|██████████| 14.1M/14.1M [00:00<00:00, 21.5MB/s]\n", "Generating train split: 100%|██████████| 31854/31854 [00:01<00:00, 26905.24 examples/s]\n", "Creating parquet from Arrow format: 100%|██████████| 32/32 [00:00<00:00, 813.13ba/s]\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "new_construction\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "Downloading builder script: 100%|██████████| 26.9k/26.9k [00:00<00:00, 16.8MB/s]\n", "Downloading readme: 100%|██████████| 24.0k/24.0k [00:00<00:00, 28.7MB/s]\n", "Downloading data: 100%|██████████| 10.9M/10.9M [00:00<00:00, 21.7MB/s]\n", "Generating train split: 100%|██████████| 49487/49487 [00:01<00:00, 38197.59 examples/s]\n", "Creating parquet from Arrow format: 100%|██████████| 50/50 [00:00<00:00, 1691.95ba/s]\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "for_sale_listings\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "Downloading builder script: 100%|██████████| 26.9k/26.9k [00:00<00:00, 2.19MB/s]\n", "Downloading readme: 100%|██████████| 24.0k/24.0k [00:00<00:00, 19.1MB/s]\n", "Downloading data: 100%|██████████| 180M/180M [00:04<00:00, 37.8MB/s] \n", "Generating train split: 100%|██████████| 578653/578653 [00:18<00:00, 31984.31 examples/s]\n", "Creating parquet from Arrow format: 100%|██████████| 579/579 [00:00<00:00, 1326.61ba/s]\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "rentals\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "Downloading builder script: 100%|██████████| 26.9k/26.9k [00:00<00:00, 6.31MB/s]\n", "Downloading readme: 100%|██████████| 24.0k/24.0k [00:00<00:00, 15.0MB/s]\n", "Downloading data: 100%|██████████| 447M/447M [00:13<00:00, 32.0MB/s] \n", "Generating train split: 100%|██████████| 1258740/1258740 [00:31<00:00, 40439.23 examples/s]\n", "Creating parquet from Arrow format: 100%|██████████| 1259/1259 [00:00<00:00, 1671.78ba/s]\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "sales\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "Downloading builder script: 100%|██████████| 26.9k/26.9k [00:00<00:00, 16.1MB/s]\n", "Downloading readme: 100%|██████████| 24.0k/24.0k [00:00<00:00, 14.9MB/s]\n", "Downloading data: 100%|██████████| 139M/139M [00:04<00:00, 34.1MB/s] \n", "Generating train split: 100%|██████████| 255024/255024 [00:10<00:00, 24278.38 examples/s]\n", "Creating parquet from Arrow format: 100%|██████████| 256/256 [00:00<00:00, 862.81ba/s]\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "home_values\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "Downloading builder script: 100%|██████████| 26.9k/26.9k [00:00<00:00, 11.3MB/s]\n", "Downloading readme: 100%|██████████| 24.0k/24.0k [00:00<00:00, 12.2MB/s]\n", "Downloading data: 100%|██████████| 41.1M/41.1M [00:01<00:00, 34.2MB/s]\n", "Generating train split: 100%|██████████| 117912/117912 [00:03<00:00, 34804.14 examples/s]\n", "Creating parquet from Arrow format: 100%|██████████| 118/118 [00:00<00:00, 1397.82ba/s]\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "days_on_market\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "Downloading builder script: 100%|██████████| 26.9k/26.9k [00:00<00:00, 6.99MB/s]\n", "Downloading readme: 100%|██████████| 24.0k/24.0k [00:00<00:00, 8.94MB/s]\n", "Downloading data: 100%|██████████| 229M/229M [00:06<00:00, 36.6MB/s] \n", "Generating train split: 100%|██████████| 586714/586714 [00:18<00:00, 31198.29 examples/s]\n", "Creating parquet from Arrow format: 100%|██████████| 587/587 [00:00<00:00, 1241.06ba/s]\n" ] } ], "source": [ "dataset_dict = {}\n", "\n", "configs = [\n", " \"days_on_market\",\n", " \"for_sale_listings\",\n", " \"home_values\",\n", " \"home_values_forecasts\",\n", " \"new_construction\",\n", " \"rentals\",\n", " \"sales\",\n", "]\n", "for config in configs:\n", " print(config)\n", " dataset_dict[config] = load_dataset(\n", " \"misikoff/zillow\",\n", " config,\n", " trust_remote_code=True,\n", " download_mode=\"force_redownload\",\n", " cache_dir=\"./cache\",\n", " )\n", " filename = path.join(\"parquet_files\", config + \".parquet\")\n", " dataset_dict[config][\"train\"].to_parquet(filename)" ] }, { "cell_type": "code", "execution_count": 18, "metadata": {}, "outputs": [], "source": [ "# import pyarrow as pa\n", "\n", "\n", "# df = pd.read_feather(\n", "# \"~/desktop/cache/misikoff___zillow/sales/1.1.0/c70d9545e9cef7612b795e19b5393a565f297e17856ab372df6f4026ecc498ae/zillow-train.arrow\"\n", "# )\n", "# df" ] }, { "cell_type": "code", "execution_count": 20, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Creating parquet from Arrow format: 100%|██████████| 256/256 [00:00<00:00, 738.39ba/s]\n" ] }, { "data": { "text/plain": [ "27088039" ] }, "execution_count": 20, "metadata": {}, "output_type": "execute_result" } ], "source": [ "dataset_dict[config][\"train\"].to_parquet(\"test-sales.parquet\")" ] }, { "cell_type": "code", "execution_count": 32, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "{'Region ID': '102001',\n", " 'Size Rank': 0,\n", " 'Region': 'United States',\n", " 'Region Type': 0,\n", " 'State': None,\n", " 'Home Type': 0,\n", " 'Date': datetime.datetime(2008, 2, 2, 0, 0),\n", " 'Mean Sale to List Ratio (Smoothed)': None,\n", " 'Median Sale to List Ratio': None,\n", " 'Median Sale Price': 172000.0,\n", " 'Median Sale Price (Smoothed) (Seasonally Adjusted)': None,\n", " 'Median Sale Price (Smoothed)': None,\n", " 'Median Sale to List Ratio (Smoothed)': None,\n", " '% Sold Below List': None,\n", " '% Sold Below List (Smoothed)': None,\n", " '% Sold Above List': None,\n", " '% Sold Above List (Smoothed)': None,\n", " 'Mean Sale to List Ratio': None}" ] }, "execution_count": 32, "metadata": {}, "output_type": "execute_result" } ], "source": [ "gen = iter(dataset_dict[config][\"train\"])\n", "next(gen)" ] } ], "metadata": { "kernelspec": { "display_name": "sta663", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.12.2" } }, "nbformat": 4, "nbformat_minor": 2 }