{ "cells": [ { "cell_type": "code", "execution_count": 86, "id": "7821c501-8c5d-4af6-81cd-caa6ad0bd58c", "metadata": {}, "outputs": [], "source": [ "from datasets import load_dataset, DatasetDict\n", "from datasets import concatenate_datasets\n", "from IPython.display import HTML\n", "\n", "from tqdm import tqdm\n", "import re \n", "import numpy as np\n", "from markdownify import markdownify as md" ] }, { "cell_type": "code", "execution_count": 80, "id": "dc821970-efdb-407f-bd79-59da09323280", "metadata": { "scrolled": true, "tags": [] }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Found cached dataset parquet (/home/leandro/.cache/huggingface/datasets/HuggingFaceH4___parquet/HuggingFaceH4--stack-exchange-preferences-1d2bff9ecb5ffe2a/0.0.0/2a3b91fbd88a2c90d1dbbb32b460cf621d31bd5b05b934492fdef7d8d6f236ec)\n" ] }, { "data": { "text/plain": [ "Dataset({\n", " features: ['qid', 'question', 'answers', 'date', 'metadata'],\n", " num_rows: 10807695\n", "})" ] }, "execution_count": 80, "metadata": {}, "output_type": "execute_result" } ], "source": [ "ds = load_dataset(\"HuggingFaceH4/stack-exchange-preferences\", split=\"train\", num_proc=16)\n", "ds" ] }, { "cell_type": "code", "execution_count": 81, "id": "0d8d8729-6d6b-4791-a24a-cb112c399bd0", "metadata": {}, "outputs": [ { "data": { "text/html": [ "
I have been wanting to learn about 3D printing a long time so I really want this site to succeed but I have no previous experience with the subject.
\n", "\n", "I was wondering how can I help the site at this early stage. I thought about asking about how to get started with 3D printing but SE explicitly discourages \"easy\" questions in the private beta.
\n", "\n", "What can newbies like me do for the site at this stage besides voting questions and answers?
\n" ], "text/plain": [ "Looking at the library code, seems all events are renamed removing 'fileupload' ... so 'fileuploaddone' becomes just 'done'. It is valid for all other callbacks.\\nlook at this section:
\\n\\n // Other callbacks:\\n // Callback for the submit event of each file upload:\\n // submit: function (e, data) {}, // .bind('fileuploadsubmit', func);\\n // Callback for the start of each file upload request:\\n // send: function (e, data) {}, // .bind('fileuploadsend', func);\\n // Callback for successful uploads:\\n // done: function (e, data) {}, // .bind('fileuploaddone', func);\\n // Callback for failed (abort or error) uploads:\\n // fail: function (e, data) {}, // .bind('fileuploadfail', func);\\n // Callback for completed (success, abort or error) requests:\\n // always: function (e, data) {}, // .bind('fileuploadalways', func);\\n // Callback for upload progress events:\\n // progress: function (e, data) {}, // .bind('fileuploadprogress', func);\\n // Callback for global upload progress events:\\n // progressall: function (e, data) {}, // .bind('fileuploadprogressall', func);\\n // Callback for uploads start, equivalent to the global ajaxStart event:\\n // start: function (e) {}, // .bind('fileuploadstart', func);\\n // Callback for uploads stop, equivalent to the global ajaxStop event:\\n // stop: function (e) {}, // .bind('fileuploadstop', func);\\n // Callback for change events of the fileInput(s):\\n // change: function (e, data) {}, // .bind('fileuploadchange', func);\\n // Callback for paste events to the pasteZone(s):\\n // paste: function (e, data) {}, // .bind('fileuploadpaste', func);\\n // Callback for drop events of the dropZone(s):\\n // drop: function (e, data) {}, // .bind('fileuploaddrop', func);\\n // Callback for dragover events of the dropZone(s):\\n // dragover: function (e) {}, // .bind('fileuploaddragover', func);\\n
\\n\\nIf you have some doubts about what's happening, just look at the code inside. This library is not compressed so it is easy to see. for example
\\n\\n// start: function (e) {}, // .bind('fileuploadstart', func);\\n
\\n\\nstart
callback is implemented. fileuploadstart
is not.
Check if the server-side uploading script returns a JSON reply - in my case it didn\\'t work when the reply was empty, but file was uploaded successfully.
\\n\\nSo, below is working for me with jQuery 1.9.1 and the newest version of the \"jQuery File Upload Plugin\" - 5.21.3
\\n\\n$(\"#fileupload\").bind(\"fileuploaddone\", function (e, data) {\\n console.log(\"fileuploaddone event fired\");\\n});\\n
\\n'}],\n",
" 'date': '2012/10/15',\n",
" 'metadata': ['https://Stackoverflow.com/questions/12891264',\n",
" 'https://Stackoverflow.com',\n",
" 'https://Stackoverflow.com/users/767244/'],\n",
" 'response_j': \"Looking at the library code, seems all events are renamed removing 'fileupload' ... so 'fileuploaddone' becomes just 'done'. It is valid for all other callbacks.\\nlook at this section:\\n\\n```\\n // Other callbacks:\\n // Callback for the submit event of each file upload:\\n // submit: function (e, data) {}, // .bind('fileuploadsubmit', func);\\n // Callback for the start of each file upload request:\\n // send: function (e, data) {}, // .bind('fileuploadsend', func);\\n // Callback for successful uploads:\\n // done: function (e, data) {}, // .bind('fileuploaddone', func);\\n // Callback for failed (abort or error) uploads:\\n // fail: function (e, data) {}, // .bind('fileuploadfail', func);\\n // Callback for completed (success, abort or error) requests:\\n // always: function (e, data) {}, // .bind('fileuploadalways', func);\\n // Callback for upload progress events:\\n // progress: function (e, data) {}, // .bind('fileuploadprogress', func);\\n // Callback for global upload progress events:\\n // progressall: function (e, data) {}, // .bind('fileuploadprogressall', func);\\n // Callback for uploads start, equivalent to the global ajaxStart event:\\n // start: function (e) {}, // .bind('fileuploadstart', func);\\n // Callback for uploads stop, equivalent to the global ajaxStop event:\\n // stop: function (e) {}, // .bind('fileuploadstop', func);\\n // Callback for change events of the fileInput(s):\\n // change: function (e, data) {}, // .bind('fileuploadchange', func);\\n // Callback for paste events to the pasteZone(s):\\n // paste: function (e, data) {}, // .bind('fileuploadpaste', func);\\n // Callback for drop events of the dropZone(s):\\n // drop: function (e, data) {}, // .bind('fileuploaddrop', func);\\n // Callback for dragover events of the dropZone(s):\\n // dragover: function (e) {}, // .bind('fileuploaddragover', func);\\n\\n```\\n\\nIf you have some doubts about what's happening, just look at the code inside. This library is not compressed so it is easy to see. for example\\n\\n```\\n// start: function (e) {}, // .bind('fileuploadstart', func);\\n\\n```\\n\\n`start` callback is implemented. `fileuploadstart` is not.\",\n",
" 'response_k': 'Check if the server-side uploading script returns a JSON reply - in my case it didn\\'t work when the reply was empty, but file was uploaded successfully.\\n\\nSo, below is working for me with jQuery 1.9.1 and the newest version of the \"jQuery File Upload Plugin\" - 5.21.3\\n\\n```\\n$(\"#fileupload\").bind(\"fileuploaddone\", function (e, data) {\\n console.log(\"fileuploaddone event fired\");\\n});\\n\\n```'}"
]
},
"execution_count": 93,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"ds_result[\"finetune\"][0]"
]
},
{
"cell_type": "code",
"execution_count": 94,
"id": "2c96653b-7a5a-4cae-a327-b6aa77aa5850",
"metadata": {},
"outputs": [],
"source": [
"ds_result = ds_result.remove_columns([\"answers\"])"
]
},
{
"cell_type": "code",
"execution_count": 95,
"id": "15c2e5ee-7c7d-4e98-9e63-e5d37a9354aa",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"DatasetDict({\n",
" finetune: Dataset({\n",
" features: ['qid', 'question', 'date', 'metadata', 'response_j', 'response_k'],\n",
" num_rows: 7440923\n",
" })\n",
" reward: Dataset({\n",
" features: ['qid', 'question', 'date', 'metadata', 'response_j', 'response_k'],\n",
" num_rows: 7441998\n",
" })\n",
" rl: Dataset({\n",
" features: ['qid', 'question', 'date', 'metadata', 'response_j', 'response_k'],\n",
" num_rows: 7435908\n",
" })\n",
" evaluation: Dataset({\n",
" features: ['qid', 'question', 'date', 'metadata', 'response_j', 'response_k'],\n",
" num_rows: 4483004\n",
" })\n",
"})"
]
},
"execution_count": 95,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"ds_result"
]
},
{
"cell_type": "code",
"execution_count": 96,
"id": "4d42b35c-5252-4b49-ba4b-20818bc9e086",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"finetune\n",
"reward\n",
"rl\n",
"evaluation\n"
]
}
],
"source": [
"for key in ds_result:\n",
" print(key)"
]
},
{
"cell_type": "code",
"execution_count": 100,
"id": "e32c11d7-a88e-4d92-9dfc-92b2a67c5455",
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"import time\n",
"from multiprocessing import Pool\n",
"from tqdm import tqdm\n",
"\n",
"from huggingface_hub import Repository\n",
"\n",
"\n",
"def save_shard(shard_tuple):\n",
" \"\"\"Save shard\"\"\"\n",
" filename, shard = shard_tuple\n",
" # use to_json instead to save as json file\n",
" shard.to_parquet(filename)\n",
"\n",
"\n",
"def save_manual_shards(ds, user=\"lvwerra\", remote_dataset_repo=\"stack-exchange-paired\", subfolder=\"train\"):\n",
" \"\"\"Save sharded data\n",
" Args:\n",
" ds (Dataset): dataset to be saved\n",
" user (str): user name\n",
" remote_dataset_repo (str): remote dataset repository\n",
" out_path (str): path to save the shards\"\"\"\n",
" # this will create a folder OUT_PATH that is a clone of REMOTE_DATASET_REPO\n",
" # you can save the shards inside it and do git add/commit/push to push data to the hub\n",
" out_path = remote_dataset_repo\n",
" # if out path doesnt already exist\n",
" if not os.path.exists(out_path):\n",
" repo = Repository(\n",
" local_dir=out_path,\n",
" clone_from=user + \"/\" + remote_dataset_repo,\n",
" repo_type=\"dataset\",\n",
" private=False,\n",
" use_auth_token=True,\n",
" git_user=user,\n",
" )\n",
"\n",
" # files will be numerous we save them in a folder called data inside out_path\n",
" if not os.path.exists(out_path):\n",
" os.mkdir(out_path + \"/data\")\n",
" os.mkdir(out_path + f\"/data/{subfolder}\")\n",
" \n",
" SHARD_SIZE = 1000 << 20\n",
" if ds._indices is not None:\n",
" dataset_nbytes = ds.data.nbytes * len(ds._indices) / len(ds.data)\n",
" else:\n",
" dataset_nbytes = ds.data.nbytes\n",
" num_shards = int(dataset_nbytes / SHARD_SIZE) + 1\n",
" print(f\"Number of shards: {num_shards}\")\n",
"\n",
" print(\"sharding the dataset\")\n",
" t_start = time.time()\n",
" shards = (\n",
" ds.shard(num_shards=num_shards, index=i, contiguous=True)\n",
" for i in range(num_shards)\n",
" )\n",
" # use f\"{OUT_PATH}/data/train-{index:05d}-of-{num_shards:05d}.json\" instead for json files\n",
" filenames = (\n",
" f\"{out_path}/data/{subfolder}/train-{index:05d}-of-{num_shards:05d}.parquet\"\n",
" for index in range(num_shards)\n",
" )\n",
"\n",
" with Pool(16) as p:\n",
" list(\n",
" tqdm(\n",
" p.imap_unordered(save_shard, zip(filenames, shards), chunksize=4),\n",
" total=num_shards,\n",
" )\n",
" )\n",
" print(f\"Time to save dataset: {time.time()-t_start:.2f}\")\n",
" # to push dataset to hub do: git add/commit/push inside OUT_PATH"
]
},
{
"cell_type": "code",
"execution_count": 101,
"id": "a90664eb-5c54-4fae-9a8a-d509bb2abdfe",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Number of shards: 20\n",
"sharding the dataset\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 20/20 [00:28<00:00, 1.43s/it]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Time to save dataset: 29.15\n",
"Number of shards: 20\n",
"sharding the dataset\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 20/20 [00:22<00:00, 1.15s/it]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Time to save dataset: 23.42\n",
"Number of shards: 20\n",
"sharding the dataset\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 20/20 [00:10<00:00, 1.83it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Time to save dataset: 11.36\n",
"Number of shards: 12\n",
"sharding the dataset\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 12/12 [00:10<00:00, 1.12it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Time to save dataset: 11.13\n"
]
}
],
"source": [
"for key in ds_result:\n",
" save_manual_shards(ds_result[key], subfolder=key)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d62f5a7f-2a23-4e0d-9e49-b29f88ea8c13",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.13"
}
},
"nbformat": 4,
"nbformat_minor": 5
}