|
from datasets import Dataset |
|
from pathlib import Path |
|
import json |
|
import logging |
|
from typing import Dict, List |
|
import requests |
|
from PIL import Image |
|
from io import BytesIO |
|
|
|
logging.basicConfig(level=logging.INFO) |
|
logger = logging.getLogger(__name__) |
|
|
|
class DatasetProcessor: |
|
def __init__(self, raw_data_path: str = 'dataset/raw_data.json'): |
|
self.raw_data_path = Path(raw_data_path) |
|
|
|
def load_raw_data(self) -> List[Dict]: |
|
"""Load scraped data from JSON""" |
|
with open(self.raw_data_path) as f: |
|
return json.load(f) |
|
|
|
def validate_image(self, url: str) -> bool: |
|
"""Check if image URL is valid and image can be loaded""" |
|
try: |
|
response = requests.get(url) |
|
img = Image.open(BytesIO(response.content)) |
|
return True |
|
except: |
|
return False |
|
|
|
def process_data(self) -> Dataset: |
|
"""Process raw data into HuggingFace dataset""" |
|
raw_data = self.load_raw_data() |
|
|
|
|
|
processed_data = { |
|
'image_url': [], |
|
'category': [], |
|
'metadata': [] |
|
} |
|
|
|
for entry in raw_data: |
|
if self.validate_image(entry['image_url']): |
|
processed_data['image_url'].append(entry['image_url']) |
|
processed_data['category'].append(entry['category']) |
|
processed_data['metadata'].append(entry['metadata']) |
|
|
|
|
|
dataset = Dataset.from_dict(processed_data) |
|
logger.info(f"Created dataset with {len(dataset)} entries") |
|
|
|
return dataset |
|
|
|
def save_to_hub(self, dataset: Dataset, repo_id: str): |
|
"""Push dataset to HuggingFace Hub""" |
|
dataset.push_to_hub(repo_id) |
|
logger.info(f"Pushed dataset to {repo_id}") |
|
|
|
if __name__ == "__main__": |
|
processor = DatasetProcessor() |
|
dataset = processor.process_data() |
|
|
|
|
|
|