|
from datasets import Dataset, DatasetDict |
|
from spacy.tokens import DocBin |
|
import spacy |
|
|
|
|
|
def convert_spacy_docs_to_hf_entry(doc) -> dict: |
|
return doc.to_json() |
|
|
|
train = "/Users/au561649/Github/DaCy/training/dane_plus/train.spacy" |
|
dev = "/Users/au561649/Github/DaCy/training/dane_plus/dev.spacy" |
|
test = "/Users/au561649/Github/DaCy/training/dane_plus/test.spacy" |
|
|
|
nlp = spacy.blank("da") |
|
|
|
train_docs = list(DocBin().from_disk(train).get_docs(nlp.vocab)) |
|
dev_docs = list(DocBin().from_disk(dev).get_docs(nlp.vocab)) |
|
test_docs = list(DocBin().from_disk(test).get_docs(nlp.vocab)) |
|
|
|
|
|
|
|
|
|
|
|
train_dataset = Dataset.from_list([convert_spacy_docs_to_hf_entry(doc) for doc in train_docs]) |
|
dev_dataset = Dataset.from_list([convert_spacy_docs_to_hf_entry(doc) for doc in dev_docs]) |
|
test_dataset = Dataset.from_list([convert_spacy_docs_to_hf_entry(doc) for doc in test_docs]) |
|
|
|
dataset_dict = DatasetDict({"train": train_dataset, "dev": dev_dataset, "test": test_dataset}) |
|
|
|
dataset_dict.push_to_hub("dane_plus") |
|
|