Datasets:

Languages:
Tagalog
ArXiv:
License:
holylovenia commited on
Commit
001fc91
·
verified ·
1 Parent(s): 27d1f01

Upload baybayin.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. baybayin.py +185 -0
baybayin.py ADDED
@@ -0,0 +1,185 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2022 The HuggingFace Datasets Authors and the current dataset script contributor.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ from pathlib import Path
17
+ from typing import Dict, List, Tuple
18
+
19
+ import datasets
20
+ from scipy.io import loadmat
21
+
22
+ from seacrowd.utils.configs import SEACrowdConfig
23
+ from seacrowd.utils.constants import (SCHEMA_TO_FEATURES, TASK_TO_SCHEMA,
24
+ Licenses, Tasks)
25
+
26
+ _CITATION = """\
27
+ @article{Pino2021,
28
+ title = {Optical character recognition system for Baybayin scripts using support vector machine},
29
+ volume = {7},
30
+ ISSN = {2376-5992},
31
+ url = {http://dx.doi.org/10.7717/peerj-cs.360},
32
+ DOI = {10.7717/peerj-cs.360},
33
+ journal = {PeerJ Computer Science},
34
+ publisher = {PeerJ},
35
+ author = {Pino, Rodney and Mendoza, Renier and Sambayan, Rachelle},
36
+ year = {2021},
37
+ month = feb,
38
+ pages = {e360}
39
+ }
40
+ """
41
+
42
+ _DATASETNAME = "baybayin"
43
+
44
+ _DESCRIPTION = """\
45
+ The Baybayin dataset contains binary images of Baybayin characters, Latin
46
+ characters, and 4 character symbols of Baybayin diacritics in MATLAB format. It
47
+ consisted of 17000 images for Baybayin (1000 per character), 18200 images for
48
+ Latin (700 per character), and 2000 images for Baybayin diacritics (500 per
49
+ symbol). Each character image is strictly center-fitted with a size 56x56
50
+ pixels. This dataset was initially used to discriminate Latin script from
51
+ Baybayin script in character recognition.
52
+
53
+ This is local dataset, please download the dataset from the `_HOMEPAGE` URL.
54
+ """
55
+
56
+ _HOMEPAGE = "https://www.kaggle.com/datasets/rodneypino/baybayin-and-latin-binary-images-in-mat-format"
57
+
58
+ _LANGUAGES = ["tgl"]
59
+ _SUBSETS = ["baybayin", "latin", "diacritic"]
60
+
61
+ _LICENSE = Licenses.CC_BY_4_0.value
62
+
63
+ _LOCAL = True # kaggle dataset need to register to download
64
+
65
+ _URLS = {}
66
+
67
+ _SUPPORTED_TASKS = [Tasks.OPTICAL_CHARACTER_RECOGNITION]
68
+ _SEACROWD_SCHEMA = f"seacrowd_{TASK_TO_SCHEMA[_SUPPORTED_TASKS[0]].lower()}" # imtext
69
+
70
+ _SOURCE_VERSION = "4.0.0"
71
+
72
+ _SEACROWD_VERSION = "2024.06.20"
73
+
74
+
75
+ class BaybayinDataset(datasets.GeneratorBasedBuilder):
76
+ """Binary images of Baybayin and Latin characters, and 4 character symbols of Baybayin diacritics"""
77
+
78
+ SOURCE_VERSION = datasets.Version(_SOURCE_VERSION)
79
+ SEACROWD_VERSION = datasets.Version(_SEACROWD_VERSION)
80
+
81
+ BUILDER_CONFIGS = []
82
+ for subset in _SUBSETS:
83
+ BUILDER_CONFIGS += [
84
+ SEACrowdConfig(
85
+ name=f"{_DATASETNAME}_{subset}_source",
86
+ version=SOURCE_VERSION,
87
+ description=f"{_DATASETNAME} {subset} source schema",
88
+ schema="source",
89
+ subset_id=subset,
90
+ ),
91
+ SEACrowdConfig(
92
+ name=f"{_DATASETNAME}_{subset}_{_SEACROWD_SCHEMA}",
93
+ version=SEACROWD_VERSION,
94
+ description=f"{_DATASETNAME} {subset} SEACrowd schema",
95
+ schema=_SEACROWD_SCHEMA,
96
+ subset_id=subset,
97
+ ),
98
+ ]
99
+
100
+ DEFAULT_CONFIG_NAME = f"{_DATASETNAME}_{_SUBSETS[0]}_source"
101
+
102
+ def _info(self) -> datasets.DatasetInfo:
103
+ if self.config.schema == "source":
104
+ features = datasets.Features(
105
+ {
106
+ "image": datasets.Array2D(shape=(56, 56), dtype="uint8"),
107
+ "character": datasets.Value("string"),
108
+ }
109
+ )
110
+ elif self.config.schema == _SEACROWD_SCHEMA:
111
+ features = SCHEMA_TO_FEATURES[TASK_TO_SCHEMA[_SUPPORTED_TASKS[0]]] # image_text_features()
112
+
113
+ return datasets.DatasetInfo(
114
+ description=_DESCRIPTION,
115
+ features=features,
116
+ homepage=_HOMEPAGE,
117
+ license=_LICENSE,
118
+ citation=_CITATION,
119
+ )
120
+
121
+ def _split_generators(self, dl_manager: datasets.DownloadManager) -> List[datasets.SplitGenerator]:
122
+ """Returns SplitGenerators."""
123
+ if self.config.data_dir is None:
124
+ raise ValueError("This is a local dataset. Please pass the `data_dir` kwarg (where the .pdf is located) to load_dataset.")
125
+ else:
126
+ data_dir = Path(self.config.data_dir)
127
+
128
+ subset_path = {
129
+ "baybayin": "Baybayin/Baybayin.mat",
130
+ "latin": "Latin/Latin.mat",
131
+ "diacritic": "Baybayin Diacritics/Baybayin_Diacritics.mat",
132
+ }
133
+
134
+ mat_file = data_dir / subset_path[self.config.subset_id]
135
+ return [
136
+ datasets.SplitGenerator(
137
+ name=datasets.Split.TRAIN,
138
+ gen_kwargs={
139
+ "mat_file": mat_file,
140
+ },
141
+ )
142
+ ]
143
+
144
+ def _generate_examples(self, mat_file: Path) -> Tuple[int, Dict]:
145
+ """Yields examples as (key, example) tuples."""
146
+ try:
147
+ from PIL import Image
148
+ except ImportError as err:
149
+ raise ImportError("You need to install PIL (`pip install pillow`) to store the image from MATLAB structs to .png files.") from err
150
+
151
+ raw_data = loadmat(str(mat_file))
152
+ contained_data = raw_data[str(mat_file.stem)][0, 0]
153
+
154
+ characters = list(contained_data.dtype.fields.keys())
155
+ data = {char: contained_data[char] for char in characters}
156
+
157
+ if self.config.schema == "source":
158
+ key = 0
159
+ for char, char_data in data.items():
160
+ for i in range(char_data.shape[0]):
161
+ image = char_data[i].reshape((56, 56))
162
+ yield key, {
163
+ "image": image,
164
+ "character": char,
165
+ }
166
+ key += 1
167
+
168
+ elif self.config.schema == _SEACROWD_SCHEMA:
169
+ key = 0
170
+ for char, char_data in data.items():
171
+ # prepare path for saving images
172
+ image_dir = mat_file.parent / char
173
+ image_dir.mkdir(exist_ok=True)
174
+
175
+ image_paths = []
176
+ for i in range(char_data.shape[0]):
177
+ image = (char_data[i].reshape((56, 56)) * 255).astype("uint8")
178
+ image_path = str(image_dir / f"{char}_{i}.png")
179
+
180
+ # save image
181
+ Image.fromarray(image).save(image_path)
182
+ image_paths.append(image_path)
183
+
184
+ yield key, {"id": str(key), "image_paths": image_paths, "texts": char, "metadata": None}
185
+ key += 1