File size: 5,876 Bytes
152c60b
fe70438
152c60b
3129d49
 
4314c58
3e28aad
7cdc7d0
d08fbc6
3e28aad
 
 
 
50db311
75cf782
058c80a
a387724
50db311
 
0c55b4f
9d5b4c0
785b9b9
3e28aad
0c578b5
3129d49
3e28aad
058c80a
36e41c0
d08fbc6
4314c58
3e28aad
4314c58
88c61d3
 
 
 
 
3129d49
058c80a
d9c13ca
3e28aad
a387724
3e28aad
 
 
 
be4a716
3e28aad
0c55b4f
3e28aad
 
 
fe70438
7cdc7d0
058c80a
be4a716
 
3e28aad
3129d49
0c578b5
3e28aad
100c2eb
3128771
be4a716
 
3e28aad
 
 
64458da
7cdc7d0
058c80a
be4a716
3129d49
a64dc20
3e28aad
d9c13ca
be4a716
d9c13ca
152c60b
e4e068f
be4a716
3e28aad
 
 
 
be4a716
36e41c0
be4a716
058c80a
 
152c60b
d9c13ca
152c60b
 
d9c13ca
152c60b
 
 
 
3e28aad
 
 
 
 
 
1083665
 
 
 
3e28aad
 
 
1083665
0c55b4f
1083665
 
 
 
 
 
fe70438
 
 
 
 
 
 
 
88c61d3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fe70438
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
import os
from typing import Optional, Union

import datasets

from .api import __file__ as _
from .artifact import __file__ as _
from .augmentors import __file__ as _
from .benchmark import __file__ as _
from .blocks import __file__ as _
from .card import __file__ as _
from .catalog import __file__ as _
from .collections import __file__ as _
from .collections_operators import __file__ as _
from .dataclass import __file__ as _
from .dataset_utils import __file__ as _
from .dataset_utils import get_dataset_artifact
from .deprecation_utils import __file__ as _
from .dialog_operators import __file__ as _
from .dict_utils import __file__ as _
from .error_utils import __file__ as _
from .eval_utils import __file__ as _
from .file_utils import __file__ as _
from .formats import __file__ as _
from .fusion import __file__ as _
from .generator_utils import __file__ as _
from .hf_utils import __file__ as _
from .hf_utils import verify_versions_compatibility
from .image_operators import __file__ as _
from .inference import __file__ as _
from .instructions import __file__ as _
from .llm_as_judge import __file__ as _
from .llm_as_judge_chat_templates import __file__ as _
from .llm_as_judge_constants import __file__ as _
from .llm_as_judge_from_template import __file__ as _
from .llm_as_judge_operators import __file__ as _
from .llm_as_judge_utils import __file__ as _
from .loaders import __file__ as _
from .logging_utils import __file__ as _
from .logging_utils import get_logger
from .metric import __file__ as _
from .metric_utils import __file__ as _
from .metrics import __file__ as _
from .normalizers import __file__ as _
from .operator import __file__ as _
from .operators import __file__ as _
from .parsing_utils import __file__ as _
from .processors import __file__ as _
from .random_utils import __file__ as _
from .recipe import __file__ as _
from .register import __file__ as _
from .schema import __file__ as _
from .schema import loads_instance
from .serializers import __file__ as _
from .settings_utils import __file__ as _
from .settings_utils import get_constants
from .span_lableing_operators import __file__ as _
from .split_utils import __file__ as _
from .splitters import __file__ as _
from .standard import __file__ as _
from .stream import __file__ as _
from .stream_operators import __file__ as _
from .string_operators import __file__ as _
from .struct_data_operators import __file__ as _
from .system_prompts import __file__ as _
from .task import __file__ as _
from .templates import __file__ as _
from .text_utils import __file__ as _
from .type_utils import __file__ as _
from .types import __file__ as _
from .utils import __file__ as _
from .utils import is_package_installed
from .validate import __file__ as _
from .version import __file__ as _

logger = get_logger()
constants = get_constants()


class Dataset(datasets.GeneratorBasedBuilder):
    VERSION = constants.version

    @property
    def generators(self):
        if not hasattr(self, "_generators") or self._generators is None:
            if is_package_installed("unitxt"):
                verify_versions_compatibility("dataset", self.VERSION)

                from unitxt.dataset_utils import \
                    get_dataset_artifact as get_dataset_artifact_installed

                logger.info("Loading with installed unitxt library...")
                dataset = get_dataset_artifact_installed(self.config.name)
            else:
                logger.info("Loading with huggingface unitxt copy...")
                dataset = get_dataset_artifact(self.config.name)

            self._generators = dataset()

        return self._generators

    def _info(self):
        return datasets.DatasetInfo()

    def _split_generators(self, _):
        return [
            datasets.SplitGenerator(name=name, gen_kwargs={"split_name": name})
            for name in self.generators.keys()
        ]

    def _generate_examples(self, split_name):
        generator = self.generators[split_name]
        yield from enumerate(generator)

    def _download_and_prepare(
        self, dl_manager, verification_mode, **prepare_splits_kwargs
    ):
        return super()._download_and_prepare(
            dl_manager, "no_checks", **prepare_splits_kwargs
        )

    def as_dataset(
        self,
        split: Optional[datasets.Split] = None,
        run_post_process=True,
        verification_mode: Optional[Union[datasets.VerificationMode, str]] = None,
        in_memory=False,
    ) -> Union[datasets.Dataset, datasets.DatasetDict]:
        """Return a Dataset for the specified split.

        Args:
            split (`datasets.Split`):
                Which subset of the data to return.
            run_post_process (`bool`, defaults to `True`):
                Whether to run post-processing dataset transforms and/or add
                indexes.
            verification_mode ([`VerificationMode`] or `str`, defaults to `BASIC_CHECKS`):
                Verification mode determining the checks to run on the
                downloaded/processed dataset information (checksums/size/splits/...).
            in_memory (`bool`, defaults to `False`):
                Whether to copy the data in-memory.

        Returns:
            datasets.Dataset

        :Example:

        .. code-block:: python

            from datasets import load_dataset_builder
            builder = load_dataset_builder('rotten_tomatoes')
            builder.download_and_prepare()
            ds = builder.as_dataset(split='train')
            print(ds)
            # prints:
            # Dataset({
            #     features: ['text', 'label'],
            #     num_rows: 8530
            # })
        """
        return (
            super()
            .as_dataset(split, run_post_process, verification_mode, in_memory)
            .with_transform(loads_instance)
        )