File size: 24,108 Bytes
0e3c8e6 4d7f032 c6d1c21 058c80a 5ba849c e80560b 058c80a c6d1c21 9bd3d0e fe70438 ba3eb02 a180fb2 058c80a eac4eaf fe70438 cc5f321 ba3eb02 a56f87d 058c80a c6d1c21 fe70438 ba0637a eac4eaf e80560b fe70438 ba0637a c6d1c21 ba0637a a180fb2 ba0637a fe70438 a180fb2 fe70438 a180fb2 fe70438 a180fb2 ba0637a fe70438 e80560b fe70438 e80560b fe70438 214d47a fe70438 214d47a c6d1c21 eac4eaf 0e3c8e6 e80560b 0e3c8e6 214d47a 0e3c8e6 214d47a 0e3c8e6 e80560b 0e3c8e6 e80560b f6ebc4f e80560b 0e3c8e6 eac4eaf c6d1c21 058c80a 9bd3d0e 01f9603 9bd3d0e 01f9603 4a664e8 5ba849c 0a1b314 c6d1c21 058c80a c6d1c21 0e3c8e6 214d47a 0e3c8e6 e80560b 058c80a e80560b 058c80a 0e3c8e6 c3f3d16 c6d1c21 eac4eaf ba3eb02 c6d1c21 ba3eb02 c6d1c21 214d47a c6d1c21 7391dc4 228b86b c6d1c21 c3f3d16 c6d1c21 214d47a c6d1c21 e80560b c6d1c21 214d47a ba3eb02 214d47a c6d1c21 a180fb2 c6d1c21 a180fb2 c6d1c21 a180fb2 c6d1c21 a180fb2 f6ebc4f e80560b a180fb2 c6d1c21 a180fb2 214d47a c6d1c21 a180fb2 228b86b 5ba849c a180fb2 01f9603 4a664e8 c6d1c21 0a1b314 eac4eaf 5ba849c c6d1c21 eac4eaf c3f3d16 287304a c3f3d16 0a1b314 eac4eaf 058c80a eac4eaf e80560b eac4eaf 0a1b314 b462f85 eac4eaf c3f3d16 f6ebc4f d389578 f6ebc4f eac4eaf 058c80a ba3eb02 058c80a 0a1b314 fe70438 0a1b314 fe70438 0a1b314 fe70438 0a1b314 fe70438 0a1b314 fe70438 0a1b314 c6d1c21 5ba849c 287304a cc5f321 287304a c6d1c21 fe70438 a56f87d c6d1c21 a180fb2 c6d1c21 fe70438 c6d1c21 fe70438 a56f87d fe70438 c6d1c21 fe70438 f6ebc4f d08fbc6 f6ebc4f 058c80a 5ba849c 058c80a e80560b f6ebc4f 5ba849c a180fb2 5ba849c f6ebc4f fe70438 5ba849c f6ebc4f 5ba849c f6ebc4f a180fb2 fe70438 a180fb2 fe70438 a180fb2 fe70438 c6d1c21 fe70438 c6d1c21 eac4eaf 228b86b fe70438 eac4eaf 66c1161 228b86b f0b2749 66c1161 e80560b c6d1c21 eac4eaf c6d1c21 e80560b ba3eb02 c6d1c21 ba3eb02 c6d1c21 e80560b c6d1c21 ba3eb02 0a1b314 fe70438 0a1b314 fe70438 0a1b314 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 |
import difflib
import inspect
import json
import os
import pkgutil
import re
import warnings
from abc import abstractmethod
from typing import Any, Dict, List, Optional, Tuple, Union, final
from .dataclass import (
AbstractField,
Dataclass,
Field,
InternalField,
NonPositionalField,
fields,
)
from .error_utils import Documentation, UnitxtError, UnitxtWarning
from .logging_utils import get_logger
from .parsing_utils import (
separate_inside_and_outside_square_brackets,
)
from .settings_utils import get_constants, get_settings
from .text_utils import camel_to_snake_case, is_camel_case
from .type_utils import isoftype, issubtype
from .utils import (
artifacts_json_cache,
json_dump,
save_to_file,
shallow_copy,
)
logger = get_logger()
settings = get_settings()
constants = get_constants()
def is_name_legal_for_catalog(name):
return re.match(r"^[\w" + constants.catalog_hierarchy_sep + "]+$", name)
def verify_legal_catalog_name(name):
assert is_name_legal_for_catalog(
name
), f'Artifict name ("{name}") should be alphanumeric. Use "." for nesting (e.g. myfolder.my_artifact)'
class Catalogs:
def __new__(cls):
if not hasattr(cls, "instance"):
cls.instance = super().__new__(cls)
cls.instance.catalogs = []
return cls.instance
def __iter__(self):
self._index = 0 # Initialize/reset the index for iteration
return self
def __next__(self):
while self._index < len(self.catalogs):
catalog = self.catalogs[self._index]
self._index += 1
if (
settings.use_only_local_catalogs and not catalog.is_local
): # Corrected typo from 'is_loacl' to 'is_local'
continue
return catalog
raise StopIteration
def register(self, catalog):
assert isinstance(
catalog, AbstractCatalog
), "catalog must be an instance of AbstractCatalog"
assert hasattr(catalog, "__contains__"), "catalog must have __contains__ method"
assert hasattr(catalog, "__getitem__"), "catalog must have __getitem__ method"
self.catalogs = [catalog, *self.catalogs]
def unregister(self, catalog):
assert isinstance(
catalog, AbstractCatalog
), "catalog must be an instance of Catalog"
assert hasattr(catalog, "__contains__"), "catalog must have __contains__ method"
assert hasattr(catalog, "__getitem__"), "catalog must have __getitem__ method"
self.catalogs.remove(catalog)
def reset(self):
self.catalogs = []
def map_values_in_place(object, mapper):
if isinstance(object, dict):
for key, value in object.items():
object[key] = mapper(value)
return object
if isinstance(object, list):
for i in range(len(object)):
object[i] = mapper(object[i])
return object
return mapper(object)
def get_closest_artifact_type(type):
artifact_type_options = list(Artifact._class_register.keys())
matches = difflib.get_close_matches(type, artifact_type_options)
if matches:
return matches[0] # Return the closest match
return None
class UnrecognizedArtifactTypeError(ValueError):
def __init__(self, type) -> None:
maybe_class = "".join(word.capitalize() for word in type.split("_"))
message = f"'{type}' is not a recognized artifact 'type'. Make sure a the class defined this type (Probably called '{maybe_class}' or similar) is defined and/or imported anywhere in the code executed."
closest_artifact_type = get_closest_artifact_type(type)
if closest_artifact_type is not None:
message += "\n\n" f"Did you mean '{closest_artifact_type}'?"
super().__init__(message)
class MissingArtifactTypeError(ValueError):
def __init__(self, dic) -> None:
message = (
f"Missing '__type__' parameter. Expected 'type' in artifact dict, got {dic}"
)
super().__init__(message)
class Artifact(Dataclass):
_class_register = {}
__type__: str = Field(default=None, final=True, init=False)
__description__: str = NonPositionalField(
default=None, required=False, also_positional=False
)
__tags__: Dict[str, str] = NonPositionalField(
default_factory=dict, required=False, also_positional=False
)
__id__: str = InternalField(default=None, required=False, also_positional=False)
# if not None, the artifact is deprecated, and once instantiated, that msg
# is logged as a warning
__deprecated_msg__: str = NonPositionalField(
default=None, required=False, also_positional=False
)
data_classification_policy: List[str] = NonPositionalField(
default=None, required=False, also_positional=False
)
@classmethod
def is_artifact_dict(cls, d):
return isinstance(d, dict) and "__type__" in d
@classmethod
def verify_artifact_dict(cls, d):
if not isinstance(d, dict):
raise ValueError(
f"Artifact dict <{d}> must be of type 'dict', got '{type(d)}'."
)
if "__type__" not in d:
raise MissingArtifactTypeError(d)
if not cls.is_registered_type(d["__type__"]):
raise UnrecognizedArtifactTypeError(d["__type__"])
@classmethod
def get_artifact_type(cls):
return camel_to_snake_case(cls.__name__)
@classmethod
def register_class(cls, artifact_class):
assert issubclass(
artifact_class, Artifact
), f"Artifact class must be a subclass of Artifact, got '{artifact_class}'"
assert is_camel_case(
artifact_class.__name__
), f"Artifact class name must be legal camel case, got '{artifact_class.__name__}'"
snake_case_key = camel_to_snake_case(artifact_class.__name__)
if cls.is_registered_type(snake_case_key):
assert (
str(cls._class_register[snake_case_key]) == str(artifact_class)
), f"Artifact class name must be unique, '{snake_case_key}' already exists for {cls._class_register[snake_case_key]}. Cannot be overridden by {artifact_class}."
return snake_case_key
cls._class_register[snake_case_key] = artifact_class
return snake_case_key
def __init_subclass__(cls, **kwargs):
super().__init_subclass__(**kwargs)
cls.register_class(cls)
@classmethod
def is_artifact_file(cls, path):
if not os.path.exists(path) or not os.path.isfile(path):
return False
with open(path) as f:
d = json.load(f)
return cls.is_artifact_dict(d)
@classmethod
def is_registered_type(cls, type: str):
return type in cls._class_register
@classmethod
def is_registered_class_name(cls, class_name: str):
snake_case_key = camel_to_snake_case(class_name)
return cls.is_registered_type(snake_case_key)
@classmethod
def is_registered_class(cls, clz: object):
return clz in set(cls._class_register.values())
@classmethod
def _recursive_load(cls, obj):
if isinstance(obj, dict):
new_d = {}
for key, value in obj.items():
new_d[key] = cls._recursive_load(value)
obj = new_d
elif isinstance(obj, list):
obj = [cls._recursive_load(value) for value in obj]
else:
pass
if cls.is_artifact_dict(obj):
cls.verify_artifact_dict(obj)
artifact_class = cls._class_register[obj.pop("__type__")]
obj = artifact_class.process_data_after_load(obj)
return artifact_class(**obj)
return obj
@classmethod
def from_dict(cls, d, overwrite_args=None):
if overwrite_args is not None:
d = {**d, **overwrite_args}
cls.verify_artifact_dict(d)
return cls._recursive_load(d)
@classmethod
def load(cls, path, artifact_identifier=None, overwrite_args=None):
d = artifacts_json_cache(path)
if "artifact_linked_to" in d and d["artifact_linked_to"] is not None:
# d stands for an ArtifactLink
artifact_link = ArtifactLink.from_dict(d)
return artifact_link.load(overwrite_args)
new_artifact = cls.from_dict(d, overwrite_args=overwrite_args)
new_artifact.__id__ = artifact_identifier
return new_artifact
def get_pretty_print_name(self):
if self.__id__ is not None:
return self.__id__
return self.__class__.__name__
def prepare(self):
if self.__deprecated_msg__:
warnings.warn(self.__deprecated_msg__, DeprecationWarning, stacklevel=2)
def verify(self):
pass
@final
def __pre_init__(self, **kwargs):
self._init_dict = get_raw(kwargs)
@final
def verify_data_classification_policy(self):
if self.data_classification_policy is not None:
if not isinstance(self.data_classification_policy, list) or not all(
isinstance(data_classification, str)
for data_classification in self.data_classification_policy
):
raise ValueError(
f"The 'data_classification_policy' of {self.get_pretty_print_name()} "
f"must be either None - in case when no policy applies - or a list of "
f"strings, for example: ['public']. However, '{self.data_classification_policy}' "
f"of type {type(self.data_classification_policy)} was provided instead."
)
@final
def __post_init__(self):
self.__type__ = self.register_class(self.__class__)
for field in fields(self):
if issubtype(
field.type, Union[Artifact, List[Artifact], Dict[str, Artifact]]
):
value = getattr(self, field.name)
value = map_values_in_place(value, maybe_recover_artifact)
setattr(self, field.name, value)
self.verify_data_classification_policy()
if not settings.skip_artifacts_prepare_and_verify:
self.prepare()
self.verify()
def _to_raw_dict(self):
return {
"__type__": self.__type__,
**self.process_data_before_dump(self._init_dict),
}
def __deepcopy__(self, memo):
if id(self) in memo:
return memo[id(self)]
new_obj = Artifact.from_dict(self.to_dict())
memo[id(self)] = new_obj
return new_obj
def process_data_before_dump(self, data):
return data
@classmethod
def process_data_after_load(cls, data):
return data
def to_json(self):
data = self.to_dict()
return json_dump(data)
def serialize(self):
if self.__id__ is not None:
return self.__id__
return self.to_json()
def save(self, path):
save_to_file(path, self.to_json())
def verify_instance(
self, instance: Dict[str, Any], name: Optional[str] = None
) -> Dict[str, Any]:
"""Checks if data classifications of an artifact and instance are compatible.
Raises an error if an artifact's data classification policy does not include that of
processed data. The purpose is to ensure that any sensitive data is handled in a
proper way (for example when sending it to some external services).
Args:
instance (Dict[str, Any]): data which should contain its allowed data
classification policies under key 'data_classification_policy'.
name (Optional[str]): name of artifact which should be used to retrieve
data classification from env. If not specified, then either __id__ or
__class__.__name__, are used instead, respectively.
Returns:
Dict[str, Any]: unchanged instance.
Examples:
instance = {"x": "some_text", "data_classification_policy": ["pii"]}
# Will raise an error as "pii" is not included policy
metric = Accuracy(data_classification_policy=["public"])
metric.verify_instance(instance)
# Will not raise an error
template = SpanLabelingTemplate(data_classification_policy=["pii", "propriety"])
template.verify_instance(instance)
# Will not raise an error since the policy was specified in environment variable:
UNITXT_DATA_CLASSIFICATION_POLICY = json.dumps({"metrics.accuracy": ["pii"]})
metric = fetch_artifact("metrics.accuracy")
metric.verify_instance(instance)
"""
name = name or self.get_pretty_print_name()
data_classification_policy = get_artifacts_data_classification(name)
if not data_classification_policy:
data_classification_policy = self.data_classification_policy
if not data_classification_policy:
return instance
if not isoftype(instance, Dict[str, Any]):
raise ValueError(
f"The instance passed to inference engine is not a dictionary. Instance:\n{instance}"
)
instance_data_classification = instance.get("data_classification_policy")
if not instance_data_classification:
UnitxtWarning(
f"The data does not provide information if it can be used by "
f"'{name}' with the following data classification policy "
f"'{data_classification_policy}'. This may lead to sending of undesired "
f"data to external service. Set the 'data_classification_policy' "
f"of the data to ensure a proper handling of sensitive information.",
Documentation.DATA_CLASSIFICATION_POLICY,
)
return instance
if not any(
data_classification in data_classification_policy
for data_classification in instance_data_classification
):
raise UnitxtError(
f"The instance '{instance} 'has the following data classification policy "
f"'{instance_data_classification}', however, the artifact '{name}' "
f"is only configured to support the data with classification "
f"'{data_classification_policy}'. To enable this either change "
f"the 'data_classification_policy' attribute of the artifact, "
f"or modify the environment variable "
f"'UNITXT_DATA_CLASSIFICATION_POLICY' accordingly.",
Documentation.DATA_CLASSIFICATION_POLICY,
)
return instance
class ArtifactLink(Artifact):
# the artifact linked to, expressed by its catalog id
artifact_linked_to: str = Field(default=None, required=True)
@classmethod
def from_dict(cls, d: dict):
assert isinstance(d, dict), f"argument must be a dictionary, got: d = {d}."
assert (
"artifact_linked_to" in d and d["artifact_linked_to"] is not None
), f"A non-none field named 'artifact_linked_to' is expected in input argument d, but got: {d}."
artifact_linked_to = d["artifact_linked_to"]
# artifact_linked_to is a name of catalog entry
assert isinstance(
artifact_linked_to, str
), f"'artifact_linked_to' should be a string expressing a name of a catalog entry. Got{artifact_linked_to}."
msg = d["__deprecated_msg__"] if "__deprecated_msg__" in d else None
return ArtifactLink(
artifact_linked_to=artifact_linked_to, __deprecated_msg__=msg
)
def load(self, overwrite_args: dict) -> Artifact:
# identify the catalog for the artifact_linked_to
assert (
self.artifact_linked_to is not None
), "'artifact_linked_to' must be non-None in order to load it from the catalog. Currently, it is None."
assert isinstance(
self.artifact_linked_to, str
), f"'artifact_linked_to' should be a string (expressing a name of a catalog entry). Currently, its type is: {type(self.artifact_linked_to)}."
needed_catalog = None
catalogs = list(Catalogs())
for catalog in catalogs:
if self.artifact_linked_to in catalog:
needed_catalog = catalog
if needed_catalog is None:
raise UnitxtArtifactNotFoundError(self.artifact_linked_to, catalogs)
path = needed_catalog.path(self.artifact_linked_to)
d = artifacts_json_cache(path)
# if needed, follow, in a recursive manner, over multiple links,
# passing through instantiating of the ArtifactLink-s on the way, triggering
# deprecatioin warning as needed.
if "artifact_linked_to" in d and d["artifact_linked_to"] is not None:
# d stands for an ArtifactLink
artifact_link = ArtifactLink.from_dict(d)
return artifact_link.load(overwrite_args)
new_artifact = Artifact.from_dict(d, overwrite_args=overwrite_args)
new_artifact.__id__ = self.artifact_linked_to
return new_artifact
def get_raw(obj):
if isinstance(obj, Artifact):
return obj._to_raw_dict()
if isinstance(obj, tuple) and hasattr(obj, "_fields"): # named tuple
return type(obj)(*[get_raw(v) for v in obj])
if isinstance(obj, (list, tuple)):
return type(obj)([get_raw(v) for v in obj])
if isinstance(obj, dict):
return type(obj)({get_raw(k): get_raw(v) for k, v in obj.items()})
return shallow_copy(obj)
class ArtifactList(list, Artifact):
def prepare(self):
for artifact in self:
artifact.prepare()
class AbstractCatalog(Artifact):
is_local: bool = AbstractField()
@abstractmethod
def __contains__(self, name: str) -> bool:
pass
@abstractmethod
def __getitem__(self, name) -> Artifact:
pass
@abstractmethod
def get_with_overwrite(self, name, overwrite_args) -> Artifact:
pass
class UnitxtArtifactNotFoundError(UnitxtError):
def __init__(self, name, catalogs):
self.name = name
self.catalogs = catalogs
msg = (
f"Artifact {self.name} does not exist, in Unitxt catalogs: {self.catalogs}."
)
if settings.use_only_local_catalogs:
msg += f"\nNotice that unitxt.settings.use_only_local_catalogs is set to True, if you want to use remote catalogs set this settings or the environment variable {settings.use_only_local_catalogs_key}."
super().__init__(msg)
def fetch_artifact(artifact_rep) -> Tuple[Artifact, Union[AbstractCatalog, None]]:
"""Loads an artifict from one of possible representations.
(1) If artifact representation is already an Artifact object, return it.
(2) If artifact representation is a string location of a local file, load the Artifact from the local file.
(3) If artifact representation is a string name in the catalog, load the Artifact from the catalog.
(4) If artifact representation is a json string, create a dictionary representation from the string and build an Artifact object from it.
(5) Otherwise, check that the artifact representation is a dictionary and build an Artifact object from it.
"""
if isinstance(artifact_rep, Artifact):
if isinstance(artifact_rep, ArtifactLink):
return fetch_artifact(artifact_rep.artifact_linked_to)
return artifact_rep, None
# If local file
if isinstance(artifact_rep, str) and Artifact.is_artifact_file(artifact_rep):
artifact_to_return = Artifact.load(artifact_rep)
if isinstance(artifact_rep, ArtifactLink):
artifact_to_return = fetch_artifact(artifact_to_return.artifact_linked_to)
return artifact_to_return, None
# if artifact is a name of a catalog entry
if isinstance(artifact_rep, str):
name, _ = separate_inside_and_outside_square_brackets(artifact_rep)
if is_name_legal_for_catalog(name):
catalog, artifact_rep, args = get_catalog_name_and_args(name=artifact_rep)
artifact_to_return = catalog.get_with_overwrite(
artifact_rep, overwrite_args=args
)
return artifact_to_return, catalog
# If Json string, first load into dictionary
if isinstance(artifact_rep, str):
artifact_rep = json.loads(artifact_rep)
# Load from dictionary (fails if not valid dictionary)
return Artifact.from_dict(artifact_rep), None
def get_catalog_name_and_args(
name: str, catalogs: Optional[List[AbstractCatalog]] = None
):
name, args = separate_inside_and_outside_square_brackets(name)
if catalogs is None:
catalogs = list(Catalogs())
for catalog in catalogs:
if name in catalog:
return catalog, name, args
raise UnitxtArtifactNotFoundError(name, catalogs)
def verbosed_fetch_artifact(identifier):
artifact, catalog = fetch_artifact(identifier)
logger.debug(f"Artifact {identifier} is fetched from {catalog}")
return artifact
def reset_artifacts_json_cache():
artifacts_json_cache.cache_clear()
def maybe_recover_artifact(artifact):
if isinstance(artifact, str):
return verbosed_fetch_artifact(artifact)
return artifact
def register_all_artifacts(path):
for loader, module_name, _is_pkg in pkgutil.walk_packages(path):
logger.info(__name__)
if module_name == __name__:
continue
logger.info(f"Loading {module_name}")
# Import the module
module = loader.find_module(module_name).load_module(module_name)
# Iterate over every object in the module
for _name, obj in inspect.getmembers(module):
# Make sure the object is a class
if inspect.isclass(obj):
# Make sure the class is a subclass of Artifact (but not Artifact itself)
if issubclass(obj, Artifact) and obj is not Artifact:
logger.info(obj)
def get_artifacts_data_classification(artifact: str) -> Optional[List[str]]:
"""Loads given artifact's data classification policy from an environment variable.
Args:
artifact (str): Name of the artifact which the data classification policy
should be retrieved for. For example "metrics.accuracy".
Returns:
Optional[List[str]] - Data classification policies for the specified artifact
if they were found, or None otherwise.
"""
data_classification = settings.data_classification_policy
if data_classification is None:
return None
error_msg = (
f"If specified, the value of 'UNITXT_DATA_CLASSIFICATION_POLICY' "
f"should be a valid json dictionary. Got '{data_classification}' "
f"instead."
)
try:
data_classification = json.loads(data_classification)
except json.decoder.JSONDecodeError as e:
raise RuntimeError(error_msg) from e
if not isinstance(data_classification, dict):
raise RuntimeError(error_msg)
for artifact_name, artifact_data_classifications in data_classification.items():
if (
not isinstance(artifact_name, str)
or not isinstance(artifact_data_classifications, list)
or not all(
isinstance(artifact_data_classification, str)
for artifact_data_classification in artifact_data_classifications
)
):
raise UnitxtError(
"'UNITXT_DATA_CLASSIFICATION_POLICY' should be of type "
"'Dict[str, List[str]]', where a artifact's name is a key, and a "
"value is a list of data classifications used by that artifact.",
Documentation.DATA_CLASSIFICATION_POLICY,
)
if artifact not in data_classification.keys():
return None
return data_classification.get(artifact)
|