inputs
stringlengths
312
52k
targets
stringlengths
1
3.1k
block_type
stringclasses
11 values
scenario
stringclasses
7 values
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base t<fim_suffix>ypes.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
BLOCK_COMMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/function_modeler.py<fim_prefix>import ast import datetime import io import json from typing import List, Tuple, Dict, Union import logging from tanuki.constants import EXAMPLE_ELEMENT_LIMIT, PATCHES, SYMBOLIC_ALIGNMENTS, POSITIVE_EMBEDDABLE_ALIGNMENTS, \ NEGATIVE_EMBEDDABLE_ALIGNMENTS, OPENAI_PROVIDER from tanuki.models.function_type import FunctionType from tanuki.language_models.llm_configs import DEFAULT_TEACHER_MODELS, DEFAULT_EMBEDDING_MODELS, DEFAULT_STUDENT_MODELS from tanuki.language_models.llm_configs.abc_base_config import BaseModelConfig from tanuki.language_models.llm_finetune_api_abc import LLM_Finetune_API from tanuki.models.finetune_job import FinetuneJob from tanuki.models.function_description import FunctionDescription from tanuki.models.function_example import FunctionExample from tanuki.trackers.dataset_worker import DatasetWorker from tanuki.utils import approximate_token_count, prepare_object_for_saving, encode_int, decode_int import copy from tanuki.models.function_config import FunctionConfig from tanuki.models.api_manager import APIManager class FunctionModeler(object): """ This class manages the registered function models and their datasets comprised of symbolic and embeddable alignments, and symbolic and embeddable patches """ def __init__(self, data_worker: DatasetWorker, api_provider: APIManager, environment_id=0, ) -> None: self.function_configs = {} self.data_worker = data_worker self.distillation_token_limit = 3000 # the token limit for finetuning self.symbolic_align_buffer = {} self.embeddable_align_buffer = {} self._get_datasets() self.environment_id = environment_id self.check_finetune_blacklist = [] self.execute_finetune_blacklist = [] self.store_data_blacklist = [] self.api_provider = api_provider self.teacher_models_override = {} self.student_model_override = {} self.startup_logging_checker = {} def _get_dataset_info(self, dataset_type, func_hash, type="length"): """ Get the dataset size for a function hash """ return self.data_worker.load_dataset(dataset_type, func_hash, return_type=type) def _configure_function_models(self, teacher_models: List[Union[str, BaseModelConfig]], student_model: str, func_hash: str, task_type: str): """ Configure the function models """ if teacher_models: self._configure_teacher_models(teacher_models, func_hash, task_type) if student_model: self._configure_student_model(student_model, func_hash, task_type) if teacher_models and not student_model: for model_config in self.teacher_models_override[func_hash]: # ban all non-openai models from finetuning if teacher is not openai and student is not specified because it doesnt make sense if model_config.provider != OPENAI_PROVIDER and func_hash not in self.check_finetune_blacklist: self.check_finetune_blacklist.append(func_hash) if model_config.provider != OPENAI_PROVIDER and func_hash not in self.execute_finetune_blacklist: self.execute_finetune_blacklist.append(func_hash) def _configure_teacher_models(self, teacher_models: List[Union[str, BaseModelConfig]], func_hash: str, task_type: str): """ Add custom teacher models to the function config First this is added to the teacher_models_override dict, which is used to override the teacher models Args: teacher_models: A list of teacher models to use for the function hash func_hash: The function hash to add the teacher models to """ if func_hash not in self.teacher_models_override: self.teacher_models_override[func_hash] = [] if task_type == FunctionType.EMBEDDABLE: preconfigured_models = DEFAULT_EMBEDDING_MODELS elif task_type == FunctionType.SYMBOLIC: preconfigured_models = DEFAULT_TEACHER_MODELS for model in teacher_models: if isinstance(model, str): if model not in preconfigured_models: raise Exception(f"Teacher model {model} not supported by default. Please include it in the list in extended config format") model_config = preconfigured_models[model] elif isinstance(model, BaseModelConfig): model_config = model self.teacher_models_override[func_hash].append(model_config) def _configure_student_model(self, student_model: str, func_hash: str, task_type: str): """ Add custom student models to the function config First this is added to the teacher_models_override dict, which is used to override the teacher models Args: teacher_models: A list of teacher models to use for the function hash func_hash: The function hash to add the teacher models to """ if task_type == FunctionType.EMBEDDABLE: logging.info("Embeddable function type does not support student models") preconfigured_models = DEFAULT_STUDENT_MODELS if student_model not in preconfigured_models: raise Exception(f"Student model {student_model} is currently not supported.") model_config = preconfigured_models[student_model] self.student_model_override[func_hash] = model_config def _get_datasets(self): """ Get the existing datasets from the data worker """ self.dataset_sizes = self.data_worker.load_existing_datasets() def save_embeddable_align_statements(self, function_hash: str, args, kwargs, positive_pairs: List[Tuple[List, Dict]], negative_pairs: List[Tuple[List, Dict]]): """ Save the contrastive align statements for the embeddable function. Do not save if the function hash is in the store data blacklist Args: function_hash: A unique hash for the function args: The arguments of the function kwargs: The keyword arguments of the function positive_pairs: A list of the other function invocations that are should have equivalent embeddings negative_pairs: A list of the other function invocations that are should have different embeddings """ # prepare args and kwargs for saving copy_args = copy.deepcopy(args) copy_kwargs = copy.deepcopy(kwargs) parsed_args = prepare_object_for_saving(copy_args) parsed_kwargs = prepare_object_for_saving(copy_kwargs) # prepare positive pairs for saving parsed_positive_pairs = [] for pair in positive_pairs: copy_pair = copy.deepcopy(pair) parsed_pair = prepare_object_for_saving(copy_pair) parsed_positive_pairs.append(parsed_pair) # prepare negative pairs for saving parsed_negative_pairs = [] for pair in negative_pairs: copy_pair = copy.deepcopy(pair) parsed_pair = prepare_object_for_saving(copy_pair) parsed_negative_pairs.append(parsed_pair) # save the contrastive pairs for pair in parsed_positive_pairs: self._save_contrastive_alignment_pair(function_hash, parsed_args, parsed_kwargs, pair, positive=True) for pair in parsed_negative_pairs: self._save_contrastive_alignment_pair(function_hash, parsed_args, parsed_kwargs, pair, positive=False) def _save_contrastive_alignment_pair(self, function_hash: str, args, kwargs, pair, positive=True): """ Save a contrastive pair """ example = FunctionExample(args, kwargs, pair) if function_hash not in self.store_data_blacklist: successfully_saved, new_datapoint = self.data_worker.log_embeddable_align(function_hash, example, positive) else: successfully_saved = False new_datapoint = True if successfully_saved: if positive: if function_hash in self.dataset_sizes[POSITIVE_EMBEDDABLE_ALIGNMENTS]: self.dataset_sizes[POSITIVE_EMBEDDABLE_ALIGNMENTS][function_hash] += 1 else: self.dataset_sizes[POSITIVE_EMBEDDABLE_ALIGNMENTS][function_hash] = 1 if not positive: if function_hash in self.dataset_sizes[NEGATIVE_EMBEDDABLE_ALIGNMENTS]: self.dataset_sizes[NEGATIVE_EMBEDDABLE_ALIGNMENTS][function_hash] += 1 else: self.dataset_sizes[NEGATIVE_EMBEDDABLE_ALIGNMENTS][function_hash] = 1 if new_datapoint: # update align buffer if function_hash not in self.embeddable_align_buffer: self.embeddable_align_buffer[function_hash] = bytearray() self.embeddable_align_buffer[function_hash].extend(str(example.__dict__).encode('utf-8') + b'\r\n') def save_symbolic_align_statements(self, function_hash, args, kwargs, output): """ Save the align statements and add to the align buffer Do not save if the function hash is in the store data blacklist Then just add the datapoints to the align buffer """ # prepare output for saving and later parsing # make a deepcopy of the output to avoid changing the original object copy_output = copy.deepcopy(output) parsed_output = prepare_object_for_saving(copy_output) # prepare args and kwargs for saving copy_args = copy.deepcopy(args) copy_kwargs = copy.deepcopy(kwargs) parsed_args = prepare_object_for_saving(copy_args) parsed_kwargs = prepare_object_for_saving(copy_kwargs) example = FunctionExample(parsed_args, parsed_kwargs, parsed_output) if function_hash not in self.store_data_blacklist: successfully_saved, new_datapoint = self.data_worker.log_symbolic_align(function_hash, example) else: successfully_saved = False new_datapoint = True if successfully_saved: if function_hash in self.dataset_sizes[SYMBOLIC_ALIGNMENTS]: self.dataset_sizes[SYMBOLIC_ALIGNMENTS][function_hash] += 1 else: self.dataset_sizes[SYMBOLIC_ALIGNMENTS][function_hash] = 1 if new_datapoint: # update align buffer if function_hash not in self.symbolic_align_buffer: self.symbolic_align_buffer[function_hash] = bytearray() self.symbolic_align_buffer[function_hash].extend(str(example.__dict__).encode('utf-8') + b'\r\n') def save_symbolic_datapoint(self, func_hash, example): """ Save datapoint to the training data """ written_datapoints = self.data_worker.log_symbolic_patch(func_hash, example) for func_hash, datapoints in written_datapoints.items(): if func_hash in self.dataset_sizes[PATCHES]: # if the dataset size is -1, it means we havent read in the dataset size yet if self.dataset_sizes[PATCHES][func_hash] == -1: self.dataset_sizes[PATCHES][func_hash] = self._get_dataset_info(PATCHES, func_hash, type="length") else: self.dataset_sizes[PATCHES][func_hash] += datapoints else: self.dataset_sizes[PATCHES][func_hash] = datapoints return len(written_datapoints) > 0 def get_symbolic_alignments(self, func_hash, max=20): """ Get all symbolic aligns for a function hash """ if func_hash not in self.symbolic_align_buffer: return [] buffer = self.symbolic_align_buffer[func_hash] return self._get_examples_from_alignment_buffer(buffer, max) def get_embeddable_alignments(self, func_hash, max=20): """ Get all embeddable aligns for a function hash """ if func_hash not in self.embeddable_align_buffer: return [] buffer = self.embeddable_align_buffer[func_hash] return self._get_examples_from_alignment_buffer(buffer, max) def _get_examples_from_alignment_buffer(self, buffer, max=20): """ Get examples from a buffer """ split_buffer = bytes(buffer).split(b"\n") # byte array of stringed python dicts into dict objects example_set = set() for example in split_buffer: if example == b"": continue example_set.add(example) # easy and straightforward way to get nr of words (not perfect but doesnt need to be) # Can do the proper way of tokenizing later, it might be slower and we dont need 100% accuracy example_element_limit = EXAMPLE_ELEMENT_LIMIT examples = [] for example_bytes in split_buffer: if example_bytes in example_set: nr_of_elements = approximate_token_count(example_bytes) example_element_limit -= nr_of_elements if example_element_limit < 0: break example = example_bytes.decode('utf-8') # json load the example try: example = json.loads(example) except: example = ast.literal_eval(example) examples.append(example) example_set.remove(example_bytes) return list(examples)[:max] def load_symbolic_align_statements(self, function_hash): """ Load all align statements First check the data storage blacklist, if the func hash is in the blacklist, then set the dataset size to 0 and the align buffer to empty bytearray """ if function_hash in self.store_data_blacklist: self.dataset_sizes[SYMBOLIC_ALIGNMENTS][function_hash] = 0 self.symbolic_align_buffer[function_hash] = bytearray() elif function_hash not in self.symbolic_align_buffer: dataset_size, align_dataset = self._get_dataset_info(SYMBOLIC_ALIGNMENTS, function_hash, type="both") if align_dataset: self.symbolic_align_buffer[function_hash] = bytearray(align_dataset) self.dataset_sizes[SYMBOLIC_ALIGNMENTS][function_hash] = dataset_size def postprocess_symbolic_datapoint(self, func_hash, function_description, example, repaired=True): """ Postprocess the datapoint First check if the datapoint should be added to the training data Add the datapoint if it should be added Then check if the function should be finetuned and execute finetuning if it should """ try: if func_hash not in self.store_data_blacklist: added = self.save_symbolic_datapoint(func_hash, example) if added: self._update_datapoint_config(repaired, func_hash) except Exception as e: print(e) print("Could not add datapoint to training data") if func_hash not in self.execute_finetune_blacklist: self.check_for_finetuning(function_description, func_hash) def load_function_config(self, func_hash, function_description): """ Load the config file for a function hash """ config, default = self.data_worker.load_function_config(func_hash) if func_hash in self.student_model_override and config.distilled_model.model_name == "": config.distilled_model = self.student_model_override[func_hash] if default and func_hash not in self.check_finetune_blacklist: finetuned, finetune_config = self._check_for_finetunes(function_description, config.distilled_model) if finetuned: config = finetune_config # update teachers if not default if func_hash in self.teacher_models_override: config.teacher_models = self.teacher_models_override[func_hash] self.function_configs[func_hash] = config return config def _check_for_finetunes(self, function_description: FunctionDescription, model_config : BaseModelConfig) -> Tuple[bool, Dict]: # hash the function_hash into 16 characters (to embed it into the name of OpenAI finetunes, for later retrieval) logging.info(f"Checking for finetunes for {function_description.name} using {model_config.provider}") finetune_hash = function_description.__hash__(purpose="finetune") + encode_int(self.environment_id) # List 10 fine-tuning jobs finetunes: List[FinetuneJob] = self.api_provider[model_config.provider].list_finetuned(model_config, limit=1000) # Check if the function_hash is in the fine-tuning jobs # the finetunes are in chronological order starting from newest # So this gets the latest finetune for finetune in finetunes: # check if the finetune hash is in the fine-tuned model name if finetune.status == "succeeded" and finetune_hash in finetune.fine_tuned_model.model_name: try: config = self._construct_config_from_finetune(finetune_hash, finetune) # save the config self.data_worker.update_function_config(function_description.__hash__(), config) logging.info(f"Found finetuned model for {function_description.name} [{config.distilled_model.model_name}]") return True, config except: logging.info(f"Found finetuned model for {function_description.name} [{finetune.fine_tuned_model.model_name}] but could not load it") return False, {} logging.info(f"No finetuned model found for {function_description.name}") return False, {} def _construct_config_from_finetune(self, finetune_hash: str, finetune: FinetuneJob): """ Construct a valid function config from a finetune job Args: finetune_hash: The hash of the function finetune: The finetune job Returns: config: The function config """ model = finetune.fine_tuned_model # get the ending location of finetune hash in the model name finetune_hash_end = model.model_name.find(finetune_hash) + len(finetune_hash) # get the next character after the finetune hash next_char = model.model_name[finetune_hash_end] # get the number of training runs nr_of_training_runs = decode_int(next_char) + 1 nr_of_training_points = (2 ** (nr_of_training_runs - 1)) * 200 config = { "distilled_model": model, "current_model_stats": { "trained_on_datapoints": nr_of_training_points, "running_faults": []}, "last_training_run": {"trained_on_datapoints": nr_of_training_points}, "current_training_run": {}, "teacher_models": [], # default teacher models, will be overwritten if needed "nr_of_training_runs": nr_of_training_runs} config = FunctionConfig().load_from_dict(config) return config def get_models(self, function_description): "<fim_suffix>"" Return the current model from the config file """ func_hash = function_description.__hash__() if func_hash in self.function_configs: func_config = self.function_configs[func_hash] else: func_config = self.load_function_config(func_hash, function_description) return func_config.distilled_model, func_config.teacher_models def _update_datapoint_config(self, repaired, func_hash): """ Update the config to reflect the new datapoint in the training data First adds 1 to the current datapoints Then updates running faults depending if priority is True or not and takes last 100 Then checks the revert condition, i.e if last 10 datapoints are 50% faulty Finally updates the config file Args: priority (bool): whether the datapoint was fixed by the teacher model/should be added to the training data """ try: if repaired: self.function_configs[func_hash].current_model_stats["running_faults"].append(1) else: self.function_configs[func_hash].current_model_stats["running_faults"].append(0) # take the last 100 datapoints self.function_configs[func_hash].current_model_stats["running_faults"] = \ self.function_configs[func_hash].current_model_stats["running_faults"][-100:] # check if the last 10 datapoints are 50% faulty, this is the switch condition if sum(self.function_configs[func_hash].current_model_stats["running_faults"][-10:]) / 10 > 0.5: self.function_configs[func_hash].distilled_model.model_name = "" self.function_configs[func_hash].current_model_stats["trained_on_datapoints"] = 0 self.function_configs[func_hash].current_model_stats["running_faults"] = [] self._update_config_file(func_hash) except Exception as e: print(e) print("Could not update config file") pass def _update_config_file(self, func_hash): self.data_worker.update_function_config(func_hash, self.function_configs[func_hash]) def check_for_finetuning(self, function_description, func_hash): """ Check for finetuning status If already finetuning, check for finetuning status If not finetuning, check for finetuning condition and execute finetuning if condition is met """ try: # check if already finetuning if "job_id" in self.function_configs[func_hash].current_training_run: # check for job status self._check_finetuning_status(func_hash, function_description) else: # check for finetuning condition if self._check_finetuning_condition(func_hash, function_description): self._execute_finetuning(function_description, func_hash) except Exception as e: print(e) print("Error checking for finetuning") def _check_finetuning_condition(self, func_hash, function_description): """ Check if the finetuning condition is met Currently finetuning condition is dependent on the number of symbolic datapoints since last finetuning """ if func_hash not in self.function_configs: return False training_threshold = (2 ** self.function_configs[func_hash].nr_of_training_runs) * 200 align_dataset_size = self.dataset_sizes[SYMBOLIC_ALIGNMENTS][func_hash] if func_hash in self.dataset_sizes[ SYMBOLIC_ALIGNMENTS] else 0 patch_dataset_size = self.dataset_sizes[PATCHES][func_hash] if func_hash in self.dataset_sizes[PATCHES] else 0 if patch_dataset_size == -1: # if havent read in the patch dataset size, read it in patch_dataset_size = self._get_dataset_info(PATCHES, func_hash, type="length") self.dataset_sizes[PATCHES][func_hash] = patch_dataset_size if func_hash not in self.startup_logging_checker: logging.info(f"Function {function_description.name} [{align_dataset_size} aligns | {patch_dataset_size} runs] will be finetuned from"\ f" {self.function_configs[func_hash].teacher_models[0].model_name} using {self.function_configs[func_hash].distilled_model.provider} in "\ f"{training_threshold-(patch_dataset_size + align_dataset_size)} runs") self.startup_logging_checker[func_hash] = True return (patch_dataset_size + align_dataset_size) > training_threshold def _execute_finetuning(self, function_description, func_hash): """ Execute the finetuning First create the OpenAI compatible dataset with jsonL file and upload it Then submit the OpenAI finetuning job Finally update the config file to reflect the new finetuning job as current """ # get function description function_string = str(function_description.__dict__.__repr__() + "\n") # get the align dataset align_dataset = self._get_dataset_info(SYMBOLIC_ALIGNMENTS, func_hash, type="dataset") if not align_dataset: align_dataset = "" else: align_dataset = align_dataset.decode('utf-8') # get the patch dataset patch_dataset = self._get_dataset_info(PATCHES, func_hash, type="dataset") if not patch_dataset: patch_dataset = "" else: patch_dataset = patch_dataset.decode('utf-8') if align_dataset == "" and patch_dataset == "": return dataset = align_dataset + patch_dataset dataset.replace("\\n", "[SEP_TOKEN]") dataset = dataset.split("\n") dataset = [x.replace("[SEP_TOKEN]", "\\n") for x in dataset if x != ""] # read in the dataset file dataset = [ast.literal_eval(x) for x in dataset] # # create the openai dataset instruction = "You are given below a function description and input data. The function description of what the function must carry out can be found in the Function section, with input and output type hints. The input data can be found in Input section. Using the function description, apply the function to the Input and return a valid output type, that is acceptable by the output_class_definition and output_class_hint. Return None if you can't apply the function to the input or if the output is optional and the correct output is None.\nINCREDIBLY IMPORTANT: Only output a JSON-compatible string in the correct response format." finetuning_dataset = [{"messages": [ { "role": "system", "content": f"You are a skillful and accurate language model, who applies a described function on input data. Make sure the function is applied accurately and correctly and the outputs follow the output type hints and are valid outputs given the output types." }, {"role": "user", "content": f"{instruction}\nFunction: {function_string}---\nInputs:\nArgs: {x['args']}\nKwargs: {x['kwargs']}\nOutput:"}, {"role": "assistant", "content": str(x['output']) if x['output'] is not None else "None"}]} for x in dataset] # Create an in-memory text stream temp_file = io.BytesIO() # Write data to the stream for idx, item in enumerate(finetuning_dataset): temp_file.write(json.dumps(item).encode('utf-8')) if idx != len(finetuning_dataset) - 1: temp_file.write("\n".encode('utf-8')) # Reset the stream position to the beginning temp_file.seek(0) # create the finetune hash finetune_hash = function_description.__hash__(purpose="finetune") nr_of_training_runs = self.function_configs[func_hash].nr_of_training_runs finetune_hash += encode_int(self.environment_id) finetune_hash += encode_int(nr_of_training_runs) # here can be sure that datasets were read in as that is checked in the finetune_check align_dataset_size = self.dataset_sizes[SYMBOLIC_ALIGNMENTS][func_hash] if func_hash in self.dataset_sizes[ SYMBOLIC_ALIGNMENTS] else 0 patch_dataset_size = self.dataset_sizes[PATCHES][func_hash] if func_hash in self.dataset_sizes[PATCHES] else 0 total_dataset_size = align_dataset_size + patch_dataset_size # Use the stream as a file try: finetune_provider = self.function_configs[func_hash].distilled_model.provider logging.info(f"Starting finetuning for {function_description.name} using {finetune_provider} for {self.function_configs[func_hash].distilled_model.base_model_for_sft}") finetuning_response: FinetuneJob = self.api_provider[finetune_provider].finetune(file=temp_file, suffix=finetune_hash, model_config = self.function_configs[func_hash].distilled_model,) except Exception as e: logging.info(f"Could not start finetuning for {function_description.name} using {finetune_provider}. Error: {e}") return self.function_configs[func_hash].current_training_run = {"job_id": finetuning_response.id, "trained_on_datapoints": total_dataset_size, "last_checked": datetime.datetime.now().strftime( "%Y-%m-%d %H:%M:%S")} # update the config json file try: self._update_config_file(func_hash) except Exception as e: print(e) print("Could not update config file to register a finetuning run") def _check_finetuning_status(self, func_hash, function_description): """ Check the status of the current finetuning job If the job is finished, update the config file to reflect the new model """ job_id = self.function_configs[func_hash].current_training_run["job_id"] last_checked = self.function_configs[func_hash].current_training_run["last_checked"] # check if last checked was more than 30 mins ago if (datetime.datetime.now() - datetime.datetime.strptime(last_checked, "%Y-%m-%d %H:%M:%S")).total_seconds() > 1800: finetune_provider = self.function_configs[func_hash].distilled_model.provider response = self.api_provider[finetune_provider].get_finetuned(job_id, model_config = self.function_configs[func_hash].distilled_model) self.function_configs[func_hash].current_training_run["last_checked"] = datetime.datetime.now().strftime( "%Y-%m-%d %H:%M:%S") if response.status == "succeeded" or response.status == "failed": self._update_finetune_config(response, func_hash, function_description) else: self._update_config_file(func_hash) def _update_finetune_config(self, response: FinetuneJob, func_hash, function_description): """ Update the config file to reflect the new model and switch the current model to the finetuned model """ self.function_configs[func_hash].update_with_finetuned_response(response) logging.info(f"Finetuning for {function_description.name} using {self.function_configs[func_hash].distilled_model.provider} finished with status: {response.status}."\ f" The id of the finetuned model is {response.fine_tuned_model.model_name}") try: self._update_config_file(func_hash) except Exception as e: logging.info(f"Could not update the function configuration file with the finetuned model for {function_description.name}. Error: {e}") pass <fim_middle>
null
BLOCK_COMMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/trackers/filesystem_buffered_logger.py<fim_prefix>import os from enum import Enum from typing import Literal, Union, Optional, Dict from appdirs import user_data_dir from tanuki.constants import * from tanuki.persistence.filter.bloom_interface import IBloomFilterPersistence from tanuki.persistence.filter.filesystem_bloom import BloomFilterFileSystemDriver from tanuki.trackers.abc_buffered_logger import ABCBufferedLogger class FilesystemBufferedLogger(ABCBufferedLogger): """ A class that handles the reading and writing of patch invocations and align statements. It includes the logic for a bloom filter, to ensure that we only store unique invocations. """ def __init__(self, name, level=15): self.log_directory = self._get_log_directory() super().__init__(name, level) def get_bloom_filter_persistence(self) -> IBloomFilterPersistence: "<fim_suffix>"" Get an instance of the bloom filter persistence provider. Typically this will be a file system provider. :return: A persistence provider """ return BloomFilterFileSystemDriver(log_directory=self.log_directory) def get_patch_location_for_function(self, func_hash, extension: Union[ ALIGN_FILE_EXTENSION_TYPE, PATCH_FILE_EXTENSION_TYPE] = "") -> str: """ Get the local location of the function patch file. :param func_hash: The representation of the function :param extension: Whether this is a patch or an alignment :return: """ return os.path.join(self.log_directory, func_hash + extension) def ensure_persistence_location_exists(self) -> None: """ Ensure that the location on the filesystem we will be writing to actually exists. If not, create it. """ log_directory = self.log_directory # Create the folder if it doesn't exist if not os.path.exists(log_directory): os.makedirs(log_directory) def does_object_exist(self, path: str) -> bool: """ Check to see if a path exists on the filesystem. :param path: :return: """ return os.path.exists(path) def _get_log_directory(self) -> str: """ Find a location on the filesystem to write our logs to. :return: """ filename = "functions" # If explicitly defined env_dir = os.getenv(ENVVAR) if env_dir and os.path.isdir(env_dir): return os.path.join(env_dir, filename) # If installed as a library library_dir = os.path.join(user_data_dir(LIB_NAME), filename) if os.path.isdir(library_dir) or not os.path.exists(library_dir): return library_dir # If installed in a project that contains a git repo - place it in the same folder as the git repo current_dir = os.getcwd() while current_dir != os.path.root: if ".git" in os.listdir(current_dir): return os.path.join(current_dir, filename) current_dir = os.path.dirname(current_dir) return os.path.join(os.getcwd(), filename) def load_dataset(self, dataset_type, func_hash, return_type="both") -> Optional[int]: """ Get the size of the dataset for a function hash """ log_directory = self._get_log_directory() dataset_type_map = {"alignments": ALIGN_FILE_EXTENSION, "positive": POSITIVE_FILE_EXTENSION, "negative": NEGATIVE_FILE_EXTENSION, "patches": PATCH_FILE_EXTENSION} log_file_path = os.path.join(log_directory, func_hash + dataset_type_map[dataset_type]) if not os.path.exists(log_file_path): if return_type == "both": return 0, None elif return_type == "dataset": return None elif return_type == "length": return 0 try: with open(log_file_path, "rb") as f: dataset = f.read() dataset_string = repr(dataset) dataset_length = dataset_string.count("\\n") - dataset_string.count("\\\\n") if return_type == "both": return dataset_length, dataset elif return_type == "dataset": return dataset elif return_type == "length": return dataset_length except Exception as e: if return_type == "both": return 0, None elif return_type == "dataset": return None elif return_type == "length": return 0 def load_existing_datasets(self) -> Dict[str, Dict[str, str]]: log_directory = self.log_directory dataset_lengths = { SYMBOLIC_ALIGNMENTS: {}, POSITIVE_EMBEDDABLE_ALIGNMENTS: {}, NEGATIVE_EMBEDDABLE_ALIGNMENTS: {}, PATCHES: {}, } try: if not os.path.exists(log_directory): os.makedirs(log_directory) # get all the files in the log directory files = os.listdir(log_directory) # discard all .json files files = [x for x in files if ".json" not in x] except Exception as e: return dataset_lengths for file in files: if ALIGN_FILE_EXTENSION not in file \ and PATCH_FILE_EXTENSION not in file \ and POSITIVE_FILE_EXTENSION not in file \ and NEGATIVE_FILE_EXTENSION not in file: continue elif ALIGN_FILE_EXTENSION in file: dataset_type = SYMBOLIC_ALIGNMENTS elif POSITIVE_FILE_EXTENSION in file: dataset_type = POSITIVE_EMBEDDABLE_ALIGNMENTS elif NEGATIVE_FILE_EXTENSION in file: dataset_type = NEGATIVE_EMBEDDABLE_ALIGNMENTS else: dataset_type = PATCHES func_hash = file.replace(ALIGN_FILE_EXTENSION, "").replace(PATCH_FILE_EXTENSION, "") dataset_lengths[dataset_type][func_hash] = -1 return dataset_lengths def write(self, path: str, data: str, mode: Literal["w", "a", "a+b"] = "w") -> None: """ Write data to a file """ with open(path, mode) as f: f.write(data) def read(self, path: str) -> str: """ Read data from a file """ with open(path, "r") as f: return f.read() def get_hash_from_path(self, path) -> str: """ Given a path with a hash, return only the hash :param path: The path to the file :return: The hash """ return path.replace(PATCH_FILE_EXTENSION, ""). \ replace(self.log_directory, ""). \ lstrip("/"). \ lstrip("\\") <fim_middle>
null
BLOCK_COMMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/function_modeler.py<fim_prefix>import ast import datetime import io import json from typing import List, Tuple, Dict, Union import logging from tanuki.constants import EXAMPLE_ELEMENT_LIMIT, PATCHES, SYMBOLIC_ALIGNMENTS, POSITIVE_EMBEDDABLE_ALIGNMENTS, \ NEGATIVE_EMBEDDABLE_ALIGNMENTS, OPENAI_PROVIDER from tanuki.models.function_type import FunctionType from tanuki.language_models.llm_configs import DEFAULT_TEACHER_MODELS, DEFAULT_EMBEDDING_MODELS, DEFAULT_STUDENT_MODELS from tanuki.language_models.llm_configs.abc_base_config import BaseModelConfig from tanuki.language_models.llm_finetune_api_abc import LLM_Finetune_API from tanuki.models.finetune_job import FinetuneJob from tanuki.models.function_description import FunctionDescription from tanuki.models.function_example import FunctionExample from tanuki.trackers.dataset_worker import DatasetWorker from tanuki.utils import approximate_token_count, prepare_object_for_saving, encode_int, decode_int import copy from tanuki.models.function_config import FunctionConfig from tanuki.models.api_manager import APIManager class FunctionModeler(object): """ This class manages the registered function models and their datasets comprised of symbolic and embeddable alignments, and symbolic and embeddable patches """ def __init__(self, data_worker: DatasetWorker, api_provider: APIManager, environment_id=0, ) -> None: self.function_configs = {} self.data_worker = data_worker self.distillation_token_limit = 3000 # the token limit for finetuning self.symbolic_align_buffer = {} self.embeddable_align_buffer = {} self._get_datasets() self.environment_id = environment_id self.check_finetune_blacklist = [] self.execute_finetune_blacklist = [] self.store_data_blacklist = [] self.api_provider = api_provider self.teacher_models_override = {} self.student_model_override = {} self.startup_logging_checker = {} def _get_dataset_info(self, dataset_type, func_hash, type="length"): """ Get the dataset size for a function hash """ return self.data_worker.load_dataset(dataset_type, func_hash, return_type=type) def _configure_function_models(self, teacher_models: List[Union[str, BaseModelConfig]], student_model: str, func_hash: str, task_type: str): """ Configure the function models """ if teacher_models: self._configure_teacher_models(teacher_models, func_hash, task_type) if student_model: self._configure_student_model(student_model, func_hash, task_type) if teacher_models and not student_model: for model_config in self.teacher_models_override[func_hash]: # ban all non-openai models from finetuning if teacher is not openai and student is not specified because it doesnt make sense if model_config.provider != OPENAI_PROVIDER and func_hash not in self.check_finetune_blacklist: self.check_finetune_blacklist.append(func_hash) if model_config.provider != OPENAI_PROVIDER and func_hash not in self.execute_finetune_blacklist: self.execute_finetune_blacklist.append(func_hash) def _configure_teacher_models(self, teacher_models: List[Union[str, BaseModelConfig]], func_hash: str, task_type: str): """ Add custom teacher models to the function config First this is added to the teacher_models_override dict, which is used to override the teacher models Args: teacher_models: A list of teacher models to use for the function hash func_hash: The function hash to add the teacher models to """ if func_hash not in self.teacher_models_override: self.teacher_models_override[func_hash] = [] if task_type == FunctionType.EMBEDDABLE: preconfigured_models = DEFAULT_EMBEDDING_MODELS elif task_type == FunctionType.SYMBOLIC: preconfigured_models = DEFAULT_TEACHER_MODELS for model in teacher_models: if isinstance(model, str): if model not in preconfigured_models: raise Exception(f"Teacher model {model} not supported by default. Please include it in the list in extended config format") model_config = preconfigured_models[model] elif isinstance(model, BaseModelConfig): model_config = model self.teacher_models_override[func_hash].append(model_config) def _configure_student_model(self, student_model: str, func_hash: str, task_type: str): """ Add custom student models to the function config First this is added to the teacher_models_override dict, which is used to override the teacher models Args: teacher_models: A list of teacher models to use for the function hash func_hash: The function hash to add the teacher models to """ if task_type == FunctionType.EMBEDDABLE: logging.info("Embeddable function type does not support student models") preconfigured_models = DEFAULT_STUDENT_MODELS if student_model not in preconfigured_models: raise Exception(f"Student model {student_model} is currently not supported.") model_config = preconfigured_models[student_model] self.student_model_override[func_hash] = model_config def _get_datasets(self): """ Get the existing datasets from the data worker """ self.dataset_sizes = self.data_worker.load_existing_datasets() def save_embeddable_align_statements(self, function_hash: str, args, kwargs, positive_pairs: List[Tuple[List, Dict]], negative_pairs: List[Tuple[List, Dict]]): """ Save the contrastive align statements for the embeddable function. Do not save if the function hash is in the store data blacklist Args: function_hash: A unique hash for the function args: The arguments of the function kwargs: The keyword arguments of the function positive_pairs: A list of the other function invocations that are should have equivalent embeddings negative_pairs: A list of the other function invocations that are should have different embeddings """ # prepare args and kwargs for saving copy_args = copy.deepcopy(args) copy_kwargs = copy.deepcopy(kwargs) parsed_args = prepare_object_for_saving(copy_args) parsed_kwargs = prepare_object_for_saving(copy_kwargs) # prepare positive pairs for saving parsed_positive_pairs = [] for pair in positive_pairs: copy_pair = copy.deepcopy(pair) parsed_pair = prepare_object_for_saving(copy_pair) parsed_positive_pairs.append(parsed_pair) # prepare negative pairs for saving parsed_negative_pairs = [] for pair in negative_pairs: copy_pair = copy.deepcopy(pair) parsed_pair = prepare_object_for_saving(copy_pair) parsed_negative_pairs.append(parsed_pair) # save the contrastive pairs for pair in parsed_positive_pairs: self._save_contrastive_alignment_pair(function_hash, parsed_args, parsed_kwargs, pair, positive=True) for pair in parsed_negative_pairs: self._save_contrastive_alignment_pair(function_hash, parsed_args, parsed_kwargs, pair, positive=False) def _save_contrastive_alignment_pair(self, function_hash: str, args, kwargs, pair, positive=True): """ Save a contrastive pair """ example = FunctionExample(args, kwargs, pair) if function_hash not in self.store_data_blacklist: successfully_saved, new_datapoint = self.data_worker.log_embeddable_align(function_hash, example, positive) else: successfully_saved = False new_datapoint = True if successfully_saved: if positive: if function_hash in self.dataset_sizes[POSITIVE_EMBEDDABLE_ALIGNMENTS]: self.dataset_sizes[POSITIVE_EMBEDDABLE_ALIGNMENTS][function_hash] += 1 else: self.dataset_sizes[POSITIVE_EMBEDDABLE_ALIGNMENTS][function_hash] = 1 if not positive: if function_hash in self.dataset_sizes[NEGATIVE_EMBEDDABLE_ALIGNMENTS]: self.dataset_sizes[NEGATIVE_EMBEDDABLE_ALIGNMENTS][function_hash] += 1 else: self.dataset_sizes[NEGATIVE_EMBEDDABLE_ALIGNMENTS][function_hash] = 1 if new_datapoint: # update align buffer if function_hash not in self.embeddable_align_buffer: self.embeddable_align_buffer[function_hash] = bytearray() self.embeddable_align_buffer[function_hash].extend(str(example.__dict__).encode('utf-8') + b'\r\n') def save_symbolic_align_statements(self, function_hash, args, kwargs, output): """ Save the align statements and add to the align buffer Do not save if the function hash is in the store data blacklist Then just add the datapoints to the align buffer """ # prepare output for saving and later parsing # make a deepcopy of the output to avoid changing the original object copy_output = copy.deepcopy(output) parsed_output = prepare_object_for_saving(copy_output) # prepare args and kwargs for saving copy_args = copy.deepcopy(args) copy_kwargs = copy.deepcopy(kwargs) parsed_args = prepare_object_for_saving(copy_args) parsed_kwargs = prepare_object_for_saving(copy_kwargs) example = FunctionExample(parsed_args, parsed_kwargs, parsed_output) if function_hash not in self.store_data_blacklist: successfully_saved, new_datapoint = self.data_worker.log_symbolic_align(function_hash, example) else: successfully_saved = False new_datapoint = True if successfully_saved: if function_hash in self.dataset_sizes[SYMBOLIC_ALIGNMENTS]: self.dataset_sizes[SYMBOLIC_ALIGNMENTS][function_hash] += 1 else: self.dataset_sizes[SYMBOLIC_ALIGNMENTS][function_hash] = 1 if new_datapoint: # update align buffer if function_hash not in self.symbolic_align_buffer: self.symbolic_align_buffer[function_hash] = bytearray() self.symbolic_align_buffer[function_hash].extend(str(example.__dict__).encode('utf-8') + b'\r\n') def save_symbolic_datapoint(self, func_hash, example): """ Save datapoint to the training data """ written_datapoints = self.data_worker.log_symbolic_patch(func_hash, example) for func_hash, datapoints in written_datapoints.items(): if func_hash in self.dataset_sizes[PATCHES]: # if the dataset size is -1, it means we havent read in the dataset size yet if self.dataset_sizes[PATCHES][func_hash] == -1: self.dataset_sizes[PATCHES][func_hash] = self._get_dataset_info(PATCHES, func_hash, type="length") else: self.dataset_sizes[PATCHES][func_hash] += datapoints else: self.dataset_sizes[PATCHES][func_hash] = datapoints return len(written_datapoints) > 0 def get_symbolic_alignments(self, func_hash, max=20): """ Get all symbolic aligns for a function hash """ if func_hash not in self.symbolic_align_buffer: return [] buffer = self.symbolic_align_buffer[func_hash] return self._get_examples_from_alignment_buffer(buffer, max) def get_embeddable_alignments(self, func_hash, max=20): """ Get all embeddable aligns for a function hash """ if func_hash not in self.embeddable_align_buffer: return [] buffer = self.embeddable_align_buffer[func_hash] return self._get_examples_from_alignment_buffer(buffer, max) def _get_examples_from_alignment_buffer(self, buffer, max=20): """ Get examples from a buffer """ split_buffer = bytes(buffer).split(b"\n") # byte array of stringed python dicts into dict objects example_set = set() for example in split_buffer: if example == b"": continue example_set.add(example) # easy and straightforward way to get nr of words (not perfect but doesnt need to be) # Can do the proper way of tokenizing later, it might be slower and we dont need 100% accuracy example_element_limit = EXAMPLE_ELEMENT_LIMIT examples = [] for example_bytes in split_buffer: if example_bytes in example_set: nr_of_elements = approximate_token_count(example_bytes) example_element_limit -= nr_of_elements if example_element_limit < 0: break example = example_bytes.decode('utf-8') # json load the example try: example = json.loads(example) except: example = ast.literal_eval(example) examples.append(example) example_set.remove(example_bytes) return list(examples)[:max] def load_symbolic_align_statements(self, function_hash): """ Load all align statements First check the data storage blacklist, if the func hash is in the blacklist, then set the dataset size to 0 and the align buffer to empty bytearray """ if function_hash in self.store_data_blacklist: self.dataset_sizes[SYMBOLIC_ALIGNMENTS][function_hash] = 0 self.symbolic_align_buffer[function_hash] = bytearray() elif function_hash not in self.symbolic_align_buffer: dataset_size, align_dataset = self._get_dataset_info(SYMBOLIC_ALIGNMENTS, function_hash, type="both") if align_dataset: self.symbolic_align_buffer[function_hash] = bytearray(align_dataset) self.dataset_sizes[SYMBOLIC_ALIGNMENTS][function_hash] = dataset_size def postprocess_symbolic_datapoint(self, func_hash, function_description, example, repaired=True): """ Postprocess the datapoint First check if the datapoint should be added to the training data Add the datapoint if it should be added Then check if the function should be finetuned and execute finetuning if it should """ try: if func_hash not in self.store_data_blacklist: added = self.save_symbolic_datapoint(func_hash, example) if added: self._update_datapoint_config(repaired, func_hash) except Exception as e: print(e) print("Could not add datapoint to training data") if func_hash not in self.execute_finetune_blacklist: self.check_for_finetuning(function_description, func_hash) def load_function_config(self, func_hash, function_description): "<fim_suffix>"" Load the config file for a function hash """ config, default = self.data_worker.load_function_config(func_hash) if func_hash in self.student_model_override and config.distilled_model.model_name == "": config.distilled_model = self.student_model_override[func_hash] if default and func_hash not in self.check_finetune_blacklist: finetuned, finetune_config = self._check_for_finetunes(function_description, config.distilled_model) if finetuned: config = finetune_config # update teachers if not default if func_hash in self.teacher_models_override: config.teacher_models = self.teacher_models_override[func_hash] self.function_configs[func_hash] = config return config def _check_for_finetunes(self, function_description: FunctionDescription, model_config : BaseModelConfig) -> Tuple[bool, Dict]: # hash the function_hash into 16 characters (to embed it into the name of OpenAI finetunes, for later retrieval) logging.info(f"Checking for finetunes for {function_description.name} using {model_config.provider}") finetune_hash = function_description.__hash__(purpose="finetune") + encode_int(self.environment_id) # List 10 fine-tuning jobs finetunes: List[FinetuneJob] = self.api_provider[model_config.provider].list_finetuned(model_config, limit=1000) # Check if the function_hash is in the fine-tuning jobs # the finetunes are in chronological order starting from newest # So this gets the latest finetune for finetune in finetunes: # check if the finetune hash is in the fine-tuned model name if finetune.status == "succeeded" and finetune_hash in finetune.fine_tuned_model.model_name: try: config = self._construct_config_from_finetune(finetune_hash, finetune) # save the config self.data_worker.update_function_config(function_description.__hash__(), config) logging.info(f"Found finetuned model for {function_description.name} [{config.distilled_model.model_name}]") return True, config except: logging.info(f"Found finetuned model for {function_description.name} [{finetune.fine_tuned_model.model_name}] but could not load it") return False, {} logging.info(f"No finetuned model found for {function_description.name}") return False, {} def _construct_config_from_finetune(self, finetune_hash: str, finetune: FinetuneJob): """ Construct a valid function config from a finetune job Args: finetune_hash: The hash of the function finetune: The finetune job Returns: config: The function config """ model = finetune.fine_tuned_model # get the ending location of finetune hash in the model name finetune_hash_end = model.model_name.find(finetune_hash) + len(finetune_hash) # get the next character after the finetune hash next_char = model.model_name[finetune_hash_end] # get the number of training runs nr_of_training_runs = decode_int(next_char) + 1 nr_of_training_points = (2 ** (nr_of_training_runs - 1)) * 200 config = { "distilled_model": model, "current_model_stats": { "trained_on_datapoints": nr_of_training_points, "running_faults": []}, "last_training_run": {"trained_on_datapoints": nr_of_training_points}, "current_training_run": {}, "teacher_models": [], # default teacher models, will be overwritten if needed "nr_of_training_runs": nr_of_training_runs} config = FunctionConfig().load_from_dict(config) return config def get_models(self, function_description): """ Return the current model from the config file """ func_hash = function_description.__hash__() if func_hash in self.function_configs: func_config = self.function_configs[func_hash] else: func_config = self.load_function_config(func_hash, function_description) return func_config.distilled_model, func_config.teacher_models def _update_datapoint_config(self, repaired, func_hash): """ Update the config to reflect the new datapoint in the training data First adds 1 to the current datapoints Then updates running faults depending if priority is True or not and takes last 100 Then checks the revert condition, i.e if last 10 datapoints are 50% faulty Finally updates the config file Args: priority (bool): whether the datapoint was fixed by the teacher model/should be added to the training data """ try: if repaired: self.function_configs[func_hash].current_model_stats["running_faults"].append(1) else: self.function_configs[func_hash].current_model_stats["running_faults"].append(0) # take the last 100 datapoints self.function_configs[func_hash].current_model_stats["running_faults"] = \ self.function_configs[func_hash].current_model_stats["running_faults"][-100:] # check if the last 10 datapoints are 50% faulty, this is the switch condition if sum(self.function_configs[func_hash].current_model_stats["running_faults"][-10:]) / 10 > 0.5: self.function_configs[func_hash].distilled_model.model_name = "" self.function_configs[func_hash].current_model_stats["trained_on_datapoints"] = 0 self.function_configs[func_hash].current_model_stats["running_faults"] = [] self._update_config_file(func_hash) except Exception as e: print(e) print("Could not update config file") pass def _update_config_file(self, func_hash): self.data_worker.update_function_config(func_hash, self.function_configs[func_hash]) def check_for_finetuning(self, function_description, func_hash): """ Check for finetuning status If already finetuning, check for finetuning status If not finetuning, check for finetuning condition and execute finetuning if condition is met """ try: # check if already finetuning if "job_id" in self.function_configs[func_hash].current_training_run: # check for job status self._check_finetuning_status(func_hash, function_description) else: # check for finetuning condition if self._check_finetuning_condition(func_hash, function_description): self._execute_finetuning(function_description, func_hash) except Exception as e: print(e) print("Error checking for finetuning") def _check_finetuning_condition(self, func_hash, function_description): """ Check if the finetuning condition is met Currently finetuning condition is dependent on the number of symbolic datapoints since last finetuning """ if func_hash not in self.function_configs: return False training_threshold = (2 ** self.function_configs[func_hash].nr_of_training_runs) * 200 align_dataset_size = self.dataset_sizes[SYMBOLIC_ALIGNMENTS][func_hash] if func_hash in self.dataset_sizes[ SYMBOLIC_ALIGNMENTS] else 0 patch_dataset_size = self.dataset_sizes[PATCHES][func_hash] if func_hash in self.dataset_sizes[PATCHES] else 0 if patch_dataset_size == -1: # if havent read in the patch dataset size, read it in patch_dataset_size = self._get_dataset_info(PATCHES, func_hash, type="length") self.dataset_sizes[PATCHES][func_hash] = patch_dataset_size if func_hash not in self.startup_logging_checker: logging.info(f"Function {function_description.name} [{align_dataset_size} aligns | {patch_dataset_size} runs] will be finetuned from"\ f" {self.function_configs[func_hash].teacher_models[0].model_name} using {self.function_configs[func_hash].distilled_model.provider} in "\ f"{training_threshold-(patch_dataset_size + align_dataset_size)} runs") self.startup_logging_checker[func_hash] = True return (patch_dataset_size + align_dataset_size) > training_threshold def _execute_finetuning(self, function_description, func_hash): """ Execute the finetuning First create the OpenAI compatible dataset with jsonL file and upload it Then submit the OpenAI finetuning job Finally update the config file to reflect the new finetuning job as current """ # get function description function_string = str(function_description.__dict__.__repr__() + "\n") # get the align dataset align_dataset = self._get_dataset_info(SYMBOLIC_ALIGNMENTS, func_hash, type="dataset") if not align_dataset: align_dataset = "" else: align_dataset = align_dataset.decode('utf-8') # get the patch dataset patch_dataset = self._get_dataset_info(PATCHES, func_hash, type="dataset") if not patch_dataset: patch_dataset = "" else: patch_dataset = patch_dataset.decode('utf-8') if align_dataset == "" and patch_dataset == "": return dataset = align_dataset + patch_dataset dataset.replace("\\n", "[SEP_TOKEN]") dataset = dataset.split("\n") dataset = [x.replace("[SEP_TOKEN]", "\\n") for x in dataset if x != ""] # read in the dataset file dataset = [ast.literal_eval(x) for x in dataset] # # create the openai dataset instruction = "You are given below a function description and input data. The function description of what the function must carry out can be found in the Function section, with input and output type hints. The input data can be found in Input section. Using the function description, apply the function to the Input and return a valid output type, that is acceptable by the output_class_definition and output_class_hint. Return None if you can't apply the function to the input or if the output is optional and the correct output is None.\nINCREDIBLY IMPORTANT: Only output a JSON-compatible string in the correct response format." finetuning_dataset = [{"messages": [ { "role": "system", "content": f"You are a skillful and accurate language model, who applies a described function on input data. Make sure the function is applied accurately and correctly and the outputs follow the output type hints and are valid outputs given the output types." }, {"role": "user", "content": f"{instruction}\nFunction: {function_string}---\nInputs:\nArgs: {x['args']}\nKwargs: {x['kwargs']}\nOutput:"}, {"role": "assistant", "content": str(x['output']) if x['output'] is not None else "None"}]} for x in dataset] # Create an in-memory text stream temp_file = io.BytesIO() # Write data to the stream for idx, item in enumerate(finetuning_dataset): temp_file.write(json.dumps(item).encode('utf-8')) if idx != len(finetuning_dataset) - 1: temp_file.write("\n".encode('utf-8')) # Reset the stream position to the beginning temp_file.seek(0) # create the finetune hash finetune_hash = function_description.__hash__(purpose="finetune") nr_of_training_runs = self.function_configs[func_hash].nr_of_training_runs finetune_hash += encode_int(self.environment_id) finetune_hash += encode_int(nr_of_training_runs) # here can be sure that datasets were read in as that is checked in the finetune_check align_dataset_size = self.dataset_sizes[SYMBOLIC_ALIGNMENTS][func_hash] if func_hash in self.dataset_sizes[ SYMBOLIC_ALIGNMENTS] else 0 patch_dataset_size = self.dataset_sizes[PATCHES][func_hash] if func_hash in self.dataset_sizes[PATCHES] else 0 total_dataset_size = align_dataset_size + patch_dataset_size # Use the stream as a file try: finetune_provider = self.function_configs[func_hash].distilled_model.provider logging.info(f"Starting finetuning for {function_description.name} using {finetune_provider} for {self.function_configs[func_hash].distilled_model.base_model_for_sft}") finetuning_response: FinetuneJob = self.api_provider[finetune_provider].finetune(file=temp_file, suffix=finetune_hash, model_config = self.function_configs[func_hash].distilled_model,) except Exception as e: logging.info(f"Could not start finetuning for {function_description.name} using {finetune_provider}. Error: {e}") return self.function_configs[func_hash].current_training_run = {"job_id": finetuning_response.id, "trained_on_datapoints": total_dataset_size, "last_checked": datetime.datetime.now().strftime( "%Y-%m-%d %H:%M:%S")} # update the config json file try: self._update_config_file(func_hash) except Exception as e: print(e) print("Could not update config file to register a finetuning run") def _check_finetuning_status(self, func_hash, function_description): """ Check the status of the current finetuning job If the job is finished, update the config file to reflect the new model """ job_id = self.function_configs[func_hash].current_training_run["job_id"] last_checked = self.function_configs[func_hash].current_training_run["last_checked"] # check if last checked was more than 30 mins ago if (datetime.datetime.now() - datetime.datetime.strptime(last_checked, "%Y-%m-%d %H:%M:%S")).total_seconds() > 1800: finetune_provider = self.function_configs[func_hash].distilled_model.provider response = self.api_provider[finetune_provider].get_finetuned(job_id, model_config = self.function_configs[func_hash].distilled_model) self.function_configs[func_hash].current_training_run["last_checked"] = datetime.datetime.now().strftime( "%Y-%m-%d %H:%M:%S") if response.status == "succeeded" or response.status == "failed": self._update_finetune_config(response, func_hash, function_description) else: self._update_config_file(func_hash) def _update_finetune_config(self, response: FinetuneJob, func_hash, function_description): """ Update the config file to reflect the new model and switch the current model to the finetuned model """ self.function_configs[func_hash].update_with_finetuned_response(response) logging.info(f"Finetuning for {function_description.name} using {self.function_configs[func_hash].distilled_model.provider} finished with status: {response.status}."\ f" The id of the finetuned model is {response.fine_tuned_model.model_name}") try: self._update_config_file(func_hash) except Exception as e: logging.info(f"Could not update the function configuration file with the finetuned model for {function_description.name}. Error: {e}") pass <fim_middle>
null
BLOCK_COMMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if<fim_suffix> self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
IF
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr<fim_suffix>(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
IF
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinsta<fim_suffix>nce(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
IF
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_t<fim_suffix>ype): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
IF
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/trackers/abc_buffered_logger.py<fim_prefix>import json from abc import abstractmethod from typing import Dict, Any, Literal from tanuki.bloom_filter import BloomFilter from tanuki.constants import EXPECTED_ITEMS, FALSE_POSITIVE_RATE, ALIGN_FILE_EXTENSION, \ POSITIVE_FILE_EXTENSION, NEGATIVE_FILE_EXTENSION, PATCH_FILE_EXTENSION from tanuki.persistence.filter.bloom_interface import IBloomFilterPersistence from tanuki.trackers.dataset_worker import DatasetWorker from tanuki.models.function_config import FunctionConfig # PATCH_FILE_EXTENSION_TYPE = Literal[".patches"] # ALIGN_FILE_EXTENSION_TYPE = Literal[".alignments"] # POSITIVE_EMBEDDING_FILE_EXTENSION_TYPE = Literal[".positive_embedding"] # NEGATIVE_EMBEDDING_FILE_EXTENSION_TYPE = Literal[".negative_embedding"] # # PATCH_FILE_EXTENSION: PATCH_FILE_EXTENSION_TYPE = ".patches" # ALIGN_FILE_EXTENSION: ALIGN_FILE_EXTENSION_TYPE = ".alignments" # POSITIVE_EMBEDDING_FILE_EXTENSION: POSITIVE_EMBEDDING_FILE_EXTENSION_TYPE = ".contrastive_positives" # NEGATIVE_EMBEDDING_FILE_EXTENSION: NEGATIVE_EMBEDDING_FILE_EXTENSION_TYPE = ".contrastive_negatives" # # EXPECTED_ITEMS = 10000 # FALSE_POSITIVE_RATE = 0.01 # LIB_NAME = "tanuki" # ENVVAR = "TANUKI_LOG_DIR" class ABCBufferedLogger(DatasetWorker): def __init__(self, name, level=15): self.buffers = {} self.mapped_files = {} self.miss_count = 0 self.hit_count = 0 self.flush_limit = {} self.buffer_rolling_size = {} self.write_count = 0 self.write_limit = 1000 # Save the Bloom filter every 1000 writes super().__init__(name, level) self.bloom_filter = self.create_bloom_filter() self.load_bloom_filter() self.default_function_config = FunctionConfig() @abstractmethod def get_bloom_filter_persistence(self) -> IBloomFilterPersistence: """ Get an instance of the bloom filter persistence provider. This exposes some persistent file storage, that must support reading and writing raw byte streams. :return: """ pass @abstractmethod def load_existing_datasets(self) -> Dict[str, Dict[str, Any]]: """ Get the lengths of all datasets backing the registered functions, including aligns. :return: """ pass @abstractmethod def ensure_persistence_location_exists(self): """ Ensure that the place we will be writing to actually exists. If not, create it. """ pass @abstractmethod def get_patch_location_for_function(self, func_hash, extension="") -> str: """ Get the address of the function patch file. :param func_hash: The representation of the function :param extension: Whether this is a patch or an alignment :return: """ pass @abstractmethod def write(self, path, data, mode="a") -> None: pass @abstractmethod def read(self, path) -> str: pass @abstractmethod def get_hash_from_path(self, path) -> str: pass @abstractmethod def does_object_exist(self, path) -> bool: pass def create_bloom_filter(self): bloom_filter_persistence = self.get_bloom_filter_persistence() bloom_filter = BloomFilter( bloom_filter_persistence, expected_number_of_elements=EXPECTED_ITEMS, false_positive_probability=FALSE_POSITIVE_RATE) return bloom_filter def load_bloom_filter(self): try: self.bloom_filter.load() except FileNotFoundError: self.debug("No Bloom filter found. Creating a new one.") def write_symbolic_align_call(self, func_hash, example) -> bool: log_file_path = self.get_patch_location_for_function(func_hash, extension=ALIGN_FILE_EXTENSION) try: # Now, write to the file dumpable_object = str(example.__dict__) self.write(log_file_path, dumpable_object + "\n", mode="a") return True except Exception as e: return False def write_embeddable_align_call(self, func_hash, example, positive=True) -> bool: if positive: log_file_path = self.get_patch_location_for_function(func_hash, extension=POSITIVE_FILE_EXTENSION) else: log_file_path = self.get_patch_location_for_function(func_hash, extension=NEGATIVE_FILE_EXTENSION) try: # Now, write to the file dumpable_object = str(example.__dict__) self.write(log_file_path, dumpable_object + "\n", mode="a") return True except Exception as e: return False def log_embeddable_align(self, func_hash, example, positive=True, **kws): """ Log a contrastive function invocation Args: func_hash: A string representation of the function signature and input parameters example: The example object positive: Whether the example is positive or negative **kws: """ successfully_saved, new_datapoint = False, False try: self.ensure_persistence_location_exists() except Exception as e: return successfully_saved, new_datapoint # prepend the function hash to the example bloom_filter_representation = func_hash + '_' + str(example.__dict__) + '\n' # Check Bloom Filter if self.bloom_filter.lookup(bloom_filter_representation): return successfully_saved, new_datapoint new_datapoint = True # add to bloom filter self.bloom_filter.add(bloom_filter_representation) self.save_bloom_filter() successfully_saved = self.write_embeddable_align_call(func_hash, example, positive) return successfully_saved, new_datapoint def log_symbolic_align(self, func_hash, *args, **kws): """ Log an align function invocation to the file system :param func_hash: A string representation of the function signature and input parameters :param args: Example objects :param kws: :return: """ successfully_saved, new_datapoint = False, False try: self.ensure_persistence_location_exists() except Exception as e: return successfully_saved, new_datapoint example = args[0] # prepend the function hash to the example bloom_filter_representation = func_hash + '_' + str(example.__dict__) + '\n' # Check Bloom Filter if self.bloom_filter.lookup(bloom_filter_representation): return successfully_saved, new_datapoint new_datapoint = True # add to bloom filter self.bloom_filter.add(bloom_filter_representation) self.save_bloom_filter() successfully_saved = self.write_symbolic_align_call(func_hash, example) return successfully_saved, new_datapoint def log_symbolic_patch(self, func_hash, example): """ Log a patched function invocation to the file system :param func_hash: A string representation of the function signature and input parameters :param example: :return: """ if not isinstance(func_hash, str): func_hash = str(func_hash) example_data = str(example.__dict__).encode('utf-8') + b'\n' bloom_filter_representation = func_hash + '_' + example_data.decode('utf-8') # Check Bloom Filter if self.bloom_filter.lookup(bloom_filter_representation): self.hit_count += 1 return {} self.miss_count += 1 # Add to Bloom Filter self.bloom_filter.add(bloom_filter_representation) try: self.ensure_persistence_location_exists() except Exception as e: return {} log_file_path = self.get_patch_location_for_function(func_hash, extension=PATCH_FILE_EXTENSION) if log_file_path not in self.bu<fim_suffix>ffers: self.buffers[log_file_path] = bytearray() if log_file_path not in self.flush_limit: self.flush_limit[log_file_path] = 1 self.buffers[log_file_path].extend(example_data) self.write_count += 1 if log_file_path not in self.buffer_rolling_size: self.buffer_rolling_size[log_file_path] = 1 else: self.buffer_rolling_size[log_file_path] += 1 if self.write_count >= self.write_limit: written_datapoints = self.flush() self.save_bloom_filter() self.write_count = 0 # Reset counter return written_datapoints if len(self.buffers[log_file_path]) >= min(self.flush_limit[log_file_path], 4096): # Flush after reaching 4KB written_datapoints = {} try: self.write(log_file_path, self.buffers[log_file_path], mode="a+b") # update buffers written_datapoints[func_hash] = self.buffer_rolling_size[log_file_path] self.buffers[log_file_path].clear() self.buffer_rolling_size[log_file_path] = 0 self.flush_limit[log_file_path] = 2 * self.flush_limit[log_file_path] self.save_bloom_filter() except Exception as e: pass return written_datapoints return {} def save_bloom_filter(self): try: self.bloom_filter.save() except Exception as e: self.warning("Could not save Bloom filter: {}".format(e)) def flush(self): # get log directory written_datapoints = {} for log_file_path, buffer in self.buffers.items(): if len(buffer) > 0: try: self.write(log_file_path, buffer, mode="a+b") written_datapoints[self.get_hash_from_path(log_file_path)] = self.buffer_rolling_size[log_file_path] self.buffer_rolling_size[log_file_path] = 0 buffer.clear() except Exception as e: pass return written_datapoints def load_function_config(self, func_hash): """ Get the config file for the function. Uses the message and log directory Config file has to be in .json """ default = False try: # try to get the config from the disk. If inaccessible, create a new default one self.ensure_persistence_location_exists() log_file_path = self.get_patch_location_for_function(func_hash) config_path = f"{log_file_path}.json" if not self.does_object_exist(config_path): function_config = self.default_function_config default = True func_config_dict = function_config.to_dict() # remove teacher_models from the config func_config_dict.pop("teacher_models") self.write_json(config_path, func_config_dict) else: function_config = FunctionConfig().load_from_dict(self.read_json(config_path)) except Exception as e: function_config = self.default_function_config default = True return function_config, default def update_function_config(self, func_hash, config_to_be_saved): """ Save the config file """ log_file_path = self.get_patch_location_for_function(func_hash) config_path = f"{log_file_path}.json" try: func_config_dict = config_to_be_saved.to_dict() # remove teacher_models from the config func_config_dict.pop("teacher_models") self.write_json(config_path, func_config_dict) except Exception as e: pass def write_json(self, path, data): self.write(path, json.dumps(data)) def read_json(self, path): return json.loads(self.read(path)) <fim_middle>
null
IF
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/language_models/language_model_manager.py<fim_prefix>import json from typing import Any, Dict from tanuki.function_modeler import FunctionModeler from tanuki.language_models.llm_api_abc import LLM_API from tanuki.models.function_description import FunctionDescription from tanuki.models.function_example import FunctionExample from tanuki.models.language_model_output import LanguageModelOutput from tanuki.utils import approximate_token_count from tanuki.validator import Validator from tanuki.models.api_manager import APIManager from tanuki.language_models.llm_configs.abc_base_config import BaseModelConfig import logging class LanguageModelManager(object): """ The LanguageModelManager is responsible for managing the language models and their outputs operationally, this includes: - Generating outputs from the language models - Repairing outputs from the language models - Saving outputs from the language models - Finetuning the language models from the saved outputs """ def __init__(self, function_modeler: FunctionModeler, api_provider: APIManager, generation_token_limit=512,) -> None: self.api_provider = api_provider self.function_modeler = function_modeler self.default_generation_length = generation_token_limit self.initialized_functions = {} self.token_counts = {} def __call__(self, args, function_description: FunctionDescription, kwargs, validator: Validator, generation_parameters: dict) -> Any: # add the generation length if not there if "max_new_tokens" not in generation_parameters: generation_parameters["max_new_tokens"] = self.default_generation_length output = self.generate(args, kwargs, function_description, generation_parameters) # start parsing the object, very hacky way for the time being choice_parsed = self._parse_choice(output) valid = validator.check_type(choice_parsed, function_description.output_type_hint) if not valid: choice, choice_parsed, successful_repair = self.repair_output(args, kwargs, function_description, output.generated_response, validator, generation_parameters) if not successful_repair: raise TypeError( f"Output type was not valid. Expected an object of type {function_description.output_type_hint}, got '{output.generated_response}'") output.generated_response = choice output.distilled_model = False datapoint = FunctionExample(args, kwargs, output.generated_response) if output.suitable_for_finetuning and not output.distilled_model: self.function_modeler.postprocess_symbolic_datapoint(function_description.__hash__(), function_description, datapoint, repaired=not valid) instantiated = validator.instantiate(choice_parsed, function_description.output_type_hint) return instantiated def _parse_choice(self, output): try: # json load choice_parsed = json.loads(output.generated_response) except: # if it fails, it's not a json object, try eval try: choice_parsed = eval(output.generated_response) except: choice_parsed = output.generated_response return choice_parsed def generate(self, args, kwargs, function_description, llm_parameters={}): """ The main generation function, given the args, kwargs, function description and model type, generate a response and check if the datapoint can be saved to the finetune dataset """ func_hash = function_description.__hash__() prompt, model, save_to_finetune, is_distilled_model = self.get_generation_case(args, kwargs, function_description, llm_parameters, func_hash) # loggings current_function_setup = self.initialized_functions.get(func_hash, None) # getting the current function setup - model and align statements if current_function_setup: generator_model = current_function_setup["model"] if is_distilled_model: logging.info(f"Generating function outputs for {function_description.name} with a finetuned model: {model.model_name}.") self.initialized_functions[func_hash]["model"] = model.model_name elif generator_model == "": logging.info(f"Found {len(current_function_setup['examples'])} align statements for {function_description.name}. Generating function outputs with {model.model_name}.") self.initialized_functions[func_hash]["model"] = model.model_name elif generator_model != model.model_name: logging.info(f"Switching output generation from {generator_model} to {model.model_name} for function {function_description.name}.") self.initialized_functions[func_hash]["model"] = model.model_name choice = self._synthesise_answer(prompt, model, llm_parameters) output = LanguageModelOutput(choice, save_to_finetune, is_distilled_model) return output def _synthesise_answer(self, prompt, model, llm_parameters): """ Synthesise an answer given the prompt, model, model_type and llm_parameters Args: prompt (str): The prompt to send to the model model (BaseModelConfig): The model to use for generation llm_parameters (dict): The parameters to use for generation return: choice (str): The generated response """ system_message = model.system_message return self.api_provider[model.provider].generate(model, system_message, prompt, **llm_parameters) def get_generation_case(self, args, kwargs, function_description, llm_parameters, func_hash): """ Get the generation case with the correct prompt and model First get the current model, then if distilled model, do zero-shot prompt and return False as suitable_for_finetune If not distilled model, check if suitable for finetuning, create the prompt and return the correct model given the token count """ f = str(function_description.__dict__.__repr__()) distilled_model, teacher_models = self.function_modeler.get_models(function_description) is_distilled_model = distilled_model.model_name != "" suitable_for_distillation, input_prompt_token_count = self.suitable_for_finetuning_token_check(args, kwargs, f, distilled_model) if func_hash not in self.initialized_fun<fim_suffix>ctions: # initialise the initialized_functions dict self.initialized_functions[func_hash] = {"model": "", "examples": []} # no examples needed, using a finetuned model. Dont save to finetune dataset if is_distilled_model and suitable_for_distillation: prompt = self.construct_prompt(f, args, kwargs, [], distilled_model) return prompt, distilled_model, suitable_for_distillation, True else: aligns = self.function_modeler.get_symbolic_alignments(function_description.__hash__(), max=16) examples = [f"Inputs:\nArgs: {align['args']}\nKwargs: {align['kwargs']}\nOutput: {align['output']}" for align in aligns] # update the examples in the initialized_functions dict self.initialized_functions[func_hash]["examples"] = examples examples_token_count = sum([approximate_token_count(example) for example in examples]) generation_tokens = llm_parameters.get("max_new_tokens", self.default_generation_length) model = self.choose_model_from_tokens(teacher_models, examples_token_count + input_prompt_token_count + generation_tokens, len(examples)) if model: examples_with_parsing_tokens = [f"Inputs:\nArgs: {align['args']}\nKwargs: {align['kwargs']}\nOutput:{model.parsing_helper_tokens['start_token']}{align['output']}{model.parsing_helper_tokens['end_token']}" for align in aligns] prompt = self.construct_prompt(f, args, kwargs, examples_with_parsing_tokens, model) return prompt, model, suitable_for_distillation, False else: raise ValueError( "The input content and align statements combined are too long, please shorten it. The maximum currently allowed token limit is 32000") def suitable_for_finetuning_token_check(self, args, kwargs, f, distilled_model: BaseModelConfig): """ Check if the inputs are suitable for finetuning, i.e are below the finetuning token count """ # check if finetunable finetuning_prompt = f"Function: {f}\n---\nInputs:\nArgs: {args}\nKwargs: {kwargs}\nOutput:" input_prompt_token_count = approximate_token_count(finetuning_prompt) if distilled_model.system_message_token_count < 0: distilled_model.system_message_token_count = approximate_token_count(distilled_model.system_message) if distilled_model.instruction_token_count < 0: distilled_model.instruction_token_count = approximate_token_count(distilled_model.instructions) suitable_for_finetune = input_prompt_token_count + distilled_model.instruction_token_count + distilled_model.system_message_token_count < distilled_model.context_length return suitable_for_finetune, input_prompt_token_count def construct_prompt(self, f, args, kwargs, examples, model): """ Construct a prompt given the model, function description, args, kwargs and examples Args: model (BaseModelConfig): The model to use for generation f (str): The function description args (tuple): The args of the function kwargs (tuple): The kwargs of the function examples (list): The examples of the function Returns: content (str): The prompt to send to the model """ if examples: final_examples = "\n".join( [f"{align}" for align in examples]) example_input = f"Examples:{final_examples}\n" else: example_input = "" instruction_prompt = model.instructions content = f"{instruction_prompt}\nFunction: {f}\n{example_input}---\nInputs:\nArgs: {args}\nKwargs: {kwargs}\nOutput:" return content def repair_generate(self, args, kwargs, f, failed_outputs_list, aligns, models, llm_parameters): """ Repair the output given the input, function description, failed outputs list, examples and models """ # get the token counts examples = [f"Inputs:\nArgs: {align['args']}\nKwargs: {align['kwargs']}\nOutput: {align['output']}" for align in aligns] examples_token_count = sum([approximate_token_count(example) for example in examples]) failed_examples_token_count = sum([approximate_token_count(failed_output[0]) + approximate_token_count(failed_output[1]) for failed_output in failed_outputs_list]) input_prompt_token_count = approximate_token_count(f"Function: {f}\n---\nInputs:\nArgs: {args}\nKwargs: {kwargs}\nOutput:") generation_tokens = llm_parameters.get("max_new_tokens", self.default_generation_length) model = self.choose_model_from_tokens(models, examples_token_count+input_prompt_token_count+generation_tokens+failed_examples_token_count, len(examples)) if model: prompt = self.generate_repair_prompt(args, kwargs, f, failed_outputs_list, examples, model) logging.info(f"Previous output failed type validation, attempting to repair with {model.model_name}") choice = self._synthesise_answer(prompt, model, llm_parameters) return choice else: return None def generate_repair_prompt(self, args, kwargs, f, failed_outputs_list, examples, model): """ Generate a repair prompt given the args, kwargs, function description, failed outputs list and examples """ if examples: final_examples = "\n".join( [f"{model.parsing_helper_tokens['start_token']}{align}{model.parsing_helper_tokens['end_token']}" for align in examples]) successful_examples = f"Examples:{final_examples}\n" else: successful_examples = "" failed_examples = "" for failed_output in failed_outputs_list: failed_examples += f"Output: {failed_output[0]}\nError: {failed_output[1]}\n\n" end_token_addition = "" if model.parsing_helper_tokens["end_token"]: end_token_addition = f"Make sure to add the {model.parsing_helper_tokens['end_token']} token at the end of the output." prompt = f"{model.repair_instruction}{end_token_addition}\nFUNCTION DESCRIPTION: {f}\n{successful_examples}---{model.parsing_helper_tokens['start_token']}Inputs:\nArgs: {args}\nKwargs: {kwargs}\nFAILED EXAMPLES: {failed_examples}Correct output:" return prompt def choose_model_from_tokens(self, models, input_token_count, nr_of_examples=0): """ Choose a model from the models given the token count and number of examples Args: models (list): The models to choose from input_token_count (int): The token count of the input nr_of_examples (int): The number of examples Returns: model (BaseModelConfig): The chosen model """ for model in models: # check if input token count is less than the context length # If the model config has custom messages, then use those, otherwise use the default ones if model.system_message_token_count < 0: model.system_message_token_count = approximate_token_count(model.system_message) if model.instruction_token_count < 0: model.instruction_token_count = approximate_token_count(model.instructions) if model.parsing_helper_tokens["start_token"]: input_token_count += 2*nr_of_examples if model.parsing_helper_tokens["end_token"]: input_token_count += 2*nr_of_examples total_token_count = input_token_count + model.instruction_token_count + model.system_message_token_count if total_token_count < model.context_length: return model return None def repair_output(self, args: tuple, kwargs: dict, function_description: FunctionDescription, choice, validator: Validator, generation_parameters: dict) -> tuple: """ Repair an output, that failed type validation by generating a new output using the teacher model and the error Args: args (tuple): The args of the function kwargs (dict): The kwargs of the function function_description (FunctionDescription): The function description choice: The output that failed type validation, type is arbitrary validator (Validator): The validator object Returns: choice (str): The choice that was generated by the language model choice_parsed: The parsed choice, type is arbitrary valid (bool): Whether the output was correctly repaired was valid """ # get the teacher models teacher_models = self.function_modeler.get_models(function_description)[1] valid = False retry_index = 5 f = str(function_description.__dict__.__repr__() + "\n") error = f"Output type was not valid. Expected an valid object of type {function_description.output_type_hint}, got '{choice}'" # instantiate the failed outputs list failed_outputs_list = [(choice, error)] while retry_index > 0 and not valid: # get the alignments aligns = self.function_modeler.get_symbolic_alignments(function_description.__hash__(), max=5) # Generate the reparied LLM output choice = self.repair_generate(args, kwargs, f, failed_outputs_list, aligns, teacher_models, generation_parameters) if not choice: # if no choice then the input was too long for the model # no specific error but the retry index goes down retry_index -= 1 continue # start parsing the object try: # json load choice_parsed = json.loads(choice) except: # if it fails, it's not a json object, try eval try: choice_parsed = eval(choice) except: choice_parsed = choice valid = validator.check_type(choice_parsed, function_description.output_type_hint) if not valid: # if it's not valid, add it to the failed outputs list error = f"Output type was not valid. Expected an object of type {function_description.output_type_hint}, got '{choice}'" failed_outputs_list.append((choice, error)) retry_index -= 1 if valid: logging.info(f"Successfully repaired output.") return choice, choice_parsed, valid <fim_middle>
null
IF
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or<fim_suffix> target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
IF
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(or<fim_suffix>igin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
IF
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin <fim_suffix>== list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
IF
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if<fim_suffix> origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
IF
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(dat<fim_suffix>a) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
STATEMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/persistence/filter/filesystem_bloom.py<fim_prefix>import os from bitarray._bitarray import bitarray from tanuki.persistence.filter.bloom_interface import IBloomFilterPersistence class BloomFilterFileSystemDriver(IBloomFilterPersistence): """ This is a Filesystem implementation of a Bloom Filter persistence layer. """ def __init__(self, log_directory: str): self.log_directory = log_directory def save(self, bit_array: bitarray) -> None: """ Write a bloom filter array of bits to the local filesystem. :param bloom_filter: A bloom filter which tracks unique function invocations """ bloom_filter_path = os.path.join(self.log_directory, 'bloom_filter_state.bin') # Append 0 bits to make the length a multiple of 8 while len(bit_array) % 8 != 0: bit_array.append(0) with open(bloom_filter_path, 'wb') as f: f.write(bit_array.tobytes()) def load(self) -> bitarray: """ Load a bloom filter from the local filesystem. :return: A bloom filter object containing the state of unique function invocations """ bloom_filter_path =<fim_suffix> os.path.join(self.log_directory, 'bloom_filter_state.bin') with open(bloom_filter_path, 'rb') as f: bit_array = bitarray() bit_array.frombytes(f.read()) while len(bit_array) % 8 != 0: bit_array.append(0) return bit_array<fim_middle>
null
STATEMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target<fim_suffix>_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
STATEMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/models/function_description.py<fim_prefix>import hashlib from dataclasses import dataclass from typing import Dict, Optional, Literal from tanuki.models.function_type import FunctionType from tanuki.utils import json_dumps @dataclass(frozen=True) class FunctionDescription: name: str docstring: str input_type_hints: Dict[str, type] input_class_definitions: Dict[str, str] output_type_hint: type output_class_definition: Optional[str] type: FunctionType = FunctionType.SYMBOLIC def __hash__(self, purpose: str = "general"): if purpose == "general": json_encoded = json_dumps(self).encode('utf-8') h = hashlib.md5(json_encoded).hexdigest() retur<fim_suffix>n str(h) if purpose == "finetune": json_encoded = json_dumps(self).encode('utf-8') h = hashlib.shake_256(json_encoded).hexdigest(8) return str(h)<fim_middle>
null
STATEMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/function_modeler.py<fim_prefix>import ast import datetime import io import json from typing import List, Tuple, Dict, Union import logging from tanuki.constants import EXAMPLE_ELEMENT_LIMIT, PATCHES, SYMBOLIC_ALIGNMENTS, POSITIVE_EMBEDDABLE_ALIGNMENTS, \ NEGATIVE_EMBEDDABLE_ALIGNMENTS, OPENAI_PROVIDER from tanuki.models.function_type import FunctionType from tanuki.language_models.llm_configs import DEFAULT_TEACHER_MODELS, DEFAULT_EMBEDDING_MODELS, DEFAULT_STUDENT_MODELS from tanuki.language_models.llm_configs.abc_base_config import BaseModelConfig from tanuki.language_models.llm_finetune_api_abc import LLM_Finetune_API from tanuki.models.finetune_job import FinetuneJob from tanuki.models.function_description import FunctionDescription from tanuki.models.function_example import FunctionExample from tanuki.trackers.dataset_worker import DatasetWorker from tanuki.utils import approximate_token_count, prepare_object_for_saving, encode_int, decode_int import copy from tanuki.models.function_config import FunctionConfig from tanuki.models.api_manager import APIManager class FunctionModeler(object): """ This class manages the registered function models and their datasets comprised of symbolic and embeddable alignments, and symbolic and embeddable patches """ def __init__(self, data_worker: DatasetWorker, api_provider: APIManager, environment_id=0, ) -> None: self.<fim_suffix>function_configs = {} self.data_worker = data_worker self.distillation_token_limit = 3000 # the token limit for finetuning self.symbolic_align_buffer = {} self.embeddable_align_buffer = {} self._get_datasets() self.environment_id = environment_id self.check_finetune_blacklist = [] self.execute_finetune_blacklist = [] self.store_data_blacklist = [] self.api_provider = api_provider self.teacher_models_override = {} self.student_model_override = {} self.startup_logging_checker = {} def _get_dataset_info(self, dataset_type, func_hash, type="length"): """ Get the dataset size for a function hash """ return self.data_worker.load_dataset(dataset_type, func_hash, return_type=type) def _configure_function_models(self, teacher_models: List[Union[str, BaseModelConfig]], student_model: str, func_hash: str, task_type: str): """ Configure the function models """ if teacher_models: self._configure_teacher_models(teacher_models, func_hash, task_type) if student_model: self._configure_student_model(student_model, func_hash, task_type) if teacher_models and not student_model: for model_config in self.teacher_models_override[func_hash]: # ban all non-openai models from finetuning if teacher is not openai and student is not specified because it doesnt make sense if model_config.provider != OPENAI_PROVIDER and func_hash not in self.check_finetune_blacklist: self.check_finetune_blacklist.append(func_hash) if model_config.provider != OPENAI_PROVIDER and func_hash not in self.execute_finetune_blacklist: self.execute_finetune_blacklist.append(func_hash) def _configure_teacher_models(self, teacher_models: List[Union[str, BaseModelConfig]], func_hash: str, task_type: str): """ Add custom teacher models to the function config First this is added to the teacher_models_override dict, which is used to override the teacher models Args: teacher_models: A list of teacher models to use for the function hash func_hash: The function hash to add the teacher models to """ if func_hash not in self.teacher_models_override: self.teacher_models_override[func_hash] = [] if task_type == FunctionType.EMBEDDABLE: preconfigured_models = DEFAULT_EMBEDDING_MODELS elif task_type == FunctionType.SYMBOLIC: preconfigured_models = DEFAULT_TEACHER_MODELS for model in teacher_models: if isinstance(model, str): if model not in preconfigured_models: raise Exception(f"Teacher model {model} not supported by default. Please include it in the list in extended config format") model_config = preconfigured_models[model] elif isinstance(model, BaseModelConfig): model_config = model self.teacher_models_override[func_hash].append(model_config) def _configure_student_model(self, student_model: str, func_hash: str, task_type: str): """ Add custom student models to the function config First this is added to the teacher_models_override dict, which is used to override the teacher models Args: teacher_models: A list of teacher models to use for the function hash func_hash: The function hash to add the teacher models to """ if task_type == FunctionType.EMBEDDABLE: logging.info("Embeddable function type does not support student models") preconfigured_models = DEFAULT_STUDENT_MODELS if student_model not in preconfigured_models: raise Exception(f"Student model {student_model} is currently not supported.") model_config = preconfigured_models[student_model] self.student_model_override[func_hash] = model_config def _get_datasets(self): """ Get the existing datasets from the data worker """ self.dataset_sizes = self.data_worker.load_existing_datasets() def save_embeddable_align_statements(self, function_hash: str, args, kwargs, positive_pairs: List[Tuple[List, Dict]], negative_pairs: List[Tuple[List, Dict]]): """ Save the contrastive align statements for the embeddable function. Do not save if the function hash is in the store data blacklist Args: function_hash: A unique hash for the function args: The arguments of the function kwargs: The keyword arguments of the function positive_pairs: A list of the other function invocations that are should have equivalent embeddings negative_pairs: A list of the other function invocations that are should have different embeddings """ # prepare args and kwargs for saving copy_args = copy.deepcopy(args) copy_kwargs = copy.deepcopy(kwargs) parsed_args = prepare_object_for_saving(copy_args) parsed_kwargs = prepare_object_for_saving(copy_kwargs) # prepare positive pairs for saving parsed_positive_pairs = [] for pair in positive_pairs: copy_pair = copy.deepcopy(pair) parsed_pair = prepare_object_for_saving(copy_pair) parsed_positive_pairs.append(parsed_pair) # prepare negative pairs for saving parsed_negative_pairs = [] for pair in negative_pairs: copy_pair = copy.deepcopy(pair) parsed_pair = prepare_object_for_saving(copy_pair) parsed_negative_pairs.append(parsed_pair) # save the contrastive pairs for pair in parsed_positive_pairs: self._save_contrastive_alignment_pair(function_hash, parsed_args, parsed_kwargs, pair, positive=True) for pair in parsed_negative_pairs: self._save_contrastive_alignment_pair(function_hash, parsed_args, parsed_kwargs, pair, positive=False) def _save_contrastive_alignment_pair(self, function_hash: str, args, kwargs, pair, positive=True): """ Save a contrastive pair """ example = FunctionExample(args, kwargs, pair) if function_hash not in self.store_data_blacklist: successfully_saved, new_datapoint = self.data_worker.log_embeddable_align(function_hash, example, positive) else: successfully_saved = False new_datapoint = True if successfully_saved: if positive: if function_hash in self.dataset_sizes[POSITIVE_EMBEDDABLE_ALIGNMENTS]: self.dataset_sizes[POSITIVE_EMBEDDABLE_ALIGNMENTS][function_hash] += 1 else: self.dataset_sizes[POSITIVE_EMBEDDABLE_ALIGNMENTS][function_hash] = 1 if not positive: if function_hash in self.dataset_sizes[NEGATIVE_EMBEDDABLE_ALIGNMENTS]: self.dataset_sizes[NEGATIVE_EMBEDDABLE_ALIGNMENTS][function_hash] += 1 else: self.dataset_sizes[NEGATIVE_EMBEDDABLE_ALIGNMENTS][function_hash] = 1 if new_datapoint: # update align buffer if function_hash not in self.embeddable_align_buffer: self.embeddable_align_buffer[function_hash] = bytearray() self.embeddable_align_buffer[function_hash].extend(str(example.__dict__).encode('utf-8') + b'\r\n') def save_symbolic_align_statements(self, function_hash, args, kwargs, output): """ Save the align statements and add to the align buffer Do not save if the function hash is in the store data blacklist Then just add the datapoints to the align buffer """ # prepare output for saving and later parsing # make a deepcopy of the output to avoid changing the original object copy_output = copy.deepcopy(output) parsed_output = prepare_object_for_saving(copy_output) # prepare args and kwargs for saving copy_args = copy.deepcopy(args) copy_kwargs = copy.deepcopy(kwargs) parsed_args = prepare_object_for_saving(copy_args) parsed_kwargs = prepare_object_for_saving(copy_kwargs) example = FunctionExample(parsed_args, parsed_kwargs, parsed_output) if function_hash not in self.store_data_blacklist: successfully_saved, new_datapoint = self.data_worker.log_symbolic_align(function_hash, example) else: successfully_saved = False new_datapoint = True if successfully_saved: if function_hash in self.dataset_sizes[SYMBOLIC_ALIGNMENTS]: self.dataset_sizes[SYMBOLIC_ALIGNMENTS][function_hash] += 1 else: self.dataset_sizes[SYMBOLIC_ALIGNMENTS][function_hash] = 1 if new_datapoint: # update align buffer if function_hash not in self.symbolic_align_buffer: self.symbolic_align_buffer[function_hash] = bytearray() self.symbolic_align_buffer[function_hash].extend(str(example.__dict__).encode('utf-8') + b'\r\n') def save_symbolic_datapoint(self, func_hash, example): """ Save datapoint to the training data """ written_datapoints = self.data_worker.log_symbolic_patch(func_hash, example) for func_hash, datapoints in written_datapoints.items(): if func_hash in self.dataset_sizes[PATCHES]: # if the dataset size is -1, it means we havent read in the dataset size yet if self.dataset_sizes[PATCHES][func_hash] == -1: self.dataset_sizes[PATCHES][func_hash] = self._get_dataset_info(PATCHES, func_hash, type="length") else: self.dataset_sizes[PATCHES][func_hash] += datapoints else: self.dataset_sizes[PATCHES][func_hash] = datapoints return len(written_datapoints) > 0 def get_symbolic_alignments(self, func_hash, max=20): """ Get all symbolic aligns for a function hash """ if func_hash not in self.symbolic_align_buffer: return [] buffer = self.symbolic_align_buffer[func_hash] return self._get_examples_from_alignment_buffer(buffer, max) def get_embeddable_alignments(self, func_hash, max=20): """ Get all embeddable aligns for a function hash """ if func_hash not in self.embeddable_align_buffer: return [] buffer = self.embeddable_align_buffer[func_hash] return self._get_examples_from_alignment_buffer(buffer, max) def _get_examples_from_alignment_buffer(self, buffer, max=20): """ Get examples from a buffer """ split_buffer = bytes(buffer).split(b"\n") # byte array of stringed python dicts into dict objects example_set = set() for example in split_buffer: if example == b"": continue example_set.add(example) # easy and straightforward way to get nr of words (not perfect but doesnt need to be) # Can do the proper way of tokenizing later, it might be slower and we dont need 100% accuracy example_element_limit = EXAMPLE_ELEMENT_LIMIT examples = [] for example_bytes in split_buffer: if example_bytes in example_set: nr_of_elements = approximate_token_count(example_bytes) example_element_limit -= nr_of_elements if example_element_limit < 0: break example = example_bytes.decode('utf-8') # json load the example try: example = json.loads(example) except: example = ast.literal_eval(example) examples.append(example) example_set.remove(example_bytes) return list(examples)[:max] def load_symbolic_align_statements(self, function_hash): """ Load all align statements First check the data storage blacklist, if the func hash is in the blacklist, then set the dataset size to 0 and the align buffer to empty bytearray """ if function_hash in self.store_data_blacklist: self.dataset_sizes[SYMBOLIC_ALIGNMENTS][function_hash] = 0 self.symbolic_align_buffer[function_hash] = bytearray() elif function_hash not in self.symbolic_align_buffer: dataset_size, align_dataset = self._get_dataset_info(SYMBOLIC_ALIGNMENTS, function_hash, type="both") if align_dataset: self.symbolic_align_buffer[function_hash] = bytearray(align_dataset) self.dataset_sizes[SYMBOLIC_ALIGNMENTS][function_hash] = dataset_size def postprocess_symbolic_datapoint(self, func_hash, function_description, example, repaired=True): """ Postprocess the datapoint First check if the datapoint should be added to the training data Add the datapoint if it should be added Then check if the function should be finetuned and execute finetuning if it should """ try: if func_hash not in self.store_data_blacklist: added = self.save_symbolic_datapoint(func_hash, example) if added: self._update_datapoint_config(repaired, func_hash) except Exception as e: print(e) print("Could not add datapoint to training data") if func_hash not in self.execute_finetune_blacklist: self.check_for_finetuning(function_description, func_hash) def load_function_config(self, func_hash, function_description): """ Load the config file for a function hash """ config, default = self.data_worker.load_function_config(func_hash) if func_hash in self.student_model_override and config.distilled_model.model_name == "": config.distilled_model = self.student_model_override[func_hash] if default and func_hash not in self.check_finetune_blacklist: finetuned, finetune_config = self._check_for_finetunes(function_description, config.distilled_model) if finetuned: config = finetune_config # update teachers if not default if func_hash in self.teacher_models_override: config.teacher_models = self.teacher_models_override[func_hash] self.function_configs[func_hash] = config return config def _check_for_finetunes(self, function_description: FunctionDescription, model_config : BaseModelConfig) -> Tuple[bool, Dict]: # hash the function_hash into 16 characters (to embed it into the name of OpenAI finetunes, for later retrieval) logging.info(f"Checking for finetunes for {function_description.name} using {model_config.provider}") finetune_hash = function_description.__hash__(purpose="finetune") + encode_int(self.environment_id) # List 10 fine-tuning jobs finetunes: List[FinetuneJob] = self.api_provider[model_config.provider].list_finetuned(model_config, limit=1000) # Check if the function_hash is in the fine-tuning jobs # the finetunes are in chronological order starting from newest # So this gets the latest finetune for finetune in finetunes: # check if the finetune hash is in the fine-tuned model name if finetune.status == "succeeded" and finetune_hash in finetune.fine_tuned_model.model_name: try: config = self._construct_config_from_finetune(finetune_hash, finetune) # save the config self.data_worker.update_function_config(function_description.__hash__(), config) logging.info(f"Found finetuned model for {function_description.name} [{config.distilled_model.model_name}]") return True, config except: logging.info(f"Found finetuned model for {function_description.name} [{finetune.fine_tuned_model.model_name}] but could not load it") return False, {} logging.info(f"No finetuned model found for {function_description.name}") return False, {} def _construct_config_from_finetune(self, finetune_hash: str, finetune: FinetuneJob): """ Construct a valid function config from a finetune job Args: finetune_hash: The hash of the function finetune: The finetune job Returns: config: The function config """ model = finetune.fine_tuned_model # get the ending location of finetune hash in the model name finetune_hash_end = model.model_name.find(finetune_hash) + len(finetune_hash) # get the next character after the finetune hash next_char = model.model_name[finetune_hash_end] # get the number of training runs nr_of_training_runs = decode_int(next_char) + 1 nr_of_training_points = (2 ** (nr_of_training_runs - 1)) * 200 config = { "distilled_model": model, "current_model_stats": { "trained_on_datapoints": nr_of_training_points, "running_faults": []}, "last_training_run": {"trained_on_datapoints": nr_of_training_points}, "current_training_run": {}, "teacher_models": [], # default teacher models, will be overwritten if needed "nr_of_training_runs": nr_of_training_runs} config = FunctionConfig().load_from_dict(config) return config def get_models(self, function_description): """ Return the current model from the config file """ func_hash = function_description.__hash__() if func_hash in self.function_configs: func_config = self.function_configs[func_hash] else: func_config = self.load_function_config(func_hash, function_description) return func_config.distilled_model, func_config.teacher_models def _update_datapoint_config(self, repaired, func_hash): """ Update the config to reflect the new datapoint in the training data First adds 1 to the current datapoints Then updates running faults depending if priority is True or not and takes last 100 Then checks the revert condition, i.e if last 10 datapoints are 50% faulty Finally updates the config file Args: priority (bool): whether the datapoint was fixed by the teacher model/should be added to the training data """ try: if repaired: self.function_configs[func_hash].current_model_stats["running_faults"].append(1) else: self.function_configs[func_hash].current_model_stats["running_faults"].append(0) # take the last 100 datapoints self.function_configs[func_hash].current_model_stats["running_faults"] = \ self.function_configs[func_hash].current_model_stats["running_faults"][-100:] # check if the last 10 datapoints are 50% faulty, this is the switch condition if sum(self.function_configs[func_hash].current_model_stats["running_faults"][-10:]) / 10 > 0.5: self.function_configs[func_hash].distilled_model.model_name = "" self.function_configs[func_hash].current_model_stats["trained_on_datapoints"] = 0 self.function_configs[func_hash].current_model_stats["running_faults"] = [] self._update_config_file(func_hash) except Exception as e: print(e) print("Could not update config file") pass def _update_config_file(self, func_hash): self.data_worker.update_function_config(func_hash, self.function_configs[func_hash]) def check_for_finetuning(self, function_description, func_hash): """ Check for finetuning status If already finetuning, check for finetuning status If not finetuning, check for finetuning condition and execute finetuning if condition is met """ try: # check if already finetuning if "job_id" in self.function_configs[func_hash].current_training_run: # check for job status self._check_finetuning_status(func_hash, function_description) else: # check for finetuning condition if self._check_finetuning_condition(func_hash, function_description): self._execute_finetuning(function_description, func_hash) except Exception as e: print(e) print("Error checking for finetuning") def _check_finetuning_condition(self, func_hash, function_description): """ Check if the finetuning condition is met Currently finetuning condition is dependent on the number of symbolic datapoints since last finetuning """ if func_hash not in self.function_configs: return False training_threshold = (2 ** self.function_configs[func_hash].nr_of_training_runs) * 200 align_dataset_size = self.dataset_sizes[SYMBOLIC_ALIGNMENTS][func_hash] if func_hash in self.dataset_sizes[ SYMBOLIC_ALIGNMENTS] else 0 patch_dataset_size = self.dataset_sizes[PATCHES][func_hash] if func_hash in self.dataset_sizes[PATCHES] else 0 if patch_dataset_size == -1: # if havent read in the patch dataset size, read it in patch_dataset_size = self._get_dataset_info(PATCHES, func_hash, type="length") self.dataset_sizes[PATCHES][func_hash] = patch_dataset_size if func_hash not in self.startup_logging_checker: logging.info(f"Function {function_description.name} [{align_dataset_size} aligns | {patch_dataset_size} runs] will be finetuned from"\ f" {self.function_configs[func_hash].teacher_models[0].model_name} using {self.function_configs[func_hash].distilled_model.provider} in "\ f"{training_threshold-(patch_dataset_size + align_dataset_size)} runs") self.startup_logging_checker[func_hash] = True return (patch_dataset_size + align_dataset_size) > training_threshold def _execute_finetuning(self, function_description, func_hash): """ Execute the finetuning First create the OpenAI compatible dataset with jsonL file and upload it Then submit the OpenAI finetuning job Finally update the config file to reflect the new finetuning job as current """ # get function description function_string = str(function_description.__dict__.__repr__() + "\n") # get the align dataset align_dataset = self._get_dataset_info(SYMBOLIC_ALIGNMENTS, func_hash, type="dataset") if not align_dataset: align_dataset = "" else: align_dataset = align_dataset.decode('utf-8') # get the patch dataset patch_dataset = self._get_dataset_info(PATCHES, func_hash, type="dataset") if not patch_dataset: patch_dataset = "" else: patch_dataset = patch_dataset.decode('utf-8') if align_dataset == "" and patch_dataset == "": return dataset = align_dataset + patch_dataset dataset.replace("\\n", "[SEP_TOKEN]") dataset = dataset.split("\n") dataset = [x.replace("[SEP_TOKEN]", "\\n") for x in dataset if x != ""] # read in the dataset file dataset = [ast.literal_eval(x) for x in dataset] # # create the openai dataset instruction = "You are given below a function description and input data. The function description of what the function must carry out can be found in the Function section, with input and output type hints. The input data can be found in Input section. Using the function description, apply the function to the Input and return a valid output type, that is acceptable by the output_class_definition and output_class_hint. Return None if you can't apply the function to the input or if the output is optional and the correct output is None.\nINCREDIBLY IMPORTANT: Only output a JSON-compatible string in the correct response format." finetuning_dataset = [{"messages": [ { "role": "system", "content": f"You are a skillful and accurate language model, who applies a described function on input data. Make sure the function is applied accurately and correctly and the outputs follow the output type hints and are valid outputs given the output types." }, {"role": "user", "content": f"{instruction}\nFunction: {function_string}---\nInputs:\nArgs: {x['args']}\nKwargs: {x['kwargs']}\nOutput:"}, {"role": "assistant", "content": str(x['output']) if x['output'] is not None else "None"}]} for x in dataset] # Create an in-memory text stream temp_file = io.BytesIO() # Write data to the stream for idx, item in enumerate(finetuning_dataset): temp_file.write(json.dumps(item).encode('utf-8')) if idx != len(finetuning_dataset) - 1: temp_file.write("\n".encode('utf-8')) # Reset the stream position to the beginning temp_file.seek(0) # create the finetune hash finetune_hash = function_description.__hash__(purpose="finetune") nr_of_training_runs = self.function_configs[func_hash].nr_of_training_runs finetune_hash += encode_int(self.environment_id) finetune_hash += encode_int(nr_of_training_runs) # here can be sure that datasets were read in as that is checked in the finetune_check align_dataset_size = self.dataset_sizes[SYMBOLIC_ALIGNMENTS][func_hash] if func_hash in self.dataset_sizes[ SYMBOLIC_ALIGNMENTS] else 0 patch_dataset_size = self.dataset_sizes[PATCHES][func_hash] if func_hash in self.dataset_sizes[PATCHES] else 0 total_dataset_size = align_dataset_size + patch_dataset_size # Use the stream as a file try: finetune_provider = self.function_configs[func_hash].distilled_model.provider logging.info(f"Starting finetuning for {function_description.name} using {finetune_provider} for {self.function_configs[func_hash].distilled_model.base_model_for_sft}") finetuning_response: FinetuneJob = self.api_provider[finetune_provider].finetune(file=temp_file, suffix=finetune_hash, model_config = self.function_configs[func_hash].distilled_model,) except Exception as e: logging.info(f"Could not start finetuning for {function_description.name} using {finetune_provider}. Error: {e}") return self.function_configs[func_hash].current_training_run = {"job_id": finetuning_response.id, "trained_on_datapoints": total_dataset_size, "last_checked": datetime.datetime.now().strftime( "%Y-%m-%d %H:%M:%S")} # update the config json file try: self._update_config_file(func_hash) except Exception as e: print(e) print("Could not update config file to register a finetuning run") def _check_finetuning_status(self, func_hash, function_description): """ Check the status of the current finetuning job If the job is finished, update the config file to reflect the new model """ job_id = self.function_configs[func_hash].current_training_run["job_id"] last_checked = self.function_configs[func_hash].current_training_run["last_checked"] # check if last checked was more than 30 mins ago if (datetime.datetime.now() - datetime.datetime.strptime(last_checked, "%Y-%m-%d %H:%M:%S")).total_seconds() > 1800: finetune_provider = self.function_configs[func_hash].distilled_model.provider response = self.api_provider[finetune_provider].get_finetuned(job_id, model_config = self.function_configs[func_hash].distilled_model) self.function_configs[func_hash].current_training_run["last_checked"] = datetime.datetime.now().strftime( "%Y-%m-%d %H:%M:%S") if response.status == "succeeded" or response.status == "failed": self._update_finetune_config(response, func_hash, function_description) else: self._update_config_file(func_hash) def _update_finetune_config(self, response: FinetuneJob, func_hash, function_description): """ Update the config file to reflect the new model and switch the current model to the finetuned model """ self.function_configs[func_hash].update_with_finetuned_response(response) logging.info(f"Finetuning for {function_description.name} using {self.function_configs[func_hash].distilled_model.provider} finished with status: {response.status}."\ f" The id of the finetuned model is {response.fine_tuned_model.model_name}") try: self._update_config_file(func_hash) except Exception as e: logging.info(f"Could not update the function configuration file with the finetuned model for {function_description.name}. Error: {e}") pass <fim_middle>
null
STATEMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(<fim_suffix>instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
STATEMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/trackers/abc_buffered_logger.py<fim_prefix>import json from abc import abstractmethod from typing import Dict, Any, Literal from tanuki.bloom_filter import BloomFilter from tanuki.constants import EXPECTED_ITEMS, FALSE_POSITIVE_RATE, ALIGN_FILE_EXTENSION, \ POSITIVE_FILE_EXTENSION, NEGATIVE_FILE_EXTENSION, PATCH_FILE_EXTENSION from tanuki.persistence.filter.bloom_interface import IBloomFilterPersistence from tanuki.trackers.dataset_worker import DatasetWorker from tanuki.models.function_config import FunctionConfig # PATCH_FILE_EXTENSION_TYPE = Literal[".patches"] # ALIGN_FILE_EXTENSION_TYPE = Literal[".alignments"] # POSITIVE_EMBEDDING_FILE_EXTENSION_TYPE = Literal[".positive_embedding"] # NEGATIVE_EMBEDDING_FILE_EXTENSION_TYPE = Literal[".negative_embedding"] # # PATCH_FILE_EXTENSION: PATCH_FILE_EXTENSION_TYPE = ".patches" # ALIGN_FILE_EXTENSION: ALIGN_FILE_EXTENSION_TYPE = ".alignments" # POSITIVE_EMBEDDING_FILE_EXTENSION: POSITIVE_EMBEDDING_FILE_EXTENSION_TYPE = ".contrastive_positives" # NEGATIVE_EMBEDDING_FILE_EXTENSION: NEGATIVE_EMBEDDING_FILE_EXTENSION_TYPE = ".contrastive_negatives" # # EXPECTED_ITEMS = 10000 # FALSE_POSITIVE_RATE = 0.01 # LIB_NAME = "tanuki" # ENVVAR = "TANUKI_LOG_DIR" class ABCBufferedLogger(DatasetWorker): def __init__(self, name, level=15): self.buffers = {} self.mapped_files = {} self.miss_count = 0 self.hit_count = 0 self.flush_limit = {} self.buffer_rolling_size = {} self.write_count = 0 self.write_limit = 1000 # Save the Bloom filter every 1000 writes super().__init__(name, level) self.bloom_filter = self.create_bloom_filter() self.load_bloom_filter() self.default_function_config = FunctionConfig() @abstractmethod def get_bloom_filter_persistence(self) -> IBloomFilterPersistence: """ Get an instance of the bloom filter persistence provider. This exposes some persistent file storage, that must support reading and writing raw byte streams. :return: """ pass @abstractmethod def load_existing_datasets(self) -> Dict[str, Dict[str, Any]]: """ Get the lengths of all datasets backing the registered functions, including aligns. :return: """ pass @abstractmethod def ensure_persistence_location_exists(self): """ Ensure that the place we will be writing to actually exists. If not, create it. """ pass @abstractmethod def get_patch_location_for_function(self, func_hash, extension="") -> str: """ Get the address of the function patch file. :param func_hash: The representation of the function :param extension: Whether this is a patch or an alignment :return: """ pass @abstractmethod def write(self, path, data, mode="a") -> None: pass @abstractmethod def read(self, path) -> str: pass @abstractmethod def get_hash_from_path(self, path) -> str: pass @abstractmethod def does_object_exist(self, path) -> bool: pass def create_bloom_filter(self): bloom_filter_persistence = self.get_bloom_filter_persistence() bloom_filter = BloomFilter( bloom_filter_persistence, expected_number_of_elements=EXPECTED_ITEMS, false_positive_probability=FALSE_POSITIVE_RATE) return bloom_filter def load_bloom_filter(self): try: self.bloom_filter.load() except FileNotFoundError: self.debug("No Bloom filter found. Creating a new one.") def write_symbolic_align_call(self, func_hash, example) -> bool: log_file_path = self.get_patch_location_for_function(func_hash, extension=ALIGN_FILE_EXTENSION) try: # Now, write to the file dumpable_object = str(example.__dict__) self.write(log_file_path, dumpable_object + "\n", mode="a") return True except Exception as e: return False def write_embeddable_align_call(self, func_hash, example, positive=True) -> bool: if positive: log_file_path = self.get_patch_location_for_function(func_hash, extension=POSITIVE_FILE_EXTENSION) else: log_file_path = self.get_patch_location_for_function(func_hash, extension=NEGATIVE_FILE_EXTENSION) try: # Now, write to the file dumpable_object = str(example.__dict__) self.write(log_file_path, dumpable_object + "\n", mode="a") return True except Exception as e: return False def log_embeddable_align(self, func_hash, example, positive=True, **kws): """ Log a contrastive function invocation Args: func_hash: A string representation of the function signature and input parameters example: The example object positive: Whether the example is positive or negative **kws: """ successfully_saved, new_datapoint = False, False try: self.ensure_persistence_location_exists() except Exception as e: return successfully_saved, new_datapoint # prepend the function hash to the example bloom_filter_representation = func_hash + '_' + str(example.__dict__) + '\n' # Check Bloom Filter if self.bloom_filter.lookup(bloom_filter_representation): return successfully_saved, new_datapoint new_datapoint = True # add to bloom filter self.bloom_filter.add(bloom_filter_representation) self.save_bloom_filter() successfully_saved = self.write_embeddable_align_call(func_hash, example, positive) return successfully_saved, new_datapoint def log_symbolic_align(self, func_hash, *args, **kws): """ Log an align function invocation to the file system :param func_hash: A string representation of the function signature and input parameters :param args: Example objects :param kws: :return: """ successfully_saved, new_datapoint = False, False try: self.ensure_persistence_location_exists() except Exception as e: return successfully_saved, new_datapoint example = args[0] # prepend the function hash to the example bloom_filter_representation = func_hash + '_' + str(example.__dict__) + '\n' # Check Bloom Filter if self.bloom_filter.lookup(bloom_filter_representation): return successfully_saved, new_datapoint new_datapoint = True # add to bloom filter self.bloom_filter.add(bloom_filter_representation) self.save_bloom_filter() successfully_saved = self.write_symbolic_align_call(func_hash, example) return successfully_saved, new_datapoint def log_symbolic_patch(self, func_hash, example): """ Log a patched function invocation to the file system :param func_hash: A string representation of the function signature and input parameters :param example: :return: """ if not isinstance(func_hash, str): func_hash = str(func_hash) example_data = str(example.__dict__).encode('utf-8') + b'\n' bloom_filter_representation = func_hash + '_' + example_data.decode('utf-8') # Check Bloom Filter if self.bloom_filter.lookup(bloom_filter_representation): self.hit_count += 1 return {} self.miss_count += 1 # Add to Bloom Filter self.bloom_filter.add(bloom_filter_representation) try: self.ensure_persistence_location_exists() except Exception as e: return {} log_file_path = self.get_patch_location_for_function(func_hash, extension=PATCH_FILE_EXTENSION) if log_file_path not in self.buffers: self.buffers[log_file_path] = bytearray() if log_file_path not in self.flush_limit: self.flush_limit[log_file_path] = 1 self.buffers[log_file_path].extend(example_data) self.write_count += 1 if log_file_path not in self.buffer_rolling_size: self.buffer_rolling_size[log_file_path] = 1 else: self.buffer_rolling_size[log_file_path] += 1 if self.write_count >= self.write_limit: written_datapoints = self.flush() self.save_bloom_filter() self.write_count = 0 # Reset counter return written_datapoints if len(self.buffers[log_file_path]) >= min(self.flush_limit[log_file_path], 4096): # Flush after reaching 4KB written_datapoints = {} try: self.write(log_file_path, self.buffers[log_file_path], mode="a+b") # update buffers written_datapoints[func_hash] = self.buffer_rolling_size[log_file_path] self.buffers[log_file_path].clear() self.buffer_rolling_size[log_file_path] = 0 self.flush_limit[log_file_path] = 2 * self.flush_limit[log_file_path] self.save_bloom_filter() except Exception as e: pass return written_datapoints return {} def save_bloom_filter(self): try: self.bloom_filter.save() except Exception as e: self.warning("Could not save Bloom filter: {}".format(e)) def flush(self): # get log directory written_datapoints = {} for log_file_path, buffer in self.buffers.items(): if len(buffer) > 0: try: self.write(log_file_path, buffer, mode="a+b") written_datapoints[self.get_hash_from_path(log_file_path)] = self.buffer_rolling_size[log_file_path] self.buffer_rolling_size[log_file_path] = 0 buffer.clear() except Exception as e: pass return written_datapoints def load_function_config(self, func_hash): """ Get the config file for the function. Uses the message and log directory Config file has to be in .json """ default = False try: # try to get the config from the disk. If inaccessible, create a new default one self.ensure_persistence_location_exists() log_file_path = self.get_patch_location_for_function(func_hash) config_path = f"{log<fim_suffix>_file_path}.json" if not self.does_object_exist(config_path): function_config = self.default_function_config default = True func_config_dict = function_config.to_dict() # remove teacher_models from the config func_config_dict.pop("teacher_models") self.write_json(config_path, func_config_dict) else: function_config = FunctionConfig().load_from_dict(self.read_json(config_path)) except Exception as e: function_config = self.default_function_config default = True return function_config, default def update_function_config(self, func_hash, config_to_be_saved): """ Save the config file """ log_file_path = self.get_patch_location_for_function(func_hash) config_path = f"{log_file_path}.json" try: func_config_dict = config_to_be_saved.to_dict() # remove teacher_models from the config func_config_dict.pop("teacher_models") self.write_json(config_path, func_config_dict) except Exception as e: pass def write_json(self, path, data): self.write(path, json.dumps(data)) def read_json(self, path): return json.loads(self.read(path)) <fim_middle>
null
STATEMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_typ<fim_suffix>e = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
STATEMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_a<fim_suffix>rgs(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
STATEMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] i<fim_suffix>f item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
STATEMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/trackers/filesystem_buffered_logger.py<fim_prefix>import os from enum import Enum from typing import Literal, Union, Optional, Dict from appdirs import user_data_dir from tanuki.constants import * from tanuki.persistence.filter.bloom_interface import IBloomFilterPersistence from tanuki.persistence.filter.filesystem_bloom import BloomFilterFileSystemDriver from tanuki.trackers.abc_buffered_logger import ABCBufferedLogger class FilesystemBufferedLogger(ABCBufferedLogger): """ A class that handles the reading and writing of patch invocations and align statements. It includes the logic for a bloom filter, to ensure that we only store unique invocations. """ def __init__(self, name, level=15): self.log_directory = self._get_log_directory() super().__init__(name, level) def get_bloom_filter_persistence(self) -> IBloomFilterPersistence: """ Get an instance of the bloom filter persistence provider. Typically this will be a file system provider. :return: A persistence provider """ return BloomFilterFileSystemDriver(log_directory=self.log_directory) def get_patch_location_for_function(self, func_hash, extension: Union[ ALIGN_FILE_EXTENSION_TYPE, PATCH_FILE_EXTENSION_TYPE] = "") -> str: """ Get the local location of the function patch file. :param func_hash: The representation of the function :param extension: Whether this is a patch or an alignment :return: """ return os.path.join(self.log_directory, func_hash + extension) def ensure_persistence_location_exists(self) -> None: """ Ensure that the location on the filesystem we will be writing to actually exists. If not, create it. """ log_directory = self.log_directory # Create the folder if it doesn't exist if not os.path.exists(log_directory): os.makedirs(log_directory) def does_object_exist(self, path: str) -> bool: """ Check to see if a path exists on the filesystem. :param path: :return: """ return os.path.exists(path) def _get_log_directory(self) -> str: """ Find a location on the filesystem to write our logs to. :return: """ filename = "functions" # If explicitly defined env_dir = os.getenv(ENVVAR) if env_dir and os.path.isdir(env_dir): return os.path.join(env_dir, filename) # If installed as a library library_dir = os.path.join(user_data_dir(LIB_NAME), filename) if os.path.isdir(library_dir) or not os.path.exists(library_dir): return library_dir # If installed in a<fim_suffix> project that contains a git repo - place it in the same folder as the git repo current_dir = os.getcwd() while current_dir != os.path.root: if ".git" in os.listdir(current_dir): return os.path.join(current_dir, filename) current_dir = os.path.dirname(current_dir) return os.path.join(os.getcwd(), filename) def load_dataset(self, dataset_type, func_hash, return_type="both") -> Optional[int]: """ Get the size of the dataset for a function hash """ log_directory = self._get_log_directory() dataset_type_map = {"alignments": ALIGN_FILE_EXTENSION, "positive": POSITIVE_FILE_EXTENSION, "negative": NEGATIVE_FILE_EXTENSION, "patches": PATCH_FILE_EXTENSION} log_file_path = os.path.join(log_directory, func_hash + dataset_type_map[dataset_type]) if not os.path.exists(log_file_path): if return_type == "both": return 0, None elif return_type == "dataset": return None elif return_type == "length": return 0 try: with open(log_file_path, "rb") as f: dataset = f.read() dataset_string = repr(dataset) dataset_length = dataset_string.count("\\n") - dataset_string.count("\\\\n") if return_type == "both": return dataset_length, dataset elif return_type == "dataset": return dataset elif return_type == "length": return dataset_length except Exception as e: if return_type == "both": return 0, None elif return_type == "dataset": return None elif return_type == "length": return 0 def load_existing_datasets(self) -> Dict[str, Dict[str, str]]: log_directory = self.log_directory dataset_lengths = { SYMBOLIC_ALIGNMENTS: {}, POSITIVE_EMBEDDABLE_ALIGNMENTS: {}, NEGATIVE_EMBEDDABLE_ALIGNMENTS: {}, PATCHES: {}, } try: if not os.path.exists(log_directory): os.makedirs(log_directory) # get all the files in the log directory files = os.listdir(log_directory) # discard all .json files files = [x for x in files if ".json" not in x] except Exception as e: return dataset_lengths for file in files: if ALIGN_FILE_EXTENSION not in file \ and PATCH_FILE_EXTENSION not in file \ and POSITIVE_FILE_EXTENSION not in file \ and NEGATIVE_FILE_EXTENSION not in file: continue elif ALIGN_FILE_EXTENSION in file: dataset_type = SYMBOLIC_ALIGNMENTS elif POSITIVE_FILE_EXTENSION in file: dataset_type = POSITIVE_EMBEDDABLE_ALIGNMENTS elif NEGATIVE_FILE_EXTENSION in file: dataset_type = NEGATIVE_EMBEDDABLE_ALIGNMENTS else: dataset_type = PATCHES func_hash = file.replace(ALIGN_FILE_EXTENSION, "").replace(PATCH_FILE_EXTENSION, "") dataset_lengths[dataset_type][func_hash] = -1 return dataset_lengths def write(self, path: str, data: str, mode: Literal["w", "a", "a+b"] = "w") -> None: """ Write data to a file """ with open(path, mode) as f: f.write(data) def read(self, path: str) -> str: """ Read data from a file """ with open(path, "r") as f: return f.read() def get_hash_from_path(self, path) -> str: """ Given a path with a hash, return only the hash :param path: The path to the file :return: The hash """ return path.replace(PATCH_FILE_EXTENSION, ""). \ replace(self.log_directory, ""). \ lstrip("/"). \ lstrip("\\") <fim_middle>
null
LINE_COMMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate date<fim_suffix>time try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
LINE_COMMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/trackers/abc_buffered_logger.py<fim_prefix>import json from abc import abstractmethod from typing import Dict, Any, Literal from tanuki.bloom_filter import BloomFilter from tanuki.constants import EXPECTED_ITEMS, FALSE_POSITIVE_RATE, ALIGN_FILE_EXTENSION, \ POSITIVE_FILE_EXTENSION, NEGATIVE_FILE_EXTENSION, PATCH_FILE_EXTENSION from tanuki.persistence.filter.bloom_interface import IBloomFilterPersistence from tanuki.trackers.dataset_worker import DatasetWorker from tanuki.models.function_config import FunctionConfig # PATCH_FILE_EXTENSION_TYPE = Literal[".patches"] # ALIGN_FILE_EXTENSION_TYPE = Literal[".alignments"] # POSITIVE_EMBEDDING_FILE_EXTENSION_TYPE = Literal[".positive_embedding"] # NEGATIVE_EMBEDDING_FILE_EXTENSION_TYPE = Literal[".negative_embedding"] # # PATCH_FILE_EXTENSION: PATCH_FILE_EXTENSION_TYPE = ".patches" # ALIGN_FILE_EXTENSION: ALIGN_FILE_EXTENSION_TYPE = ".alignments" # POSITIVE_EMBEDDING_FILE_EXTENSION: POSITIVE_EMBEDDING_FILE_EXTENSION_TYPE = ".contrastive_positives" # NEGATIVE_EMBEDDING_FILE_EXTENSION: NEGATIVE_EMBEDDING_FILE_EXTENSION_TYPE = ".contrastive_negatives" # # EXPECTED_ITEMS = 10000 # FALSE_POSITIVE_RATE = 0.01 # LIB_NAME = "tanuki" # ENVVAR = "TANUKI_LOG_DIR" class ABCBufferedLogger(DatasetWorker): def __init__(self, name, level=15): self.buffers = {} self.mapped_files = {} self.miss_count = 0 self.hit_count = 0 self.flush_limit = {} self.buffer_rolling_size = {} self.write_count = 0 self.write_limit = 1000 # Save the Bloom filter every 1000 writes super().__init__(name, level) self.bloom_filter = self.create_bloom_filter() self.load_bloom_filter() self.default_function_config = FunctionConfig() @abstractmethod def get_bloom_filter_persistence(self) -> IBloomFilterPersistence: """ Get an instance of the bloom filter persistence provider. This exposes some persistent file storage, that must support reading and writing raw byte streams. :return: """ pass @abstractmethod def load_existing_datasets(self) -> Dict[str, Dict[str, Any]]: """ Get the lengths of all datasets backing the registered functions, including aligns. :return: """ pass @abstractmethod def ensure_persistence_location_exists(self): """ Ensure that the place we will be writing to actually exists. If not, create it. """ pass @abstractmethod def get_patch_location_for_function(self, func_hash, extension="") -> str: """ Get the address of the function patch file. :param func_hash: The representation of the function :param extension: Whether this is a patch or an alignment :return: """ pass @abstractmethod def write(self, path, data, mode="a") -> None: pass @abstractmethod def read(self, path) -> str: pass @abstractmethod def get_hash_from_path(self, path) -> str: pass @abstractmethod def does_object_exist(self, path) -> bool: pass def create_bloom_filter(self): bloom_filter_persistence = self.get_bloom_filter_persistence() bloom_filter = BloomFilter( bloom_filter_persistence, expected_number_of_elements=EXPECTED_ITEMS, false_positive_probability=FALSE_POSITIVE_RATE) return bloom_filter def load_bloom_filter(self): try: self.bloom_filter.load() except FileNotFoundError: self.debug("No Bloom filter found. Creating a new one.") def write_symbolic_align_call(self, func_hash, example) -> bool: log_file_path = self.get_patch_location_for_function(func_hash, extension=ALIGN_FILE_EXTENSION) try: # Now, write to the file dumpable_object = str(example.__dict__) self.write(log_file_path, dumpable_object + "\n", mode="a") return True except Exception as e: return False def write_embeddable_align_call(self, func_hash, example, positive=True) -> bool: if positive: log_file_path = self.get_patch_location_for_function(func_hash, extension=POSITIVE_FILE_EXTENSION) else: log_file_path = self.get_patch_location_for_function(func_hash, extension=NEGATIVE_FILE_EXTENSION) try: # Now, write to the file dumpable_object = str(example.__dict__) self.write(log_file_path, dumpable_object + "\n", mode="a") return True except Exception as e: return False def log_embeddable_align(self, func_hash, example, positive=True, **kws): """ Log a contrastive function invocation Args: func_hash: A string representation of the function signature and input parameters example: The example object positive: Whether the example is positive or negative **kws: """ successfully_saved, new_datapoint = False, False try: self.ensure_persistence_location_exists() except Exception as e: return successfully_saved, new_datapoint # prepend the function hash to the example bloom_filter_representation = func_hash + '_' + str(example.__dict__) + '\n' # Check Bloom Filter if self.bloom_filter.lookup(bloom_filter_representation): return successfully_saved, new_datapoint new_datapoint = True # add to bloom filter self.bloom_filter.add(bloom_filter_representation) self.save_bloom_filter() successfully_saved = self.write_embeddable_align_call(func_hash, example, positive) return successfully_saved, new_datapoint def log_symbolic_align(self, func_hash, *args, **kws): """ Log an align function invocation to the file system :param func_hash: A string representation of the function signature and input parameters :param args: Example objects :param kws: :return: """ successfully_saved, new_datapoint = False, False try: self.ensure_persistence_location_exists() except Exception as e: return successfully_saved, new_datapoint example = args[0] # prepend the function hash to the example bloom_filter_representation = func_hash + '_' + str(example.__dict__) + '\n' # Check Bloom Filter if self.bloom_filter.lookup(bloom_filter_representation): return successfully_saved, new_datapoint new_datapoint = True # add to bloom filter self.bloom_filter.add(bloom_filter_representation) self.save_bloom_filter() successfully_saved = self.write_symbolic_align_call(func_hash, example) return successfully_saved, new_datapoint def log_symbolic_patch(self, func_hash, example): """ Log a patched function invocation to the file system :param func_hash: A string representation of the function signature and input parameters :param example: :return: """ if not isinstance(func_hash, str): func_hash = str(func_hash) example_data = str(example.__dict__).encode('utf-8') + b'\n' bloom_filter_representation = func_hash + '_' + example_data.decode('utf-8') # Check Bloom Filter if self.bloom_filter.lookup(bloom_filter_representation): self.hit_count += 1 return {} self.miss_count += 1 # Add to Bloom Filter self.bloom_filter.add(bloom_filter_representation) try: self.ensure_persistence_location_exists() except Exception as e: return {} log_file_path = self.get_patch_location_for_function(func_hash, extension=PATCH_FILE_EXTENSION) if log_file_path not in self.buffers: self.buffers[log_file_path] = bytearray() if log_file_path not in self.flush_limit: self.flush_limit[log_file_path] = 1 self.buffers[log_file_path].extend(example_data) self.write_count += 1 if log_file_path not in self.buffer_rolling_size: self.buffer_rolling_size[log_file_path] = 1 else: self.buffer_rolling_size[log_file_path] += 1 if self.write_count >= self.write_limit: written_datapoints = self.flush() self.save_bloom_filter() self.write_count = 0 # Reset counter return written_datapoints if len(self.buffers[log_file_path]) >= min(self.flush_limit[log_file_path], 4096): # Flush after reaching 4KB written_datapoints = {} try: self.write(log_file_path, self.buffers[log_file_path], mode="a+b") # update buffers written_datapoints[func_hash] = self.buffer_rolling_size[log_file_path] self.buffers[log_file_path].clear() self.buffer_rolling_size[log_file_path] = 0 self.flush_limit[log_file_path] = 2 * self.flush_limit[log_file_path] self.save_bloom_filter() except Exception as e: pass return written_datapoints return {} def save_bloom_filter(self): try: self.bloom_filter.save() except Exception as e: self.warning("Could not save Bloom filter: {}".format(e)) def flush(self): # get log directory written_datapoints = {} for log_file_path, buffer in self.buffers.items(): if len(buffer) > 0: try: self.write(log_file_path, buffer, mode="a+b") written_datapoints[self.get_hash_from_path(log_file_path)] = self.buffer_rolling_size[log_file_path] self.buffer_rolling_size[log_file_path] = 0 buffer.clear() except Exception as e: pass return written_datapoints def load_function_config(self, func_hash): """ Get the config file for the function. Uses the message and log directory Config file has to be in .json """ default = False try: # try to get the config from the disk. If inaccessible, create a new default one self.ensure_persistence_location_exists() log_file_path = self.get_patch_location_for_function(func_hash) config_path = f"{log_file_path}.json" if not self.does_object_exist(config_path): function_config = self.default_function_config default = True func_config_dict = function_config.to_dict() # remove teacher_models from the conf<fim_suffix>ig func_config_dict.pop("teacher_models") self.write_json(config_path, func_config_dict) else: function_config = FunctionConfig().load_from_dict(self.read_json(config_path)) except Exception as e: function_config = self.default_function_config default = True return function_config, default def update_function_config(self, func_hash, config_to_be_saved): """ Save the config file """ log_file_path = self.get_patch_location_for_function(func_hash) config_path = f"{log_file_path}.json" try: func_config_dict = config_to_be_saved.to_dict() # remove teacher_models from the config func_config_dict.pop("teacher_models") self.write_json(config_path, func_config_dict) except Exception as e: pass def write_json(self, path, data): self.write(path, json.dumps(data)) def read_json(self, path): return json.loads(self.read(path)) <fim_middle>
null
LINE_COMMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/language_models/language_model_manager.py<fim_prefix>import json from typing import Any, Dict from tanuki.function_modeler import FunctionModeler from tanuki.language_models.llm_api_abc import LLM_API from tanuki.models.function_description import FunctionDescription from tanuki.models.function_example import FunctionExample from tanuki.models.language_model_output import LanguageModelOutput from tanuki.utils import approximate_token_count from tanuki.validator import Validator from tanuki.models.api_manager import APIManager from tanuki.language_models.llm_configs.abc_base_config import BaseModelConfig import logging class LanguageModelManager(object): """ The LanguageModelManager is responsible for managing the language models and their outputs operationally, this includes: - Generating outputs from the language models - Repairing outputs from the language models - Saving outputs from the language models - Finetuning the language models from the saved outputs """ def __init__(self, function_modeler: FunctionModeler, api_provider: APIManager, generation_token_limit=512,) -> None: self.api_provider = api_provider self.function_modeler = function_modeler self.default_generation_length = generation_token_limit self.initialized_functions = {} self.token_counts = {} def __call__(self, args, function_description: FunctionDescription, kwargs, validator: Validator, generation_parameters: dict) -> Any: # add the generation length if not there if "max_new_tokens" not in generation_parameters: generation_parameters["max_new_tokens"] = self.default_generation_length output = self.generate(args, kwargs, function_description, generation_parameters) # start parsing the object, very hacky way for the time being choice_parsed = self._parse_choice(output) valid = validator.check_type(choice_parsed, function_description.output_type_hint) if not valid: choice, choice_parsed, successful_repair = self.repair_output(args, kwargs, function_description, output.generated_response, validator, generation_parameters) if not successful_repair: raise TypeError( f"Output type was not valid. Expected an object of type {function_description.output_type_hint}, got '{output.generated_response}'") output.generated_response = choice output.distilled_model = False datapoint = FunctionExample(args, kwargs, output.generated_response) if output.suitable_for_finetuning and not output.distilled_model: self.function_modeler.postprocess_symbolic_datapoint(function_description.__hash__(), function_description, datapoint, repaired=not valid) instantiated = validator.instantiate(choice_parsed, function_description.output_type_hint) return instantiated def _parse_choice(self, output): try: # json load choice_parsed = json.loads(output.generated_response) except: # if it fails, it's not a json object, try eval try: choice_parsed = eval(output.generated_response) except: choice_parsed = output.generated_response return choice_parsed def generate(self, args, kwargs, function_description, llm_parameters={}): """ The main generation function, given the args, kwargs, function description and model type, generate a response and check if the datapoint can be saved to the finetune dataset """ func_hash = function_description.__hash__() prompt, model, save_to_finetune, is_distilled_model = self.get_generation_case(args, kwargs, function_description, llm_parameters, func_hash) # loggings current_function_setup = self.initialized_functions.get(func_hash, None) # getting the current function setup - model and align statements if current_function_setup: generator_model = current_function_setup["model"] if is_distilled_model: logging.info(f"Generating function outputs for {function_description.name} with a finetuned model: {model.model_name}.") self.initialized_functions[func_hash]["model"] = model.model_name elif generator_model == "": logging.info(f"Found {len(current_function_setup['examples'])} align statements for {function_description.name}. Generating function outputs with {model.model_name}.") self.initialized_functions[func_hash]["model"] = model.model_name elif generator_model != model.model_name: logging.info(f"Switching output generation from {generator_model} to {model.model_name} for function {function_description.name}.") self.initialized_functions[func_hash]["model"] = model.model_name choice = self._synthesise_answer(prompt, model, llm_parameters) output = LanguageModelOutput(choice, save_to_finetune, is_distilled_model) return output def _synthesise_answer(self, prompt, model, llm_parameters): """ Synthesise an answer given the prompt, model, model_type and llm_parameters Args: prompt (str): The prompt to send to the model model (BaseModelConfig): The model to use for generation llm_parameters (dict): The parameters to use for generation return: choice (str): The generated response """ system_message = model.system_message return self.api_provider[model.provider].generate(model, system_message, prompt, **llm_parameters) def get_generation_case(self, args, kwargs, function_description, llm_parameters, func_hash): """ Get the generation case with the correct prompt and model First get the current model, then if distilled model, do zero-shot prompt and return False as suitable_for_finetune If not distilled model, check if suitable for finetuning, create the prompt and return the correct model given the token count """ f = str(function_description.__dict__.__repr__()) distilled_model, teacher_models = self.function_modeler.get_models(function_description) is_distilled_model = distilled_model.model_name != "" suitable_for_distillation, input_prompt_token_count = self.suitable_for_finetuning_token_check(args, kwargs, f, distilled_model) if func_hash not in self.initialized_functions: # initialise the initialized_functions dic<fim_suffix>t self.initialized_functions[func_hash] = {"model": "", "examples": []} # no examples needed, using a finetuned model. Dont save to finetune dataset if is_distilled_model and suitable_for_distillation: prompt = self.construct_prompt(f, args, kwargs, [], distilled_model) return prompt, distilled_model, suitable_for_distillation, True else: aligns = self.function_modeler.get_symbolic_alignments(function_description.__hash__(), max=16) examples = [f"Inputs:\nArgs: {align['args']}\nKwargs: {align['kwargs']}\nOutput: {align['output']}" for align in aligns] # update the examples in the initialized_functions dict self.initialized_functions[func_hash]["examples"] = examples examples_token_count = sum([approximate_token_count(example) for example in examples]) generation_tokens = llm_parameters.get("max_new_tokens", self.default_generation_length) model = self.choose_model_from_tokens(teacher_models, examples_token_count + input_prompt_token_count + generation_tokens, len(examples)) if model: examples_with_parsing_tokens = [f"Inputs:\nArgs: {align['args']}\nKwargs: {align['kwargs']}\nOutput:{model.parsing_helper_tokens['start_token']}{align['output']}{model.parsing_helper_tokens['end_token']}" for align in aligns] prompt = self.construct_prompt(f, args, kwargs, examples_with_parsing_tokens, model) return prompt, model, suitable_for_distillation, False else: raise ValueError( "The input content and align statements combined are too long, please shorten it. The maximum currently allowed token limit is 32000") def suitable_for_finetuning_token_check(self, args, kwargs, f, distilled_model: BaseModelConfig): """ Check if the inputs are suitable for finetuning, i.e are below the finetuning token count """ # check if finetunable finetuning_prompt = f"Function: {f}\n---\nInputs:\nArgs: {args}\nKwargs: {kwargs}\nOutput:" input_prompt_token_count = approximate_token_count(finetuning_prompt) if distilled_model.system_message_token_count < 0: distilled_model.system_message_token_count = approximate_token_count(distilled_model.system_message) if distilled_model.instruction_token_count < 0: distilled_model.instruction_token_count = approximate_token_count(distilled_model.instructions) suitable_for_finetune = input_prompt_token_count + distilled_model.instruction_token_count + distilled_model.system_message_token_count < distilled_model.context_length return suitable_for_finetune, input_prompt_token_count def construct_prompt(self, f, args, kwargs, examples, model): """ Construct a prompt given the model, function description, args, kwargs and examples Args: model (BaseModelConfig): The model to use for generation f (str): The function description args (tuple): The args of the function kwargs (tuple): The kwargs of the function examples (list): The examples of the function Returns: content (str): The prompt to send to the model """ if examples: final_examples = "\n".join( [f"{align}" for align in examples]) example_input = f"Examples:{final_examples}\n" else: example_input = "" instruction_prompt = model.instructions content = f"{instruction_prompt}\nFunction: {f}\n{example_input}---\nInputs:\nArgs: {args}\nKwargs: {kwargs}\nOutput:" return content def repair_generate(self, args, kwargs, f, failed_outputs_list, aligns, models, llm_parameters): """ Repair the output given the input, function description, failed outputs list, examples and models """ # get the token counts examples = [f"Inputs:\nArgs: {align['args']}\nKwargs: {align['kwargs']}\nOutput: {align['output']}" for align in aligns] examples_token_count = sum([approximate_token_count(example) for example in examples]) failed_examples_token_count = sum([approximate_token_count(failed_output[0]) + approximate_token_count(failed_output[1]) for failed_output in failed_outputs_list]) input_prompt_token_count = approximate_token_count(f"Function: {f}\n---\nInputs:\nArgs: {args}\nKwargs: {kwargs}\nOutput:") generation_tokens = llm_parameters.get("max_new_tokens", self.default_generation_length) model = self.choose_model_from_tokens(models, examples_token_count+input_prompt_token_count+generation_tokens+failed_examples_token_count, len(examples)) if model: prompt = self.generate_repair_prompt(args, kwargs, f, failed_outputs_list, examples, model) logging.info(f"Previous output failed type validation, attempting to repair with {model.model_name}") choice = self._synthesise_answer(prompt, model, llm_parameters) return choice else: return None def generate_repair_prompt(self, args, kwargs, f, failed_outputs_list, examples, model): """ Generate a repair prompt given the args, kwargs, function description, failed outputs list and examples """ if examples: final_examples = "\n".join( [f"{model.parsing_helper_tokens['start_token']}{align}{model.parsing_helper_tokens['end_token']}" for align in examples]) successful_examples = f"Examples:{final_examples}\n" else: successful_examples = "" failed_examples = "" for failed_output in failed_outputs_list: failed_examples += f"Output: {failed_output[0]}\nError: {failed_output[1]}\n\n" end_token_addition = "" if model.parsing_helper_tokens["end_token"]: end_token_addition = f"Make sure to add the {model.parsing_helper_tokens['end_token']} token at the end of the output." prompt = f"{model.repair_instruction}{end_token_addition}\nFUNCTION DESCRIPTION: {f}\n{successful_examples}---{model.parsing_helper_tokens['start_token']}Inputs:\nArgs: {args}\nKwargs: {kwargs}\nFAILED EXAMPLES: {failed_examples}Correct output:" return prompt def choose_model_from_tokens(self, models, input_token_count, nr_of_examples=0): """ Choose a model from the models given the token count and number of examples Args: models (list): The models to choose from input_token_count (int): The token count of the input nr_of_examples (int): The number of examples Returns: model (BaseModelConfig): The chosen model """ for model in models: # check if input token count is less than the context length # If the model config has custom messages, then use those, otherwise use the default ones if model.system_message_token_count < 0: model.system_message_token_count = approximate_token_count(model.system_message) if model.instruction_token_count < 0: model.instruction_token_count = approximate_token_count(model.instructions) if model.parsing_helper_tokens["start_token"]: input_token_count += 2*nr_of_examples if model.parsing_helper_tokens["end_token"]: input_token_count += 2*nr_of_examples total_token_count = input_token_count + model.instruction_token_count + model.system_message_token_count if total_token_count < model.context_length: return model return None def repair_output(self, args: tuple, kwargs: dict, function_description: FunctionDescription, choice, validator: Validator, generation_parameters: dict) -> tuple: """ Repair an output, that failed type validation by generating a new output using the teacher model and the error Args: args (tuple): The args of the function kwargs (dict): The kwargs of the function function_description (FunctionDescription): The function description choice: The output that failed type validation, type is arbitrary validator (Validator): The validator object Returns: choice (str): The choice that was generated by the language model choice_parsed: The parsed choice, type is arbitrary valid (bool): Whether the output was correctly repaired was valid """ # get the teacher models teacher_models = self.function_modeler.get_models(function_description)[1] valid = False retry_index = 5 f = str(function_description.__dict__.__repr__() + "\n") error = f"Output type was not valid. Expected an valid object of type {function_description.output_type_hint}, got '{choice}'" # instantiate the failed outputs list failed_outputs_list = [(choice, error)] while retry_index > 0 and not valid: # get the alignments aligns = self.function_modeler.get_symbolic_alignments(function_description.__hash__(), max=5) # Generate the reparied LLM output choice = self.repair_generate(args, kwargs, f, failed_outputs_list, aligns, teacher_models, generation_parameters) if not choice: # if no choice then the input was too long for the model # no specific error but the retry index goes down retry_index -= 1 continue # start parsing the object try: # json load choice_parsed = json.loads(choice) except: # if it fails, it's not a json object, try eval try: choice_parsed = eval(choice) except: choice_parsed = choice valid = validator.check_type(choice_parsed, function_description.output_type_hint) if not valid: # if it's not valid, add it to the failed outputs list error = f"Output type was not valid. Expected an object of type {function_description.output_type_hint}, got '{choice}'" failed_outputs_list.append((choice, error)) retry_index -= 1 if valid: logging.info(f"Successfully repaired output.") return choice, choice_parsed, valid <fim_middle>
null
LINE_COMMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/language_models/language_model_manager.py<fim_prefix>import json from typing import Any, Dict from tanuki.function_modeler import FunctionModeler from tanuki.language_models.llm_api_abc import LLM_API from tanuki.models.function_description import FunctionDescription from tanuki.models.function_example import FunctionExample from tanuki.models.language_model_output import LanguageModelOutput from tanuki.utils import approximate_token_count from tanuki.validator import Validator from tanuki.models.api_manager import APIManager from tanuki.language_models.llm_configs.abc_base_config import BaseModelConfig import logging class LanguageModelManager(object): """ The LanguageModelManager is responsible for managing the language models and their outputs operationally, this includes: - Generating outputs from the language models - Repairing outputs from the language models - Saving outputs from the language models - Finetuning the language models from the saved outputs """ def __init__(self, function_modeler: FunctionModeler, api_provider: APIManager, generation_token_limit=512,) -> None: self.api_provider = api_provider self.function_modeler = function_modeler self.default_generation_length = generation_token_limit self.initialized_functions = {} self.token_counts = {} def __call__(self, args, function_description: FunctionDescription, kwargs, validator: Validator, generation_parameters: dict) -> Any: # add the generation length if not there if "max_new_tokens" not in generation_parameters: generation_parameters["max_new_tokens"] = self.default_generation_length output = self.generate(args, kwargs, function_description, generation_parameters) # start parsing the object, very hacky way for the time being choice_parsed = self._parse_choice(output) valid = validator.check_type(choice_parsed, function_description.output_type_hint) if not valid: choice, choice_parsed, successful_repair = self.repair_output(args, kwargs, function_description, output.generated_response, validator, generation_parameters) if not successful_repair: raise TypeError( f"Output type was not valid. Expected an object of type {function_description.output_type_hint}, got '{output.generated_response}'") output.generated_response = choice output.distilled_model = False datapoint = FunctionExample(args, kwargs, output.generated_response) if output.suitable_for_finetuning and not output.distilled_model: self.function_modeler.postprocess_symbolic_datapoint(function_description.__hash__(), function_description, datapoint, repaired=not valid) instantiated = validator.instantiate(choice_parsed, function_description.output_type_hint) return instantiated def _parse_choice(self, output): try: # json load choice_parsed = json.loads(output.generated_response) except: # if it fails, it's not a json object, try eval try: choice_parsed = eval(output.generated_response) except: choice_parsed = output.generated_response return choice_parsed def generate(self, args, kwargs, function_description, llm_parameters={}): """ The main generation function, given the args, kwargs, function description and model type, generate a response and check if the datapoint can be saved to the finetune dataset """ func_hash = function_description.__hash__() prompt, model, save_to_finetune, is_distilled_model = self.get_generation_case(args, kwargs, function_description, llm_parameters, func_hash) # loggings current_function_setup = self.initialized_functions.get(func_hash, None) # getting the current function setup - model and align statements if current_function_setup: generator_model = current_function_setup["model"] if is_distilled_model: logging.info(f"Generating function outputs for {function_description.name} with a finetuned model: {model.model_name}.") self.initialized_functions[func_hash]["model"] = model.model_name elif generator_model == "": logging.info(f"Found {len(current_function_setup['examples'])} align statements for {function_description.name}. Generating function outputs with {model.model_name}.") self.initialized_functions[func_hash]["model"] = model.model_name elif generator_model != model.model_name: logging.info(f"Switching output generation from {generator_model} to {model.model_name} for function {function_description.name}.") self.initialized_functions[func_hash]["model"] = model.model_name choice = self._synthesise_answer(prompt, model, llm_parameters) output = LanguageModelOutput(choice, save_to_finetune, is_distilled_model) return output def _synthesise_answer(self, prompt, model, llm_parameters): """ Synthesise an answer given the prompt, model, model_type and llm_parameters Args: prompt (str): The prompt to send to the model model (BaseModelConfig): The model to use for generation llm_parameters (dict): The parameters to use for generation return: choice (str): The generated response """ system_message = model.system_message return self.api_provider[model.provider].generate(model, system_message, prompt, **llm_parameters) def get_generation_case(self, args, kwargs, function_description, llm_parameters, func_hash): """ Get the generation case with the correct prompt and model First get the current model, then if distilled model, do zero-shot prompt and return False as suitable_for_finetune If not distilled model, check if suitable for finetuning, create the prompt and return the correct model given the token count """ f = str(function_description.__dict__.__repr__()) distilled_model, teacher_models = self.function_modeler.get_models(function_description) is_distilled_model = distilled_model.model_name != "" suitable_for_distillation, input_prompt_token_count = self.suitable_for_finetuning_token_check(args, kwargs, f, distilled_model) if func_hash not in self.initialized_functions: # initialise the initialized_functions dict self.initialized_functions[func_hash] = {"model": "", "examples": []} # no examples needed, using a finetuned model. Dont save to finetune dataset if is_distilled_model and suitable_for_distillation: prompt = self.construct_prompt(f, args, kwargs, [], distilled_model) return prompt, distilled_model, suitable_for_distillation, True else: aligns = self.function_modeler.get_symbolic_alignments(function_description.__hash__(), max=16) examples = [f"Inputs:\nArgs: {align['args']}\nKwargs: {align['kwargs']}\nOutput: {align['output']}" for align in aligns] # update the examp<fim_suffix>les in the initialized_functions dict self.initialized_functions[func_hash]["examples"] = examples examples_token_count = sum([approximate_token_count(example) for example in examples]) generation_tokens = llm_parameters.get("max_new_tokens", self.default_generation_length) model = self.choose_model_from_tokens(teacher_models, examples_token_count + input_prompt_token_count + generation_tokens, len(examples)) if model: examples_with_parsing_tokens = [f"Inputs:\nArgs: {align['args']}\nKwargs: {align['kwargs']}\nOutput:{model.parsing_helper_tokens['start_token']}{align['output']}{model.parsing_helper_tokens['end_token']}" for align in aligns] prompt = self.construct_prompt(f, args, kwargs, examples_with_parsing_tokens, model) return prompt, model, suitable_for_distillation, False else: raise ValueError( "The input content and align statements combined are too long, please shorten it. The maximum currently allowed token limit is 32000") def suitable_for_finetuning_token_check(self, args, kwargs, f, distilled_model: BaseModelConfig): """ Check if the inputs are suitable for finetuning, i.e are below the finetuning token count """ # check if finetunable finetuning_prompt = f"Function: {f}\n---\nInputs:\nArgs: {args}\nKwargs: {kwargs}\nOutput:" input_prompt_token_count = approximate_token_count(finetuning_prompt) if distilled_model.system_message_token_count < 0: distilled_model.system_message_token_count = approximate_token_count(distilled_model.system_message) if distilled_model.instruction_token_count < 0: distilled_model.instruction_token_count = approximate_token_count(distilled_model.instructions) suitable_for_finetune = input_prompt_token_count + distilled_model.instruction_token_count + distilled_model.system_message_token_count < distilled_model.context_length return suitable_for_finetune, input_prompt_token_count def construct_prompt(self, f, args, kwargs, examples, model): """ Construct a prompt given the model, function description, args, kwargs and examples Args: model (BaseModelConfig): The model to use for generation f (str): The function description args (tuple): The args of the function kwargs (tuple): The kwargs of the function examples (list): The examples of the function Returns: content (str): The prompt to send to the model """ if examples: final_examples = "\n".join( [f"{align}" for align in examples]) example_input = f"Examples:{final_examples}\n" else: example_input = "" instruction_prompt = model.instructions content = f"{instruction_prompt}\nFunction: {f}\n{example_input}---\nInputs:\nArgs: {args}\nKwargs: {kwargs}\nOutput:" return content def repair_generate(self, args, kwargs, f, failed_outputs_list, aligns, models, llm_parameters): """ Repair the output given the input, function description, failed outputs list, examples and models """ # get the token counts examples = [f"Inputs:\nArgs: {align['args']}\nKwargs: {align['kwargs']}\nOutput: {align['output']}" for align in aligns] examples_token_count = sum([approximate_token_count(example) for example in examples]) failed_examples_token_count = sum([approximate_token_count(failed_output[0]) + approximate_token_count(failed_output[1]) for failed_output in failed_outputs_list]) input_prompt_token_count = approximate_token_count(f"Function: {f}\n---\nInputs:\nArgs: {args}\nKwargs: {kwargs}\nOutput:") generation_tokens = llm_parameters.get("max_new_tokens", self.default_generation_length) model = self.choose_model_from_tokens(models, examples_token_count+input_prompt_token_count+generation_tokens+failed_examples_token_count, len(examples)) if model: prompt = self.generate_repair_prompt(args, kwargs, f, failed_outputs_list, examples, model) logging.info(f"Previous output failed type validation, attempting to repair with {model.model_name}") choice = self._synthesise_answer(prompt, model, llm_parameters) return choice else: return None def generate_repair_prompt(self, args, kwargs, f, failed_outputs_list, examples, model): """ Generate a repair prompt given the args, kwargs, function description, failed outputs list and examples """ if examples: final_examples = "\n".join( [f"{model.parsing_helper_tokens['start_token']}{align}{model.parsing_helper_tokens['end_token']}" for align in examples]) successful_examples = f"Examples:{final_examples}\n" else: successful_examples = "" failed_examples = "" for failed_output in failed_outputs_list: failed_examples += f"Output: {failed_output[0]}\nError: {failed_output[1]}\n\n" end_token_addition = "" if model.parsing_helper_tokens["end_token"]: end_token_addition = f"Make sure to add the {model.parsing_helper_tokens['end_token']} token at the end of the output." prompt = f"{model.repair_instruction}{end_token_addition}\nFUNCTION DESCRIPTION: {f}\n{successful_examples}---{model.parsing_helper_tokens['start_token']}Inputs:\nArgs: {args}\nKwargs: {kwargs}\nFAILED EXAMPLES: {failed_examples}Correct output:" return prompt def choose_model_from_tokens(self, models, input_token_count, nr_of_examples=0): """ Choose a model from the models given the token count and number of examples Args: models (list): The models to choose from input_token_count (int): The token count of the input nr_of_examples (int): The number of examples Returns: model (BaseModelConfig): The chosen model """ for model in models: # check if input token count is less than the context length # If the model config has custom messages, then use those, otherwise use the default ones if model.system_message_token_count < 0: model.system_message_token_count = approximate_token_count(model.system_message) if model.instruction_token_count < 0: model.instruction_token_count = approximate_token_count(model.instructions) if model.parsing_helper_tokens["start_token"]: input_token_count += 2*nr_of_examples if model.parsing_helper_tokens["end_token"]: input_token_count += 2*nr_of_examples total_token_count = input_token_count + model.instruction_token_count + model.system_message_token_count if total_token_count < model.context_length: return model return None def repair_output(self, args: tuple, kwargs: dict, function_description: FunctionDescription, choice, validator: Validator, generation_parameters: dict) -> tuple: """ Repair an output, that failed type validation by generating a new output using the teacher model and the error Args: args (tuple): The args of the function kwargs (dict): The kwargs of the function function_description (FunctionDescription): The function description choice: The output that failed type validation, type is arbitrary validator (Validator): The validator object Returns: choice (str): The choice that was generated by the language model choice_parsed: The parsed choice, type is arbitrary valid (bool): Whether the output was correctly repaired was valid """ # get the teacher models teacher_models = self.function_modeler.get_models(function_description)[1] valid = False retry_index = 5 f = str(function_description.__dict__.__repr__() + "\n") error = f"Output type was not valid. Expected an valid object of type {function_description.output_type_hint}, got '{choice}'" # instantiate the failed outputs list failed_outputs_list = [(choice, error)] while retry_index > 0 and not valid: # get the alignments aligns = self.function_modeler.get_symbolic_alignments(function_description.__hash__(), max=5) # Generate the reparied LLM output choice = self.repair_generate(args, kwargs, f, failed_outputs_list, aligns, teacher_models, generation_parameters) if not choice: # if no choice then the input was too long for the model # no specific error but the retry index goes down retry_index -= 1 continue # start parsing the object try: # json load choice_parsed = json.loads(choice) except: # if it fails, it's not a json object, try eval try: choice_parsed = eval(choice) except: choice_parsed = choice valid = validator.check_type(choice_parsed, function_description.output_type_hint) if not valid: # if it's not valid, add it to the failed outputs list error = f"Output type was not valid. Expected an object of type {function_description.output_type_hint}, got '{choice}'" failed_outputs_list.append((choice, error)) retry_index -= 1 if valid: logging.info(f"Successfully repaired output.") return choice, choice_parsed, valid <fim_middle>
null
LINE_COMMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, at<fim_suffix>tempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
LINE_COMMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibilit<fim_suffix>y with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
LINE_COMMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the<fim_suffix> data as-is return data <fim_middle>
null
LINE_COMMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type check<fim_suffix>ing for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
LINE_COMMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/trackers/filesystem_buffered_logger.py<fim_prefix>import os from enum import Enum from typing import Literal, Union, Optional, Dict from appdirs import user_data_dir from tanuki.constants import * from tanuki.persistence.filter.bloom_interface import IBloomFilterPersistence from tanuki.persistence.filter.filesystem_bloom import BloomFilterFileSystemDriver from tanuki.trackers.abc_buffered_logger import ABCBufferedLogger class FilesystemBufferedLogger(ABCBufferedLogger): """ A class that handles the reading and writing of patch invocations and align statements. It includes the logic for a bloom filter, to ensure that we only store unique invocations. """ def __init__(self, name, level=15): self.log_directory = self._get_log_directory() super().__init__(name, level) def get_bloom_filter_persistence(self) -> IBloomFilterPersistence: """ Get an instance of the bloom filter persistence provider. Typically this will be a file system provider. :return: A persistence provider """ return BloomFilterFileSystemDriver(log_directory=self.log_directory) def get_patch_location_for_function(self, func_hash, extension: Union[ ALIGN_FILE_EXTENSION_TYPE, PATCH_FILE_EXTENSION_TYPE] = "") -> str: """ Get the local location of the function patch file. :param func_hash: The representation of the function :param extension: Whether this is a patch or an alignment :return: """ return os.path.join(self.log_directory, func_hash + extension) def ensure_persistence_location_exists(self) -> None: """ Ensure that the location on the filesystem we will be writing to actually exists. If not, create it. """ log_directory = self.log_directory # Create the folder if it doesn't exist if not os.path.exists(log_directory): os.makedirs(log_directory) def does_object_exist(self, path: str) -> bool: """ Check to see if a path exists on the filesystem. :param path: :return: """ return os.path.exists(path) def _get_log_directory(self) -> str: """ Find a location on the filesystem to write our logs to. :return: """ filename = "functions" # If explicitly defined env_dir = os.getenv(ENVVAR) if env_dir and os.path.isdir(env_dir): return os.path.join(env_dir, filename) # If installed a<fim_suffix>s a library library_dir = os.path.join(user_data_dir(LIB_NAME), filename) if os.path.isdir(library_dir) or not os.path.exists(library_dir): return library_dir # If installed in a project that contains a git repo - place it in the same folder as the git repo current_dir = os.getcwd() while current_dir != os.path.root: if ".git" in os.listdir(current_dir): return os.path.join(current_dir, filename) current_dir = os.path.dirname(current_dir) return os.path.join(os.getcwd(), filename) def load_dataset(self, dataset_type, func_hash, return_type="both") -> Optional[int]: """ Get the size of the dataset for a function hash """ log_directory = self._get_log_directory() dataset_type_map = {"alignments": ALIGN_FILE_EXTENSION, "positive": POSITIVE_FILE_EXTENSION, "negative": NEGATIVE_FILE_EXTENSION, "patches": PATCH_FILE_EXTENSION} log_file_path = os.path.join(log_directory, func_hash + dataset_type_map[dataset_type]) if not os.path.exists(log_file_path): if return_type == "both": return 0, None elif return_type == "dataset": return None elif return_type == "length": return 0 try: with open(log_file_path, "rb") as f: dataset = f.read() dataset_string = repr(dataset) dataset_length = dataset_string.count("\\n") - dataset_string.count("\\\\n") if return_type == "both": return dataset_length, dataset elif return_type == "dataset": return dataset elif return_type == "length": return dataset_length except Exception as e: if return_type == "both": return 0, None elif return_type == "dataset": return None elif return_type == "length": return 0 def load_existing_datasets(self) -> Dict[str, Dict[str, str]]: log_directory = self.log_directory dataset_lengths = { SYMBOLIC_ALIGNMENTS: {}, POSITIVE_EMBEDDABLE_ALIGNMENTS: {}, NEGATIVE_EMBEDDABLE_ALIGNMENTS: {}, PATCHES: {}, } try: if not os.path.exists(log_directory): os.makedirs(log_directory) # get all the files in the log directory files = os.listdir(log_directory) # discard all .json files files = [x for x in files if ".json" not in x] except Exception as e: return dataset_lengths for file in files: if ALIGN_FILE_EXTENSION not in file \ and PATCH_FILE_EXTENSION not in file \ and POSITIVE_FILE_EXTENSION not in file \ and NEGATIVE_FILE_EXTENSION not in file: continue elif ALIGN_FILE_EXTENSION in file: dataset_type = SYMBOLIC_ALIGNMENTS elif POSITIVE_FILE_EXTENSION in file: dataset_type = POSITIVE_EMBEDDABLE_ALIGNMENTS elif NEGATIVE_FILE_EXTENSION in file: dataset_type = NEGATIVE_EMBEDDABLE_ALIGNMENTS else: dataset_type = PATCHES func_hash = file.replace(ALIGN_FILE_EXTENSION, "").replace(PATCH_FILE_EXTENSION, "") dataset_lengths[dataset_type][func_hash] = -1 return dataset_lengths def write(self, path: str, data: str, mode: Literal["w", "a", "a+b"] = "w") -> None: """ Write data to a file """ with open(path, mode) as f: f.write(data) def read(self, path: str) -> str: """ Read data from a file """ with open(path, "r") as f: return f.read() def get_hash_from_path(self, path) -> str: """ Given a path with a hash, return only the hash :param path: The path to the file :return: The hash """ return path.replace(PATCH_FILE_EXTENSION, ""). \ replace(self.log_directory, ""). \ lstrip("/"). \ lstrip("\\") <fim_middle>
null
LINE_COMMENT
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try<fim_suffix>: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
TRY
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/trackers/abc_buffered_logger.py<fim_prefix>import json from abc import abstractmethod from typing import Dict, Any, Literal from tanuki.bloom_filter import BloomFilter from tanuki.constants import EXPECTED_ITEMS, FALSE_POSITIVE_RATE, ALIGN_FILE_EXTENSION, \ POSITIVE_FILE_EXTENSION, NEGATIVE_FILE_EXTENSION, PATCH_FILE_EXTENSION from tanuki.persistence.filter.bloom_interface import IBloomFilterPersistence from tanuki.trackers.dataset_worker import DatasetWorker from tanuki.models.function_config import FunctionConfig # PATCH_FILE_EXTENSION_TYPE = Literal[".patches"] # ALIGN_FILE_EXTENSION_TYPE = Literal[".alignments"] # POSITIVE_EMBEDDING_FILE_EXTENSION_TYPE = Literal[".positive_embedding"] # NEGATIVE_EMBEDDING_FILE_EXTENSION_TYPE = Literal[".negative_embedding"] # # PATCH_FILE_EXTENSION: PATCH_FILE_EXTENSION_TYPE = ".patches" # ALIGN_FILE_EXTENSION: ALIGN_FILE_EXTENSION_TYPE = ".alignments" # POSITIVE_EMBEDDING_FILE_EXTENSION: POSITIVE_EMBEDDING_FILE_EXTENSION_TYPE = ".contrastive_positives" # NEGATIVE_EMBEDDING_FILE_EXTENSION: NEGATIVE_EMBEDDING_FILE_EXTENSION_TYPE = ".contrastive_negatives" # # EXPECTED_ITEMS = 10000 # FALSE_POSITIVE_RATE = 0.01 # LIB_NAME = "tanuki" # ENVVAR = "TANUKI_LOG_DIR" class ABCBufferedLogger(DatasetWorker): def __init__(self, name, level=15): self.buffers = {} self.mapped_files = {} self.miss_count = 0 self.hit_count = 0 self.flush_limit = {} self.buffer_rolling_size = {} self.write_count = 0 self.write_limit = 1000 # Save the Bloom filter every 1000 writes super().__init__(name, level) self.bloom_filter = self.create_bloom_filter() self.load_bloom_filter() self.default_function_config = FunctionConfig() @abstractmethod def get_bloom_filter_persistence(self) -> IBloomFilterPersistence: """ Get an instance of the bloom filter persistence provider. This exposes some persistent file storage, that must support reading and writing raw byte streams. :return: """ pass @abstractmethod def load_existing_datasets(self) -> Dict[str, Dict[str, Any]]: """ Get the lengths of all datasets backing the registered functions, including aligns. :return: """ pass @abstractmethod def ensure_persistence_location_exists(self): """ Ensure that the place we will be writing to actually exists. If not, create it. """ pass @abstractmethod def get_patch_location_for_function(self, func_hash, extension="") -> str: """ Get the address of the function patch file. :param func_hash: The representation of the function :param extension: Whether this is a patch or an alignment :return: """ pass @abstractmethod def write(self, path, data, mode="a") -> None: pass @abstractmethod def read(self, path) -> str: pass @abstractmethod def get_hash_from_path(self, path) -> str: pass @abstractmethod def does_object_exist(self, path) -> bool: pass def create_bloom_filter(self): bloom_filter_persistence = self.get_bloom_filter_persistence() bloom_filter = BloomFilter( bloom_filter_persistence, expected_number_of_elements=EXPECTED_ITEMS, false_positive_probability=FALSE_POSITIVE_RATE) return bloom_filter def load_bloom_filter(self): try: self.bloom_filter.load() except FileNotFoundError: self.debug("No Bloom filter found. Creating a new one.") def write_symbolic_align_call(self, func_hash, example) -> bool: log_file_path = self.get_patch_location_for_function(func_hash, extension=ALIGN_FILE_EXTENSION) try: # Now, write to the file dumpable_object = str(example.__dict__) self.write(log_file_path, dumpable_object + "\n", mode="a") return True except Exception as e: return False def write_embeddable_align_call(self, func_hash, example, positive=True) -> bool: if positive: log_file_path = self.get_patch_location_for_function(func_hash, extension=POSITIVE_FILE_EXTENSION) else: log_file_path = self.get_patch_location_for_function(func_hash, extension=NEGATIVE_FILE_EXTENSION) try: # Now, write to the file dumpable_object = str(example.__dict__) self.write(log_file_path, dumpable_object + "\n", mode="a") return True except Exception as e: return False def log_embeddable_align(self, func_hash, example, positive=True, **kws): """ Log a contrastive function invocation Args: func_hash: A string representation of the function signature and input parameters example: The example object positive: Whether the example is positive or negative **kws: """ successfully_saved, new_datapoint = False, False try: self.ensure_persistence_location_exists() except Exception as e: return successfully_saved, new_datapoint # prepend the function hash to the example bloom_filter_representation = func_hash + '_' + str(example.__dict__) + '\n' # Check Bloom Filter if self.bloom_filter.lookup(bloom_filter_representation): return successfully_saved, new_datapoint new_datapoint = True # add to bloom filter self.bloom_filter.add(bloom_filter_representation) self.save_bloom_filter() successfully_saved = self.write_embeddable_align_call(func_hash, example, positive) return successfully_saved, new_datapoint def log_symbolic_align(self, func_hash, *args, **kws): """ Log an align function invocation to the file system :param func_hash: A string representation of the function signature and input parameters :param args: Example objects :param kws: :return: """ successfully_saved, new_datapoint = False, False try: self.ensure_persistence_location_exists() except Exception as e: return successfully_saved, new_datapoint example = args[0] # prepend the function hash to the example bloom_filter_representation = func_hash + '_' + str(example.__dict__) + '\n' # Check Bloom Filter if self.bloom_filter.lookup(bloom_filter_representation): return successfully_saved, new_datapoint new_datapoint = True # add to bloom filter self.bloom_filter.add(bloom_filter_representation) self.save_bloom_filter() successfully_saved = self.write_symbolic_align_call(func_hash, example) return successfully_saved, new_datapoint def log_symbolic_patch(self, func_hash, example): """ Log a patched function invocation to the file system :param func_hash: A string representation of the function signature and input parameters :param example: :return: """ if not isinstance(func_hash, str): func_hash = str(func_hash) example_data = str(example.__dict__).encode('utf-8') + b'\n' bloom_filter_representation = func_hash + '_' + example_data.decode('utf-8') # Check Bloom Filter if self.bloom_filter.lookup(bloom_filter_representation): self.hit_count += 1 return {} self.miss_count += 1 # Add to Bloom Filter self.bloom_filter.add(bloom_filter_representation) try: self.ensure_persistence_location_exists() except Exception as e: return {} log_file_path = self.get_patch_location_for_function(func_hash, extension=PATCH_FILE_EXTENSION) if log_file_path not in self.buffers: self.buffers[log_file_path] = bytearray() if log_file_path not in self.flush_limit: self.flush_limit[log_file_path] = 1 self.buffers[log_file_path].extend(example_data) self.write_count += 1 if log_file_path not in self.buffer_rolling_size: self.buffer_rolling_size[log_file_path] = 1 else: self.buffer_rolling_size[log_file_path] += 1 if self.write_count >= self.write_limit: written_datapoints = self.flush() self.save_bloom_filter() self.write_count = 0 # Reset counter return written_datapoints if len(self.buffers[log_file_path]) >= min(self.flush_limit[log_file_path], 4096): # Flush after reaching 4KB written_datapoints = {} try<fim_suffix>: self.write(log_file_path, self.buffers[log_file_path], mode="a+b") # update buffers written_datapoints[func_hash] = self.buffer_rolling_size[log_file_path] self.buffers[log_file_path].clear() self.buffer_rolling_size[log_file_path] = 0 self.flush_limit[log_file_path] = 2 * self.flush_limit[log_file_path] self.save_bloom_filter() except Exception as e: pass return written_datapoints return {} def save_bloom_filter(self): try: self.bloom_filter.save() except Exception as e: self.warning("Could not save Bloom filter: {}".format(e)) def flush(self): # get log directory written_datapoints = {} for log_file_path, buffer in self.buffers.items(): if len(buffer) > 0: try: self.write(log_file_path, buffer, mode="a+b") written_datapoints[self.get_hash_from_path(log_file_path)] = self.buffer_rolling_size[log_file_path] self.buffer_rolling_size[log_file_path] = 0 buffer.clear() except Exception as e: pass return written_datapoints def load_function_config(self, func_hash): """ Get the config file for the function. Uses the message and log directory Config file has to be in .json """ default = False try: # try to get the config from the disk. If inaccessible, create a new default one self.ensure_persistence_location_exists() log_file_path = self.get_patch_location_for_function(func_hash) config_path = f"{log_file_path}.json" if not self.does_object_exist(config_path): function_config = self.default_function_config default = True func_config_dict = function_config.to_dict() # remove teacher_models from the config func_config_dict.pop("teacher_models") self.write_json(config_path, func_config_dict) else: function_config = FunctionConfig().load_from_dict(self.read_json(config_path)) except Exception as e: function_config = self.default_function_config default = True return function_config, default def update_function_config(self, func_hash, config_to_be_saved): """ Save the config file """ log_file_path = self.get_patch_location_for_function(func_hash) config_path = f"{log_file_path}.json" try: func_config_dict = config_to_be_saved.to_dict() # remove teacher_models from the config func_config_dict.pop("teacher_models") self.write_json(config_path, func_config_dict) except Exception as e: pass def write_json(self, path, data): self.write(path, json.dumps(data)) def read_json(self, path): return json.loads(self.read(path)) <fim_middle>
null
TRY
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): t<fim_suffix>ry: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
TRY
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try<fim_suffix>: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
TRY
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: tr<fim_suffix>y: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
TRY
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it tr<fim_suffix>y: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
TRY
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime t<fim_suffix>ry: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
TRY
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) tr<fim_suffix>y: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
TRY
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try<fim_suffix>: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
TRY
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/trackers/abc_buffered_logger.py<fim_prefix>import json from abc import abstractmethod from typing import Dict, Any, Literal from tanuki.bloom_filter import BloomFilter from tanuki.constants import EXPECTED_ITEMS, FALSE_POSITIVE_RATE, ALIGN_FILE_EXTENSION, \ POSITIVE_FILE_EXTENSION, NEGATIVE_FILE_EXTENSION, PATCH_FILE_EXTENSION from tanuki.persistence.filter.bloom_interface import IBloomFilterPersistence from tanuki.trackers.dataset_worker import DatasetWorker from tanuki.models.function_config import FunctionConfig # PATCH_FILE_EXTENSION_TYPE = Literal[".patches"] # ALIGN_FILE_EXTENSION_TYPE = Literal[".alignments"] # POSITIVE_EMBEDDING_FILE_EXTENSION_TYPE = Literal[".positive_embedding"] # NEGATIVE_EMBEDDING_FILE_EXTENSION_TYPE = Literal[".negative_embedding"] # # PATCH_FILE_EXTENSION: PATCH_FILE_EXTENSION_TYPE = ".patches" # ALIGN_FILE_EXTENSION: ALIGN_FILE_EXTENSION_TYPE = ".alignments" # POSITIVE_EMBEDDING_FILE_EXTENSION: POSITIVE_EMBEDDING_FILE_EXTENSION_TYPE = ".contrastive_positives" # NEGATIVE_EMBEDDING_FILE_EXTENSION: NEGATIVE_EMBEDDING_FILE_EXTENSION_TYPE = ".contrastive_negatives" # # EXPECTED_ITEMS = 10000 # FALSE_POSITIVE_RATE = 0.01 # LIB_NAME = "tanuki" # ENVVAR = "TANUKI_LOG_DIR" class ABCBufferedLogger(DatasetWorker): def __init__(self, name, level=15): self.buffers = {} self.mapped_files = {} self.miss_count = 0 self.hit_count = 0 self.flush_limit = {} self.buffer_rolling_size = {} self.write_count = 0 self.write_limit = 1000 # Save the Bloom filter every 1000 writes super().__init__(name, level) self.bloom_filter = self.create_bloom_filter() self.load_bloom_filter() self.default_function_config = FunctionConfig() @abstractmethod def get_bloom_filter_persistence(self) -> IBloomFilterPersistence: """ Get an instance of the bloom filter persistence provider. This exposes some persistent file storage, that must support reading and writing raw byte streams. :return: """ pass @abstractmethod def load_existing_datasets(self) -> Dict[str, Dict[str, Any]]: """ Get the lengths of all datasets backing the registered functions, including aligns. :return: """ pass @abstractmethod def ensure_persistence_location_exists(self): """ Ensure that the place we will be writing to actually exists. If not, create it. """ pass @abstractmethod def get_patch_location_for_function(self, func_hash, extension="") -> str: """ Get the address of the function patch file. :param func_hash: The representation of the function :param extension: Whether this is a patch or an alignment :return: """ pass @abstractmethod def write(self, path, data, mode="a") -> None: pass @abstractmethod def read(self, path) -> str: pass @abstractmethod def get_hash_from_path(self, path) -> str: pass @abstractmethod def does_object_exist(self, path) -> bool: pass def create_bloom_filter(self): bloom_filter_persistence = self.get_bloom_filter_persistence() bloom_filter = BloomFilter( bloom_filter_persistence, expected_number_of_elements=EXPECTED_ITEMS, false_positive_probability=FALSE_POSITIVE_RATE) return bloom_filter def load_bloom_filter(self): try: self.bloom_filter.load() except FileNotFoundError: self.debug("No Bloom filter found. Creating a new one.") def write_symbolic_align_call(self, func_hash, example) -> bool: log_file_path = self.get_patch_location_for_function(func_hash, extension=ALIGN_FILE_EXTENSION) try: # Now, write to the file dumpable_object = str(example.__dict__) self.write(log_file_path, dumpable_object + "\n", mode="a") return True except Exception as e: return False def write_embeddable_align_call(self, func_hash, example, positive=True) -> bool: if positive: log_file_path = self.get_patch_location_for_function(func_hash, extension=POSITIVE_FILE_EXTENSION) else: log_file_path = self.get_patch_location_for_function(func_hash, extension=NEGATIVE_FILE_EXTENSION) try: # Now, write to the file dumpable_object = str(example.__dict__) self.write(log_file_path, dumpable_object + "\n", mode="a") return True except Exception as e: return False def log_embeddable_align(self, func_hash, example, positive=True, **kws): """ Log a contrastive function invocation Args: func_hash: A string representation of the function signature and input parameters example: The example object positive: Whether the example is positive or negative **kws: """ successfully_saved, new_datapoint = False, False try: self.ensure_persistence_location_exists() except Exception as e: return successfully_saved, new_datapoint # prepend the function hash to the example bloom_filter_representation = func_hash + '_' + str(example.__dict__) + '\n' # Check Bloom Filter if self.bloom_filter.lookup(bloom_filter_representation): return successfully_saved, new_datapoint new_datapoint = True # add to bloom filter self.bloom_filter.add(bloom_filter_representation) self.save_bloom_filter() successfully_saved = self.write_embeddable_align_call(func_hash, example, positive) return successfully_saved, new_datapoint def log_symbolic_align(self, func_hash, *args, **kws): """ Log an align function invocation to the file system :param func_hash: A string representation of the function signature and input parameters :param args: Example objects :param kws: :return: """ successfully_saved, new_datapoint = False, False try: self.ensure_persistence_location_exists() except Exception as e: return successfully_saved, new_datapoint example = args[0] # prepend the function hash to the example bloom_filter_representation = func_hash + '_' + str(example.__dict__) + '\n' # Check Bloom Filter if self.bloom_filter.lookup(bloom_filter_representation): return successfully_saved, new_datapoint new_datapoint = True # add to bloom filter self.bloom_filter.add(bloom_filter_representation) self.save_bloom_filter() successfully_saved = self.write_symbolic_align_call(func_hash, example) return successfully_saved, new_datapoint def log_symbolic_patch(self, func_hash, example): """ Log a patched function invocation to the file system :param func_hash: A string representation of the function signature and input parameters :param example: :return: """ if not isinstance(func_hash, str): func_hash = str(func_hash) example_data = str(example.__dict__).encode('utf-8') + b'\n' bloom_filter_representation = func_hash + '_' + example_data.decode('utf-8') # Check Bloom Filter if self.bloom_filter.lookup(bloom_filter_representation): self.hit_count += 1 return {} self.miss_count += 1 # Add to Bloom Filter self.bloom_filter.add(bloom_filter_representation) t<fim_suffix>ry: self.ensure_persistence_location_exists() except Exception as e: return {} log_file_path = self.get_patch_location_for_function(func_hash, extension=PATCH_FILE_EXTENSION) if log_file_path not in self.buffers: self.buffers[log_file_path] = bytearray() if log_file_path not in self.flush_limit: self.flush_limit[log_file_path] = 1 self.buffers[log_file_path].extend(example_data) self.write_count += 1 if log_file_path not in self.buffer_rolling_size: self.buffer_rolling_size[log_file_path] = 1 else: self.buffer_rolling_size[log_file_path] += 1 if self.write_count >= self.write_limit: written_datapoints = self.flush() self.save_bloom_filter() self.write_count = 0 # Reset counter return written_datapoints if len(self.buffers[log_file_path]) >= min(self.flush_limit[log_file_path], 4096): # Flush after reaching 4KB written_datapoints = {} try: self.write(log_file_path, self.buffers[log_file_path], mode="a+b") # update buffers written_datapoints[func_hash] = self.buffer_rolling_size[log_file_path] self.buffers[log_file_path].clear() self.buffer_rolling_size[log_file_path] = 0 self.flush_limit[log_file_path] = 2 * self.flush_limit[log_file_path] self.save_bloom_filter() except Exception as e: pass return written_datapoints return {} def save_bloom_filter(self): try: self.bloom_filter.save() except Exception as e: self.warning("Could not save Bloom filter: {}".format(e)) def flush(self): # get log directory written_datapoints = {} for log_file_path, buffer in self.buffers.items(): if len(buffer) > 0: try: self.write(log_file_path, buffer, mode="a+b") written_datapoints[self.get_hash_from_path(log_file_path)] = self.buffer_rolling_size[log_file_path] self.buffer_rolling_size[log_file_path] = 0 buffer.clear() except Exception as e: pass return written_datapoints def load_function_config(self, func_hash): """ Get the config file for the function. Uses the message and log directory Config file has to be in .json """ default = False try: # try to get the config from the disk. If inaccessible, create a new default one self.ensure_persistence_location_exists() log_file_path = self.get_patch_location_for_function(func_hash) config_path = f"{log_file_path}.json" if not self.does_object_exist(config_path): function_config = self.default_function_config default = True func_config_dict = function_config.to_dict() # remove teacher_models from the config func_config_dict.pop("teacher_models") self.write_json(config_path, func_config_dict) else: function_config = FunctionConfig().load_from_dict(self.read_json(config_path)) except Exception as e: function_config = self.default_function_config default = True return function_config, default def update_function_config(self, func_hash, config_to_be_saved): """ Save the config file """ log_file_path = self.get_patch_location_for_function(func_hash) config_path = f"{log_file_path}.json" try: func_config_dict = config_to_be_saved.to_dict() # remove teacher_models from the config func_config_dict.pop("teacher_models") self.write_json(config_path, func_config_dict) except Exception as e: pass def write_json(self, path, data): self.write(path, json.dumps(data)) def read_json(self, path): return json.loads(self.read(path)) <fim_middle>
null
TRY
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except<fim_suffix>: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
CATCH
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) e<fim_suffix>xcept: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
CATCH
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueEr<fim_suffix>ror, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
CATCH
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True e<fim_suffix>xcept Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
CATCH
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) ex<fim_suffix>cept (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
CATCH
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as<fim_suffix> e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
CATCH
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeErro<fim_suffix>r: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
CATCH
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/language_models/openai_api.py<fim_prefix>from typing import List import logging import time # import abstract base class from openai import OpenAI from openai.types import CreateEmbeddingResponse from openai.types.fine_tuning import FineTuningJob from tanuki.language_models.llm_finetune_api_abc import LLM_Finetune_API from tanuki.models.embedding import Embedding from tanuki.language_models.embedding_api_abc import Embedding_API from tanuki.language_models.llm_api_abc import LLM_API import os from tanuki.constants import DEFAULT_DISTILLED_MODEL_NAME from tanuki.language_models.llm_configs.openai_config import OpenAIConfig from tanuki.models.finetune_job import FinetuneJob import copy OPENAI_URL = "https://api.openai.com/v1/chat/completions" import requests LLM_GENERATION_PARAMETERS = ["temperature", "top_p", "max_new_tokens", "frequency_penalty", "presence_penalty"] class OpenAI_API(LLM_API, Embedding_API, LLM_Finetune_API): def __init__(self) -> None: # initialise the abstract base class super().__init__() self.api_key = os.environ.get("OPENAI_API_KEY") self.client = None def embed(self, texts: List[str], model: OpenAIConfig, **kwargs) -> List[Embedding]: """ Generate embeddings for the provided texts using the specified OpenAI model. Lightweight wrapper over the OpenAI client. :param texts: A list of texts to embed. :param model: The model to use for embeddings. :return: A list of embeddings. """ self.check_api_key() try: response: CreateEmbeddingResponse = self.client.embeddings.create( input=texts, model=model.model_name, **kwargs ) assert response.object == "list" assert len(response.data) == len(texts) embeddings = [] for embedding_response in response.data: assert embedding_response.object == "embedding" embeddings.append(Embedding(embedding_response.embedding)) return embeddings except Exception as e: print(f"An error occurred: {e}") return None def generate(self, model, system_message, prompt, **kwargs): """ The main generation function, given the args, kwargs, function_modeler, function description and model type, generate a response Args model (OpenAIConfig): The model to use for generation. system_message (str): The system message to use for generation. prompt (str): The prompt to use for generation. kwargs (dict): Additional generation parameters. """ self.check_api_key() temperature = kwargs.get("temperature", 0.1) top_p = kwargs.get("top_p", 1) frequency_penalty = kwargs.get("frequency_penalty", 0) presence_penalty = kwargs.get("presence_penalty", 0) max_new_tokens = kwargs.get("max_new_tokens") # check if there are any generation parameters that are not supported unsupported_params = [param for param in kwargs.keys() if param not in LLM_GENERATION_PARAMETERS] if len(unsupported_params) > 0: # log warning logging.warning(f"Unused generation parameters sent as input: {unsupported_params}."\ f"For OpenAI, only the following parameters are supported: {LLM_GENERATION_PARAMETERS}") params = { "model": model.model_name, "temperature": temperature, "max_tokens": max_new_tokens, "top_p": top_p, "frequency_penalty": frequency_penalty, "presence_penalty": presence_penalty, } if model.parsing_helper_tokens["start_token"]: prompt += model.parsing_helper_tokens["start_token"] messages = [ { "role": "system", "content": system_message }, { "role": "user", "content": prompt } ] params["messages"] = messages counter = 0 choice = None # initiate response so exception logic doesnt error out when checking for error in response response = {} while counter <= 5: try: openai_headers = { "Authorization": f"Bearer {self.api_key}", "Content-Type": "application/json", } response = requests.post( OPENAI_URL, headers=openai_headers, json=params, timeout=50 ) response = response.json() choice = response["choices"][0]["message"]["content"].strip("'") break except Excepti<fim_suffix>on as e: if ("error" in response and "code" in response["error"] and response["error"]["code"] == 'invalid_api_key'): raise Exception(f"The supplied OpenAI API key {self.api_key} is invalid") if counter == 5: raise Exception(f"OpenAI API failed to generate a response: {e}") counter += 1 time.sleep(2 ** counter) continue if not choice: raise Exception("OpenAI API failed to generate a response") if model.parsing_helper_tokens["end_token"]: # remove the end token from the choice choice = choice.split(model.parsing_helper_tokens["end_token"])[0] # check if starting token is in choice if model.parsing_helper_tokens["start_token"] in choice: # remove the starting token from the choice choice = choice.split(model.parsing_helper_tokens["start_token"])[-1] return choice def list_finetuned(self, model_config, limit=100, **kwargs) -> List[FinetuneJob]: self.check_api_key() response = self.client.fine_tuning.jobs.list(limit=limit) jobs = [] for job in response.data: finetune_job = self.create_finetune_job(job, model_config) jobs.append(finetune_job) return jobs def get_finetuned(self, job_id, model_config: OpenAIConfig) -> FinetuneJob: self.check_api_key() response = self.client.fine_tuning.jobs.retrieve(job_id) finetune_job = self.create_finetune_job(response, model_config= model_config) return finetune_job def finetune(self, file, suffix, model_config, **kwargs) -> FinetuneJob: self.check_api_key() # Use the stream as a file response = self.client.files.create(file=file, purpose='fine-tune') training_file_id = response.id if not model_config.base_model_for_sft: model_config.base_model_for_sft = DEFAULT_DISTILLED_MODEL_NAME # submit the finetuning job finetuning_response: FineTuningJob = self.client.fine_tuning.jobs.create(training_file=training_file_id, model=model_config.base_model_for_sft, suffix=suffix) finetune_job = self.create_finetune_job(finetuning_response, model_config) return finetune_job def create_finetune_job(self, response: FineTuningJob, model_config: OpenAIConfig) -> FinetuneJob: finetuned_model_config = copy.deepcopy(model_config) finetuned_model_config.model_name = response.fine_tuned_model finetune_job = FinetuneJob(response.id, response.status, finetuned_model_config) return finetune_job def check_api_key(self): # check if api key is not none if not self.api_key: # try to get the api key from the environment, maybe it has been set later self.api_key = os.getenv("OPENAI_API_KEY") if not self.api_key: raise ValueError("OpenAI API key is not set") if not self.client: self.client = OpenAI(api_key=self.api_key) <fim_middle>
null
CATCH
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError<fim_suffix>: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
CATCH
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except <fim_suffix>TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
CATCH
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target<fim_suffix>_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
FOR
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/bloom_filter.py<fim_prefix>import hashlib import logging import math import numpy as np from bitarray import bitarray from tanuki.persistence.filter.bloom_interface import IBloomFilterPersistence class BloomFilter: def __init__(self, persistence: IBloomFilterPersistence, size=None, hash_count=None, expected_number_of_elements=None, false_positive_probability=None): if not persistence: raise ValueError("Persistence cannot be None, it must be an instance of IBloomFilterPersistence") if not size and not hash_count and not expected_number_of_elements and not false_positive_probability: raise ValueError("Must specify either (size, hash_count) or (expected_number_of_elements, false_positive_probability") if expected_number_of_elements and false_positive_probability: size, hash_count = BloomFilter.optimal_bloom_filter_params(expected_number_of_elements, false_positive_probability) if not size and not hash_count: raise ValueError("Size and hash_count not set. This should never happen.") self.size = size self.hash_count = hash_count self.bit_array, self.indices = self.init_bit_array(size) self.persistence = persistence def init_bit_array(self, size): _bit_array = bitarray(size) _bit_array.setall(0) _indices = np.zeros(size, dtype=np.int32) return _bit_array, _indices def hash_functions(self, string): # h1(x) hash1 = int(hashlib.sha256(string.encode('utf-8')).hexdigest(), 16) # h2(x) hash2 = int(hashlib.md5(string.encode('utf-8')).hexdigest(), 16) return hash1, hash2 def lookup(self, string): hash1, hash2 = self.hash_functions(string) for seed in range(self.hash_count): index = (hash1 + seed * hash2) % self.size #print(f"Lookup: Seed={seed}, Digest={index}, BitValue={self.bit_array[index]}") if self.bit_array[index] == 0: return False return True def add(self, string): hash1, hash2 = self.hash_functions(string) for seed in rang<fim_suffix>e(self.hash_count): index = (hash1 + seed * hash2) % self.size self.bit_array[index] = 1 #print(f"Add: Seed={seed}, Digest={index}, BitValue={self.bit_array[index]}") def save(self): self.persistence.save(self.bit_array) def load(self): self.bit_array = self.persistence.load() length_in_bytes = int(len(self.bit_array)/8) expected_length = math.ceil(self.size / 8) if length_in_bytes != expected_length: logging.warning("Bit array length does not match expected size, and so might be corrupted. Reinitializing.") self.bit_array, self.indices = self.init_bit_array(self.size) self.save() @staticmethod def optimal_bloom_filter_params(n, p): """ Calculate the optimal bit array size (m) and number of hash functions (k) for a Bloom filter. n: expected number of items to be stored p: acceptable false positive probability Returns a tuple (m, k) """ m = - (n * math.log(p)) / (math.log(2) ** 2) k = (m / n) * math.log(2) return int(math.ceil(m)), int(math.ceil(k))<fim_middle>
null
FOR
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() fo<fim_suffix>r item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
FOR
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enum<fim_suffix>erate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
FOR
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for ite<fim_suffix>m in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
FOR
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/validator.py<fim_prefix>import abc from collections import defaultdict import collections import typing from collections import deque import dataclasses import inspect import json from dataclasses import is_dataclass from typing import get_origin, get_args, Any, Mapping, MutableMapping, OrderedDict, Literal, Union, get_type_hints, \ Type, Sequence, Tuple, Optional from pydantic import BaseModel, create_model import datetime class Validator: def __init__(self): # Extract types from collections and collections.abc collection_types = {cls for name, cls in collections.__dict__.items() if isinstance(cls, type)} abc_collection_types = {cls for name, cls in collections.abc.__dict__.items() if isinstance(cls, type)} # Filter out types that have dictionary-like methods self.dict_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'keys') and hasattr(cls, 'items') } self.list_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'append') and hasattr(cls, 'pop') } self.set_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, 'add') and hasattr(cls, 'discard') } # Add the general Sequence to list-like types # if python version is 3.9 or above, use collections.abc.Sequence if hasattr(collections.abc, 'Sequence'): self.list_like_types.add(collections.abc.Sequence) else: self.list_like_types.add(collections.Sequence) self.list_like_types.add(typing.List) # Add the general Mapping to dict-like types if hasattr(collections.abc, 'Mapping'): self.dict_like_types.add(collections.abc.Mapping) else: self.dict_like_types.add(collections.Mapping) self.dict_like_types.add(typing.Dict) # Add the general Set to set-like types if hasattr(collections.abc, 'Set'): self.set_like_types.add(collections.abc.Set) else: self.set_like_types.add(collections.Set) self.set_like_types.add(typing.Set) # Add the general Tuple to tuple-like types self.tuple_like_types = { cls for cls in collection_types.union(abc_collection_types) if hasattr(cls, '__getitem__') and hasattr(cls, '__len__') } self.tuple_like_types.add(typing.Tuple) def is_base_type(self, _type: Any) -> bool: """Determine if a type is a base type.""" return _type in {int, float, str, bool, None} def validate_base_type(self, value: Any, typ: Any) -> bool: """Validate base types.""" if typ is None: return value is None return isinstance(value, typ) def validate_output(self, output: str, type_definition: Any) -> bool: try: deserialized_output = json.loads(output) except json.JSONDecodeError: return False return self.check_type(deserialized_output, type_definition) def check_type(self, value: Any, type_definition: Any) -> bool: """ Validate a value against a type definition. Args: value: Any object or primitive value type_definition: The type definition to validate against Returns: Whether the value is valid for the type definition """ if type_definition is Any: return True if self.is_base_type(type_definition): return self.validate_base_type(value, type_definition) origin = get_origin(type_definition) or type_definition args = get_args(type_definition) # Handle base types if self.is_base_type(origin): return self.validate_base_type(value, origin) if origin == Literal: return value in args if origin == Union: return any(self.check_type(value, union_type) for union_type in args) # Handle tuples if origin == tuple: if not isinstance(value, tuple): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle lists if origin == list: if not isinstance(value, list): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle more complex types that are collections and list-like if origin is list or issubclass(origin, tuple(self.list_like_types)): if not any(isinstance(value, t) for t in self.list_like_types): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle sets if origin == set: if not isinstance(value, set): return False item_type = args[0] if args else Any return all(self.check_type(v, item_type) for v in value) # Handle datetime if origin in [datetime.datetime, datetime.date, datetime.time]: # try to instantiate datetime try: obj = origin(**value) return True except: return False # Handle dictionaries if origin is dict or issubclass(origin, tuple(self.dict_like_types)): if not isinstance(value, (dict, Mapping)):#, MutableMapping, OrderedDict)): return False if args: if len(args) == 1: key_type = args[0] value_type = Any # General assumption; specific dict-like types might differ elif len(args) == 2: key_type, value_type = args else: key_type = value_type = Any else: key_type = value_type = Any return all( self.check_type(k, key_type) and self.check_type(v, value_type) for k, v in value.items() ) # Handle pydantic models if self.is_pydantic_model(origin): try: #temp_model = create_model('TempModel', **value) if isinstance(value, origin): return True #return isinstance(temp_model, origin) # check if value is dict if not isinstance(value, dict): return False # get all required init arguments for origin # required arguments are the ones withouyt default values required_fields = [field for field, field_type in origin.__annotations__.items() if not (typing.get_origin(field_type) is Union and type(None) in typing.get_args(field_type))] # check that all required arguments are in value and do type checking for arg in required_fields: # check if it is in value if arg not in value: return False # get the type of the argument arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False # check that all arguments in value are correct type # this is additional check, because the above check only checks required arguments for arg, obj in value.items(): if arg in required_fields: continue arg_type = origin.__annotations__[arg] if not self.check_type(value[arg], arg_type): return False #origin.parse_obj(value) return True except Exception as e: print(e) return False # Handle dataclasses if self.is_dataclass_instance(origin): try: # for field in dataclasses.fields(origin): # field_name = field.name # field_type = field.type # if field_name not in value or not self.check_type(value[field_name], field_type): # return False # return True obj = origin(**value) return dataclasses.asdict(obj) == value except: return False # Handle dataclasses and arbitrary class types if inspect.isclass(origin) and not self.is_base_type(origin): # Ensure the value is an instance of the class if not isinstance(value, origin): return False # Gather type hints from the class and its bases type_hints = {} for cls in reversed(origin.__mro__): type_hints.update(get_type_hints(cls)) # Validate each attribute of the class for attr, attr_type in type_hints.items(): attr_value = getattr(value, attr, None) if not self.check_type(attr_value, attr_type): return False return True return False @staticmethod def is_pydantic_model(cls): return hasattr(cls, 'parse_obj') @staticmethod def is_dataclass_instance(cls): return hasattr(cls, '__annotations__') and hasattr(cls, '__dataclass_fields__') @staticmethod def _is_subclass_of_generic(cls: Type, generic: Type) -> bool: """Determine if the class is a subclass of a generic type.""" try: return issubclass(cls, generic) and cls is not generic except TypeError: if not hasattr(cls, '__origin__'): return False return cls.__origin__ is generic @staticmethod def _is_generic(cls: Type) -> bool: """Check if the provided type is a generic.""" return hasattr(cls, "__origin__") def _get_recursive_args(self, target_type: Type) -> Tuple[Type, ...]: """ Recursively check the base classes (i.e., the superclass chain) of the target type until we find one that retains the type arguments. :return: Type chain """ if get_args(target_type): return get_args(target_type) for base in target_type.__bases__: args = self._get_recursive_args(base) if args: return args return () def _find_generic_base_and_args(self, target_type: Type) -> Tuple[Type, Tuple[Type, ...]]: """ Navigate up the MRO to find the first generic base and its arguments. """ # First, check if target_type is a type annotation. # If so, directly return its origin and arguments. origin = get_origin(target_type) args = get_args(target_type) if origin and args: return origin, args # If target_type is a real class, then navigate its MRO. if hasattr(target_type, '__mro__'): if hasattr(target_type, '__orig_bases__'): for base in<fim_suffix> target_type.__orig_bases__: if get_args(base): return base, get_args(base) for base in target_type.__mro__: if get_args(base): return base, get_args(base) return None, () def _is_list_like(self, target_type: Type) -> bool: """Determine if the target type is list-like.""" if target_type in {list, typing.List}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {list, typing.List}: return True return False def _is_tuple_like(self, target_type: Type) -> bool: """Determine if the target type is tuple-like.""" if target_type in {tuple, typing.Tuple}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {tuple, typing.Tuple}: return True return False def _is_dict_like(self, target_type: Type) -> bool: """Determine if the target type is dict-like.""" if target_type in {dict, typing.Dict}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {dict, typing.Dict}: return True return False def _is_set_like(self, target_type: Type) -> bool: """Determine if the target type is set-like.""" if target_type in {set, typing.Set}: return True if hasattr(target_type, "__origin__") and target_type.__origin__ in {set, typing.Set}: return True return False def instantiate(self, data: Any, target_type: Type) -> Any: """ Attempts to convert a JSON-compatible data structure into an instance of the specified type. Args: data: JSON-compatible data structure to instantiate the target type. target_type: The type to instantiate from the given data. Returns: An instance of the target type initialized with the data. """ # Handle None type if data is None: return None origin = get_origin(target_type) or target_type # If the target type is a built-in, attempt to instantiate and return if self.is_base_type(target_type) or target_type is Any: # If the parsed data is a string and target type is str, return it directly if isinstance(data, str) and target_type is str: return data # If any, return the data directly if target_type is Any: return data try: return target_type(data) except (ValueError, TypeError): # Handle the special case where the string represents a float but we want an integer if target_type is int: try: return int(float(data)) except (ValueError, TypeError): pass if target_type is float: try: return int(float(data)) except (ValueError, TypeError): pass raise TypeError(f"Failed to instantiate {target_type} from provided data.") # special handling for datetime if origin == datetime.datetime: # try to instantiate datetime try: return datetime.datetime(**data) except: raise TypeError(f"Failed to instantiate {target_type} from provided data.") # check if origin is Union, if so, instantiate the first type that works if origin == Union: for arg in get_args(target_type): try: return self.instantiate(data, arg) except: continue raise TypeError(f"Failed to instantiate {target_type} from provided data.") # If the data is a dictionary and the target is a custom class that can be instantiated from a dictionary. if isinstance(data, dict): if inspect.isclass(target_type) and not self.is_base_type(target_type): # Special handling for dataclasses if is_dataclass(target_type): fields = [f.name for f in dataclasses.fields(target_type)] type_hints = get_type_hints(target_type) filtered_data = {k: self.instantiate(v, type_hints.get(k, Any)) for k, v in data.items() if k in fields} return target_type(**filtered_data) # Special handling for Pydantic models if issubclass(target_type, BaseModel): # instantiate the sub attributes for attr, attr_type in target_type.__annotations__.items(): if attr in data: data[attr] = self.instantiate(data[attr], attr_type) try: return target_type.model_validate(data) except AttributeError as e: # backwards compatibility with pydantic < 2 return target_type.parse_obj(data) # For general classes, attempt instantiation try: return target_type(**data) except TypeError: raise TypeError(f"Failed to instantiate {target_type.__name__} from dictionary.") # Handle dictionary-like types # Check if the target type is or inherits from defaultdict if origin is defaultdict or (isinstance(origin, type) and issubclass(origin, defaultdict)): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # For defaultdict, you'll need a default factory. Here, I'm using `int` for simplicity, # but you might want to adapt this based on your needs. return defaultdict(int, instantiated_items) # Handle set-like dict types like OrderedDict # the first check needs to be done to ensure origin has the __mro__ attribute elif inspect.isclass(origin)and any(issubclass(base, dict) for base in origin.__mro__): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_items = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} return origin(instantiated_items) # Handle other dictionary-like types elif origin is dict or self._is_subclass_of_generic(origin, dict): key_type, value_type = get_args(target_type) if get_args(target_type) else (Any, Any) instantiated_dict = {self.instantiate(k, key_type): self.instantiate(v, value_type) for k, v in data.items()} # If the target_type is a subclass of dict, return an instance of target_type if self._is_subclass_of_generic(target_type, dict) and not self._is_generic(target_type): return target_type(instantiated_dict) else: return dict(instantiated_dict) # Tuples aren't supported in JSONable types, so we look for lists instead if isinstance(data, list): try: # If the origin or target type is a list-like type, or if it implements a list-like collections type # e.g Sequence[int] if origin is list or self._is_subclass_of_generic(origin, list): base, item_types = self._find_generic_base_and_args(target_type) item_type = item_types[0] if item_types else Any instantiated_items = [] for item in data: # For each item, validate and instantiate it try: instantiated_item = self.instantiate(item, item_type) except ValueError: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") safe = self.check_type(instantiated_item, item_type) if not safe: raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") instantiated_items.append(instantiated_item) # If target_type is a subclass of list, return an instance of target_type if self._is_subclass_of_generic(target_type, list) and not self._is_generic(target_type): return target_type(instantiated_items) return instantiated_items # Handle tuples if self._is_tuple_like(target_type) or (isinstance(origin, type) and issubclass(origin, tuple)): base, item_types = self._find_generic_base_and_args(target_type) instantiated_items = [] # If there are no subscripted types, assume Any if not item_types: item_types = (Any,) * len(data) for i, item in enumerate(data): # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_types[i]) instantiated_items.append(instantiated_item) # If the instantiated item does not match the expected type, raise an exception _type = item_types[i] if not isinstance(instantiated_item, _type): raise TypeError( f"Item {i} of type {type(item).__name__} does not match expected type {item_types[i].__name__}.") # Convert the list of instantiated items to a tuple instantiated_tuple = tuple(instantiated_items) # If target_type is a subclass of tuple, return an instance of target_type if self._is_subclass_of_generic(target_type, tuple): return target_type(instantiated_tuple) return instantiated_tuple # Handle sets if self._is_set_like(target_type) or (isinstance(origin, type) and issubclass(origin, set)): base, item_type = self._find_generic_base_and_args(target_type) if not item_type: item_type = Any instantiated_items = set() for item in data: # For each item, validate and instantiate it instantiated_item = self.instantiate(item, item_type[0]) instantiated_items.add(instantiated_item) # If the instantiated item does not match the expected type, raise an exception if not isinstance(instantiated_item, item_type[0]): raise TypeError( f"Item of type {type(item).__name__} does not match expected type {item_type[0].__name__}.") # If target_type is a subclass of set, return an instance of target_type if self._is_subclass_of_generic(target_type, set): return target_type(instantiated_items) return instantiated_items # Handle deques if origin is deque or (isinstance(origin, type) and issubclass(origin, set)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return deque(self.instantiate(item, item_type) for item in data) if origin is frozenset or (isinstance(origin, type) and issubclass(origin, frozenset)): item_type = get_args(target_type)[0] if get_args(target_type) else Any return frozenset(self.instantiate(item, item_type) for item in data) except TypeError as e: print(e) raise TypeError(f"Failed to instantiate {target_type} from list. {e}") # If none of the above, return the data as-is return data <fim_middle>
null
FOR
complete_current_header_empty_completion
<filename>tanuki_py/src/tanuki/language_models/openai_api.py<fim_prefix>from typing import List import logging import time # import abstract base class from openai import OpenAI from openai.types import CreateEmbeddingResponse from openai.types.fine_tuning import FineTuningJob from tanuki.language_models.llm_finetune_api_abc import LLM_Finetune_API from tanuki.models.embedding import Embedding from tanuki.language_models.embedding_api_abc import Embedding_API from tanuki.language_models.llm_api_abc import LLM_API import os from tanuki.constants import DEFAULT_DISTILLED_MODEL_NAME from tanuki.language_models.llm_configs.openai_config import OpenAIConfig from tanuki.models.finetune_job import FinetuneJob import copy OPENAI_URL = "https://api.openai.com/v1/chat/completions" import requests LLM_GENERATION_PARAMETERS = ["temperature", "top_p", "max_new_tokens", "frequency_penalty", "presence_penalty"] class OpenAI_API(LLM_API, Embedding_API, LLM_Finetune_API): def __init__(self) -> None: # initialise the abstract base class super().__init__() self.api_key = os.environ.get("OPENAI_API_KEY") self.client = None def embed(self, texts: List[str], model: OpenAIConfig, **kwargs) -> List[Embedding]: """ Generate embeddings for the provided texts using the specified OpenAI model. Lightweight wrapper over the OpenAI client. :param texts: A list of texts to embed. :param model: The model to use for embeddings. :return: A list of embeddings. """ self.check_api_key() try: response: CreateEmbeddingResponse = self.client.embeddings.create( input=texts, model=model.model_name, **kwargs ) assert response.object == "list" assert len(response.data) == len(texts) embeddings = [] for embedding_response in response.data: assert embedding_response.object == "embedding" embeddings.append(Embedding(embedding_response.embedding)) return embeddings except Exception as e: print(f"An error occurred: {e}") return None def generate(self, model, system_message, prompt, **kwargs): """ The main generation function, given the args, kwargs, function_modeler, function description and model type, generate a response Args model (OpenAIConfig): The model to use for generation. system_message (str): The system message to use for generation. prompt (str): The prompt to use for generation. kwargs (dict): Additional generation parameters. """ self.check_api_key() temperature = kwargs.get("temperature", 0.1) top_p = kwargs.get("top_p", 1) frequency_penalty = kwargs.get("frequency_penalty", 0) presence_penalty = kwargs.get("presence_penalty", 0) max_new_tokens = kwargs.get("max_new_tokens") # check if there are any generation parameters that are not supported unsupported_params = [param for param in kwargs.keys() if param not in LLM_GENERATION_PARAMETERS] if len(unsupported_params) > 0: # log warning logging.warning(f"Unused generation parameters sent as input: {unsupported_params}."\ f"For OpenAI, only the following parameters are supported: {LLM_GENERATION_PARAMETERS}") params = { "model": model.model_name, "temperature": temperature, "max_tokens": max_new_tokens, "top_p": top_p, "frequency_penalty": frequency_penalty, "presence_penalty": presence_penalty, } if model.parsing_helper_tokens["start_token"]: prompt += model.parsing_helper_tokens["start_token"] messages = [ { "role": "system", "content": system_message }, { "role": "user", "content": prompt } ] params["messages"] = messages counter = 0 choice = None # initiate response so exception logic doesnt error out when checking for error in response response = {} while counter <= 5<fim_suffix>: try: openai_headers = { "Authorization": f"Bearer {self.api_key}", "Content-Type": "application/json", } response = requests.post( OPENAI_URL, headers=openai_headers, json=params, timeout=50 ) response = response.json() choice = response["choices"][0]["message"]["content"].strip("'") break except Exception as e: if ("error" in response and "code" in response["error"] and response["error"]["code"] == 'invalid_api_key'): raise Exception(f"The supplied OpenAI API key {self.api_key} is invalid") if counter == 5: raise Exception(f"OpenAI API failed to generate a response: {e}") counter += 1 time.sleep(2 ** counter) continue if not choice: raise Exception("OpenAI API failed to generate a response") if model.parsing_helper_tokens["end_token"]: # remove the end token from the choice choice = choice.split(model.parsing_helper_tokens["end_token"])[0] # check if starting token is in choice if model.parsing_helper_tokens["start_token"] in choice: # remove the starting token from the choice choice = choice.split(model.parsing_helper_tokens["start_token"])[-1] return choice def list_finetuned(self, model_config, limit=100, **kwargs) -> List[FinetuneJob]: self.check_api_key() response = self.client.fine_tuning.jobs.list(limit=limit) jobs = [] for job in response.data: finetune_job = self.create_finetune_job(job, model_config) jobs.append(finetune_job) return jobs def get_finetuned(self, job_id, model_config: OpenAIConfig) -> FinetuneJob: self.check_api_key() response = self.client.fine_tuning.jobs.retrieve(job_id) finetune_job = self.create_finetune_job(response, model_config= model_config) return finetune_job def finetune(self, file, suffix, model_config, **kwargs) -> FinetuneJob: self.check_api_key() # Use the stream as a file response = self.client.files.create(file=file, purpose='fine-tune') training_file_id = response.id if not model_config.base_model_for_sft: model_config.base_model_for_sft = DEFAULT_DISTILLED_MODEL_NAME # submit the finetuning job finetuning_response: FineTuningJob = self.client.fine_tuning.jobs.create(training_file=training_file_id, model=model_config.base_model_for_sft, suffix=suffix) finetune_job = self.create_finetune_job(finetuning_response, model_config) return finetune_job def create_finetune_job(self, response: FineTuningJob, model_config: OpenAIConfig) -> FinetuneJob: finetuned_model_config = copy.deepcopy(model_config) finetuned_model_config.model_name = response.fine_tuned_model finetune_job = FinetuneJob(response.id, response.status, finetuned_model_config) return finetune_job def check_api_key(self): # check if api key is not none if not self.api_key: # try to get the api key from the environment, maybe it has been set later self.api_key = os.getenv("OPENAI_API_KEY") if not self.api_key: raise ValueError("OpenAI API key is not set") if not self.client: self.client = OpenAI(api_key=self.api_key) <fim_middle>
null
WHILE
complete_current_header_empty_completion