diff --git "a/codereval/python/CEPythonHumanLabel.jsonl" "b/codereval/python/CEPythonHumanLabel.jsonl" new file mode 100644--- /dev/null +++ "b/codereval/python/CEPythonHumanLabel.jsonl" @@ -0,0 +1,230 @@ +{"question_id": "62e60f43d76274f8a4026e28", "input": "def hydrate_time(nanoseconds, tz=None):\n\t\"\"\"\n\tConvert nanoseconds to a time in fixed format.\n\t\"\"\"\n\t", "signature": "def hydrate_time(nanoseconds, tz=None):", "docstring": "Convert nanoseconds to a time in fixed format."} +{"question_id": "62e60f3bd76274f8a4026e10", "input": "def dehydrate_timedelta(value):\n\t\"\"\"\n\tUse the value in timedelta to generate the Structure class.\n\t\"\"\"\n\t", "signature": "def dehydrate_timedelta(value):", "docstring": "Use the value in timedelta to generate the Structure class."} +{"question_id": "62e60f37d76274f8a4026dfd", "input": "def dehydrate_time(value):\n\t\"\"\"\n\tUse ticks in the Time class to generate the Structure class.\n\t\"\"\"\n\t", "signature": "def dehydrate_time(value):", "docstring": "Use ticks in the Time class to generate the Structure class."} +{"question_id": "62e60f33d76274f8a4026de9", "input": "def dehydrate_point(value):\n\t\"\"\"\n\tThe structure class is generated based on the value length.\n\t\"\"\"\n\t", "signature": "def dehydrate_point(value):", "docstring": "The structure class is generated based on the value length."} +{"question_id": "62e60ed4d76274f8a4026da0", "input": "def keys(self):\n\t\"\"\"\n\tReturns the __keys property of a class as a List\n\t\"\"\"\n\t", "signature": "def keys(self):", "docstring": "Returns the __keys property of a class as a List"} +{"question_id": "62e60ecfd76274f8a4026d6a", "input": "def protocol_handlers(cls, protocol_version=None):\n\t\"\"\"\n\tDifferent forms of Bolt protocol handlers are returned based on the value of protocol_version.\n\t\"\"\"\n\t", "signature": "def protocol_handlers(cls, protocol_version=None):", "docstring": "Different forms of Bolt protocol handlers are returned based on the value of protocol_version."} +{"question_id": "62e60e49d76274f8a4026d25", "input": "def unit_of_work(metadata=None, timeout=None):\n\t\"\"\"\n\tReturns a decorator with metadata and timeout attributes.\n\t\"\"\"\n\t", "signature": "def unit_of_work(metadata=None, timeout=None):", "docstring": "Returns a decorator with metadata and timeout attributes."} +{"question_id": "62e60e05d76274f8a4026cfd", "input": "def index(self, key):\n\t\"\"\"\n\tReturns the key in the form of int.\n\t\"\"\"\n\t", "signature": "def index(self, key):", "docstring": "Returns the key in the form of int."} +{"question_id": "62e60da4d76274f8a4026cf1", "input": "def values(self, *keys):\n\t\"\"\"\n\tReturns the key filtered by self.index in the form of a list.\n\t\"\"\"\n\t", "signature": "def values(self, *keys):", "docstring": "Returns the key filtered by self.index in the form of a list."} +{"question_id": "62e60b10d76274f8a4026ccd", "input": "def data(self, *keys):\n\t\"\"\"\n\tReturns the keys processed by the transform method of the RecordExporter class.\n\t\"\"\"\n\t", "signature": "def data(self, *keys):", "docstring": "Returns the keys processed by the transform method of the RecordExporter class."} +{"question_id": "62e6087bd76274f8a4026bfa", "input": "def pop_u16(self):\n\t\"\"\"\n\tRemove the last two elements in self.data and return\n\t\"\"\"\n\t", "signature": "def pop_u16(self):", "docstring": "Remove the last two elements in self.data and return"} +{"question_id": "62e6087ad76274f8a4026bf2", "input": "def discard(self, n=-1, qid=-1, dehydration_hooks=None,\n hydration_hooks=None, **handlers):\n\t\"\"\"\n\tAppends a DISCARD message to the output queue.\n\n:param n: number of records to discard, default = -1 (ALL)\n:param qid: query ID to discard for, default = -1 (last query)\n:param dehydration_hooks:\n Hooks to dehydrate types (dict from type (class) to dehydration\n function). Dehydration functions receive the value and returns an\n object of type understood by packstream.\n:param hydration_hooks:\n Hooks to hydrate types (mapping from type (class) to\n dehydration function). Dehydration functions receive the value of\n type understood by packstream and are free to return anything.\n:param handlers: handler functions passed into the returned Response object\n\t\"\"\"\n\t", "signature": "def discard(self, n=-1, qid=-1, dehydration_hooks=None,\n hydration_hooks=None, **handlers):", "docstring": "Appends a DISCARD message to the output queue.\n\n:param n: number of records to discard, default = -1 (ALL)\n:param qid: query ID to discard for, default = -1 (last query)\n:param dehydration_hooks:\n Hooks to dehydrate types (dict from type (class) to dehydration\n function). Dehydration functions receive the value and returns an\n object of type understood by packstream.\n:param hydration_hooks:\n Hooks to hydrate types (mapping from type (class) to\n dehydration function). Dehydration functions receive the value of\n type understood by packstream and are free to return anything.\n:param handlers: handler functions passed into the returned Response object"} +{"question_id": "62e60879d76274f8a4026bec", "input": "def begin(self, mode=None, bookmarks=None, metadata=None, timeout=None,\n db=None, imp_user=None, dehydration_hooks=None,\n hydration_hooks=None, **handlers):\n\t\"\"\"\n\tAppends a BEGIN message to the output queue.\n\n:param mode: access mode for routing - \"READ\" or \"WRITE\" (default)\n:param bookmarks: iterable of bookmark values after which this transaction should begin\n:param metadata: custom metadata dictionary to attach to the transaction\n:param timeout: timeout for transaction execution (seconds)\n:param db: name of the database against which to begin the transaction\n Requires Bolt 4.0+.\n:param imp_user: the user to impersonate\n Requires Bolt 4.4+\n:param dehydration_hooks:\n Hooks to dehydrate types (dict from type (class) to dehydration\n function). Dehydration functions receive the value and returns an\n object of type understood by packstream.\n:param hydration_hooks:\n Hooks to hydrate types (mapping from type (class) to\n dehydration function). Dehydration functions receive the value of\n type understood by packstream and are free to return anything.\n:param handlers: handler functions passed into the returned Response object\n:return: Response object\n\t\"\"\"\n\t", "signature": "def begin(self, mode=None, bookmarks=None, metadata=None, timeout=None,\n db=None, imp_user=None, dehydration_hooks=None,\n hydration_hooks=None, **handlers):", "docstring": "Appends a BEGIN message to the output queue.\n\n:param mode: access mode for routing - \"READ\" or \"WRITE\" (default)\n:param bookmarks: iterable of bookmark values after which this transaction should begin\n:param metadata: custom metadata dictionary to attach to the transaction\n:param timeout: timeout for transaction execution (seconds)\n:param db: name of the database against which to begin the transaction\n Requires Bolt 4.0+.\n:param imp_user: the user to impersonate\n Requires Bolt 4.4+\n:param dehydration_hooks:\n Hooks to dehydrate types (dict from type (class) to dehydration\n function). Dehydration functions receive the value and returns an\n object of type understood by packstream.\n:param hydration_hooks:\n Hooks to hydrate types (mapping from type (class) to\n dehydration function). Dehydration functions receive the value of\n type understood by packstream and are free to return anything.\n:param handlers: handler functions passed into the returned Response object\n:return: Response object"} +{"question_id": "62e60723d76274f8a4026b75", "input": "def round_half_to_even(n):\n\t\"\"\"\n\tRound a floating-point number\n\t\"\"\"\n\t", "signature": "def round_half_to_even(n):", "docstring": "Round a floating-point number"} +{"question_id": "62e60707d76274f8a4026b69", "input": "def point_type(name, fields, srid_map):\n\t\"\"\"\n\tDynamically Generating Point Class\n\t\"\"\"\n\t", "signature": "def point_type(name, fields, srid_map):", "docstring": "Dynamically Generating Point Class"} +{"question_id": "62e5dc9ed76274f8a4026b5b", "input": "def deprecated(message):\n\t\"\"\"\n\tReturn a decorator function for deprecating functions and methods.\n\t\"\"\"\n\t", "signature": "def deprecated(message):", "docstring": "Return a decorator function for deprecating functions and methods."} +{"question_id": "62e4fc3c85ea98643089041e", "input": "def _inline_r_setup(code: str) -> str:\n\t\"\"\"\n\tSome behaviour of R cannot be configured via env variables, but can\nonly be configured via R options once R has started. These are set here.\n\t\"\"\"\n\t", "signature": "def _inline_r_setup(code: str) -> str:", "docstring": "Some behaviour of R cannot be configured via env variables, but can\nonly be configured via R options once R has started. These are set here."} +{"question_id": "62e4fbda85ea986430890405", "input": "def xargs(\n cmd: tuple[str, ...],\n varargs: Sequence[str],\n *,\n color: bool = False,\n target_concurrency: int = 1,\n _max_length: int = _get_platform_max_length(),\n **kwargs:\n\t\"\"\"\n\tSimplified Implementation of Xargs in Linux\n\t\"\"\"\n\t", "signature": "def xargs(\n cmd: tuple[str, ...],\n varargs: Sequence[str],\n *,\n color: bool = False,\n target_concurrency: int = 1,\n _max_length: int = _get_platform_max_length(),\n **kwargs:", "docstring": "Simplified Implementation of Xargs in Linux"} +{"question_id": "62e4fbda85ea986430890403", "input": "def _shuffled(seq: Sequence[str]) -> list[str]:\n\t\"\"\"\n\tShuffle a given seq with the given FIXED_RANDOM_SEED\n\t\"\"\"\n\t", "signature": "def _shuffled(seq: Sequence[str]) -> list[str]:", "docstring": "Shuffle a given seq with the given FIXED_RANDOM_SEED"} +{"question_id": "62e4fb6585ea98643089032b", "input": "def parse_version(s: str) -> tuple[int, ...]:\n\t\"\"\"\n\tConverts a string concatenated by dot to a tuple consisting of integers.\n\t\"\"\"\n\t", "signature": "def parse_version(s: str) -> tuple[int, ...]:", "docstring": "Converts a string concatenated by dot to a tuple consisting of integers."} +{"question_id": "62e4fb4d85ea9864308902e7", "input": "def normalize_cmd(cmd: tuple[str, ...]) -> tuple[str, ...]:\n\t\"\"\"\n\tComplement the full path to exe and return it in its original form\n\t\"\"\"\n\t", "signature": "def normalize_cmd(cmd: tuple[str, ...]) -> tuple[str, ...]:", "docstring": "Complement the full path to exe and return it in its original form"} +{"question_id": "62b8d27a48ba5a41d1c3f4c6", "input": "def cached(cache, key=hashkey, lock=None):\n\t\"\"\"\n\tReturns a decorator function that saves the results in the cache\n\t\"\"\"\n\t", "signature": "def cached(cache, key=hashkey, lock=None):", "docstring": "Returns a decorator function that saves the results in the cache"} +{"question_id": "62b8d24048ba5a41d1c3f49f", "input": "def ttl_cache(maxsize=128, ttl=600, timer=time.monotonic, typed=False):\n\t\"\"\"\n\tDecorator to wrap a function with a memoizing callable that saves\nup to `maxsize` results based on a Least Recently Used (LRU)\nalgorithm with a per-item time-to-live (TTL) value.\n\t\"\"\"\n\t", "signature": "def ttl_cache(maxsize=128, ttl=600, timer=time.monotonic, typed=False):", "docstring": "Decorator to wrap a function with a memoizing callable that saves\nup to `maxsize` results based on a Least Recently Used (LRU)\nalgorithm with a per-item time-to-live (TTL) value."} +{"question_id": "62b8d23b48ba5a41d1c3f49a", "input": "def mru_cache(maxsize=128, typed=False):\n\t\"\"\"\n\tDecorator to wrap a function with a memoizing callable that saves\nup to `maxsize` results based on a Most Recently Used (MRU)\nalgorithm.\n\t\"\"\"\n\t", "signature": "def mru_cache(maxsize=128, typed=False):", "docstring": "Decorator to wrap a function with a memoizing callable that saves\nup to `maxsize` results based on a Most Recently Used (MRU)\nalgorithm."} +{"question_id": "62b8d23948ba5a41d1c3f498", "input": "def lru_cache(maxsize=128, typed=False):\n\t\"\"\"\n\tDecorator to wrap a function with a memoizing callable that saves\nup to `maxsize` results based on a Least Recently Used (LRU)\nalgorithm.\n\t\"\"\"\n\t", "signature": "def lru_cache(maxsize=128, typed=False):", "docstring": "Decorator to wrap a function with a memoizing callable that saves\nup to `maxsize` results based on a Least Recently Used (LRU)\nalgorithm."} +{"question_id": "62b8d23748ba5a41d1c3f496", "input": "def lfu_cache(maxsize=128, typed=False):\n\t\"\"\"\n\tDecorator to wrap a function with a memoizing callable that saves\nup to `maxsize` results based on a Least Frequently Used (LFU)\nalgorithm.\n\t\"\"\"\n\t", "signature": "def lfu_cache(maxsize=128, typed=False):", "docstring": "Decorator to wrap a function with a memoizing callable that saves\nup to `maxsize` results based on a Least Frequently Used (LFU)\nalgorithm."} +{"question_id": "62b8d22f48ba5a41d1c3f488", "input": "def popitem(self):\n\t\"\"\"\n\tRemove the value corresponding to the first inserted key and returns the key and value in tuple format.\n\t\"\"\"\n\t", "signature": "def popitem(self):", "docstring": "Remove the value corresponding to the first inserted key and returns the key and value in tuple format."} +{"question_id": "62b8d22a48ba5a41d1c3f47e", "input": "def setdefault(self, key, default=None):\n\t\"\"\"\n\tIf a key exists in the class, the value corresponding to the key is returned. Otherwise, the value corresponding to the key is set to default.\n\t\"\"\"\n\t", "signature": "def setdefault(self, key, default=None):", "docstring": "If a key exists in the class, the value corresponding to the key is returned. Otherwise, the value corresponding to the key is set to default."} +{"question_id": "62b8d22948ba5a41d1c3f47c", "input": "def get(self, key, default=None):\n\t\"\"\"\n\tIf a key exists in the class, the value corresponding to the key is returned. Otherwise, default is returned.\n\t\"\"\"\n\t", "signature": "def get(self, key, default=None):", "docstring": "If a key exists in the class, the value corresponding to the key is returned. Otherwise, default is returned."} +{"question_id": "62b8d22548ba5a41d1c3f472", "input": "def cachedmethod(cache, key=hashkey, lock=None):\n\t\"\"\"\n\tReturns a decorator function that can call a result in the cache.\n\t\"\"\"\n\t", "signature": "def cachedmethod(cache, key=hashkey, lock=None):", "docstring": "Returns a decorator function that can call a result in the cache."} +{"question_id": "62b8c517e0d34b282c18122e", "input": "def extostr(cls, e, max_level=30, max_path_level=5):\n\t\"\"\"\n\tFormat the exception as a string\n\t\"\"\"\n\t", "signature": "def extostr(cls, e, max_level=30, max_path_level=5):", "docstring": "Format the exception as a string"} +{"question_id": "62b8bbbfe0d34b282c181210", "input": "def append_text_to_file(file_name, text_buffer, encoding, overwrite=False):\n\t\"\"\"\n\tWrites the data in the text buffer to a file\n\t\"\"\"\n\t", "signature": "def append_text_to_file(file_name, text_buffer, encoding, overwrite=False):", "docstring": "Writes the data in the text buffer to a file"} +{"question_id": "62b8bbbfe0d34b282c18120f", "input": "def file_to_textbuffer(file_name, encoding):\n\t\"\"\"\n\tLoad a file toward a text buffer\n\t\"\"\"\n\t", "signature": "def file_to_textbuffer(file_name, encoding):", "docstring": "Load a file toward a text buffer"} +{"question_id": "62b8bbbce0d34b282c18120d", "input": "def is_file_exist(file_name):\n\t\"\"\"\n\tCheck whether file_name is an existing file.\n\t\"\"\"\n\t", "signature": "def is_file_exist(file_name):", "docstring": "Check whether file_name is an existing file."} +{"question_id": "62b8b99de0d34b282c1811f8", "input": "def _reset_logging(cls):\n\t\"\"\"\n\tReset the logging system\n\t\"\"\"\n\t", "signature": "def _reset_logging(cls):", "docstring": "Reset the logging system"} +{"question_id": "62b8b59feb7e40a82d2d1291", "input": "def _getTargetClass(self):\n\t\"\"\"\n\tDefine this to return the implementation in use,\nwithout the 'Py' or 'Fallback' suffix.\n\t\"\"\"\n\t", "signature": "def _getTargetClass(self):", "docstring": "Define this to return the implementation in use,\nwithout the 'Py' or 'Fallback' suffix."} +{"question_id": "62b8b590eb7e40a82d2d1275", "input": "def _legacy_mergeOrderings(orderings):\n\t\"\"\"\n\tCombine multiple lists in the sequence of occurrence into a list with no duplicate elements.\n\t\"\"\"\n\t", "signature": "def _legacy_mergeOrderings(orderings):", "docstring": "Combine multiple lists in the sequence of occurrence into a list with no duplicate elements."} +{"question_id": "62b8b58deb7e40a82d2d1269", "input": "def directlyProvidedBy(object):\n\t\"\"\"\n\tReturn the interfaces directly provided by the given object\n\nThe value returned is an `~zope.interface.interfaces.IDeclaration`.\n\t\"\"\"\n\t", "signature": "def directlyProvidedBy(object):", "docstring": "Return the interfaces directly provided by the given object\n\nThe value returned is an `~zope.interface.interfaces.IDeclaration`."} +{"question_id": "62b8b559eb7e40a82d2d11f8", "input": "def minimalBases(classes):\n\t\"\"\"\n\tReturns all classes without subclasses as a list.\n\t\"\"\"\n\t", "signature": "def minimalBases(classes):", "docstring": "Returns all classes without subclasses as a list."} +{"question_id": "62b8b4b9eb7e40a82d2d1134", "input": "def namesAndDescriptions(self, all=False):\n\t\"\"\"\n\tReturns the name and description of the current class attribute.\n\t\"\"\"\n\t", "signature": "def namesAndDescriptions(self, all=False):", "docstring": "Returns the name and description of the current class attribute."} +{"question_id": "62b8b416eb7e40a82d2d1129", "input": "def names(self, all=False):\n\t\"\"\"\n\tReturn the attribute names of current class.\n\t\"\"\"\n\t", "signature": "def names(self, all=False):", "docstring": "Return the attribute names of current class."} +{"question_id": "62b8b3d6eb7e40a82d2d111c", "input": "def _normalizeargs(sequence, output=None):\n\t\"\"\"\n\tNormalize declaration arguments\n\nNormalization arguments might contain Declarions, tuples, or single\ninterfaces.\n\nAnything but individial interfaces or implements specs will be expanded.\n\t\"\"\"\n\t", "signature": "def _normalizeargs(sequence, output=None):", "docstring": "Normalize declaration arguments\n\nNormalization arguments might contain Declarions, tuples, or single\ninterfaces.\n\nAnything but individial interfaces or implements specs will be expanded."} +{"question_id": "62b8b3d5eb7e40a82d2d1110", "input": "def _c_optimizations_available():\n\t\"\"\"\n\tIf available, return the C optimization module, otherwise a false value.\n\t\"\"\"\n\t", "signature": "def _c_optimizations_available():", "docstring": "If available, return the C optimization module, otherwise a false value."} +{"question_id": "62b8b3d4eb7e40a82d2d110f", "input": "def _should_attempt_c_optimizations():\n\t\"\"\"\n\tReturn a true value if we use the C optimizations.\n\t\"\"\"\n\t", "signature": "def _should_attempt_c_optimizations():", "docstring": "Return a true value if we use the C optimizations."} +{"question_id": "62b8b3d4eb7e40a82d2d110e", "input": "def _c_optimizations_ignored():\n\t\"\"\"\n\tReturn True if get \"PURE_PYTHON\" environ is not none and not 0, else return False\n\t\"\"\"\n\t", "signature": "def _c_optimizations_ignored():", "docstring": "Return True if get \"PURE_PYTHON\" environ is not none and not 0, else return False"} +{"question_id": "62b8b3d4eb7e40a82d2d110d", "input": "def _c_optimizations_required():\n\t\"\"\"\n\tReturn a true value if the C optimizations are required.\n\t\"\"\"\n\t", "signature": "def _c_optimizations_required():", "docstring": "Return a true value if the C optimizations are required."} +{"question_id": "62b87b989a0c4fa8b80b35ee", "input": "def reset(self):\n\t\"\"\"\n\tCurrent context is reset to an empty dict, bins of the class are reinitialized with the *initial_value* or with *make_bins()*.\n\t\"\"\"\n\t", "signature": "def reset(self):", "docstring": "Current context is reset to an empty dict, bins of the class are reinitialized with the *initial_value* or with *make_bins()*."} +{"question_id": "62b87b859a0c4fa8b80b35d7", "input": "def to_csv(self, separator=\",\", header=None):\n\t\"\"\"\n\t.. deprecated:: 0.5 in Lena 0.5 to_csv is not used.\n Iterables are converted to tables.\n\nConvert graph's points to CSV.\n\n*separator* delimits values, the default is comma.\n\n*header*, if not ``None``, is the first string of the output\n(new line is added automatically).\n\nSince a graph can be multidimensional,\nfor each point first its coordinate is converted to string\n(separated by *separator*), then each part of its value.\n\nTo convert :class:`Graph` to CSV inside a Lena sequence,\nuse :class:`lena.output.ToCSV`.\n\t\"\"\"\n\t", "signature": "def to_csv(self, separator=\",\", header=None):", "docstring": ".. deprecated:: 0.5 in Lena 0.5 to_csv is not used.\n Iterables are converted to tables.\n\nConvert graph's points to CSV.\n\n*separator* delimits values, the default is comma.\n\n*header*, if not ``None``, is the first string of the output\n(new line is added automatically).\n\nSince a graph can be multidimensional,\nfor each point first its coordinate is converted to string\n(separated by *separator*), then each part of its value.\n\nTo convert :class:`Graph` to CSV inside a Lena sequence,\nuse :class:`lena.output.ToCSV`."} +{"question_id": "62b87b839a0c4fa8b80b35cb", "input": "def _get_err_indices(self, coord_name):\n\t\"\"\"\n\tFind all error indexes corresponding to coord_name.\n\t\"\"\"\n\t", "signature": "def _get_err_indices(self, coord_name):", "docstring": "Find all error indexes corresponding to coord_name."} +{"question_id": "62b87b7e9a0c4fa8b80b35bc", "input": "def _update_context(self, context):\n\t\"\"\"\n\tUpdate *context* with the properties of this graph.\n\n*context.error* is appended with indices of errors.\nExample subcontext for a graph with fields \"E,t,error_E_low\":\n{\"error\": {\"x_low\": {\"index\": 2}}}.\nNote that error names are called \"x\", \"y\" and \"z\"\n(this corresponds to first three coordinates,\nif they are present), which allows to simplify plotting.\nExisting values are not removed\nfrom *context.value* and its subcontexts.\n\nCalled on \"destruction\" of the graph (for example,\nin :class:`.ToCSV`). By destruction we mean conversion\nto another structure (like text) in the flow.\nThe graph object is not really destroyed in this process.\n\t\"\"\"\n\t", "signature": "def _update_context(self, context):", "docstring": "Update *context* with the properties of this graph.\n\n*context.error* is appended with indices of errors.\nExample subcontext for a graph with fields \"E,t,error_E_low\":\n{\"error\": {\"x_low\": {\"index\": 2}}}.\nNote that error names are called \"x\", \"y\" and \"z\"\n(this corresponds to first three coordinates,\nif they are present), which allows to simplify plotting.\nExisting values are not removed\nfrom *context.value* and its subcontexts.\n\nCalled on \"destruction\" of the graph (for example,\nin :class:`.ToCSV`). By destruction we mean conversion\nto another structure (like text) in the flow.\nThe graph object is not really destroyed in this process."} +{"question_id": "62b87b4f9a0c4fa8b80b3580", "input": "def integral(bins, edges):\n\t\"\"\"\n\tCalculate the area of the overall graph.\n\t\"\"\"\n\t", "signature": "def integral(bins, edges):", "docstring": "Calculate the area of the overall graph."} +{"question_id": "62b87b199a0c4fa8b80b354e", "input": "def is_fill_request_seq(seq):\n\t\"\"\"\n\tCheck whether seq can be converted to FillRequestSeq and bool is returned.\n\t\"\"\"\n\t", "signature": "def is_fill_request_seq(seq):", "docstring": "Check whether seq can be converted to FillRequestSeq and bool is returned."} +{"question_id": "62b87b099a0c4fa8b80b3538", "input": "def is_fill_request_el(obj):\n\t\"\"\"\n\tCheck whether the obj class has the fill and request attributes.\n\t\"\"\"\n\t", "signature": "def is_fill_request_el(obj):", "docstring": "Check whether the obj class has the fill and request attributes."} +{"question_id": "62b87af99a0c4fa8b80b3524", "input": "def is_run_el(obj):\n\t\"\"\"\n\tCheck whether the obj class has the run method.\n\t\"\"\"\n\t", "signature": "def is_run_el(obj):", "docstring": "Check whether the obj class has the run method."} +{"question_id": "62b87af69a0c4fa8b80b351a", "input": "def is_fill_compute_el(obj):\n\t\"\"\"\n\tCheck whether the obj class has the fill and compute methods.\n\t\"\"\"\n\t", "signature": "def is_fill_compute_el(obj):", "docstring": "Check whether the obj class has the fill and compute methods."} +{"question_id": "62b87af19a0c4fa8b80b34f7", "input": "def difference(d1, d2, level=-1):\n\t\"\"\"\n\tReturn a dictionary with items from d1 not contained in d2.\n\t\"\"\"\n\t", "signature": "def difference(d1, d2, level=-1):", "docstring": "Return a dictionary with items from d1 not contained in d2."} +{"question_id": "62b87af09a0c4fa8b80b34f1", "input": "def fill(self, coord, weight=1):\n\t\"\"\"\n\tFill histogram at *coord* with the given *weight*.\n\nCoordinates outside the histogram edges are ignored.\n\t\"\"\"\n\t", "signature": "def fill(self, coord, weight=1):", "docstring": "Fill histogram at *coord* with the given *weight*.\n\nCoordinates outside the histogram edges are ignored."} +{"question_id": "62b86aa3b4d922cb0e688d36", "input": "def _validate_labels(labels):\n\t\"\"\"\n\tCheck that keys and values in the given labels by validate_key() and validate_value().\n\t\"\"\"\n\t", "signature": "def _validate_labels(labels):", "docstring": "Check that keys and values in the given labels by validate_key() and validate_value()."} +{"question_id": "62b86a9eb4d922cb0e688d25", "input": "def _get_resource_name_regex():\n\t\"\"\"\n\tReturn the regular expressions that are used to validate the name of the Krake resources\n\t\"\"\"\n\t", "signature": "def _get_resource_name_regex():", "docstring": "Return the regular expressions that are used to validate the name of the Krake resources"} +{"question_id": "62b86a4fb4d922cb0e688cf8", "input": "def validate_value(value):\n\t\"\"\"\n\tValidate the given value against the corresponding regular expression.\n\t\"\"\"\n\t", "signature": "def validate_value(value):", "docstring": "Validate the given value against the corresponding regular expression."} +{"question_id": "62b86a4fb4d922cb0e688cf7", "input": "def validate_key(key):\n\t\"\"\"\n\tValidate the given key against the corresponding regular expression.\n\t\"\"\"\n\t", "signature": "def validate_key(key):", "docstring": "Validate the given key against the corresponding regular expression."} +{"question_id": "62b86a01b4d922cb0e688ccc", "input": "def generate_default_observer_schema_dict(manifest_dict, first_level=False):\n\t\"\"\"\n\tThe values corresponding to different keys in the new dict are generated based on the value type (such as dict and list) in the manifest_dict file. Then new dictionary is returned.\n\t\"\"\"\n\t", "signature": "def generate_default_observer_schema_dict(manifest_dict, first_level=False):", "docstring": "The values corresponding to different keys in the new dict are generated based on the value type (such as dict and list) in the manifest_dict file. Then new dictionary is returned."} +{"question_id": "62b869ebb4d922cb0e688cc6", "input": "def update_last_applied_manifest_list_from_resp(\n last_applied_manifest, observer_schema, response\n):\n\t\"\"\"\n\tTogether with :func:``update_last_applied_manifest_dict_from_resp``, this\nfunction is called recursively to update a partial ``last_applied_manifest``\nfrom a partial Kubernetes response\n\nArgs:\n last_applied_manifest (list): partial ``last_applied_manifest`` being\n updated\n observer_schema (list): partial ``observer_schema``\n response (list): partial response from the Kubernetes API.\n\nThis function go through all observed fields, and initialized their value in\nlast_applied_manifest if they are not yet present\n\t\"\"\"\n\t", "signature": "def update_last_applied_manifest_list_from_resp(\n last_applied_manifest, observer_schema, response\n):", "docstring": "Together with :func:``update_last_applied_manifest_dict_from_resp``, this\nfunction is called recursively to update a partial ``last_applied_manifest``\nfrom a partial Kubernetes response\n\nArgs:\n last_applied_manifest (list): partial ``last_applied_manifest`` being\n updated\n observer_schema (list): partial ``observer_schema``\n response (list): partial response from the Kubernetes API.\n\nThis function go through all observed fields, and initialized their value in\nlast_applied_manifest if they are not yet present"} +{"question_id": "62b869eab4d922cb0e688cc5", "input": "def update_last_applied_manifest_dict_from_resp(\n last_applied_manifest, observer_schema, response\n):\n\t\"\"\"\n\tTogether with :func:``update_last_applied_manifest_list_from_resp``, this\nfunction is called recursively to update a partial ``last_applied_manifest``\nfrom a partial Kubernetes response\n\nArgs:\n last_applied_manifest (dict): partial ``last_applied_manifest`` being\n updated\n observer_schema (dict): partial ``observer_schema``\n response (dict): partial response from the Kubernetes API.\n\nRaises:\n KeyError: If the observed field is not present in the Kubernetes response\n\nThis function go through all observed fields, and initialized their value in\nlast_applied_manifest if they are not yet present\n\t\"\"\"\n\t", "signature": "def update_last_applied_manifest_dict_from_resp(\n last_applied_manifest, observer_schema, response\n):", "docstring": "Together with :func:``update_last_applied_manifest_list_from_resp``, this\nfunction is called recursively to update a partial ``last_applied_manifest``\nfrom a partial Kubernetes response\n\nArgs:\n last_applied_manifest (dict): partial ``last_applied_manifest`` being\n updated\n observer_schema (dict): partial ``observer_schema``\n response (dict): partial response from the Kubernetes API.\n\nRaises:\n KeyError: If the observed field is not present in the Kubernetes response\n\nThis function go through all observed fields, and initialized their value in\nlast_applied_manifest if they are not yet present"} +{"question_id": "62b869eab4d922cb0e688cbf", "input": "def generate_default_observer_schema(app):\n\t\"\"\"\n\tGenerate the default observer schema for each Kubernetes resource present in ``spec.manifest`` for which a custom observer schema hasn't been specified.\n\t\"\"\"\n\t", "signature": "def generate_default_observer_schema(app):", "docstring": "Generate the default observer schema for each Kubernetes resource present in ``spec.manifest`` for which a custom observer schema hasn't been specified."} +{"question_id": "62b43427903eeb48555d3ea5", "input": "def format(\n\t\tself,\n\t\tsql: AnyStr,\n\t\tparams: Union[Dict[Union[str, int], Any], Sequence[Any]],\n\t) -> Tuple[AnyStr, Union[Dict[Union[str, int], Any], Sequence[Any]]]:\n\t\"\"\"\n\tConvert sql using self._converter.convert\n\t\"\"\"\n\t", "signature": "def format(\n\t\tself,\n\t\tsql: AnyStr,\n\t\tparams: Union[Dict[Union[str, int], Any], Sequence[Any]],\n\t) -> Tuple[AnyStr, Union[Dict[Union[str, int], Any], Sequence[Any]]]:", "docstring": "Convert sql using self._converter.convert"} +{"question_id": "62b43428903eeb48555d3eaa", "input": "def formatmany(\n\t\tself,\n\t\tsql: AnyStr,\n\t\tmany_params: Union[Iterable[Dict[Union[str, int], Any]], Iterable[Sequence[Any]]],\n\t) -> Tuple[AnyStr, Union[List[Dict[Union[str, int], Any]], List[Sequence[Any]]]]:\n\t\"\"\"\n\tConvert sql using self._converter.convert_many\n\t\"\"\"\n\t", "signature": "def formatmany(\n\t\tself,\n\t\tsql: AnyStr,\n\t\tmany_params: Union[Iterable[Dict[Union[str, int], Any]], Iterable[Sequence[Any]]],\n\t) -> Tuple[AnyStr, Union[List[Dict[Union[str, int], Any]], List[Sequence[Any]]]]:", "docstring": "Convert sql using self._converter.convert_many"} +{"question_id": "62b45df05108cfac7f2109ce", "input": "def validate(self, path):\n\t\"\"\"\n\tReturns True if OCFL object at path or pyfs root, False otherwise.\n\t\"\"\"\n\t", "signature": "def validate(self, path):", "docstring": "Returns True if OCFL object at path or pyfs root, False otherwise."} +{"question_id": "62b45df15108cfac7f2109dc", "input": "def status_str(self, prefix=''):\n\t\"\"\"\n\tReturn a string with visiting the sorted self.messages list, each visit add prefix and the element in the sorted self.messages list.\n\t\"\"\"\n\t", "signature": "def status_str(self, prefix=''):", "docstring": "Return a string with visiting the sorted self.messages list, each visit add prefix and the element in the sorted self.messages list."} +{"question_id": "62b45df15108cfac7f2109dd", "input": "def status_str(self, prefix=''):\n\t\"\"\"\n\tReturn string representation with self.log.status_str, with optional prefix.\n\t\"\"\"\n\t", "signature": "def status_str(self, prefix=''):", "docstring": "Return string representation with self.log.status_str, with optional prefix."} +{"question_id": "62b45e135108cfac7f2109f4", "input": "def is_valid(self, identifier):\n\t\"\"\"\n\tReturn True if identifier is valid. In this base implementation, always return True. (D)\n\t\"\"\"\n\t", "signature": "def is_valid(self, identifier):", "docstring": "Return True if identifier is valid. In this base implementation, always return True. (D)"} +{"question_id": "62b45e145108cfac7f210a07", "input": "def validate(self, inventory, extract_spec_version=False):\n\t\"\"\"\n\tValidate a given inventory. If extract_spec_version is True then will look at the type value to determine the specification version. In the case that there is no type value or it isn't valid, then other tests will be based on the version given in self.spec_version. (D)\n\t\"\"\"\n\t", "signature": "def validate(self, inventory, extract_spec_version=False):", "docstring": "Validate a given inventory. If extract_spec_version is True then will look at the type value to determine the specification version. In the case that there is no type value or it isn't valid, then other tests will be based on the version given in self.spec_version. (D)"} +{"question_id": "62b45e145108cfac7f210a09", "input": "def check_digests_present_and_used(self, manifest_files, digests_used):\n\t\"\"\"\n\tCheck all digests in manifest that are needed are present and used. Return error() in the class.\n\t\"\"\"\n\t", "signature": "def check_digests_present_and_used(self, manifest_files, digests_used):", "docstring": "Check all digests in manifest that are needed are present and used. Return error() in the class."} +{"question_id": "62b45e165108cfac7f210a16", "input": "def validate_as_prior_version(self, prior):\n\t\"\"\"\n\tCheck that prior is a valid prior version of the current inventory object. The input variable prior is also expected to be an InventoryValidator object and both self and prior inventories are assumed to have been checked for internal consistency. Return error() in the class.\n\t\"\"\"\n\t", "signature": "def validate_as_prior_version(self, prior):", "docstring": "Check that prior is a valid prior version of the current inventory object. The input variable prior is also expected to be an InventoryValidator object and both self and prior inventories are assumed to have been checked for internal consistency. Return error() in the class."} +{"question_id": "62b45e165108cfac7f210a17", "input": "def get_logical_path_map(inventory, version):\n\t\"\"\"\n\tReturns the file paths of the states in the inventory in the dict type.\n\t\"\"\"\n\t", "signature": "def get_logical_path_map(inventory, version):", "docstring": "Returns the file paths of the states in the inventory in the dict type."} +{"question_id": "62b45e175108cfac7f210a19", "input": "def validate_fixity(self, fixity, manifest_files):\n\t\"\"\"\n\tValidate fixity block in inventory. Check the structure of the fixity block and makes sure that only files listed in the manifest are referenced. Return error() in the class.\n\t\"\"\"\n\t", "signature": "def validate_fixity(self, fixity, manifest_files):", "docstring": "Validate fixity block in inventory. Check the structure of the fixity block and makes sure that only files listed in the manifest are referenced. Return error() in the class."} +{"question_id": "62b463153879012d19481498", "input": "def files_list(path):\n\t\"\"\"\n\tReturn the files in given path.\n\t\"\"\"\n\t", "signature": "def files_list(path):", "docstring": "Return the files in given path."} +{"question_id": "62b463153879012d1948149a", "input": "def _group_files_by_xml_filename(source, xmls, files):\n\t\"\"\"\n\tGroups files by xmls and returns data in dict format.\n\t\"\"\"\n\t", "signature": "def _group_files_by_xml_filename(source, xmls, files):", "docstring": "Groups files by xmls and returns data in dict format."} +{"question_id": "62b463153879012d1948149b", "input": "def match_file_by_prefix(prefix, file_path):\n\t\"\"\"\n\tGiven a filepath, return true if the basename of the filepath is startswith the given prefix plus \"-\" or the given prefix plus \".\"\n\t\"\"\"\n\t", "signature": "def match_file_by_prefix(prefix, file_path):", "docstring": "Given a filepath, return true if the basename of the filepath is startswith the given prefix plus \"-\" or the given prefix plus \".\""} +{"question_id": "62b463153879012d1948149c", "input": "def select_filenames_by_prefix(prefix, files):\n\t\"\"\"\n\tFor each file in files, return all files taht match the given prefix\n\t\"\"\"\n\t", "signature": "def select_filenames_by_prefix(prefix, files):", "docstring": "For each file in files, return all files taht match the given prefix"} +{"question_id": "62b463153879012d1948149d", "input": "def _explore_folder(folder):\n\t\"\"\"\n\tGroups files in the given group by using _group_files_by_xml_filename.\n\t\"\"\"\n\t", "signature": "def _explore_folder(folder):", "docstring": "Groups files in the given group by using _group_files_by_xml_filename."} +{"question_id": "62b463153879012d1948149f", "input": "def _eval_file(prefix, file_path):\n\t\"\"\"\n\tIdentify the type of the given file. Return None if the file do not match the given prefix or the type of the file is xml. Return dict with the key of component_id, file_path if the type of the file is \"pdf\", return dict with the key of component_id, file_path, ftype, file_path if the type of the file is not \"pdf\".\n\t\"\"\"\n\t", "signature": "def _eval_file(prefix, file_path):", "docstring": "Identify the type of the given file. Return None if the file do not match the given prefix or the type of the file is xml. Return dict with the key of component_id, file_path if the type of the file is \"pdf\", return dict with the key of component_id, file_path, ftype, file_path if the type of the file is not \"pdf\"."} +{"question_id": "62b463153879012d194814a1", "input": "def add_rendition(self, lang, file_path):\n\t\"\"\"\n\tAssign the filepath invoke by filepath() in the class to \"lang\" in _renditions in the class.\n\t\"\"\"\n\t", "signature": "def add_rendition(self, lang, file_path):", "docstring": "Assign the filepath invoke by filepath() in the class to \"lang\" in _renditions in the class."} +{"question_id": "62b463163879012d194814a2", "input": "def add_asset(self, basename, file_path):\n\t\"\"\"\n\tAssign the filepath invoke by filepath() in the class to \"basename\" in _assets in the class.\n\t\"\"\"\n\t", "signature": "def add_asset(self, basename, file_path):", "docstring": "Assign the filepath invoke by filepath() in the class to \"basename\" in _assets in the class."} +{"question_id": "62b463163879012d194814a4", "input": "def _explore_zipfile(zip_path):\n\t\"\"\"\n\tGroups the given zip path by using _group_files_by_xml_filename.\n\t\"\"\"\n\t", "signature": "def _explore_zipfile(zip_path):", "docstring": "Groups the given zip path by using _group_files_by_xml_filename."} +{"question_id": "62b463163879012d194814a6", "input": "def files_list_from_zipfile(zip_path):\n\t\"\"\"\n\tReturn the files in the given zip path.\n\t\"\"\"\n\t", "signature": "def files_list_from_zipfile(zip_path):", "docstring": "Return the files in the given zip path."} +{"question_id": "62b4631b3879012d194814dd", "input": "def fix_namespace_prefix_w(content):\n\t\"\"\"\n\tReplace \"w:st=\" in content with \"w-st=\".\n\t\"\"\"\n\t", "signature": "def fix_namespace_prefix_w(content):", "docstring": "Replace \"w:st=\" in content with \"w-st=\"."} +{"question_id": "62b463283879012d1948153d", "input": "def match_pubdate(node, pubdate_xpaths):\n\t\"\"\"\n\tFor the given node, returns the first match in the pubdate_xpaths list.\n\t\"\"\"\n\t", "signature": "def match_pubdate(node, pubdate_xpaths):", "docstring": "For the given node, returns the first match in the pubdate_xpaths list."} +{"question_id": "62b463303879012d19481579", "input": "def _extract_number_and_supplment_from_issue_element(issue):\n\t\"\"\"\n\tReturn the possible values of number and sup from the contents of issue.\n\t\"\"\"\n\t", "signature": "def _extract_number_and_supplment_from_issue_element(issue):", "docstring": "Return the possible values of number and sup from the contents of issue."} +{"question_id": "62b46740d2f69a53b466171a", "input": "def pretty(self, indent=0, debug=False):\n\t\"\"\"\n\tReturn a pretty formatted representation of self. obj = f\"'{self.obj}'\" if isinstance(self.obj, str) else repr(self.obj) return (\" \" * indent) + f\"{self.__class__.__name__}({debug_details}{obj})\"\n\t\"\"\"\n\t", "signature": "def pretty(self, indent=0, debug=False):", "docstring": "Return a pretty formatted representation of self. obj = f\"'{self.obj}'\" if isinstance(self.obj, str) else repr(self.obj) return (\" \" * indent) + f\"{self.__class__.__name__}({debug_details}{obj})\""} +{"question_id": "62b46746d2f69a53b4661722", "input": "def absorb(self, args):\n\t\"\"\"\n\tGiven an `args` sequence of expressions, return a new list of expression applying absorption.\n\t\"\"\"\n\t", "signature": "def absorb(self, args):", "docstring": "Given an `args` sequence of expressions, return a new list of expression applying absorption."} +{"question_id": "62b86707b4d922cb0e688c2a", "input": "def on(self, hook):\n\t\"\"\"\n\tReturn a decorator function to add a new handler to the \"hook\" in the registry in the class.\n\t\"\"\"\n\t", "signature": "def on(self, hook):", "docstring": "Return a decorator function to add a new handler to the \"hook\" in the registry in the class."} +{"question_id": "62b86729b4d922cb0e688c2f", "input": "def base_config(user, etcd_host=\"localhost\", etcd_port=2379):\n\t\"\"\"\n\tCreates a configuration with some simple parameters, the key parameters contains \"tls\", \"authentication\", \"authorization\", \"etcd\", \"docs\", and \"log\".\n\t\"\"\"\n\t", "signature": "def base_config(user, etcd_host=\"localhost\", etcd_port=2379):", "docstring": "Creates a configuration with some simple parameters, the key parameters contains \"tls\", \"authentication\", \"authorization\", \"etcd\", \"docs\", and \"log\"."} +{"question_id": "62b8a4a4755ee91dce50a3d2", "input": "def _fromutc(self, dt):\n\t\"\"\"\n\tGiven a timezone datetime in a given timezone, calculates a timezone datetime in a new timezone.\n\t\"\"\"\n\t", "signature": "def _fromutc(self, dt):", "docstring": "Given a timezone datetime in a given timezone, calculates a timezone datetime in a new timezone."} +{"question_id": "62b8982f755ee91dce50a241", "input": "def normalized(self):\n\t\"\"\"\n\tNormalize all units of time to integers.\n\t\"\"\"\n\t", "signature": "def normalized(self):", "docstring": "Normalize all units of time to integers."} +{"question_id": "62b89640755ee91dce50a114", "input": "def tzname_in_python2(namefunc):\n\t\"\"\"\n\tChange unicode output into bytestrings in Python 2\n\t\"\"\"\n\t", "signature": "def tzname_in_python2(namefunc):", "docstring": "Change unicode output into bytestrings in Python 2"} +{"question_id": "62b87d24d292efb640a55670", "input": "def get_versions():\n\t\"\"\"\n\tObtains the version information. If the version information cannot be obtained, the default value is returned.\n\t\"\"\"\n\t", "signature": "def get_versions():", "docstring": "Obtains the version information. If the version information cannot be obtained, the default value is returned."} +{"question_id": "62b87d24d292efb640a5566f", "input": "def render(pieces, style):\n\t\"\"\"\n\tInput pieces and a style, render the pieces to the corresponding style.\n\t\"\"\"\n\t", "signature": "def render(pieces, style):", "docstring": "Input pieces and a style, render the pieces to the corresponding style."} +{"question_id": "62b87d24d292efb640a5566d", "input": "def plus_or_dot(pieces):\n\t\"\"\"\n\tReturn \".\" If the closet-tag of the pieces contains \"+\", otherwise, return \"+\".\n\t\"\"\"\n\t", "signature": "def plus_or_dot(pieces):", "docstring": "Return \".\" If the closet-tag of the pieces contains \"+\", otherwise, return \"+\"."} +{"question_id": "62b87d23d292efb640a5566b", "input": "def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None):\n\t\"\"\"\n\tCall the given command(s).\n\t\"\"\"\n\t", "signature": "def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None):", "docstring": "Call the given command(s)."} +{"question_id": "62b87d23d292efb640a55668", "input": "def get_config():\n\t\"\"\"\n\tReturn a new VersioneerConfig() and set various attribute of it.\n\t\"\"\"\n\t", "signature": "def get_config():", "docstring": "Return a new VersioneerConfig() and set various attribute of it."} +{"question_id": "62b87d23d292efb640a55667", "input": "def register_vcs_handler(vcs, method):\n\t\"\"\"\n\tCreate decorator to mark a method as the handler of a object\n\t\"\"\"\n\t", "signature": "def register_vcs_handler(vcs, method):", "docstring": "Create decorator to mark a method as the handler of a object"} +{"question_id": "62b45e945108cfac7f210a4a", "input": "def validate_hierarchy(self, validate_objects=True, check_digests=True, show_warnings=False):\n\t\"\"\"\n\tValidate storage root hierarchy.\n\nReturns:\n num_objects - number of objects checked\n good_objects - number of objects checked that were found to be valid\n\t\"\"\"\n\t", "signature": "def validate_hierarchy(self, validate_objects=True, check_digests=True, show_warnings=False):", "docstring": "Validate storage root hierarchy.\n\nReturns:\n num_objects - number of objects checked\n good_objects - number of objects checked that were found to be valid"} +{"question_id": "62b45e515108cfac7f210a3c", "input": "def initialize(self):\n\t\"\"\"\n\tCreate and initialize a new OCFL storage root.\n\t\"\"\"\n\t", "signature": "def initialize(self):", "docstring": "Create and initialize a new OCFL storage root."} +{"question_id": "62b45e2eb89c9fd354170232", "input": "def next_version(version):\n\t\"\"\"\n\tGiven next version identifier following existing pattern\n\t\"\"\"\n\t", "signature": "def next_version(version):", "docstring": "Given next version identifier following existing pattern"} +{"question_id": "62b45e23e0d4551b0392c90a", "input": "def validate_version_inventories(self, version_dirs):\n\t\"\"\"\n\tEach version SHOULD have an inventory up to that point.\n\nAlso keep a record of any content digests different from those in the root inventory\nso that we can also check them when validating the content.\n\nversion_dirs is an array of version directory names and is assumed to be in\nversion sequence (1, 2, 3...).\n\t\"\"\"\n\t", "signature": "def validate_version_inventories(self, version_dirs):", "docstring": "Each version SHOULD have an inventory up to that point.\n\nAlso keep a record of any content digests different from those in the root inventory\nso that we can also check them when validating the content.\n\nversion_dirs is an array of version directory names and is assumed to be in\nversion sequence (1, 2, 3...)."} +{"question_id": "62b45e21e0d4551b0392c8ed", "input": "def find_path_type(path):\n\t\"\"\"\n\tReturn a string indicating the type of thing at the given path\n\t\"\"\"\n\t", "signature": "def find_path_type(path):", "docstring": "Return a string indicating the type of thing at the given path"} +{"question_id": "62b45b396decaeff903e1001", "input": "def amend_bzparams(self, params, bug_ids):\n\t\"\"\"\n\tAmend the Bugzilla params\n\t\"\"\"\n\t", "signature": "def amend_bzparams(self, params, bug_ids):", "docstring": "Amend the Bugzilla params"} +{"question_id": "62b4567ed7d32e5b55cc83d9", "input": "def deep_merge_nodes(nodes):\n\t\"\"\"\n\tMerge any node values corresponding to duplicate keys and return the result. If there are colliding keys with non-MappingNode values, the last of the values remains.\n\t\"\"\"\n\t", "signature": "def deep_merge_nodes(nodes):", "docstring": "Merge any node values corresponding to duplicate keys and return the result. If there are colliding keys with non-MappingNode values, the last of the values remains."} +{"question_id": "62b4567ad7d32e5b55cc83af", "input": "def parse_arguments(*arguments):\n\t\"\"\"\n\tGiven command-line arguments with which this script was invoked, parse the arguments and return them as an ArgumentParser instance\n\t\"\"\"\n\t", "signature": "def parse_arguments(*arguments):", "docstring": "Given command-line arguments with which this script was invoked, parse the arguments and return them as an ArgumentParser instance"} +{"question_id": "62b45679d7d32e5b55cc83a9", "input": "def parser_flags(parser):\n\t\"\"\"\n\tGiven an argparse.ArgumentParser instance, return its argument flags in a space-separated\nstring.\n\t\"\"\"\n\t", "signature": "def parser_flags(parser):", "docstring": "Given an argparse.ArgumentParser instance, return its argument flags in a space-separated\nstring."} +{"question_id": "62b45665d7d32e5b55cc8365", "input": "def parse_arguments(*unparsed_arguments):\n\t\"\"\"\n\tParses parameters and returns them as dict maps\n\t\"\"\"\n\t", "signature": "def parse_arguments(*unparsed_arguments):", "docstring": "Parses parameters and returns them as dict maps"} +{"question_id": "62b45665d7d32e5b55cc8364", "input": "def parse_subparser_arguments(unparsed_arguments, subparsers):\n\t\"\"\"\n\tGiven a sequence of arguments and a dict from subparser name to argparse.ArgumentParser\ninstance, give each requested action's subparser a shot at parsing all arguments. This allows\ncommon arguments like \"--repository\" to be shared across multiple subparsers.\n\nReturn the result as a tuple of (a dict mapping from subparser name to a parsed namespace of\narguments, a list of remaining arguments not claimed by any subparser).\n\t\"\"\"\n\t", "signature": "def parse_subparser_arguments(unparsed_arguments, subparsers):", "docstring": "Given a sequence of arguments and a dict from subparser name to argparse.ArgumentParser\ninstance, give each requested action's subparser a shot at parsing all arguments. This allows\ncommon arguments like \"--repository\" to be shared across multiple subparsers.\n\nReturn the result as a tuple of (a dict mapping from subparser name to a parsed namespace of\narguments, a list of remaining arguments not claimed by any subparser)."} +{"question_id": "62b45665d7d32e5b55cc8363", "input": "def make_parsers():\n\t\"\"\"\n\tBuild a parser and its subparsers and return them as a tuple.\n\t\"\"\"\n\t", "signature": "def make_parsers():", "docstring": "Build a parser and its subparsers and return them as a tuple."} +{"question_id": "62b438ba66fea644fe22cca2", "input": "def deep_merge_nodes(nodes):\n\t\"\"\"\n\tmerge any node values corresponding to duplicate keys and return the result. If there are colliding keys with non-MappingNode values, the last of the values remains.\n\t\"\"\"\n\t", "signature": "def deep_merge_nodes(nodes):", "docstring": "merge any node values corresponding to duplicate keys and return the result. If there are colliding keys with non-MappingNode values, the last of the values remains."} +{"question_id": "62b438b666fea644fe22cc78", "input": "def parse_arguments(*arguments):\n\t\"\"\"\n\tGiven command-line arguments with which this script was invoked, parse the arguments and return them as an ArgumentParser instance\n\t\"\"\"\n\t", "signature": "def parse_arguments(*arguments):", "docstring": "Given command-line arguments with which this script was invoked, parse the arguments and return them as an ArgumentParser instance"} +{"question_id": "62b438b666fea644fe22cc72", "input": "def parser_flags(parser):\n\t\"\"\"\n\tGiven an argparse.ArgumentParser instance, return its argument flags in a space-separated\nstring.\n\t\"\"\"\n\t", "signature": "def parser_flags(parser):", "docstring": "Given an argparse.ArgumentParser instance, return its argument flags in a space-separated\nstring."} +{"question_id": "62b438b566fea644fe22cc70", "input": "def bash_completion():\n\t\"\"\"\n\tProduce the borgmatic command by introspecting borgmatic's command-line argument parsers.\n\t\"\"\"\n\t", "signature": "def bash_completion():", "docstring": "Produce the borgmatic command by introspecting borgmatic's command-line argument parsers."} +{"question_id": "62b438a266fea644fe22cc2e", "input": "def parse_arguments(*unparsed_arguments):\n\t\"\"\"\n\tGiven command-line arguments with which this script was invoked, parse the arguments and return\nthem as a dict mapping from subparser name (or \"global\") to an argparse.Namespace instance.\n\t\"\"\"\n\t", "signature": "def parse_arguments(*unparsed_arguments):", "docstring": "Given command-line arguments with which this script was invoked, parse the arguments and return\nthem as a dict mapping from subparser name (or \"global\") to an argparse.Namespace instance."} +{"question_id": "62b438a266fea644fe22cc2d", "input": "def parse_subparser_arguments(unparsed_arguments, subparsers):\n\t\"\"\"\n\tGiven a sequence of arguments and a dict from subparser name to argparse.ArgumentParser\ninstance, give each requested action's subparser a shot at parsing all arguments. This allows\ncommon arguments like \"--repository\" to be shared across multiple subparsers.\n\nReturn the result as a tuple of (a dict mapping from subparser name to a parsed namespace of\narguments, a list of remaining arguments not claimed by any subparser).\n\t\"\"\"\n\t", "signature": "def parse_subparser_arguments(unparsed_arguments, subparsers):", "docstring": "Given a sequence of arguments and a dict from subparser name to argparse.ArgumentParser\ninstance, give each requested action's subparser a shot at parsing all arguments. This allows\ncommon arguments like \"--repository\" to be shared across multiple subparsers.\n\nReturn the result as a tuple of (a dict mapping from subparser name to a parsed namespace of\narguments, a list of remaining arguments not claimed by any subparser)."} +{"question_id": "62b438a266fea644fe22cc2c", "input": "def make_parsers():\n\t\"\"\"\n\tBuild a top-level parser and its subparsers and return them as a tuple.\n\t\"\"\"\n\t", "signature": "def make_parsers():", "docstring": "Build a top-level parser and its subparsers and return them as a tuple."} +{"question_id": "62ece4982e6aefcf4aabbd5f", "input": "def paging(response, max_results):\n\t\"\"\"\n\tReturn every response with the length of max_results\n Args:\n response (list): WAPI response.\n max_results (int): Maximum number of objects to be returned in one page.\n Returns:\n Generator object with WAPI response split page by page.\n\t\"\"\"\n\t", "signature": "def paging(response, max_results):", "docstring": "Return every response with the length of max_results\n Args:\n response (list): WAPI response.\n max_results (int): Maximum number of objects to be returned in one page.\n Returns:\n Generator object with WAPI response split page by page."} +{"question_id": "62ece4982e6aefcf4aabbd60", "input": "def size_to_bytes(size: str) -> int:\n\t\"\"\"\n\tConvert human readable file size to bytes\n\n Args:\n size: str, a string representing a human readable file size (eg: '500K')\n Returns:\n int: file size in bytes\n\t\"\"\"\n\t", "signature": "def size_to_bytes(size: str) -> int:", "docstring": "Convert human readable file size to bytes\n\n Args:\n size: str, a string representing a human readable file size (eg: '500K')\n Returns:\n int: file size in bytes"} +{"question_id": "62ece4982e6aefcf4aabbd61", "input": "def _dictsum(dicts):\n\t\"\"\"\n\tSum values with the same key in the dictionaries. And return a new dictionary.\n Example:\n given two dictionaries: dict1: {'a': 1, 'b': 2}, dict2: {'a': 5, 'b': 0}\n return a dictionary: dic:{'a': 6, 'b': 2}\n Returns: dict\n\t\"\"\"\n\t", "signature": "def _dictsum(dicts):", "docstring": "Sum values with the same key in the dictionaries. And return a new dictionary.\n Example:\n given two dictionaries: dict1: {'a': 1, 'b': 2}, dict2: {'a': 5, 'b': 0}\n return a dictionary: dic:{'a': 6, 'b': 2}\n Returns: dict"} +{"question_id": "62ece4982e6aefcf4aabbd62", "input": "def _replace_url_args(url, url_args):\n\t\"\"\"\n\tReplace the value in url with the value in url_args\n If url_args has a value, iterate over the keys and values from url_args.\n Then replace the keys of the first parameter with values.\n Returns: the modified url.\n\t\"\"\"\n\t", "signature": "def _replace_url_args(url, url_args):", "docstring": "Replace the value in url with the value in url_args\n If url_args has a value, iterate over the keys and values from url_args.\n Then replace the keys of the first parameter with values.\n Returns: the modified url."} +{"question_id": "62ece4982e6aefcf4aabbd63", "input": "def is_none_string(val: any) -> bool:\n\t\"\"\"\n\tCheck if a string represents a None value.\n Returns:\n Return True if the type of val is string and the lowercase of val is equal to 'none', otherwise return False\n\t\"\"\"\n\t", "signature": "def is_none_string(val: any) -> bool:", "docstring": "Check if a string represents a None value.\n Returns:\n Return True if the type of val is string and the lowercase of val is equal to 'none', otherwise return False"} +{"question_id": "62ece4982e6aefcf4aabbd64", "input": "def strip_root(path, root):\n\t\"\"\"\n\tRemove root from path. If fails, throw exception\n\n Returns:\n A path without root\n\t\"\"\"\n\t", "signature": "def strip_root(path, root):", "docstring": "Remove root from path. If fails, throw exception\n\n Returns:\n A path without root"} +{"question_id": "62ece4982e6aefcf4aabbd65", "input": "def parser_flags(parser):\n\t\"\"\"\n\tGiven an argparse.ArgumentParser instance, return its argument flags in a space-separated string.\n Args:\n script: argparse.ArgumentParser instance\n\n Returns:\n argument flags in a space-separated string\n\t\"\"\"\n\t", "signature": "def parser_flags(parser):", "docstring": "Given an argparse.ArgumentParser instance, return its argument flags in a space-separated string.\n Args:\n script: argparse.ArgumentParser instance\n\n Returns:\n argument flags in a space-separated string"} +{"question_id": "62ece4982e6aefcf4aabbd66", "input": "def was_processed(processed, path_name, verbose):\n\t\"\"\"\n\tCheck if a file or directory has already been processed.\n\n To prevent recursion, expand the path name to an absolution path\n call this function with a set that will store all the entries and\n the entry to test. If the entry is already in the set, report the issue\n and return ``True``. Otherwise, add the entry to the set and return\n ``False`` to allow the path to be processed.\n\n Args:\n processed: Set to store processed pathnames\n path_name: Path to a directory or file\n verbose: True if verbose output is requested\n\n Returns:\n True if it's already in the set. False if not.\n\t\"\"\"\n\t", "signature": "def was_processed(processed, path_name, verbose):", "docstring": "Check if a file or directory has already been processed.\n\n To prevent recursion, expand the path name to an absolution path\n call this function with a set that will store all the entries and\n the entry to test. If the entry is already in the set, report the issue\n and return ``True``. Otherwise, add the entry to the set and return\n ``False`` to allow the path to be processed.\n\n Args:\n processed: Set to store processed pathnames\n path_name: Path to a directory or file\n verbose: True if verbose output is requested\n\n Returns:\n True if it's already in the set. False if not."} +{"question_id": "62ece4982e6aefcf4aabbd67", "input": "def vertex3tuple(vertices):\n\t\"\"\"\n\tGet 3 points for each vertex of the polygon.\n This will include the vertex and the 2 points on both sides of the vertex\n If the subscript is out of bounds, take the value of index as 0\n Args:\n vertices: vertices to be converted\n\n Returns:\n A list where the elements of vertices represented by tuple\n\t\"\"\"\n\t", "signature": "def vertex3tuple(vertices):", "docstring": "Get 3 points for each vertex of the polygon.\n This will include the vertex and the 2 points on both sides of the vertex\n If the subscript is out of bounds, take the value of index as 0\n Args:\n vertices: vertices to be converted\n\n Returns:\n A list where the elements of vertices represented by tuple"} +{"question_id": "62ece4982e6aefcf4aabbd68", "input": "def int_to_string(number: int, alphabet: List[str], padding: Optional[int] = None) -> str:\n\t\"\"\"\n\tConvert a number to a string, using the given alphabet.\n The number represents a short uuid.\n The output has the most significant digit first.\n @param number: Int value\n @param alphabet : List with letters\n @param padding : Optional with int value\n @return string value corresponded to int\n\t\"\"\"\n\t", "signature": "def int_to_string(number: int, alphabet: List[str], padding: Optional[int] = None) -> str:", "docstring": "Convert a number to a string, using the given alphabet.\n The number represents a short uuid.\n The output has the most significant digit first.\n @param number: Int value\n @param alphabet : List with letters\n @param padding : Optional with int value\n @return string value corresponded to int"} +{"question_id": "62ece4982e6aefcf4aabbd69", "input": "def _replace_register(flow_params, register_number, register_value):\n\t\"\"\"\n\tReplace the value of flow_params[register_number] with flow_params[register_value] and delete flow_params[register_value]\n\t\"\"\"\n\t", "signature": "def _replace_register(flow_params, register_number, register_value):", "docstring": "Replace the value of flow_params[register_number] with flow_params[register_value] and delete flow_params[register_value]"} +{"question_id": "62ece4982e6aefcf4aabbd6a", "input": "def replace_dots(value, arg):\n\t\"\"\"\n\tReplaces all values of '.' to arg from the given string\n Args:\n value: old string\n arg: new string to replace '.'\n Returns:\n str, the replaced string\n\t\"\"\"\n\t", "signature": "def replace_dots(value, arg):", "docstring": "Replaces all values of '.' to arg from the given string\n Args:\n value: old string\n arg: new string to replace '.'\n Returns:\n str, the replaced string"} +{"question_id": "62ece4982e6aefcf4aabbd6b", "input": "def subclasses(cls):\n\t\"\"\"\n\tReturn all subclasses of a class, recursively\n\t\"\"\"\n\t", "signature": "def subclasses(cls):", "docstring": "Return all subclasses of a class, recursively"} +{"question_id": "62ece4982e6aefcf4aabbd6d", "input": "def string_to_int(string: str, alphabet: List[str]) -> int:\n\t\"\"\"\n\tConvert a string to a number, using the given alphabet.\n :param string: a string consist of the letters in alphabet\n :param alphabet: list of letters\n :return: int, the corresponding number of the given string using the given transition rule.\n\t\"\"\"\n\t", "signature": "def string_to_int(string: str, alphabet: List[str]) -> int:", "docstring": "Convert a string to a number, using the given alphabet.\n :param string: a string consist of the letters in alphabet\n :param alphabet: list of letters\n :return: int, the corresponding number of the given string using the given transition rule."} +{"question_id": "62ece4982e6aefcf4aabbd6f", "input": "def get_repo_archive(url: str, destination_path: Path) -> Path:\n\t\"\"\"\n\tGiven an url and a destination path, retrieve and extract .tar.gz archive which contains 'desc' file for each package\n\n Args:\n url: url of the .tar.gz archive to download\n destination_path: the path on disk where to extract archive\n Returns:\n path where the archive is extracted to\n\t\"\"\"\n\t", "signature": "def get_repo_archive(url: str, destination_path: Path) -> Path:", "docstring": "Given an url and a destination path, retrieve and extract .tar.gz archive which contains 'desc' file for each package\n\n Args:\n url: url of the .tar.gz archive to download\n destination_path: the path on disk where to extract archive\n Returns:\n path where the archive is extracted to"} +{"question_id": "62ece4982e6aefcf4aabbd70", "input": "def os_is_mac():\n\t\"\"\"\n\tChecks if the os is macOS\n\n :return: bool, True is macOS, otherwise False.\n\t\"\"\"\n\t", "signature": "def os_is_mac():", "docstring": "Checks if the os is macOS\n\n :return: bool, True is macOS, otherwise False."} +{"question_id": "62ece4982e6aefcf4aabbd71", "input": "def regex_dict(item):\n\t\"\"\"\n\tConvert *.cpp keys to regex keys.\nGiven a dict where the keys are all filenames with wildcards,\nconvert only the keys into equivalent regexes and leave the values intact.\n\nArgs:\n item: dict to convert\nReturns:\n dict with keys converted to regexes\n\t\"\"\"\n\t", "signature": "def regex_dict(item):", "docstring": "Convert *.cpp keys to regex keys.\nGiven a dict where the keys are all filenames with wildcards,\nconvert only the keys into equivalent regexes and leave the values intact.\n\nArgs:\n item: dict to convert\nReturns:\n dict with keys converted to regexes"} +{"question_id": "62ece4982e6aefcf4aabbd72", "input": "def unquote(name):\n\t\"\"\"\n\tRemove quote from the given name with regular expression.\n Args:\n name: input name\n Returns:\n name after removal\n\t\"\"\"\n\t", "signature": "def unquote(name):", "docstring": "Remove quote from the given name with regular expression.\n Args:\n name: input name\n Returns:\n name after removal"} +{"question_id": "62ece4982e6aefcf4aabbd73", "input": "def split(s, platform='this'):\n\t\"\"\"\n\tSplit the input str under given platform, return the splitting result\n If platform equals 'this', auto-detect current platform.\n If platform equals 1, use POSIX style.\n If platform equals 0, use Windows/CMD style.\n Args:\n s: input str\n platform: 'this' = auto from current platform; 1 = POSIX; 0 = Windows/CMD\n Returns:\n a list of splitting str\n\t\"\"\"\n\t", "signature": "def split(s, platform='this'):", "docstring": "Split the input str under given platform, return the splitting result\n If platform equals 'this', auto-detect current platform.\n If platform equals 1, use POSIX style.\n If platform equals 0, use Windows/CMD style.\n Args:\n s: input str\n platform: 'this' = auto from current platform; 1 = POSIX; 0 = Windows/CMD\n Returns:\n a list of splitting str"} +{"question_id": "62ece4982e6aefcf4aabbd74", "input": "def prepare_repository_from_archive(\n archive_path: str,\n filename: Optional[str] = None,\n tmp_path: Union[PosixPath, str] = \"/tmp\",\n) -> str:\n\t\"\"\"\n\tGiven an existing archive_path, uncompress it.\n Returns a file repo url which can be used as origin url.\n\n This does not deal with the case where the archive passed along does not exist.\n @param archive_path : archive file path\n @param filename: File name\n @param tmp_path: Temporary file path\n @return Repo URL\n\t\"\"\"\n\t", "signature": "def prepare_repository_from_archive(\n archive_path: str,\n filename: Optional[str] = None,\n tmp_path: Union[PosixPath, str] = \"/tmp\",\n) -> str:", "docstring": "Given an existing archive_path, uncompress it.\n Returns a file repo url which can be used as origin url.\n\n This does not deal with the case where the archive passed along does not exist.\n @param archive_path : archive file path\n @param filename: File name\n @param tmp_path: Temporary file path\n @return Repo URL"} +{"question_id": "62ece4982e6aefcf4aabbd75", "input": "def addignored(ignored):\n\t\"\"\"\n\tUse the git command to obtain the file names\u3002\n Turn the file names into a list, sort the list for only ignored files\n return those files as a single string with each filename separated by a comma.\n\t\"\"\"\n\t", "signature": "def addignored(ignored):", "docstring": "Use the git command to obtain the file names\u3002\n Turn the file names into a list, sort the list for only ignored files\n return those files as a single string with each filename separated by a comma."} +{"question_id": "62ece4982e6aefcf4aabbd76", "input": "def match(filename):\n\t\"\"\"\n\tCheck if the type of the given filename is 'doxyfile'\n\n Args:\n filename: filename to be check\n Returns:\n Return True if the type of the given filename in lower case is 'doxyfile'\n\t\"\"\"\n\t", "signature": "def match(filename):", "docstring": "Check if the type of the given filename is 'doxyfile'\n\n Args:\n filename: filename to be check\n Returns:\n Return True if the type of the given filename in lower case is 'doxyfile'"} +{"question_id": "62ece4982e6aefcf4aabbd77", "input": "def parse_frequency(frequency):\n\t\"\"\"\n\tGiven a frequency string with a number and a unit of time, return a corresponding\n datetime.timedelta instance.\n If the frequency is None or \"always\", return None.\n Raise ValueError if the given frequency cannot be parsed.\n For instance, given \"3 timeunit\", return datetime.timedelta(timeunit=3)\n\n @param frequency : A frequency string \"number timeunit\"\n\n @return str, the corresponding datetime\n\t\"\"\"\n\t", "signature": "def parse_frequency(frequency):", "docstring": "Given a frequency string with a number and a unit of time, return a corresponding\n datetime.timedelta instance.\n If the frequency is None or \"always\", return None.\n Raise ValueError if the given frequency cannot be parsed.\n For instance, given \"3 timeunit\", return datetime.timedelta(timeunit=3)\n\n @param frequency : A frequency string \"number timeunit\"\n\n @return str, the corresponding datetime"} +{"question_id": "62ece4992e6aefcf4aabbd78", "input": "def is_local(host):\n\t\"\"\"\n\tChecks if the host is the localhost,\n the localhost include local IP, user name, local domain name, `localhost` and `127.0.0.1`\n\n Args:\n host: The hostname or ip\n\n Returns:\n True if the host is the localhost else False\n\t\"\"\"\n\t", "signature": "def is_local(host):", "docstring": "Checks if the host is the localhost,\n the localhost include local IP, user name, local domain name, `localhost` and `127.0.0.1`\n\n Args:\n host: The hostname or ip\n\n Returns:\n True if the host is the localhost else False"} +{"question_id": "62ece4992e6aefcf4aabbd79", "input": "def make_find_paths(find_paths):\n\t\"\"\"\n\tGiven a sequence of path, transform all path into glob patterns. Pass through existing patterns untouched.\n\n Args:\n find_paths: sequence of path\n Returns:\n tuple of transformed path\n\t\"\"\"\n\t", "signature": "def make_find_paths(find_paths):", "docstring": "Given a sequence of path, transform all path into glob patterns. Pass through existing patterns untouched.\n\n Args:\n find_paths: sequence of path\n Returns:\n tuple of transformed path"} +{"question_id": "62ece4992e6aefcf4aabbd7a", "input": "def is_gitbash():\n\t\"\"\"\n\tCheck whether the current program is run in a Windows gitbash terminal\n\n Returns:\n boolean, True if gitbash else False\n\t\"\"\"\n\t", "signature": "def is_gitbash():", "docstring": "Check whether the current program is run in a Windows gitbash terminal\n\n Returns:\n boolean, True if gitbash else False"} +{"question_id": "62ece4992e6aefcf4aabbd7b", "input": "def write_configuration(config_filename, rendered_config, mode=0o600, overwrite=False):\n\t\"\"\"\n\tGiven a rendered config YAML, write it out to target file.\n But if the file already exists and overwrite is False,\n abort before writing anything.\n If the file does not exist, create it.\n Write to the file otherwise.\n\n Returns: None\n\t\"\"\"\n\t", "signature": "def write_configuration(config_filename, rendered_config, mode=0o600, overwrite=False):", "docstring": "Given a rendered config YAML, write it out to target file.\n But if the file already exists and overwrite is False,\n abort before writing anything.\n If the file does not exist, create it.\n Write to the file otherwise.\n\n Returns: None"} +{"question_id": "62ece4992e6aefcf4aabbd7c", "input": "def oneline(script, seperator=\" && \"):\n\t\"\"\"\n\tConvert a script to one line command with the given seperator.\n\n Args:\n script: str\n separator: str\n Returns:\n str, the one-line command.\n\t\"\"\"\n\t", "signature": "def oneline(script, seperator=\" && \"):", "docstring": "Convert a script to one line command with the given seperator.\n\n Args:\n script: str\n separator: str\n Returns:\n str, the one-line command."} +{"question_id": "62ece4992e6aefcf4aabbd7d", "input": "def subprocess_run_helper(func, *args, timeout, extra_env=None):\n\t\"\"\"\n\tRun a function in a sub-process\n\n Args:\n func: function, the function to be run. It must be in a module that is importable.\n *args: str. Any additional command line arguments to be passed in\n the first argument to ``subprocess.run``.\n extra_env: dict[str, str]. Any additional environment variables to be set for the subprocess.\n Returns:\n CompletedProcess instance\n\t\"\"\"\n\t", "signature": "def subprocess_run_helper(func, *args, timeout, extra_env=None):", "docstring": "Run a function in a sub-process\n\n Args:\n func: function, the function to be run. It must be in a module that is importable.\n *args: str. Any additional command line arguments to be passed in\n the first argument to ``subprocess.run``.\n extra_env: dict[str, str]. Any additional environment variables to be set for the subprocess.\n Returns:\n CompletedProcess instance"} +{"question_id": "62ece4992e6aefcf4aabbd7e", "input": "def _resolve_string(matcher):\n\t\"\"\"\n\tGet the value from environment given a matcher containing a name and an optional default value in its group dict.\n If the variable is not defined in environment and no default value is provided, an Error is raised.\n\t\"\"\"\n\t", "signature": "def _resolve_string(matcher):", "docstring": "Get the value from environment given a matcher containing a name and an optional default value in its group dict.\n If the variable is not defined in environment and no default value is provided, an Error is raised."} +{"question_id": "62ece4992e6aefcf4aabbd7f", "input": "def _parse_image_ref(image_href: str) -> Tuple[str, str, bool]:\n\t\"\"\"\n\tParse an image href into composite parts import urllib.\n\n :param image_href: href of an image\n :returns: a tuple of the form (image_id, netloc, use_ssl)\n :raises ValueError:\n\t\"\"\"\n\t", "signature": "def _parse_image_ref(image_href: str) -> Tuple[str, str, bool]:", "docstring": "Parse an image href into composite parts import urllib.\n\n :param image_href: href of an image\n :returns: a tuple of the form (image_id, netloc, use_ssl)\n :raises ValueError:"} +{"question_id": "62ece4992e6aefcf4aabbd80", "input": "def remove_ending_os_sep(input_list):\n\t\"\"\"\n\tIf input list is None, return []\n Iterate over a string list and remove trailing os seperator characters.\n Each string is tested if its length is greater than one and if the last\n character is the pathname seperator.\n Returns:\n A list after removing trailing os seperator characters.\n\t\"\"\"\n\t", "signature": "def remove_ending_os_sep(input_list):", "docstring": "If input list is None, return []\n Iterate over a string list and remove trailing os seperator characters.\n Each string is tested if its length is greater than one and if the last\n character is the pathname seperator.\n Returns:\n A list after removing trailing os seperator characters."} +{"question_id": "62ece4992e6aefcf4aabbd82", "input": "def get_pattern(pattern, strip=True):\n\t\"\"\"\n\tConvert a string to a regex pattern object\n\n Args:\n pattern: str or regex pattern object\n strip: boolean, whether the spaces in the start/end will be stripped\n Returns:\n regex pattern object\n\t\"\"\"\n\t", "signature": "def get_pattern(pattern, strip=True):", "docstring": "Convert a string to a regex pattern object\n\n Args:\n pattern: str or regex pattern object\n strip: boolean, whether the spaces in the start/end will be stripped\n Returns:\n regex pattern object"} +{"question_id": "62ece4992e6aefcf4aabbd83", "input": "def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None):\n\t\"\"\"\n\tA list of command is given. Run the given command(s) with the given arguments use a subprocess. Return the running result (stdout and returncode)\n\t\"\"\"\n\t", "signature": "def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None):", "docstring": "A list of command is given. Run the given command(s) with the given arguments use a subprocess. Return the running result (stdout and returncode)"} +{"question_id": "62ece4992e6aefcf4aabbd84", "input": "def is_ipv4(target):\n\t\"\"\"\n\tTest if IPv4 address or not.\n\n Returns: Boolean, True if target is IPv4 address, else False.\n\t\"\"\"\n\t", "signature": "def is_ipv4(target):", "docstring": "Test if IPv4 address or not.\n\n Returns: Boolean, True if target is IPv4 address, else False."} +{"question_id": "62ece4992e6aefcf4aabbd85", "input": "def find_roots(\n graph: \"Graph\", prop: \"URIRef\", roots: Optional[Set[\"Node\"]] = None\n) -> Set[\"Node\"]:\n\t\"\"\"\n\t Find the roots in some sort of transitive hierarchy.\n\n find_roots(graph, rdflib.RDFS.subClassOf)\n will return a set of all roots of the sub-class hierarchy\n\n Assumes triple of the form (child, prop, parent), i.e. the direction of\n RDFS.subClassOf or SKOS.broader\n\n Args:\n graph: Graph Class Object\n prop: URIRef Class Object\n roots: Optional list with set type\n Return:\n roots: a set with nodes\n\t\"\"\"\n\t", "signature": "def find_roots(\n graph: \"Graph\", prop: \"URIRef\", roots: Optional[Set[\"Node\"]] = None\n) -> Set[\"Node\"]:", "docstring": " Find the roots in some sort of transitive hierarchy.\n\n find_roots(graph, rdflib.RDFS.subClassOf)\n will return a set of all roots of the sub-class hierarchy\n\n Assumes triple of the form (child, prop, parent), i.e. the direction of\n RDFS.subClassOf or SKOS.broader\n\n Args:\n graph: Graph Class Object\n prop: URIRef Class Object\n roots: Optional list with set type\n Return:\n roots: a set with nodes"} +{"question_id": "62ece4992e6aefcf4aabbd86", "input": "def _dump_string(obj, dumper=None):\n\t\"\"\"\n\tGiven a python object and serialize it into a YAML stream\n\n Args:\n obj: python object\n\n Returns:\n YAML stream\n\t\"\"\"\n\t", "signature": "def _dump_string(obj, dumper=None):", "docstring": "Given a python object and serialize it into a YAML stream\n\n Args:\n obj: python object\n\n Returns:\n YAML stream"} +{"question_id": "62ece4992e6aefcf4aabbd87", "input": "def build_app_logger(name='app', logfile='app.log', debug=True):\n\t\"\"\"\n\tGeneral purpose application logger. Useful mainly for debugging\n Args:\n name: The name of logger\n logfile: The log file need to save\n debug: is need to debug\n\n Returns:\n Returns an instantiated logger object\n\t\"\"\"\n\t", "signature": "def build_app_logger(name='app', logfile='app.log', debug=True):", "docstring": "General purpose application logger. Useful mainly for debugging\n Args:\n name: The name of logger\n logfile: The log file need to save\n debug: is need to debug\n\n Returns:\n Returns an instantiated logger object"} +{"question_id": "62ece4992e6aefcf4aabbd88", "input": "def make_array(shape, dtype=np.dtype(\"float32\")):\n\t\"\"\"\n\tReturns a 0-filled array of the given shape and type.\n\n Args:\n shape : tuple\n shape of the array to create\n dtype : `numpy.dtype`\n data-type of the array to create\n Return: array\n\t\"\"\"\n\t", "signature": "def make_array(shape, dtype=np.dtype(\"float32\")):", "docstring": "Returns a 0-filled array of the given shape and type.\n\n Args:\n shape : tuple\n shape of the array to create\n dtype : `numpy.dtype`\n data-type of the array to create\n Return: array"} +{"question_id": "62ece4992e6aefcf4aabbd89", "input": "def gaussian(x):\n\t\"\"\"\n\tCalculate Gaussian centered with u is 0.2 and sigma is 0.1.\n\t\"\"\"\n\t", "signature": "def gaussian(x):", "docstring": "Calculate Gaussian centered with u is 0.2 and sigma is 0.1."} +{"question_id": "62ece4992e6aefcf4aabbd8a", "input": "def load_configurations(config_filenames, overrides=None, resolve_env=True):\n\t\"\"\"\n\tGiven a sequence of configuration filenames, load and validate each configuration file. If the configuration file\ncannot be read due to insufficient permissions or error parsing configuration file, the error log will\nbe recorded. Otherwise, return the results as a tuple of: dict of configuration filename to corresponding parsed configuration,\nand sequence of logging.LogRecord instances containing any parse errors.\n\t\"\"\"\n\t", "signature": "def load_configurations(config_filenames, overrides=None, resolve_env=True):", "docstring": "Given a sequence of configuration filenames, load and validate each configuration file. If the configuration file\ncannot be read due to insufficient permissions or error parsing configuration file, the error log will\nbe recorded. Otherwise, return the results as a tuple of: dict of configuration filename to corresponding parsed configuration,\nand sequence of logging.LogRecord instances containing any parse errors."} +{"question_id": "62ece4992e6aefcf4aabbd8b", "input": "def force_string(obj):\n\t\"\"\"\n\tForce decoding obj with 'utf-8' if it is a numpy.bytes or bytes\n\n Args:\n obj: consistency config\n\n Returns:\n returns the bytes object corresponding to `obj` in case it is a string using UTF-8 else return `obj`\n\t\"\"\"\n\t", "signature": "def force_string(obj):", "docstring": "Force decoding obj with 'utf-8' if it is a numpy.bytes or bytes\n\n Args:\n obj: consistency config\n\n Returns:\n returns the bytes object corresponding to `obj` in case it is a string using UTF-8 else return `obj`"} +{"question_id": "62e60723d76274f8a4026b76", "input": "def from_ticks(cls, ticks, tz=None):\n\t\"\"\"\n\tCreate a time from ticks (nanoseconds since midnight).\n\n:param ticks: nanoseconds since midnight\n:type ticks: int\n:param tz: optional timezone\n:type tz: datetime.tzinfo\n\n:rtype: Time\n\n:raises ValueError: if ticks is out of bounds\n (0 <= ticks < 86400000000000)\n\t\"\"\"\n\t", "signature": "def from_ticks(cls, ticks, tz=None):", "docstring": "Create a time from ticks (nanoseconds since midnight).\n\n:param ticks: nanoseconds since midnight\n:type ticks: int\n:param tz: optional timezone\n:type tz: datetime.tzinfo\n\n:rtype: Time\n\n:raises ValueError: if ticks is out of bounds\n (0 <= ticks < 86400000000000)"} +{"question_id": "62e60873d76274f8a4026bd8", "input": "def protocol_handlers(cls, protocol_version=None):\n\t\"\"\"\n\tReturn a dictionary of available Bolt protocol handlers,\nkeyed by version tuple. If an explicit protocol version is\nprovided, the dictionary will contain either zero or one items,\ndepending on whether that version is supported. If no protocol\nversion is provided, all available versions will be returned.\n\n:param protocol_version: tuple identifying a specific protocol\n version (e.g. (3, 5)) or None\n:return: dictionary of version tuple to handler class for all\n relevant and supported protocol versions\n:raise TypeError: if protocol version is not passed in a tuple\n\t\"\"\"\n\t", "signature": "def protocol_handlers(cls, protocol_version=None):", "docstring": "Return a dictionary of available Bolt protocol handlers,\nkeyed by version tuple. If an explicit protocol version is\nprovided, the dictionary will contain either zero or one items,\ndepending on whether that version is supported. If no protocol\nversion is provided, all available versions will be returned.\n\n:param protocol_version: tuple identifying a specific protocol\n version (e.g. (3, 5)) or None\n:return: dictionary of version tuple to handler class for all\n relevant and supported protocol versions\n:raise TypeError: if protocol version is not passed in a tuple"} +{"question_id": "62e60e3bd76274f8a4026d1a", "input": "def from_raw_values(cls, values):\n\t\"\"\"\n\tCreate a Bookmarks object from a list of raw bookmark string values.\n\t\"\"\"\n\t", "signature": "def from_raw_values(cls, values):", "docstring": "Create a Bookmarks object from a list of raw bookmark string values."} +{"question_id": "62b87b199a0c4fa8b80b354c", "input": "def _get_seq_with_type(seq, bufsize=None):\n\t\"\"\"\n\tReturn a (sequence, type) pair.\nSequence is derived from *seq*\n(or is *seq*, if that is of a sequence type).\n\t\"\"\"\n\t", "signature": "def _get_seq_with_type(seq, bufsize=None):", "docstring": "Return a (sequence, type) pair.\nSequence is derived from *seq*\n(or is *seq*, if that is of a sequence type)."} +{"question_id": "62b87b4f9a0c4fa8b80b3581", "input": "def scale(self, other=None, recompute=False):\n\t\"\"\"\n\tCompute or set scale (integral of the histogram).\n\nIf *other* is ``None``, return scale of this histogram.\nIf its scale was not computed before,\nit is computed and stored for subsequent use\n(unless explicitly asked to *recompute*).\nNote that after changing (filling) the histogram\none must explicitly recompute the scale\nif it was computed before.\n\nIf a float *other* is provided, rescale self to *other*.\n\nHistograms with scale equal to zero can't be rescaled.\n:exc:`.LenaValueError` is raised if one tries to do that.\n\t\"\"\"\n\t", "signature": "def scale(self, other=None, recompute=False):", "docstring": "Compute or set scale (integral of the histogram).\n\nIf *other* is ``None``, return scale of this histogram.\nIf its scale was not computed before,\nit is computed and stored for subsequent use\n(unless explicitly asked to *recompute*).\nNote that after changing (filling) the histogram\none must explicitly recompute the scale\nif it was computed before.\n\nIf a float *other* is provided, rescale self to *other*.\n\nHistograms with scale equal to zero can't be rescaled.\n:exc:`.LenaValueError` is raised if one tries to do that."} +{"question_id": "62b87b519a0c4fa8b80b3583", "input": "def scale(self, other=None):\n\t\"\"\"\n\tGet or set the scale of the graph.\n\nIf *other* is ``None``, return the scale of this graph.\n\nIf a numeric *other* is provided, rescale to that value.\nIf the graph has unknown or zero scale,\nrescaling that will raise :exc:`~.LenaValueError`.\n\nTo get meaningful results, graph's fields are used.\nOnly the last coordinate is rescaled.\nFor example, if the graph has *x* and *y* coordinates,\nthen *y* will be rescaled, and for a 3-dimensional graph\n*z* will be rescaled.\nAll errors are rescaled together with their coordinate.\n\t\"\"\"\n\t", "signature": "def scale(self, other=None):", "docstring": "Get or set the scale of the graph.\n\nIf *other* is ``None``, return the scale of this graph.\n\nIf a numeric *other* is provided, rescale to that value.\nIf the graph has unknown or zero scale,\nrescaling that will raise :exc:`~.LenaValueError`.\n\nTo get meaningful results, graph's fields are used.\nOnly the last coordinate is rescaled.\nFor example, if the graph has *x* and *y* coordinates,\nthen *y* will be rescaled, and for a 3-dimensional graph\n*z* will be rescaled.\nAll errors are rescaled together with their coordinate."} +{"question_id": "62b87b869a0c4fa8b80b35e1", "input": "def hist_to_graph(hist, make_value=None, get_coordinate=\"left\",\n field_names=(\"x\", \"y\"), scale=None):\n\t\"\"\"\n\tConvert a :class:`.histogram` to a :class:`.graph`.\n\n*make_value* is a function to set the value of a graph's point.\nBy default it is bin content.\n*make_value* accepts a single value (bin content) without context.\n\nThis option could be used to create graph's error bars.\nFor example, to create a graph with errors\nfrom a histogram where bins contain\na named tuple with fields *mean*, *mean_error* and a context\none could use\n\n>>> make_value = lambda bin_: (bin_.mean, bin_.mean_error)\n\n*get_coordinate* defines what the coordinate\nof a graph point created from a histogram bin will be.\nIt can be \"left\" (default), \"right\" and \"middle\".\n\n*field_names* set field names of the graph. Their number\nmust be the same as the dimension of the result.\nFor a *make_value* above they would be\n*(\"x\", \"y_mean\", \"y_mean_error\")*.\n\n*scale* becomes the graph's scale (unknown by default).\nIf it is ``True``, it uses the histogram scale.\n\n*hist* must contain only numeric bins (without context)\nor *make_value* must remove context when creating a numeric graph.\n\nReturn the resulting graph.\n\t\"\"\"\n\t", "signature": "def hist_to_graph(hist, make_value=None, get_coordinate=\"left\",\n field_names=(\"x\", \"y\"), scale=None):", "docstring": "Convert a :class:`.histogram` to a :class:`.graph`.\n\n*make_value* is a function to set the value of a graph's point.\nBy default it is bin content.\n*make_value* accepts a single value (bin content) without context.\n\nThis option could be used to create graph's error bars.\nFor example, to create a graph with errors\nfrom a histogram where bins contain\na named tuple with fields *mean*, *mean_error* and a context\none could use\n\n>>> make_value = lambda bin_: (bin_.mean, bin_.mean_error)\n\n*get_coordinate* defines what the coordinate\nof a graph point created from a histogram bin will be.\nIt can be \"left\" (default), \"right\" and \"middle\".\n\n*field_names* set field names of the graph. Their number\nmust be the same as the dimension of the result.\nFor a *make_value* above they would be\n*(\"x\", \"y_mean\", \"y_mean_error\")*.\n\n*scale* becomes the graph's scale (unknown by default).\nIf it is ``True``, it uses the histogram scale.\n\n*hist* must contain only numeric bins (without context)\nor *make_value* must remove context when creating a numeric graph.\n\nReturn the resulting graph."} +{"question_id": "62b8b4baeb7e40a82d2d1136", "input": "def _verify(iface, candidate, tentative=False, vtype=None):\n\t\"\"\"\n\tVerify that *candidate* might correctly provide *iface*.\n\nThis involves:\n\n- Making sure the candidate claims that it provides the\n interface using ``iface.providedBy`` (unless *tentative* is `True`,\n in which case this step is skipped). This means that the candidate's class\n declares that it `implements ` the interface,\n or the candidate itself declares that it `provides `\n the interface\n\n- Making sure the candidate defines all the necessary methods\n\n- Making sure the methods have the correct signature (to the\n extent possible)\n\n- Making sure the candidate defines all the necessary attributes\n\n:return bool: Returns a true value if everything that could be\n checked passed.\n:raises zope.interface.Invalid: If any of the previous\n conditions does not hold.\n\n.. versionchanged:: 5.0\n If multiple methods or attributes are invalid, all such errors\n are collected and reported. Previously, only the first error was reported.\n As a special case, if only one such error is present, it is raised\n alone, like before.\n\t\"\"\"\n\t", "signature": "def _verify(iface, candidate, tentative=False, vtype=None):", "docstring": "Verify that *candidate* might correctly provide *iface*.\n\nThis involves:\n\n- Making sure the candidate claims that it provides the\n interface using ``iface.providedBy`` (unless *tentative* is `True`,\n in which case this step is skipped). This means that the candidate's class\n declares that it `implements ` the interface,\n or the candidate itself declares that it `provides `\n the interface\n\n- Making sure the candidate defines all the necessary methods\n\n- Making sure the methods have the correct signature (to the\n extent possible)\n\n- Making sure the candidate defines all the necessary attributes\n\n:return bool: Returns a true value if everything that could be\n checked passed.\n:raises zope.interface.Invalid: If any of the previous\n conditions does not hold.\n\n.. versionchanged:: 5.0\n If multiple methods or attributes are invalid, all such errors\n are collected and reported. Previously, only the first error was reported.\n As a special case, if only one such error is present, it is raised\n alone, like before."} +{"question_id": "62b8b4baeb7e40a82d2d1137", "input": "def verifyObject(iface, candidate, tentative=False):\n\t\"\"\"\n\tVerify that *candidate* might correctly provide *iface*.\n\nThis involves:\n\n- Making sure the candidate claims that it provides the\n interface using ``iface.providedBy`` (unless *tentative* is `True`,\n in which case this step is skipped). This means that the candidate's class\n declares that it `implements ` the interface,\n or the candidate itself declares that it `provides `\n the interface\n\n- Making sure the candidate defines all the necessary methods\n\n- Making sure the methods have the correct signature (to the\n extent possible)\n\n- Making sure the candidate defines all the necessary attributes\n\n:return bool: Returns a true value if everything that could be\n checked passed.\n:raises zope.interface.Invalid: If any of the previous\n conditions does not hold.\n\n.. versionchanged:: 5.0\n If multiple methods or attributes are invalid, all such errors\n are collected and reported. Previously, only the first error was reported.\n As a special case, if only one such error is present, it is raised\n alone, like before.\n\t\"\"\"\n\t", "signature": "def verifyObject(iface, candidate, tentative=False):", "docstring": "Verify that *candidate* might correctly provide *iface*.\n\nThis involves:\n\n- Making sure the candidate claims that it provides the\n interface using ``iface.providedBy`` (unless *tentative* is `True`,\n in which case this step is skipped). This means that the candidate's class\n declares that it `implements ` the interface,\n or the candidate itself declares that it `provides `\n the interface\n\n- Making sure the candidate defines all the necessary methods\n\n- Making sure the methods have the correct signature (to the\n extent possible)\n\n- Making sure the candidate defines all the necessary attributes\n\n:return bool: Returns a true value if everything that could be\n checked passed.\n:raises zope.interface.Invalid: If any of the previous\n conditions does not hold.\n\n.. versionchanged:: 5.0\n If multiple methods or attributes are invalid, all such errors\n are collected and reported. Previously, only the first error was reported.\n As a special case, if only one such error is present, it is raised\n alone, like before."} +{"question_id": "62b8b4c1eb7e40a82d2d1139", "input": "def verifyClass(iface, candidate, tentative=False):\n\t\"\"\"\n\tVerify that the *candidate* might correctly provide *iface*.\n\t\"\"\"\n\t", "signature": "def verifyClass(iface, candidate, tentative=False):", "docstring": "Verify that the *candidate* might correctly provide *iface*."} +{"question_id": "62b8b559eb7e40a82d2d11f6", "input": "def determineMetaclass(bases, explicit_mc=None):\n\t\"\"\"\n\tDetermine metaclass from 1+ bases and optional explicit __metaclass__\n\t\"\"\"\n\t", "signature": "def determineMetaclass(bases, explicit_mc=None):", "docstring": "Determine metaclass from 1+ bases and optional explicit __metaclass__"} +{"question_id": "62b8d22a48ba5a41d1c3f47d", "input": "def pop(self, key, default=__marker):\n\t\"\"\"\n\tD.pop(k[,d]) -> v, remove specified key and return the corresponding value.\nIf key is not found, d is returned if given, otherwise KeyError is raised.\n\t\"\"\"\n\t", "signature": "def pop(self, key, default=__marker):", "docstring": "D.pop(k[,d]) -> v, remove specified key and return the corresponding value.\nIf key is not found, d is returned if given, otherwise KeyError is raised."} +{"question_id": "62b8d23748ba5a41d1c3f497", "input": "def popitem(self):\n\t\"\"\"\n\tRemove and return the `(key, value)` pair least frequently used.\n\t\"\"\"\n\t", "signature": "def popitem(self):", "docstring": "Remove and return the `(key, value)` pair least frequently used."} +{"question_id": "62b8d23a48ba5a41d1c3f499", "input": "def popitem(self):\n\t\"\"\"\n\tRemove and return the `(key, value)` pair least recently used.\n\t\"\"\"\n\t", "signature": "def popitem(self):", "docstring": "Remove and return the `(key, value)` pair least recently used."} +{"question_id": "62b8d23c48ba5a41d1c3f49b", "input": "def popitem(self):\n\t\"\"\"\n\t\"Remove and return the (key, value) pair most recently used.\n\t\"\"\"\n\t", "signature": "def popitem(self):", "docstring": "\"Remove and return the (key, value) pair most recently used."} +{"question_id": "62b8d23e48ba5a41d1c3f49e", "input": "def popitem(self):\n\t\"\"\"\n\tFind, remove and return a random `(key, value)` pair via __choice in the class\n\t\"\"\"\n\t", "signature": "def popitem(self):", "docstring": "Find, remove and return a random `(key, value)` pair via __choice in the class"} +{"question_id": "62b43425903eeb48555d3ea1", "input": "def _create_in_regex(self) -> Pattern:\n\t\"\"\"\n\tCreate the in-style parameter regular expression.\n\nReturns the in-style parameter regular expression (:class:`re.Pattern`).\n\t\"\"\"\n\t", "signature": "def _create_in_regex(self) -> Pattern:", "docstring": "Create the in-style parameter regular expression.\n\nReturns the in-style parameter regular expression (:class:`re.Pattern`)."} +{"question_id": "62b43426903eeb48555d3ea2", "input": "def _create_converter(self) -> _converting._Converter:\n\t\"\"\"\n\tCreate the parameter style converter.\n\nReturns the parameter style converter (:class:`._converting._Converter`).\n\t\"\"\"\n\t", "signature": "def _create_converter(self) -> _converting._Converter:", "docstring": "Create the parameter style converter.\n\nReturns the parameter style converter (:class:`._converting._Converter`)."} +{"question_id": "62b8966c755ee91dce50a154", "input": "def isoparse(self, dt_str):\n\t\"\"\"\n\tParse an ISO-8601 datetime string into a :class:`datetime.datetime`.\n\nAn ISO-8601 datetime string consists of a date portion, followed\noptionally by a time portion - the date and time portions are separated\nby a single character separator, which is ``T`` in the official\nstandard. Incomplete date formats (such as ``YYYY-MM``) may *not* be\ncombined with a time portion.\n\nSupported date formats are:\n\nCommon:\n\n- ``YYYY``\n- ``YYYY-MM`` or ``YYYYMM``\n- ``YYYY-MM-DD`` or ``YYYYMMDD``\n\nUncommon:\n\n- ``YYYY-Www`` or ``YYYYWww`` - ISO week (day defaults to 0)\n- ``YYYY-Www-D`` or ``YYYYWwwD`` - ISO week and day\n\nThe ISO week and day numbering follows the same logic as\n:func:`datetime.date.isocalendar`.\n\nSupported time formats are:\n\n- ``hh``\n- ``hh:mm`` or ``hhmm``\n- ``hh:mm:ss`` or ``hhmmss``\n- ``hh:mm:ss.ssssss`` (Up to 6 sub-second digits)\n\nMidnight is a special case for `hh`, as the standard supports both\n00:00 and 24:00 as a representation. The decimal separator can be\neither a dot or a comma.\n\n\n.. caution::\n\n Support for fractional components other than seconds is part of the\n ISO-8601 standard, but is not currently implemented in this parser.\n\nSupported time zone offset formats are:\n\n- `Z` (UTC)\n- `\u00b1HH:MM`\n- `\u00b1HHMM`\n- `\u00b1HH`\n\nOffsets will be represented as :class:`dateutil.tz.tzoffset` objects,\nwith the exception of UTC, which will be represented as\n:class:`dateutil.tz.tzutc`. Time zone offsets equivalent to UTC (such\nas `+00:00`) will also be represented as :class:`dateutil.tz.tzutc`.\n\n:param dt_str:\n A string or stream containing only an ISO-8601 datetime string\n\n:return:\n Returns a :class:`datetime.datetime` representing the string.\n Unspecified components default to their lowest value.\n\n.. warning::\n\n As of version 2.7.0, the strictness of the parser should not be\n considered a stable part of the contract. Any valid ISO-8601 string\n that parses correctly with the default settings will continue to\n parse correctly in future versions, but invalid strings that\n currently fail (e.g. ``2017-01-01T00:00+00:00:00``) are not\n guaranteed to continue failing in future versions if they encode\n a valid date.\n\n.. versionadded:: 2.7.0\n\t\"\"\"\n\t", "signature": "def isoparse(self, dt_str):", "docstring": "Parse an ISO-8601 datetime string into a :class:`datetime.datetime`.\n\nAn ISO-8601 datetime string consists of a date portion, followed\noptionally by a time portion - the date and time portions are separated\nby a single character separator, which is ``T`` in the official\nstandard. Incomplete date formats (such as ``YYYY-MM``) may *not* be\ncombined with a time portion.\n\nSupported date formats are:\n\nCommon:\n\n- ``YYYY``\n- ``YYYY-MM`` or ``YYYYMM``\n- ``YYYY-MM-DD`` or ``YYYYMMDD``\n\nUncommon:\n\n- ``YYYY-Www`` or ``YYYYWww`` - ISO week (day defaults to 0)\n- ``YYYY-Www-D`` or ``YYYYWwwD`` - ISO week and day\n\nThe ISO week and day numbering follows the same logic as\n:func:`datetime.date.isocalendar`.\n\nSupported time formats are:\n\n- ``hh``\n- ``hh:mm`` or ``hhmm``\n- ``hh:mm:ss`` or ``hhmmss``\n- ``hh:mm:ss.ssssss`` (Up to 6 sub-second digits)\n\nMidnight is a special case for `hh`, as the standard supports both\n00:00 and 24:00 as a representation. The decimal separator can be\neither a dot or a comma.\n\n\n.. caution::\n\n Support for fractional components other than seconds is part of the\n ISO-8601 standard, but is not currently implemented in this parser.\n\nSupported time zone offset formats are:\n\n- `Z` (UTC)\n- `\u00b1HH:MM`\n- `\u00b1HHMM`\n- `\u00b1HH`\n\nOffsets will be represented as :class:`dateutil.tz.tzoffset` objects,\nwith the exception of UTC, which will be represented as\n:class:`dateutil.tz.tzutc`. Time zone offsets equivalent to UTC (such\nas `+00:00`) will also be represented as :class:`dateutil.tz.tzutc`.\n\n:param dt_str:\n A string or stream containing only an ISO-8601 datetime string\n\n:return:\n Returns a :class:`datetime.datetime` representing the string.\n Unspecified components default to their lowest value.\n\n.. warning::\n\n As of version 2.7.0, the strictness of the parser should not be\n considered a stable part of the contract. Any valid ISO-8601 string\n that parses correctly with the default settings will continue to\n parse correctly in future versions, but invalid strings that\n currently fail (e.g. ``2017-01-01T00:00+00:00:00``) are not\n guaranteed to continue failing in future versions if they encode\n a valid date.\n\n.. versionadded:: 2.7.0"} +{"question_id": "62b896de755ee91dce50a183", "input": "def parse(self, timestr, default=None,\n ignoretz=False, tzinfos=None, **kwargs):\n\t\"\"\"\n\tParse the date/time string into a :class:`datetime.datetime` object.\n\n:param timestr:\n Any date/time string using the supported formats.\n\n:param default:\n The default datetime object, if this is a datetime object and not\n ``None``, elements specified in ``timestr`` replace elements in the\n default object.\n\n:param ignoretz:\n If set ``True``, time zones in parsed strings are ignored and a\n naive :class:`datetime.datetime` object is returned.\n\n:param tzinfos:\n Additional time zone names / aliases which may be present in the\n string. This argument maps time zone names (and optionally offsets\n from those time zones) to time zones. This parameter can be a\n dictionary with timezone aliases mapping time zone names to time\n zones or a function taking two parameters (``tzname`` and\n ``tzoffset``) and returning a time zone.\n\n The timezones to which the names are mapped can be an integer\n offset from UTC in seconds or a :class:`tzinfo` object.\n\n .. doctest::\n :options: +NORMALIZE_WHITESPACE\n\n >>> from dateutil.parser import parse\n >>> from dateutil.tz import gettz\n >>> tzinfos = {\"BRST\": -7200, \"CST\": gettz(\"America/Chicago\")}\n >>> parse(\"2012-01-19 17:21:00 BRST\", tzinfos=tzinfos)\n datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200))\n >>> parse(\"2012-01-19 17:21:00 CST\", tzinfos=tzinfos)\n datetime.datetime(2012, 1, 19, 17, 21,\n tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago'))\n\n This parameter is ignored if ``ignoretz`` is set.\n\n:param \\*\\*kwargs:\n Keyword arguments as passed to ``_parse()``.\n\n:return:\n Returns a :class:`datetime.datetime` object or, if the\n ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the\n first element being a :class:`datetime.datetime` object, the second\n a tuple containing the fuzzy tokens.\n\n:raises ParserError:\n Raised for invalid or unknown string format, if the provided\n :class:`tzinfo` is not in a valid format, or if an invalid date\n would be created.\n\n:raises TypeError:\n Raised for non-string or character stream input.\n\n:raises OverflowError:\n Raised if the parsed date exceeds the largest valid C integer on\n your system.\n\t\"\"\"\n\t", "signature": "def parse(self, timestr, default=None,\n ignoretz=False, tzinfos=None, **kwargs):", "docstring": "Parse the date/time string into a :class:`datetime.datetime` object.\n\n:param timestr:\n Any date/time string using the supported formats.\n\n:param default:\n The default datetime object, if this is a datetime object and not\n ``None``, elements specified in ``timestr`` replace elements in the\n default object.\n\n:param ignoretz:\n If set ``True``, time zones in parsed strings are ignored and a\n naive :class:`datetime.datetime` object is returned.\n\n:param tzinfos:\n Additional time zone names / aliases which may be present in the\n string. This argument maps time zone names (and optionally offsets\n from those time zones) to time zones. This parameter can be a\n dictionary with timezone aliases mapping time zone names to time\n zones or a function taking two parameters (``tzname`` and\n ``tzoffset``) and returning a time zone.\n\n The timezones to which the names are mapped can be an integer\n offset from UTC in seconds or a :class:`tzinfo` object.\n\n .. doctest::\n :options: +NORMALIZE_WHITESPACE\n\n >>> from dateutil.parser import parse\n >>> from dateutil.tz import gettz\n >>> tzinfos = {\"BRST\": -7200, \"CST\": gettz(\"America/Chicago\")}\n >>> parse(\"2012-01-19 17:21:00 BRST\", tzinfos=tzinfos)\n datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200))\n >>> parse(\"2012-01-19 17:21:00 CST\", tzinfos=tzinfos)\n datetime.datetime(2012, 1, 19, 17, 21,\n tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago'))\n\n This parameter is ignored if ``ignoretz`` is set.\n\n:param \\*\\*kwargs:\n Keyword arguments as passed to ``_parse()``.\n\n:return:\n Returns a :class:`datetime.datetime` object or, if the\n ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the\n first element being a :class:`datetime.datetime` object, the second\n a tuple containing the fuzzy tokens.\n\n:raises ParserError:\n Raised for invalid or unknown string format, if the provided\n :class:`tzinfo` is not in a valid format, or if an invalid date\n would be created.\n\n:raises TypeError:\n Raised for non-string or character stream input.\n\n:raises OverflowError:\n Raised if the parsed date exceeds the largest valid C integer on\n your system."} +{"question_id": "62b8a4a4755ee91dce50a3d3", "input": "def fromutc(self, dt):\n\t\"\"\"\n\tGiven a timezone-aware datetime in a given timezone, calculates a\ntimezone-aware datetime in a new timezone.\n\nSince this is the one time that we *know* we have an unambiguous\ndatetime object, we take this opportunity to determine whether the\ndatetime is ambiguous and in a \"fold\" state (e.g. if it's the first\noccurrence, chronologically, of the ambiguous datetime).\n\n:param dt:\n A timezone-aware :class:`datetime.datetime` object.\n\t\"\"\"\n\t", "signature": "def fromutc(self, dt):", "docstring": "Given a timezone-aware datetime in a given timezone, calculates a\ntimezone-aware datetime in a new timezone.\n\nSince this is the one time that we *know* we have an unambiguous\ndatetime object, we take this opportunity to determine whether the\ndatetime is ambiguous and in a \"fold\" state (e.g. if it's the first\noccurrence, chronologically, of the ambiguous datetime).\n\n:param dt:\n A timezone-aware :class:`datetime.datetime` object."} +{"question_id": "62b8a7b2755ee91dce50a4a7", "input": "def default_tzinfo(dt, tzinfo):\n\t\"\"\"\n\tSets the ``tzinfo`` parameter on naive datetimes only\n\nThis is useful for example when you are provided a datetime that may have\neither an implicit or explicit time zone, such as when parsing a time zone\nstring.\n\n.. doctest::\n\n >>> from dateutil.tz import tzoffset\n >>> from dateutil.parser import parse\n >>> from dateutil.utils import default_tzinfo\n >>> dflt_tz = tzoffset(\"EST\", -18000)\n >>> print(default_tzinfo(parse('2014-01-01 12:30 UTC'), dflt_tz))\n 2014-01-01 12:30:00+00:00\n >>> print(default_tzinfo(parse('2014-01-01 12:30'), dflt_tz))\n 2014-01-01 12:30:00-05:00\n\n:param dt:\n The datetime on which to replace the time zone\n\n:param tzinfo:\n The :py:class:`datetime.tzinfo` subclass instance to assign to\n ``dt`` if (and only if) it is naive.\n\n:return:\n Returns an aware :py:class:`datetime.datetime`.\n\t\"\"\"\n\t", "signature": "def default_tzinfo(dt, tzinfo):", "docstring": "Sets the ``tzinfo`` parameter on naive datetimes only\n\nThis is useful for example when you are provided a datetime that may have\neither an implicit or explicit time zone, such as when parsing a time zone\nstring.\n\n.. doctest::\n\n >>> from dateutil.tz import tzoffset\n >>> from dateutil.parser import parse\n >>> from dateutil.utils import default_tzinfo\n >>> dflt_tz = tzoffset(\"EST\", -18000)\n >>> print(default_tzinfo(parse('2014-01-01 12:30 UTC'), dflt_tz))\n 2014-01-01 12:30:00+00:00\n >>> print(default_tzinfo(parse('2014-01-01 12:30'), dflt_tz))\n 2014-01-01 12:30:00-05:00\n\n:param dt:\n The datetime on which to replace the time zone\n\n:param tzinfo:\n The :py:class:`datetime.tzinfo` subclass instance to assign to\n ``dt`` if (and only if) it is naive.\n\n:return:\n Returns an aware :py:class:`datetime.datetime`."} +{"question_id": "6305f9991d275c6667163c50", "input": "def set_cut_chars(self, before: bytes, after: bytes) -> None:\n\t\"\"\"\n\tSet the bytes used to delimit slice points.\n\nArgs:\n before: Split file before these delimiters.\n after: Split file after these delimiters.\n\t\"\"\"\n\t", "signature": "def set_cut_chars(self, before: bytes, after: bytes) -> None:", "docstring": "Set the bytes used to delimit slice points.\n\nArgs:\n before: Split file before these delimiters.\n after: Split file after these delimiters."} +{"question_id": "6306292052e177c0ba469f09", "input": "def identify_request(request: RequestType):\n\t\"\"\"\n\tCheck whether the request body loaded using JSON contains events. If yes, True is returned, otherwise, check whether the tag of the XML loaded from the request body is Magic_ENV_TAG, if yes, return True. If neither of the preceding conditions is met, return False.\n\t\"\"\"\n\t", "signature": "def identify_request(request: RequestType):", "docstring": "Check whether the request body loaded using JSON contains events. If yes, True is returned, otherwise, check whether the tag of the XML loaded from the request body is Magic_ENV_TAG, if yes, return True. If neither of the preceding conditions is met, return False."} +{"question_id": "6306292152e177c0ba469f0d", "input": "def identify_request(request: RequestType) -> bool:\n\t\"\"\"\n\tCheck whether the request body loaded using JSON contains events. If yes, True is returned. Otherwise, False is returned.\n\t\"\"\"\n\t", "signature": "def identify_request(request: RequestType) -> bool:", "docstring": "Check whether the request body loaded using JSON contains events. If yes, True is returned. Otherwise, False is returned."} +{"question_id": "6306292252e177c0ba469f11", "input": "def format_dt(dt):\n\t\"\"\"\n\tUse the ensure_timezone function to format the time of dt and return the time.\n\t\"\"\"\n\t", "signature": "def format_dt(dt):", "docstring": "Use the ensure_timezone function to format the time of dt and return the time."} +{"question_id": "6306292352e177c0ba469f1d", "input": "def find_tags(text: str, replacer: callable = None) -> Tuple[Set, str]:\n\t\"\"\"\n\tFind tags in text.\n\nTries to ignore tags inside code blocks.\n\nOptionally, if passed a \"replacer\", will also replace the tag word with the result\nof the replacer function called with the tag word.\n\nReturns a set of tags and the original or replaced text.\n\t\"\"\"\n\t", "signature": "def find_tags(text: str, replacer: callable = None) -> Tuple[Set, str]:", "docstring": "Find tags in text.\n\nTries to ignore tags inside code blocks.\n\nOptionally, if passed a \"replacer\", will also replace the tag word with the result\nof the replacer function called with the tag word.\n\nReturns a set of tags and the original or replaced text."} +{"question_id": "6306292352e177c0ba469f1e", "input": "def process_text_links(text):\n\t\"\"\"\n\tProcess links in text, adding some attributes and linkifying textual links.\n\t\"\"\"\n\t", "signature": "def process_text_links(text):", "docstring": "Process links in text, adding some attributes and linkifying textual links."} +{"question_id": "6306292652e177c0ba469f34", "input": "def fetch_content_type(url: str) -> Optional[str]:\n\t\"\"\"\n\tSet the head of the request through the URL and USER_AGENT.\n\t\"\"\"\n\t", "signature": "def fetch_content_type(url: str) -> Optional[str]:", "docstring": "Set the head of the request through the URL and USER_AGENT."} +{"question_id": "6306292a52e177c0ba469f41", "input": "def test_tag(tag: str) -> bool:\n\t\"\"\"\n\tChecks whether each character in the LEEGAL_TAG_CHARS belongs to a tag. If any character belongs to a tag, the value False is returned. Otherwise, the value True is returned.\n\t\"\"\"\n\t", "signature": "def test_tag(tag: str) -> bool:", "docstring": "Checks whether each character in the LEEGAL_TAG_CHARS belongs to a tag. If any character belongs to a tag, the value False is returned. Otherwise, the value True is returned."} +{"question_id": "6306298b52e177c0ba469fdc", "input": "def xml_children_as_dict(node):\n\t\"\"\"\n\tTurn the children of node into a dict, keyed by tag name.\n\t\"\"\"\n\t", "signature": "def xml_children_as_dict(node):", "docstring": "Turn the children of node into a dict, keyed by tag name."} +{"question_id": "6306299052e177c0ba469fe8", "input": "def check_sender_and_entity_handle_match(sender_handle, entity_handle):\n\t\"\"\"\n\tCheck whether sender_handle is the same as entity_handle. If yes, return True. Otherwise, return False.\n\t\"\"\"\n\t", "signature": "def check_sender_and_entity_handle_match(sender_handle, entity_handle):", "docstring": "Check whether sender_handle is the same as entity_handle. If yes, return True. Otherwise, return False."} +{"question_id": "630629b952e177c0ba46a043", "input": "def get_nodeinfo_well_known_document(url, document_path=None):\n\t\"\"\"\n\tReturns a formatted dictionary, including information such as url and document_path.\n\t\"\"\"\n\t", "signature": "def get_nodeinfo_well_known_document(url, document_path=None):", "docstring": "Returns a formatted dictionary, including information such as url and document_path."} +{"question_id": "630629d052e177c0ba46a0a1", "input": "def verify_relayable_signature(public_key, doc, signature):\n\t\"\"\"\n\tVerify the signed XML elements to have confidence that the claimed\nauthor did actually generate this message.\n\t\"\"\"\n\t", "signature": "def verify_relayable_signature(public_key, doc, signature):", "docstring": "Verify the signed XML elements to have confidence that the claimed\nauthor did actually generate this message."} +{"question_id": "630629e052e177c0ba46a0c4", "input": "def parse_diaspora_webfinger(document: str) -> Dict:\n\t\"\"\"\n\tThe webfinger is obtained by reading the document in JSON format, the value of hcard_url in the webfinger is the value of href in links in the document.\n\t\"\"\"\n\t", "signature": "def parse_diaspora_webfinger(document: str) -> Dict:", "docstring": "The webfinger is obtained by reading the document in JSON format, the value of hcard_url in the webfinger is the value of href in links in the document."} +{"question_id": "630629e152e177c0ba46a0d1", "input": "def try_retrieve_webfinger_document(handle: str) -> Optional[str]:\n\t\"\"\"\n\tTry to retrieve an RFC7033 webfinger document. Does not raise if it fails.\n\t\"\"\"\n\t", "signature": "def try_retrieve_webfinger_document(handle: str) -> Optional[str]:", "docstring": "Try to retrieve an RFC7033 webfinger document. Does not raise if it fails."} +{"question_id": "630629e152e177c0ba46a0d2", "input": "def retrieve_and_parse_diaspora_webfinger(handle):\n\t\"\"\"\n\tRetrieve a and parse a remote Diaspora webfinger document.\n\n:arg handle: Remote handle to retrieve\n:returns: dict\n\t\"\"\"\n\t", "signature": "def retrieve_and_parse_diaspora_webfinger(handle):", "docstring": "Retrieve a and parse a remote Diaspora webfinger document.\n\n:arg handle: Remote handle to retrieve\n:returns: dict"} +{"question_id": "630629e252e177c0ba46a0d6", "input": "def retrieve_diaspora_host_meta(host):\n\t\"\"\"\n\tRetrieve a remote Diaspora host-meta document.\n\n:arg host: Host to retrieve from\n:returns: ``XRD`` instance\n\t\"\"\"\n\t", "signature": "def retrieve_diaspora_host_meta(host):", "docstring": "Retrieve a remote Diaspora host-meta document.\n\n:arg host: Host to retrieve from\n:returns: ``XRD`` instance"} +{"question_id": "630629e752e177c0ba46a0fb", "input": "def send_document(url, data, timeout=10, method=\"post\", *args, **kwargs):\n\t\"\"\"\n\tSend a response containing data through the POST method.\n\t\"\"\"\n\t", "signature": "def send_document(url, data, timeout=10, method=\"post\", *args, **kwargs):", "docstring": "Send a response containing data through the POST method."} +{"question_id": "6306091073426c38ae68acac", "input": "def dict_insert(dic, val, key, *keys):\n\t\"\"\"\n\tinsert a value of a nested key into a dictionary\n\nto insert value for a nested key, all ancestor keys should be given as\nmethod's arguments\n\nexample:\n dict_insert({}, 'val', 'key1.key2'.split('.'))\n\n:param dic: a dictionary object to insert the nested key value into\n:param val: a value to insert to the given dictionary\n:param key: first key in a chain of key that will store the value\n:param keys: sub keys in the keys chain\n\t\"\"\"\n\t", "signature": "def dict_insert(dic, val, key, *keys):", "docstring": "insert a value of a nested key into a dictionary\n\nto insert value for a nested key, all ancestor keys should be given as\nmethod's arguments\n\nexample:\n dict_insert({}, 'val', 'key1.key2'.split('.'))\n\n:param dic: a dictionary object to insert the nested key value into\n:param val: a value to insert to the given dictionary\n:param key: first key in a chain of key that will store the value\n:param keys: sub keys in the keys chain"} +{"question_id": "6306091a73426c38ae68acc8", "input": "def list_of_file_names(settings_dirs, spec_option):\n\t\"\"\"\n\tCreate and return a new IniType complex type via cli.ListOfFileNames()\n\t\"\"\"\n\t", "signature": "def list_of_file_names(settings_dirs, spec_option):", "docstring": "Create and return a new IniType complex type via cli.ListOfFileNames()"} +{"question_id": "6306091b73426c38ae68acd7", "input": "def ansible_config_manager(cls):\n\t\"\"\"\n\tGets the ansible config manager via ServiceName.ANSIBLE_CONFIG_MANAGER in cls._get_service()\n\t\"\"\"\n\t", "signature": "def ansible_config_manager(cls):", "docstring": "Gets the ansible config manager via ServiceName.ANSIBLE_CONFIG_MANAGER in cls._get_service()"} +{"question_id": "6306091b73426c38ae68acd9", "input": "def workspace_manager(cls):\n\t\"\"\"\n\tGets the workspace manager via ServiceName.WORKSPACE_MANAGER in cls._get_service()\n\t\"\"\"\n\t", "signature": "def workspace_manager(cls):", "docstring": "Gets the workspace manager via ServiceName.WORKSPACE_MANAGER in cls._get_service()"} +{"question_id": "6306091b73426c38ae68acda", "input": "def plugins_manager(cls):\n\t\"\"\"\n\tGets the plugin manager via ServiceName.PLUGINS_MANAGER in cls._get_service()\n\t\"\"\"\n\t", "signature": "def plugins_manager(cls):", "docstring": "Gets the plugin manager via ServiceName.PLUGINS_MANAGER in cls._get_service()"} +{"question_id": "6306091c73426c38ae68acdc", "input": "def validate_from_content(cls, spec_content=None):\n\t\"\"\"\n\tvalidates that spec (YAML) content has all required fields\n\n:param spec_content: content of spec file\n:raise IRValidatorException: when mandatory data\nis missing in spec file\n:return: Dictionary with data loaded from a spec (YAML) file\n\t\"\"\"\n\t", "signature": "def validate_from_content(cls, spec_content=None):", "docstring": "validates that spec (YAML) content has all required fields\n\n:param spec_content: content of spec file\n:raise IRValidatorException: when mandatory data\nis missing in spec file\n:return: Dictionary with data loaded from a spec (YAML) file"} +{"question_id": "6306091c73426c38ae68acdd", "input": "def validate_from_file(cls, yaml_file=None):\n\t\"\"\"\n\tLoads & validates that a YAML file has all required fields\n\n:param yaml_file: Path to YAML file\n:raise IRValidatorException: when mandatory data is missing in file\n:return: Dictionary with data loaded from a YAML file\n\t\"\"\"\n\t", "signature": "def validate_from_file(cls, yaml_file=None):", "docstring": "Loads & validates that a YAML file has all required fields\n\n:param yaml_file: Path to YAML file\n:raise IRValidatorException: when mandatory data is missing in file\n:return: Dictionary with data loaded from a YAML file"} +{"question_id": "6306091d73426c38ae68ace5", "input": "def _include_groups(self, parser_dict):\n\t\"\"\"\n\tResolves the include dict directive in the spec files.\n\t\"\"\"\n\t", "signature": "def _include_groups(self, parser_dict):", "docstring": "Resolves the include dict directive in the spec files."} +{"question_id": "6306092373426c38ae68acfa", "input": "def get_spec_defaults(self):\n\t\"\"\"\n\tResolve arguments' values from spec and other sources with self._get_defaults()\n\t\"\"\"\n\t", "signature": "def get_spec_defaults(self):", "docstring": "Resolve arguments' values from spec and other sources with self._get_defaults()"} +{"question_id": "6306092973426c38ae68ad01", "input": "def get_deprecated_args(self):\n\t\"\"\"\n\tReturning dict with options which deprecate others. options from self.spec_helper.iterate_option_specs()\n\t\"\"\"\n\t", "signature": "def get_deprecated_args(self):", "docstring": "Returning dict with options which deprecate others. options from self.spec_helper.iterate_option_specs()"} +{"question_id": "6306092c73426c38ae68ad02", "input": "def validate_arg_deprecation(self, cli_args, answer_file_args):\n\t\"\"\"\n\tValidates and prints the deprecated arguments.\n\n:param cli_args: the dict of arguments from cli\n:param answer_file_args: the dict of arguments from files\n\t\"\"\"\n\t", "signature": "def validate_arg_deprecation(self, cli_args, answer_file_args):", "docstring": "Validates and prints the deprecated arguments.\n\n:param cli_args: the dict of arguments from cli\n:param answer_file_args: the dict of arguments from files"} +{"question_id": "6306092d73426c38ae68ad04", "input": "def get_parser_option_specs(self, command_name):\n\t\"\"\"\n\tGets all the options for the specified command\n\n:param command_name: the command name (main, virsh, ospd, etc...)\n:return: the list of all command options\n\t\"\"\"\n\t", "signature": "def get_parser_option_specs(self, command_name):", "docstring": "Gets all the options for the specified command\n\n:param command_name: the command name (main, virsh, ospd, etc...)\n:return: the list of all command options"} +{"question_id": "6306092d73426c38ae68ad05", "input": "def get_option_spec(self, command_name, argument_name):\n\t\"\"\"\n\tGets the specification for the specified option name. Get options of the given command_name with self.get_parser_option_specs()\n\t\"\"\"\n\t", "signature": "def get_option_spec(self, command_name, argument_name):", "docstring": "Gets the specification for the specified option name. Get options of the given command_name with self.get_parser_option_specs()"} +{"question_id": "6306092d73426c38ae68ad06", "input": "def get_silent_args(self, args):\n\t\"\"\"\n\tlist of silenced argument\n\n:param args: The received arguments.\n:return: list, slienced argument names\n\t\"\"\"\n\t", "signature": "def get_silent_args(self, args):", "docstring": "list of silenced argument\n\n:param args: The received arguments.\n:return: list, slienced argument names"} +{"question_id": "6306092d73426c38ae68ad07", "input": "def validate_requires_args(self, args):\n\t\"\"\"\n\tCheck if all the required arguments have been provided.\n\t\"\"\"\n\t", "signature": "def validate_requires_args(self, args):", "docstring": "Check if all the required arguments have been provided."} +{"question_id": "6306092d73426c38ae68ad08", "input": "def _get_conditionally_required_args(self, command_name, options_spec,\n args):\n\t\"\"\"\n\tList arguments with ``required_when`` condition matched.\n\n:param command_name: the command name.\n:param options_spec: the list of command spec options.\n:param args: the received input arguments\n:return: list, list of argument names with matched ``required_when``\n condition\n\t\"\"\"\n\t", "signature": "def _get_conditionally_required_args(self, command_name, options_spec,\n args):", "docstring": "List arguments with ``required_when`` condition matched.\n\n:param command_name: the command name.\n:param options_spec: the list of command spec options.\n:param args: the received input arguments\n:return: list, list of argument names with matched ``required_when``\n condition"} +{"question_id": "6306092e73426c38ae68ad09", "input": "def validate_length_args(self, args):\n\t\"\"\"\n\tCheck if value of the given args is not longer than length specified. \\n:param args: The received arguments.\n\t\"\"\"\n\t", "signature": "def validate_length_args(self, args):", "docstring": "Check if value of the given args is not longer than length specified. \\n:param args: The received arguments."} +{"question_id": "6306092e73426c38ae68ad0a", "input": "def validate_choices_args(self, args):\n\t\"\"\"\n\tCheck if value of the given args is one of the available choices. \\n:param args: The received arguments.\n\t\"\"\"\n\t", "signature": "def validate_choices_args(self, args):", "docstring": "Check if value of the given args is one of the available choices. \\n:param args: The received arguments."} +{"question_id": "6306092e73426c38ae68ad0b", "input": "def validate_min_max_args(self, args):\n\t\"\"\"\n\tCheck if value of the given args is between minimum and maximum values\n\t\"\"\"\n\t", "signature": "def validate_min_max_args(self, args):", "docstring": "Check if value of the given args is between minimum and maximum values"} +{"question_id": "6306092e73426c38ae68ad0d", "input": "def create_complex_argumet_type(self, subcommand, type_name, option_name,\n spec_option):\n\t\"\"\"\n\tReturns the corresponding function in COMPLEX_TYPES based on type_name and returns the result obtained by complex_action after option_name,(self.vars, self.defaults, self.plugin_path), subcommand, spec_option) is input.\n\t\"\"\"\n\t", "signature": "def create_complex_argumet_type(self, subcommand, type_name, option_name,\n spec_option):", "docstring": "Returns the corresponding function in COMPLEX_TYPES based on type_name and returns the result obtained by complex_action after option_name,(self.vars, self.defaults, self.plugin_path), subcommand, spec_option) is input."} +{"question_id": "6306092e73426c38ae68ad0f", "input": "def get_nested_custom_and_control_args(self, args):\n\t\"\"\"\n\tSplit input arguments to control nested and custom.\n\nControls arguments: control the IR behavior. These arguments\n will not be put into the spec yml file\nNested arguments: are used by the Ansible playbooks and will be put\n into the spec yml file.\nCustom arguments: Custom ansible variables to be used instead of the\n normal nested usage.\n\n:param args: the collected list of args.\n:return: (dict, dict): flat dicts (control_args, nested_args)\n\t\"\"\"\n\t", "signature": "def get_nested_custom_and_control_args(self, args):", "docstring": "Split input arguments to control nested and custom.\n\nControls arguments: control the IR behavior. These arguments\n will not be put into the spec yml file\nNested arguments: are used by the Ansible playbooks and will be put\n into the spec yml file.\nCustom arguments: Custom ansible variables to be used instead of the\n normal nested usage.\n\n:param args: the collected list of args.\n:return: (dict, dict): flat dicts (control_args, nested_args)"} +{"question_id": "6306092e73426c38ae68ad11", "input": "def merge_extra_vars(vars_dict, extra_vars=None):\n\t\"\"\"\n\tExtend ``vars_dict`` with ``extra-vars``\n\n:param vars_dict: Dictionary to merge extra-vars into\n:param extra_vars: List of extra-vars\n\t\"\"\"\n\t", "signature": "def merge_extra_vars(vars_dict, extra_vars=None):", "docstring": "Extend ``vars_dict`` with ``extra-vars``\n\n:param vars_dict: Dictionary to merge extra-vars into\n:param extra_vars: List of extra-vars"} +{"question_id": "6306092f73426c38ae68ad13", "input": "def ansible_playbook(ir_workspace, ir_plugin, playbook_path, verbose=None,\n extra_vars=None, ansible_args=None):\n\t\"\"\"\n\tWraps the 'ansible-playbook' CLI.\n\n:param ir_workspace: An Infrared Workspace object represents the active\nworkspace\n:param ir_plugin: An InfraredPlugin object of the current plugin\n:param playbook_path: the playbook to invoke\n:param verbose: Ansible verbosity level\n:param extra_vars: dict. Passed to Ansible as extra-vars\n:param ansible_args: dict of ansible-playbook arguments to plumb down\n directly to Ansible.\n\t\"\"\"\n\t", "signature": "def ansible_playbook(ir_workspace, ir_plugin, playbook_path, verbose=None,\n extra_vars=None, ansible_args=None):", "docstring": "Wraps the 'ansible-playbook' CLI.\n\n:param ir_workspace: An Infrared Workspace object represents the active\nworkspace\n:param ir_plugin: An InfraredPlugin object of the current plugin\n:param playbook_path: the playbook to invoke\n:param verbose: Ansible verbosity level\n:param extra_vars: dict. Passed to Ansible as extra-vars\n:param ansible_args: dict of ansible-playbook arguments to plumb down\n directly to Ansible."} +{"question_id": "6306093273426c38ae68ad15", "input": "def _run_playbook(cli_args, vars_dict, ir_workspace, ir_plugin):\n\t\"\"\"\n\tRuns ansible cli with vars dict\n\n:param vars_dict: dict, Will be passed as Ansible extra-vars\n:param cli_args: the list of command line arguments\n:param ir_workspace: An Infrared Workspace object represents the active\n workspace\n:param ir_plugin: An InfraredPlugin object of the current plugin\n:return: ansible results\n\t\"\"\"\n\t", "signature": "def _run_playbook(cli_args, vars_dict, ir_workspace, ir_plugin):", "docstring": "Runs ansible cli with vars dict\n\n:param vars_dict: dict, Will be passed as Ansible extra-vars\n:param cli_args: the list of command line arguments\n:param ir_workspace: An Infrared Workspace object represents the active\n workspace\n:param ir_plugin: An InfraredPlugin object of the current plugin\n:return: ansible results"} +{"question_id": "63060ada73426c38ae68ad31", "input": "def _convert_non_cli_args(self, parser_name, values_dict):\n\t\"\"\"\n\tCasts arguments to correct types by modifying values_dict param.\n\nBy default all the values are strings.\n\n:param parser_name: The command name, e.g. main, virsh, ospd, etc\n:param values_dict: The dict of with arguments\n\t\"\"\"\n\t", "signature": "def _convert_non_cli_args(self, parser_name, values_dict):", "docstring": "Casts arguments to correct types by modifying values_dict param.\n\nBy default all the values are strings.\n\n:param parser_name: The command name, e.g. main, virsh, ospd, etc\n:param values_dict: The dict of with arguments"} +{"question_id": "63060b1a73426c38ae68ad3e", "input": "def get_plugin_spec_flatten_dict(plugin_dir):\n\t\"\"\"\n\tUse YAML to read various information in plugin_dir and return the information in dictionary form.\n\t\"\"\"\n\t", "signature": "def get_plugin_spec_flatten_dict(plugin_dir):", "docstring": "Use YAML to read various information in plugin_dir and return the information in dictionary form."} +{"question_id": "63060b1b73426c38ae68ad42", "input": "def inject_config(self):\n\t\"\"\"\n\tIf the ANSIBLE_CONFIG property does not exist in os.environ, set it to self.ansible_config_path.\n\t\"\"\"\n\t", "signature": "def inject_config(self):", "docstring": "If the ANSIBLE_CONFIG property does not exist in os.environ, set it to self.ansible_config_path."} +{"question_id": "63060b1b73426c38ae68ad43", "input": "def extend_cli(self, root_subparsers):\n\t\"\"\"\n\tAdds the spec cli options to to the main entry point.\n\n:param subparser: the subparser object to extend.\n\t\"\"\"\n\t", "signature": "def extend_cli(self, root_subparsers):", "docstring": "Adds the spec cli options to to the main entry point.\n\n:param subparser: the subparser object to extend."}