code
stringlengths
64
7.01k
docstring
stringlengths
2
15.8k
text
stringlengths
144
19.2k
#vtb def get_stories(self, userids: Optional[List[int]] = None) -> Iterator[Story]: if not userids: data = self.context.graphql_query("d15efd8c0c5b23f0ef71f18bf363c704", {"only_stories": True})["data"]["user"] if data is None: raise BadResponseException() userids = list(edge["node"]["id"] for edge in data["feed_reels_tray"]["edge_reels_tray_to_reel"]["edges"]) def _userid_chunks(): userids_per_query = 100 for i in range(0, len(userids), userids_per_query): yield userids[i:i + userids_per_query] for userid_chunk in _userid_chunks(): stories = self.context.graphql_query("bf41e22b1c4ba4c9f31b844ebb7d9056", {"reel_ids": userid_chunk, "precomposed_overlay": False})["data"] yield from (Story(self.context, media) for media in stories[])
Get available stories from followees or all stories of users whose ID are given. Does not mark stories as seen. To use this, one needs to be logged in :param userids: List of user IDs to be processed in terms of downloading their stories, or None.
### Input: Get available stories from followees or all stories of users whose ID are given. Does not mark stories as seen. To use this, one needs to be logged in :param userids: List of user IDs to be processed in terms of downloading their stories, or None. ### Response: #vtb def get_stories(self, userids: Optional[List[int]] = None) -> Iterator[Story]: if not userids: data = self.context.graphql_query("d15efd8c0c5b23f0ef71f18bf363c704", {"only_stories": True})["data"]["user"] if data is None: raise BadResponseException() userids = list(edge["node"]["id"] for edge in data["feed_reels_tray"]["edge_reels_tray_to_reel"]["edges"]) def _userid_chunks(): userids_per_query = 100 for i in range(0, len(userids), userids_per_query): yield userids[i:i + userids_per_query] for userid_chunk in _userid_chunks(): stories = self.context.graphql_query("bf41e22b1c4ba4c9f31b844ebb7d9056", {"reel_ids": userid_chunk, "precomposed_overlay": False})["data"] yield from (Story(self.context, media) for media in stories[])
#vtb def raise_exception_if_baseline_file_is_unstaged(filename): try: files_changed_but_not_staged = subprocess.check_output( [ , , , ], ).split() except subprocess.CalledProcessError: ).format( filename, filename, )) raise ValueError
We want to make sure that if there are changes to the baseline file, they will be included in the commit. This way, we can keep our baselines up-to-date. :raises: ValueError
### Input: We want to make sure that if there are changes to the baseline file, they will be included in the commit. This way, we can keep our baselines up-to-date. :raises: ValueError ### Response: #vtb def raise_exception_if_baseline_file_is_unstaged(filename): try: files_changed_but_not_staged = subprocess.check_output( [ , , , ], ).split() except subprocess.CalledProcessError: ).format( filename, filename, )) raise ValueError
#vtb async def display_columns_and_rows( self, database, table, description, rows, link_column=False, truncate_cells=0, ): "Returns columns, rows for specified table - including fancy foreign key treatment" table_metadata = self.ds.table_metadata(database, table) sortable_columns = await self.sortable_columns_for_table(database, table, True) columns = [ {"name": r[0], "sortable": r[0] in sortable_columns} for r in description ] pks = await self.ds.execute_against_connection_in_thread( database, lambda conn: detect_primary_keys(conn, table) ) column_to_foreign_key_table = { fk["column"]: fk["other_table"] for fk in await self.ds.foreign_keys_for_table(database, table) } cell_rows = [] for row in rows: cells = [] if link_column: cells.append( { "column": pks[0] if len(pks) == 1 else "Link", "value": jinja2.Markup( .format( database=database, table=urllib.parse.quote_plus(table), flat_pks=str( jinja2.escape( path_from_row_pks(row, pks, not pks, False) ) ), flat_pks_quoted=path_from_row_pks(row, pks, not pks), ) ), } ) for value, column_dict in zip(row, columns): column = column_dict["name"] if link_column and len(pks) == 1 and column == pks[0]: label = value["label"] value = value["value"] other_table = column_to_foreign_key_table[column] link_template = ( LINK_WITH_LABEL if (label != value) else LINK_WITH_VALUE ) display_value = jinja2.Markup(link_template.format( database=database, table=urllib.parse.quote_plus(other_table), link_id=urllib.parse.quote_plus(str(value)), id=str(jinja2.escape(value)), label=str(jinja2.escape(label)), )) elif value in ("", None): display_value = jinja2.Markup(" ") elif is_url(str(value).strip()): display_value = jinja2.Markup( .format( url=jinja2.escape(value.strip()) ) ) elif column in table_metadata.get("units", {}) and value != "": value = value * ureg(table_metadata["units"][column]) value = round(value.to_compact(), 6) display_value = jinja2.Markup( "{:~P}".format(value).replace(" ", " ") ) else: display_value = str(value) if truncate_cells and len(display_value) > truncate_cells: display_value = display_value[:truncate_cells] + u"\u2026" cells.append({"column": column, "value": display_value}) cell_rows.append(cells) if link_column: if len(pks) == 1: columns = [col for col in columns if col["name"] != pks[0]] columns = [ {"name": pks[0] if len(pks) == 1 else "Link", "sortable": len(pks) == 1} ] + columns return columns, cell_rows
Returns columns, rows for specified table - including fancy foreign key treatment
### Input: Returns columns, rows for specified table - including fancy foreign key treatment ### Response: #vtb async def display_columns_and_rows( self, database, table, description, rows, link_column=False, truncate_cells=0, ): "Returns columns, rows for specified table - including fancy foreign key treatment" table_metadata = self.ds.table_metadata(database, table) sortable_columns = await self.sortable_columns_for_table(database, table, True) columns = [ {"name": r[0], "sortable": r[0] in sortable_columns} for r in description ] pks = await self.ds.execute_against_connection_in_thread( database, lambda conn: detect_primary_keys(conn, table) ) column_to_foreign_key_table = { fk["column"]: fk["other_table"] for fk in await self.ds.foreign_keys_for_table(database, table) } cell_rows = [] for row in rows: cells = [] if link_column: cells.append( { "column": pks[0] if len(pks) == 1 else "Link", "value": jinja2.Markup( .format( database=database, table=urllib.parse.quote_plus(table), flat_pks=str( jinja2.escape( path_from_row_pks(row, pks, not pks, False) ) ), flat_pks_quoted=path_from_row_pks(row, pks, not pks), ) ), } ) for value, column_dict in zip(row, columns): column = column_dict["name"] if link_column and len(pks) == 1 and column == pks[0]: label = value["label"] value = value["value"] other_table = column_to_foreign_key_table[column] link_template = ( LINK_WITH_LABEL if (label != value) else LINK_WITH_VALUE ) display_value = jinja2.Markup(link_template.format( database=database, table=urllib.parse.quote_plus(other_table), link_id=urllib.parse.quote_plus(str(value)), id=str(jinja2.escape(value)), label=str(jinja2.escape(label)), )) elif value in ("", None): display_value = jinja2.Markup(" ") elif is_url(str(value).strip()): display_value = jinja2.Markup( .format( url=jinja2.escape(value.strip()) ) ) elif column in table_metadata.get("units", {}) and value != "": value = value * ureg(table_metadata["units"][column]) value = round(value.to_compact(), 6) display_value = jinja2.Markup( "{:~P}".format(value).replace(" ", " ") ) else: display_value = str(value) if truncate_cells and len(display_value) > truncate_cells: display_value = display_value[:truncate_cells] + u"\u2026" cells.append({"column": column, "value": display_value}) cell_rows.append(cells) if link_column: if len(pks) == 1: columns = [col for col in columns if col["name"] != pks[0]] columns = [ {"name": pks[0] if len(pks) == 1 else "Link", "sortable": len(pks) == 1} ] + columns return columns, cell_rows
#vtb def from_uint8(arr_uint8, shape, min_value=0.0, max_value=1.0): arr_0to1 = arr_uint8.astype(np.float32) / 255.0 return HeatmapsOnImage.from_0to1(arr_0to1, shape, min_value=min_value, max_value=max_value)
Create a heatmaps object from an heatmap array containing values ranging from 0 to 255. Parameters ---------- arr_uint8 : (H,W) ndarray or (H,W,C) ndarray Heatmap(s) array, where ``H`` is height, ``W`` is width and ``C`` is the number of heatmap channels. Expected dtype is uint8. shape : tuple of int Shape of the image on which the heatmap(s) is/are placed. NOT the shape of the heatmap(s) array, unless it is identical to the image shape (note the likely difference between the arrays in the number of channels). If there is not a corresponding image, use the shape of the heatmaps array. min_value : float, optional Minimum value for the heatmaps that the 0-to-255 array represents. This will usually be 0.0. It is used when calling :func:`imgaug.HeatmapsOnImage.get_arr`, which converts the underlying ``(0, 255)`` array to value range ``(min_value, max_value)``. max_value : float, optional Maximum value for the heatmaps that 0-to-255 array represents. See parameter `min_value` for details. Returns ------- imgaug.HeatmapsOnImage Heatmaps object.
### Input: Create a heatmaps object from an heatmap array containing values ranging from 0 to 255. Parameters ---------- arr_uint8 : (H,W) ndarray or (H,W,C) ndarray Heatmap(s) array, where ``H`` is height, ``W`` is width and ``C`` is the number of heatmap channels. Expected dtype is uint8. shape : tuple of int Shape of the image on which the heatmap(s) is/are placed. NOT the shape of the heatmap(s) array, unless it is identical to the image shape (note the likely difference between the arrays in the number of channels). If there is not a corresponding image, use the shape of the heatmaps array. min_value : float, optional Minimum value for the heatmaps that the 0-to-255 array represents. This will usually be 0.0. It is used when calling :func:`imgaug.HeatmapsOnImage.get_arr`, which converts the underlying ``(0, 255)`` array to value range ``(min_value, max_value)``. max_value : float, optional Maximum value for the heatmaps that 0-to-255 array represents. See parameter `min_value` for details. Returns ------- imgaug.HeatmapsOnImage Heatmaps object. ### Response: #vtb def from_uint8(arr_uint8, shape, min_value=0.0, max_value=1.0): arr_0to1 = arr_uint8.astype(np.float32) / 255.0 return HeatmapsOnImage.from_0to1(arr_0to1, shape, min_value=min_value, max_value=max_value)
#vtb def with_mfa(self, mfa_token): if hasattr(mfa_token, ): self.context.mfa_token = mfa_token.__call__() else: self.context.mfa_token = mfa_token return self
Set the MFA token for the next request. `mfa_token`s are only good for one request. Use this method to chain into the protected action you want to perform. Note: Only useful for Application authentication. Usage: account.with_mfa(application.totp.now()).pay(...) Args: mfa_token (str/function, optional): TOTP token for the Application OR a callable/function which will generate such a token when called. Returns: self
### Input: Set the MFA token for the next request. `mfa_token`s are only good for one request. Use this method to chain into the protected action you want to perform. Note: Only useful for Application authentication. Usage: account.with_mfa(application.totp.now()).pay(...) Args: mfa_token (str/function, optional): TOTP token for the Application OR a callable/function which will generate such a token when called. Returns: self ### Response: #vtb def with_mfa(self, mfa_token): if hasattr(mfa_token, ): self.context.mfa_token = mfa_token.__call__() else: self.context.mfa_token = mfa_token return self
#vtb def get_best_splitting_attr(self): best = (-1e999999, None) for attr in self.attributes: best = max(best, (self.get_gain(attr), attr)) best_gain, best_attr = best return best_attr
Returns the name of the attribute with the highest gain.
### Input: Returns the name of the attribute with the highest gain. ### Response: #vtb def get_best_splitting_attr(self): best = (-1e999999, None) for attr in self.attributes: best = max(best, (self.get_gain(attr), attr)) best_gain, best_attr = best return best_attr
#vtb def decrease_writes_in_units( current_provisioning, units, min_provisioned_writes, log_tag): updated_provisioning = int(current_provisioning) - int(units) min_provisioned_writes = __get_min_writes( current_provisioning, min_provisioned_writes, log_tag) if updated_provisioning < min_provisioned_writes: logger.info( .format( log_tag, int(min_provisioned_writes))) return min_provisioned_writes logger.debug( .format( log_tag, int(updated_provisioning))) return updated_provisioning
Decrease the current_provisioning with units units :type current_provisioning: int :param current_provisioning: The current provisioning :type units: int :param units: How many units should we decrease with :returns: int -- New provisioning value :type min_provisioned_writes: int :param min_provisioned_writes: Configured min provisioned writes :type log_tag: str :param log_tag: Prefix for the log
### Input: Decrease the current_provisioning with units units :type current_provisioning: int :param current_provisioning: The current provisioning :type units: int :param units: How many units should we decrease with :returns: int -- New provisioning value :type min_provisioned_writes: int :param min_provisioned_writes: Configured min provisioned writes :type log_tag: str :param log_tag: Prefix for the log ### Response: #vtb def decrease_writes_in_units( current_provisioning, units, min_provisioned_writes, log_tag): updated_provisioning = int(current_provisioning) - int(units) min_provisioned_writes = __get_min_writes( current_provisioning, min_provisioned_writes, log_tag) if updated_provisioning < min_provisioned_writes: logger.info( .format( log_tag, int(min_provisioned_writes))) return min_provisioned_writes logger.debug( .format( log_tag, int(updated_provisioning))) return updated_provisioning
#vtb def safe_call(cls, method, *args): return cls.call(method, *args, safe=True)
Call a remote api method but don't raise if an error occurred.
### Input: Call a remote api method but don't raise if an error occurred. ### Response: #vtb def safe_call(cls, method, *args): return cls.call(method, *args, safe=True)
#vtb def ap_state(value, failure_string=None): try: return statestyle.get(value).ap except: if failure_string: return failure_string else: return value
Converts a state's name, postal abbreviation or FIPS to A.P. style. Example usage: >> ap_state("California") 'Calif.'
### Input: Converts a state's name, postal abbreviation or FIPS to A.P. style. Example usage: >> ap_state("California") 'Calif.' ### Response: #vtb def ap_state(value, failure_string=None): try: return statestyle.get(value).ap except: if failure_string: return failure_string else: return value
#vtb def _get_path(entity_id): try: path = entity_id.path() except AttributeError: path = entity_id if path.startswith(): path = path[3:] return path
Get the entity_id as a string if it is a Reference. @param entity_id The ID either a reference or a string of the entity to get. @return entity_id as a string
### Input: Get the entity_id as a string if it is a Reference. @param entity_id The ID either a reference or a string of the entity to get. @return entity_id as a string ### Response: #vtb def _get_path(entity_id): try: path = entity_id.path() except AttributeError: path = entity_id if path.startswith(): path = path[3:] return path
#vtb def downgrades(src): def _(f): destination = src - 1 @do(operator.setitem(_downgrade_methods, destination)) @wraps(f) def wrapper(op, conn, version_info_table): conn.execute(version_info_table.delete()) f(op) write_version_info(conn, version_info_table, destination) return wrapper return _
Decorator for marking that a method is a downgrade to a version to the previous version. Parameters ---------- src : int The version this downgrades from. Returns ------- decorator : callable[(callable) -> callable] The decorator to apply.
### Input: Decorator for marking that a method is a downgrade to a version to the previous version. Parameters ---------- src : int The version this downgrades from. Returns ------- decorator : callable[(callable) -> callable] The decorator to apply. ### Response: #vtb def downgrades(src): def _(f): destination = src - 1 @do(operator.setitem(_downgrade_methods, destination)) @wraps(f) def wrapper(op, conn, version_info_table): conn.execute(version_info_table.delete()) f(op) write_version_info(conn, version_info_table, destination) return wrapper return _
#vtb def observe(self, seconds=None): if self._observer.isRunning: return False if seconds is not None: timeout = time.time() + seconds else: timeout = None while (not self._observer.isStopped) and (seconds is None or time.time() < timeout): self._observer.check_events() time.sleep(1/self.getObserveScanRate()) return True
Begins the observer loop (synchronously). Loops for ``seconds`` or until this region's stopObserver() method is called. If ``seconds`` is None, the observer loop cycles until stopped. If this method is called while the observer loop is already running, it returns False. Returns True if the observer could be started, False otherwise.
### Input: Begins the observer loop (synchronously). Loops for ``seconds`` or until this region's stopObserver() method is called. If ``seconds`` is None, the observer loop cycles until stopped. If this method is called while the observer loop is already running, it returns False. Returns True if the observer could be started, False otherwise. ### Response: #vtb def observe(self, seconds=None): if self._observer.isRunning: return False if seconds is not None: timeout = time.time() + seconds else: timeout = None while (not self._observer.isStopped) and (seconds is None or time.time() < timeout): self._observer.check_events() time.sleep(1/self.getObserveScanRate()) return True
#vtb def infer_named_tuple(node, context=None): tuple_base_name = nodes.Name(name="tuple", parent=node.root()) class_node, name, attributes = infer_func_form( node, tuple_base_name, context=context ) call_site = arguments.CallSite.from_call(node) func = next(extract_node("import collections; collections.namedtuple").infer()) try: rename = next(call_site.infer_argument(func, "rename", context)).bool_value() except InferenceError: rename = False if rename: attributes = _get_renamed_namedtuple_attributes(attributes) replace_args = ", ".join("{arg}=None".format(arg=arg) for arg in attributes) field_def = ( " {name} = property(lambda self: self[{index:d}], " "doc=)" ) field_defs = "\n".join( field_def.format(name=name, index=index) for index, name in enumerate(attributes) ) fake = AstroidBuilder(MANAGER).string_build( % { "name": name, "fields": attributes, "field_defs": field_defs, "replace_args": replace_args, } ) class_node.locals["_asdict"] = fake.body[0].locals["_asdict"] class_node.locals["_make"] = fake.body[0].locals["_make"] class_node.locals["_replace"] = fake.body[0].locals["_replace"] class_node.locals["_fields"] = fake.body[0].locals["_fields"] for attr in attributes: class_node.locals[attr] = fake.body[0].locals[attr] return iter([class_node])
Specific inference function for namedtuple Call node
### Input: Specific inference function for namedtuple Call node ### Response: #vtb def infer_named_tuple(node, context=None): tuple_base_name = nodes.Name(name="tuple", parent=node.root()) class_node, name, attributes = infer_func_form( node, tuple_base_name, context=context ) call_site = arguments.CallSite.from_call(node) func = next(extract_node("import collections; collections.namedtuple").infer()) try: rename = next(call_site.infer_argument(func, "rename", context)).bool_value() except InferenceError: rename = False if rename: attributes = _get_renamed_namedtuple_attributes(attributes) replace_args = ", ".join("{arg}=None".format(arg=arg) for arg in attributes) field_def = ( " {name} = property(lambda self: self[{index:d}], " "doc=)" ) field_defs = "\n".join( field_def.format(name=name, index=index) for index, name in enumerate(attributes) ) fake = AstroidBuilder(MANAGER).string_build( % { "name": name, "fields": attributes, "field_defs": field_defs, "replace_args": replace_args, } ) class_node.locals["_asdict"] = fake.body[0].locals["_asdict"] class_node.locals["_make"] = fake.body[0].locals["_make"] class_node.locals["_replace"] = fake.body[0].locals["_replace"] class_node.locals["_fields"] = fake.body[0].locals["_fields"] for attr in attributes: class_node.locals[attr] = fake.body[0].locals[attr] return iter([class_node])
#vtb def all_devices(cl_device_type=None, platform=None): if isinstance(cl_device_type, str): cl_device_type = device_type_from_string(cl_device_type) runtime_list = [] if platform is None: platforms = cl.get_platforms() else: platforms = [platform] for platform in platforms: if cl_device_type: devices = platform.get_devices(device_type=cl_device_type) else: devices = platform.get_devices() for device in devices: if device_supports_double(device): env = CLEnvironment(platform, device) runtime_list.append(env) return runtime_list
Get multiple device environments, optionally only of the indicated type. This will only fetch devices that support double point precision. Args: cl_device_type (cl.device_type.* or string): The type of the device we want, can be a opencl device type or a string matching 'GPU' or 'CPU'. platform (opencl platform): The opencl platform to select the devices from Returns: list of CLEnvironment: List with the CL device environments.
### Input: Get multiple device environments, optionally only of the indicated type. This will only fetch devices that support double point precision. Args: cl_device_type (cl.device_type.* or string): The type of the device we want, can be a opencl device type or a string matching 'GPU' or 'CPU'. platform (opencl platform): The opencl platform to select the devices from Returns: list of CLEnvironment: List with the CL device environments. ### Response: #vtb def all_devices(cl_device_type=None, platform=None): if isinstance(cl_device_type, str): cl_device_type = device_type_from_string(cl_device_type) runtime_list = [] if platform is None: platforms = cl.get_platforms() else: platforms = [platform] for platform in platforms: if cl_device_type: devices = platform.get_devices(device_type=cl_device_type) else: devices = platform.get_devices() for device in devices: if device_supports_double(device): env = CLEnvironment(platform, device) runtime_list.append(env) return runtime_list
#vtb def resolve_polytomy( self, dist=1.0, support=100, recursive=True): nself = self.copy() nself.treenode.resolve_polytomy( default_dist=dist, default_support=support, recursive=recursive) nself._coords.update() return nself
Returns a copy of the tree with all polytomies randomly resolved. Does not transform tree in-place.
### Input: Returns a copy of the tree with all polytomies randomly resolved. Does not transform tree in-place. ### Response: #vtb def resolve_polytomy( self, dist=1.0, support=100, recursive=True): nself = self.copy() nself.treenode.resolve_polytomy( default_dist=dist, default_support=support, recursive=recursive) nself._coords.update() return nself
#vtb def enr_at_fpr(fg_vals, bg_vals, fpr=0.01): pos = np.array(fg_vals) neg = np.array(bg_vals) s = scoreatpercentile(neg, 100 - fpr * 100) neg_matches = float(len(neg[neg >= s])) if neg_matches == 0: return float("inf") return len(pos[pos >= s]) / neg_matches * len(neg) / float(len(pos))
Computes the enrichment at a specific FPR (default 1%). Parameters ---------- fg_vals : array_like The list of values for the positive set. bg_vals : array_like The list of values for the negative set. fpr : float, optional The FPR (between 0.0 and 1.0). Returns ------- enrichment : float The enrichment at the specified FPR.
### Input: Computes the enrichment at a specific FPR (default 1%). Parameters ---------- fg_vals : array_like The list of values for the positive set. bg_vals : array_like The list of values for the negative set. fpr : float, optional The FPR (between 0.0 and 1.0). Returns ------- enrichment : float The enrichment at the specified FPR. ### Response: #vtb def enr_at_fpr(fg_vals, bg_vals, fpr=0.01): pos = np.array(fg_vals) neg = np.array(bg_vals) s = scoreatpercentile(neg, 100 - fpr * 100) neg_matches = float(len(neg[neg >= s])) if neg_matches == 0: return float("inf") return len(pos[pos >= s]) / neg_matches * len(neg) / float(len(pos))
#vtb def main(params=None): if params == None: parser = getParser() args = parser.parse_args(params) else: args = params results = [] print(general.title(banner.text)) sayingHello = .format(general.LICENSE_URL) print(general.info(sayingHello)) if args.license: general.showLicense() else: startTime= dt.datetime.now() print(str(startTime) + "\tStarting search in different platform(s)... Relax!\n") print(general.emphasis("\tPress <Ctrl + C> to stop...\n")) try: results = performSearch(platformNames=args.platforms, queries=args.queries, process=args.process, excludePlatformNames=args.exclude) except KeyboardInterrupt: print(general.error("\n[!] Process manually stopped by the user. Workers terminated without providing any result.\n")) results = [] if args.extension: if not os.path.exists (args.output_folder): os.makedirs(args.output_folder) fileHeader = os.path.join(args.output_folder, args.file_header) for ext in args.extension: general.exportUsufy(results, ext, fileHeader) now = dt.datetime.now() print("\n{}\tResults obtained:\n".format(str(now))) print(general.success(general.usufyToTextExport(results))) if args.web_browser: general.openResultsInBrowser(results) now = dt.datetime.now() print("\n{date}\tYou can find all the information collected in the following files:".format(date=str(now))) for ext in args.extension: print("\t" + general.emphasis(fileHeader + "." + ext)) endTime= dt.datetime.now() print("\n{date}\tFinishing execution...\n".format(date=str(endTime))) print("Total time used:\t" + general.emphasis(str(endTime-startTime))) print("Average seconds/query:\t" + general.emphasis(str((endTime-startTime).total_seconds()/len(args.platforms))) +" seconds\n") print(banner.footer) if params: return results
Main function to launch usufy. The function is created in this way so as to let other applications make use of the full configuration capabilities of the application. The parameters received are used as parsed by this modules `getParser()`. Args: ----- params: A list with the parameters as grabbed by the terminal. It is None when this is called by an entry_point. If it is called by osrf the data is already parsed. Returns: -------- A list of i3visio entities.
### Input: Main function to launch usufy. The function is created in this way so as to let other applications make use of the full configuration capabilities of the application. The parameters received are used as parsed by this modules `getParser()`. Args: ----- params: A list with the parameters as grabbed by the terminal. It is None when this is called by an entry_point. If it is called by osrf the data is already parsed. Returns: -------- A list of i3visio entities. ### Response: #vtb def main(params=None): if params == None: parser = getParser() args = parser.parse_args(params) else: args = params results = [] print(general.title(banner.text)) sayingHello = .format(general.LICENSE_URL) print(general.info(sayingHello)) if args.license: general.showLicense() else: startTime= dt.datetime.now() print(str(startTime) + "\tStarting search in different platform(s)... Relax!\n") print(general.emphasis("\tPress <Ctrl + C> to stop...\n")) try: results = performSearch(platformNames=args.platforms, queries=args.queries, process=args.process, excludePlatformNames=args.exclude) except KeyboardInterrupt: print(general.error("\n[!] Process manually stopped by the user. Workers terminated without providing any result.\n")) results = [] if args.extension: if not os.path.exists (args.output_folder): os.makedirs(args.output_folder) fileHeader = os.path.join(args.output_folder, args.file_header) for ext in args.extension: general.exportUsufy(results, ext, fileHeader) now = dt.datetime.now() print("\n{}\tResults obtained:\n".format(str(now))) print(general.success(general.usufyToTextExport(results))) if args.web_browser: general.openResultsInBrowser(results) now = dt.datetime.now() print("\n{date}\tYou can find all the information collected in the following files:".format(date=str(now))) for ext in args.extension: print("\t" + general.emphasis(fileHeader + "." + ext)) endTime= dt.datetime.now() print("\n{date}\tFinishing execution...\n".format(date=str(endTime))) print("Total time used:\t" + general.emphasis(str(endTime-startTime))) print("Average seconds/query:\t" + general.emphasis(str((endTime-startTime).total_seconds()/len(args.platforms))) +" seconds\n") print(banner.footer) if params: return results
#vtb def _find_ancestor(self, task_spec): if self.parent is None: return self if self.parent.task_spec == task_spec: return self.parent return self.parent._find_ancestor(task_spec)
Returns the ancestor that has the given task spec assigned. If no such ancestor was found, the root task is returned. :type task_spec: TaskSpec :param task_spec: The wanted task spec. :rtype: Task :returns: The ancestor.
### Input: Returns the ancestor that has the given task spec assigned. If no such ancestor was found, the root task is returned. :type task_spec: TaskSpec :param task_spec: The wanted task spec. :rtype: Task :returns: The ancestor. ### Response: #vtb def _find_ancestor(self, task_spec): if self.parent is None: return self if self.parent.task_spec == task_spec: return self.parent return self.parent._find_ancestor(task_spec)
#vtb def det_curve(y_true, scores, distances=False): if distances: scores = -scores fpr, tpr, thresholds = sklearn.metrics.roc_curve( y_true, scores, pos_label=True) fnr = 1 - tpr if distances: thresholds = -thresholds eer_index = np.where(fpr > fnr)[0][0] eer = .25 * (fpr[eer_index-1] + fpr[eer_index] + fnr[eer_index-1] + fnr[eer_index]) return fpr, fnr, thresholds, eer
DET curve Parameters ---------- y_true : (n_samples, ) array-like Boolean reference. scores : (n_samples, ) array-like Predicted score. distances : boolean, optional When True, indicate that `scores` are actually `distances` Returns ------- fpr : numpy array False alarm rate fnr : numpy array False rejection rate thresholds : numpy array Corresponding thresholds eer : float Equal error rate
### Input: DET curve Parameters ---------- y_true : (n_samples, ) array-like Boolean reference. scores : (n_samples, ) array-like Predicted score. distances : boolean, optional When True, indicate that `scores` are actually `distances` Returns ------- fpr : numpy array False alarm rate fnr : numpy array False rejection rate thresholds : numpy array Corresponding thresholds eer : float Equal error rate ### Response: #vtb def det_curve(y_true, scores, distances=False): if distances: scores = -scores fpr, tpr, thresholds = sklearn.metrics.roc_curve( y_true, scores, pos_label=True) fnr = 1 - tpr if distances: thresholds = -thresholds eer_index = np.where(fpr > fnr)[0][0] eer = .25 * (fpr[eer_index-1] + fpr[eer_index] + fnr[eer_index-1] + fnr[eer_index]) return fpr, fnr, thresholds, eer
#vtb def slice(self, start, stop=None, axis=0): if stop is None: stop = start axis = self.get_axis_number(axis) start_bin = max(0, self.get_axis_bin_index(start, axis)) stop_bin = min(len(self.bin_centers(axis)) - 1, self.get_axis_bin_index(stop, axis)) new_bin_edges = self.bin_edges.copy() new_bin_edges[axis] = new_bin_edges[axis][start_bin:stop_bin + 2] return Histdd.from_histogram(np.take(self.histogram, np.arange(start_bin, stop_bin + 1), axis=axis), bin_edges=new_bin_edges, axis_names=self.axis_names)
Restrict histogram to bins whose data values (not bin numbers) along axis are between start and stop (both inclusive). Returns d dimensional histogram.
### Input: Restrict histogram to bins whose data values (not bin numbers) along axis are between start and stop (both inclusive). Returns d dimensional histogram. ### Response: #vtb def slice(self, start, stop=None, axis=0): if stop is None: stop = start axis = self.get_axis_number(axis) start_bin = max(0, self.get_axis_bin_index(start, axis)) stop_bin = min(len(self.bin_centers(axis)) - 1, self.get_axis_bin_index(stop, axis)) new_bin_edges = self.bin_edges.copy() new_bin_edges[axis] = new_bin_edges[axis][start_bin:stop_bin + 2] return Histdd.from_histogram(np.take(self.histogram, np.arange(start_bin, stop_bin + 1), axis=axis), bin_edges=new_bin_edges, axis_names=self.axis_names)
#vtb def get_between_times(self, t1, t2, target=None): try: t1 = t1.isoformat() t2 = t2.isoformat() except AttributeError: pass myquery = self._get_time_query(t1, t2) if target is not None: myquery["target"] = target self.create_files_request(myquery, fmt="json") self.unpack_json_response()
Query for OPUS data between times t1 and t2. Parameters ---------- t1, t2 : datetime.datetime, strings Start and end time for the query. If type is datetime, will be converted to isoformat string. If type is string already, it needs to be in an accepted international format for time strings. target : str Potential target for the observation query. Most likely will reduce the amount of data matching the query a lot. Returns ------- None, but set's state of the object to have new query results stored in self.obsids.
### Input: Query for OPUS data between times t1 and t2. Parameters ---------- t1, t2 : datetime.datetime, strings Start and end time for the query. If type is datetime, will be converted to isoformat string. If type is string already, it needs to be in an accepted international format for time strings. target : str Potential target for the observation query. Most likely will reduce the amount of data matching the query a lot. Returns ------- None, but set's state of the object to have new query results stored in self.obsids. ### Response: #vtb def get_between_times(self, t1, t2, target=None): try: t1 = t1.isoformat() t2 = t2.isoformat() except AttributeError: pass myquery = self._get_time_query(t1, t2) if target is not None: myquery["target"] = target self.create_files_request(myquery, fmt="json") self.unpack_json_response()
#vtb def filter_queryset(self, request, queryset, view): start_value = self.get_start(request) if start_value: queryset = self.apply_published_filter(queryset, "after", start_value) end_value = self.get_end(request) if end_value: queryset = self.apply_published_filter(queryset, "before", end_value) return queryset
Apply the relevant behaviors to the view queryset.
### Input: Apply the relevant behaviors to the view queryset. ### Response: #vtb def filter_queryset(self, request, queryset, view): start_value = self.get_start(request) if start_value: queryset = self.apply_published_filter(queryset, "after", start_value) end_value = self.get_end(request) if end_value: queryset = self.apply_published_filter(queryset, "before", end_value) return queryset
#vtb def read(self, nrml_file, validate=False, simple_fault_spacing=1.0, complex_mesh_spacing=5.0, mfd_spacing=0.1): self.source_file = nrml_file if validate: converter = SourceConverter(1.0, simple_fault_spacing, complex_mesh_spacing, mfd_spacing, 10.0) converter.fname = nrml_file root = nrml.read(nrml_file) if root[] == : sg_nodes = [root.sourceModel.nodes] else: sg_nodes = root.sourceModel.nodes sources = [] for sg_node in sg_nodes: for no, src_node in enumerate(sg_node, 1): if validate: print("Validating Source %s" % src_node.attrib["id"]) converter.convert_node(src_node) sources.append(src_node) return SourceModel(sources)
Build the source model from nrml format
### Input: Build the source model from nrml format ### Response: #vtb def read(self, nrml_file, validate=False, simple_fault_spacing=1.0, complex_mesh_spacing=5.0, mfd_spacing=0.1): self.source_file = nrml_file if validate: converter = SourceConverter(1.0, simple_fault_spacing, complex_mesh_spacing, mfd_spacing, 10.0) converter.fname = nrml_file root = nrml.read(nrml_file) if root[] == : sg_nodes = [root.sourceModel.nodes] else: sg_nodes = root.sourceModel.nodes sources = [] for sg_node in sg_nodes: for no, src_node in enumerate(sg_node, 1): if validate: print("Validating Source %s" % src_node.attrib["id"]) converter.convert_node(src_node) sources.append(src_node) return SourceModel(sources)
#vtb def local_temp_dir(): path = tempfile.mkdtemp() yield path shutil.rmtree(path, ignore_errors=True)
Creates a local temporary directory. The directory is removed when no longer needed. Failure to do so will be ignored. :return: Path to the temporary directory. :rtype: unicode
### Input: Creates a local temporary directory. The directory is removed when no longer needed. Failure to do so will be ignored. :return: Path to the temporary directory. :rtype: unicode ### Response: #vtb def local_temp_dir(): path = tempfile.mkdtemp() yield path shutil.rmtree(path, ignore_errors=True)
#vtb def _incr_exceptions(self, conn): self._pool_manager.get_connection(self.pid, conn).exceptions += 1
Increment the number of exceptions for the current connection. :param psycopg2.extensions.connection conn: the psycopg2 connection
### Input: Increment the number of exceptions for the current connection. :param psycopg2.extensions.connection conn: the psycopg2 connection ### Response: #vtb def _incr_exceptions(self, conn): self._pool_manager.get_connection(self.pid, conn).exceptions += 1
#vtb def QRatio(s1, s2, force_ascii=True, full_process=True): if full_process: p1 = utils.full_process(s1, force_ascii=force_ascii) p2 = utils.full_process(s2, force_ascii=force_ascii) else: p1 = s1 p2 = s2 if not utils.validate_string(p1): return 0 if not utils.validate_string(p2): return 0 return ratio(p1, p2)
Quick ratio comparison between two strings. Runs full_process from utils on both strings Short circuits if either of the strings is empty after processing. :param s1: :param s2: :param force_ascii: Allow only ASCII characters (Default: True) :full_process: Process inputs, used here to avoid double processing in extract functions (Default: True) :return: similarity ratio
### Input: Quick ratio comparison between two strings. Runs full_process from utils on both strings Short circuits if either of the strings is empty after processing. :param s1: :param s2: :param force_ascii: Allow only ASCII characters (Default: True) :full_process: Process inputs, used here to avoid double processing in extract functions (Default: True) :return: similarity ratio ### Response: #vtb def QRatio(s1, s2, force_ascii=True, full_process=True): if full_process: p1 = utils.full_process(s1, force_ascii=force_ascii) p2 = utils.full_process(s2, force_ascii=force_ascii) else: p1 = s1 p2 = s2 if not utils.validate_string(p1): return 0 if not utils.validate_string(p2): return 0 return ratio(p1, p2)
#vtb def _parse_handler_result(self, result): if isinstance(result, (list, tuple)): payload = result[0] list_result = list(result) else: payload = result list_result = [""] return payload, list_result
Parses the item(s) returned by your handler implementation. Handlers may return a single item (payload), or a tuple that gets passed to the Response class __init__ method of your HTTP layer. _parse_handler_result separates the payload from the rest the tuple, as well as providing the tuple so that it can be re-composed after the payload has been run through the `_returns` Resource's renderer.
### Input: Parses the item(s) returned by your handler implementation. Handlers may return a single item (payload), or a tuple that gets passed to the Response class __init__ method of your HTTP layer. _parse_handler_result separates the payload from the rest the tuple, as well as providing the tuple so that it can be re-composed after the payload has been run through the `_returns` Resource's renderer. ### Response: #vtb def _parse_handler_result(self, result): if isinstance(result, (list, tuple)): payload = result[0] list_result = list(result) else: payload = result list_result = [""] return payload, list_result
#vtb def merge_keywords(x,y): z = x.copy() z.update(y) return z
Given two dicts, merge them into a new dict as a shallow copy.
### Input: Given two dicts, merge them into a new dict as a shallow copy. ### Response: #vtb def merge_keywords(x,y): z = x.copy() z.update(y) return z
#vtb def get_function_from_config(item): config = get_configuration() func_path = config.get(item) module_path, func_name = func_path.rsplit(".", 1) module = importlib.import_module(module_path) func = getattr(module, func_name) return func
Import the function to get profile by handle.
### Input: Import the function to get profile by handle. ### Response: #vtb def get_function_from_config(item): config = get_configuration() func_path = config.get(item) module_path, func_name = func_path.rsplit(".", 1) module = importlib.import_module(module_path) func = getattr(module, func_name) return func
#vtb def split_bezier(bpoints, t): def split_bezier_recursion(bpoints_left_, bpoints_right_, bpoints_, t_): if len(bpoints_) == 1: bpoints_left_.append(bpoints_[0]) bpoints_right_.append(bpoints_[0]) else: new_points = [None]*(len(bpoints_) - 1) bpoints_left_.append(bpoints_[0]) bpoints_right_.append(bpoints_[-1]) for i in range(len(bpoints_) - 1): new_points[i] = (1 - t_)*bpoints_[i] + t_*bpoints_[i + 1] bpoints_left_, bpoints_right_ = split_bezier_recursion( bpoints_left_, bpoints_right_, new_points, t_) return bpoints_left_, bpoints_right_ bpoints_left = [] bpoints_right = [] bpoints_left, bpoints_right = \ split_bezier_recursion(bpoints_left, bpoints_right, bpoints, t) bpoints_right.reverse() return bpoints_left, bpoints_right
Uses deCasteljau's recursion to split the Bezier curve at t into two Bezier curves of the same order.
### Input: Uses deCasteljau's recursion to split the Bezier curve at t into two Bezier curves of the same order. ### Response: #vtb def split_bezier(bpoints, t): def split_bezier_recursion(bpoints_left_, bpoints_right_, bpoints_, t_): if len(bpoints_) == 1: bpoints_left_.append(bpoints_[0]) bpoints_right_.append(bpoints_[0]) else: new_points = [None]*(len(bpoints_) - 1) bpoints_left_.append(bpoints_[0]) bpoints_right_.append(bpoints_[-1]) for i in range(len(bpoints_) - 1): new_points[i] = (1 - t_)*bpoints_[i] + t_*bpoints_[i + 1] bpoints_left_, bpoints_right_ = split_bezier_recursion( bpoints_left_, bpoints_right_, new_points, t_) return bpoints_left_, bpoints_right_ bpoints_left = [] bpoints_right = [] bpoints_left, bpoints_right = \ split_bezier_recursion(bpoints_left, bpoints_right, bpoints, t) bpoints_right.reverse() return bpoints_left, bpoints_right
#vtb def get_parser(segmenter, **options): if segmenter == : return NLAPIParser(**options) elif segmenter == : return MecabParser() elif segmenter == : return TinysegmenterParser() else: raise ValueError(.format(segmenter))
Gets a parser. Args: segmenter (str): Segmenter to use. options (:obj:`dict`, optional): Optional settings. Returns: Parser (:obj:`budou.parser.Parser`) Raises: ValueError: If unsupported segmenter is specified.
### Input: Gets a parser. Args: segmenter (str): Segmenter to use. options (:obj:`dict`, optional): Optional settings. Returns: Parser (:obj:`budou.parser.Parser`) Raises: ValueError: If unsupported segmenter is specified. ### Response: #vtb def get_parser(segmenter, **options): if segmenter == : return NLAPIParser(**options) elif segmenter == : return MecabParser() elif segmenter == : return TinysegmenterParser() else: raise ValueError(.format(segmenter))
#vtb def _read_opt_type(self, kind): bin_ = bin(kind)[2:].zfill(8) type_ = dict( value=kind, action=_IPv6_Opts_ACT.get(bin_[:2]), change=True if int(bin_[2], base=2) else False, ) return type_
Read option type field. Positional arguments: * kind -- int, option kind value Returns: * dict -- extracted IPv6_Opts option Structure of option type field [RFC 791]: Octets Bits Name Descriptions 0 0 ipv6_opts.opt.type.value Option Number 0 0 ipv6_opts.opt.type.action Action (00-11) 0 2 ipv6_opts.opt.type.change Change Flag (0/1)
### Input: Read option type field. Positional arguments: * kind -- int, option kind value Returns: * dict -- extracted IPv6_Opts option Structure of option type field [RFC 791]: Octets Bits Name Descriptions 0 0 ipv6_opts.opt.type.value Option Number 0 0 ipv6_opts.opt.type.action Action (00-11) 0 2 ipv6_opts.opt.type.change Change Flag (0/1) ### Response: #vtb def _read_opt_type(self, kind): bin_ = bin(kind)[2:].zfill(8) type_ = dict( value=kind, action=_IPv6_Opts_ACT.get(bin_[:2]), change=True if int(bin_[2], base=2) else False, ) return type_
#vtb def find_types_removed_from_unions( old_schema: GraphQLSchema, new_schema: GraphQLSchema ) -> List[BreakingChange]: old_type_map = old_schema.type_map new_type_map = new_schema.type_map types_removed_from_union = [] for old_type_name, old_type in old_type_map.items(): new_type = new_type_map.get(old_type_name) if not (is_union_type(old_type) and is_union_type(new_type)): continue old_type = cast(GraphQLUnionType, old_type) new_type = cast(GraphQLUnionType, new_type) type_names_in_new_union = {type_.name for type_ in new_type.types} for type_ in old_type.types: type_name = type_.name if type_name not in type_names_in_new_union: types_removed_from_union.append( BreakingChange( BreakingChangeType.TYPE_REMOVED_FROM_UNION, f"{type_name} was removed from union type {old_type_name}.", ) ) return types_removed_from_union
Find types removed from unions. Given two schemas, returns a list containing descriptions of any breaking changes in the new_schema related to removing types from a union type.
### Input: Find types removed from unions. Given two schemas, returns a list containing descriptions of any breaking changes in the new_schema related to removing types from a union type. ### Response: #vtb def find_types_removed_from_unions( old_schema: GraphQLSchema, new_schema: GraphQLSchema ) -> List[BreakingChange]: old_type_map = old_schema.type_map new_type_map = new_schema.type_map types_removed_from_union = [] for old_type_name, old_type in old_type_map.items(): new_type = new_type_map.get(old_type_name) if not (is_union_type(old_type) and is_union_type(new_type)): continue old_type = cast(GraphQLUnionType, old_type) new_type = cast(GraphQLUnionType, new_type) type_names_in_new_union = {type_.name for type_ in new_type.types} for type_ in old_type.types: type_name = type_.name if type_name not in type_names_in_new_union: types_removed_from_union.append( BreakingChange( BreakingChangeType.TYPE_REMOVED_FROM_UNION, f"{type_name} was removed from union type {old_type_name}.", ) ) return types_removed_from_union
#vtb def strip_possessives(self, word): if word.endswith(""): return word[:-3] elif word.endswith(""): return word[:-1] else: return word
Get rid of apostrophes indicating possession.
### Input: Get rid of apostrophes indicating possession. ### Response: #vtb def strip_possessives(self, word): if word.endswith(""): return word[:-3] elif word.endswith(""): return word[:-1] else: return word
#vtb def list_snapshots(self): return [snap for snap in self.manager.list_snapshots() if snap.volume_id == self.id]
Returns a list of all snapshots of this volume.
### Input: Returns a list of all snapshots of this volume. ### Response: #vtb def list_snapshots(self): return [snap for snap in self.manager.list_snapshots() if snap.volume_id == self.id]
#vtb def matches(self, pattern, flags=0): if not re.match(pattern, self._subject, flags): raise self._error_factory(_format("Expected {} to match {}", self._subject, pattern)) return ChainInspector(self._subject)
Ensures :attr:`subject` matches regular expression *pattern*.
### Input: Ensures :attr:`subject` matches regular expression *pattern*. ### Response: #vtb def matches(self, pattern, flags=0): if not re.match(pattern, self._subject, flags): raise self._error_factory(_format("Expected {} to match {}", self._subject, pattern)) return ChainInspector(self._subject)
#vtb def describe_config_variable(self, config_id): config = self._config_variables.get(config_id) if config is None: return [Error.INVALID_ARRAY_KEY, 0, 0, 0, 0] packed_size = config.total_size packed_size |= int(config.variable) << 15 return [0, 0, 0, config_id, packed_size]
Describe the config variable by its id.
### Input: Describe the config variable by its id. ### Response: #vtb def describe_config_variable(self, config_id): config = self._config_variables.get(config_id) if config is None: return [Error.INVALID_ARRAY_KEY, 0, 0, 0, 0] packed_size = config.total_size packed_size |= int(config.variable) << 15 return [0, 0, 0, config_id, packed_size]
#vtb def loadPng(varNumVol, tplPngSize, strPathPng): print() lstPngPaths = [None] * varNumVol for idx01 in range(0, varNumVol): lstPngPaths[idx01] = (strPathPng + str(idx01) + ) aryPngData = np.zeros((tplPngSize[0], tplPngSize[1], varNumVol)) for idx01 in range(0, varNumVol): aryPngData[:, :, idx01] = np.array(Image.open(lstPngPaths[idx01])) aryPngData = (aryPngData > 0).astype(int) return aryPngData
Load PNG files. Parameters ---------- varNumVol : float Number of volumes, i.e. number of time points in all runs. tplPngSize : tuple Shape of the stimulus image (i.e. png). strPathPng: str Path to the folder cointaining the png files. Returns ------- aryPngData : 2d numpy array, shape [png_x, png_y, n_vols] Stack of stimulus data.
### Input: Load PNG files. Parameters ---------- varNumVol : float Number of volumes, i.e. number of time points in all runs. tplPngSize : tuple Shape of the stimulus image (i.e. png). strPathPng: str Path to the folder cointaining the png files. Returns ------- aryPngData : 2d numpy array, shape [png_x, png_y, n_vols] Stack of stimulus data. ### Response: #vtb def loadPng(varNumVol, tplPngSize, strPathPng): print() lstPngPaths = [None] * varNumVol for idx01 in range(0, varNumVol): lstPngPaths[idx01] = (strPathPng + str(idx01) + ) aryPngData = np.zeros((tplPngSize[0], tplPngSize[1], varNumVol)) for idx01 in range(0, varNumVol): aryPngData[:, :, idx01] = np.array(Image.open(lstPngPaths[idx01])) aryPngData = (aryPngData > 0).astype(int) return aryPngData
#vtb def print_output(self, per_identity_data: ) -> None: if not self._window_bts: data = per_identity_data.flatMap( lambda x: [json.dumps(data, cls=BlurrJSONEncoder) for data in x[1][0].items()]) else: data = per_identity_data.map( lambda x: json.dumps((x[0], x[1][1]), cls=BlurrJSONEncoder)) for row in data.collect(): print(row)
Basic helper function to write data to stdout. If window BTS was provided then the window BTS output is written, otherwise, the streaming BTS output is written to stdout. WARNING - For large datasets this will be extremely slow. :param per_identity_data: Output of the `execute()` call.
### Input: Basic helper function to write data to stdout. If window BTS was provided then the window BTS output is written, otherwise, the streaming BTS output is written to stdout. WARNING - For large datasets this will be extremely slow. :param per_identity_data: Output of the `execute()` call. ### Response: #vtb def print_output(self, per_identity_data: ) -> None: if not self._window_bts: data = per_identity_data.flatMap( lambda x: [json.dumps(data, cls=BlurrJSONEncoder) for data in x[1][0].items()]) else: data = per_identity_data.map( lambda x: json.dumps((x[0], x[1][1]), cls=BlurrJSONEncoder)) for row in data.collect(): print(row)
#vtb def _type_string(label, case=None): return label, abstractSearch.in_string, lambda s: abstractRender.default(s, case=case), ""
Shortcut for string like fields
### Input: Shortcut for string like fields ### Response: #vtb def _type_string(label, case=None): return label, abstractSearch.in_string, lambda s: abstractRender.default(s, case=case), ""
#vtb def serialize(self, data=None): if data is not None and self.response is not None: self.response[] = self.media_types[0] self.response.write(data) return data
Transforms the object into an acceptable format for transmission. @throws ValueError To indicate this serializer does not support the encoding of the specified object.
### Input: Transforms the object into an acceptable format for transmission. @throws ValueError To indicate this serializer does not support the encoding of the specified object. ### Response: #vtb def serialize(self, data=None): if data is not None and self.response is not None: self.response[] = self.media_types[0] self.response.write(data) return data
#vtb def is_vert_aligned(c): return all( [ _to_span(c[i]).sentence.is_visual() and bbox_vert_aligned( bbox_from_span(_to_span(c[i])), bbox_from_span(_to_span(c[0])) ) for i in range(len(c)) ] )
Return true if all the components of c are vertically aligned. Vertical alignment means that the bounding boxes of each Mention of c shares a similar x-axis value in the visual rendering of the document. :param c: The candidate to evaluate :rtype: boolean
### Input: Return true if all the components of c are vertically aligned. Vertical alignment means that the bounding boxes of each Mention of c shares a similar x-axis value in the visual rendering of the document. :param c: The candidate to evaluate :rtype: boolean ### Response: #vtb def is_vert_aligned(c): return all( [ _to_span(c[i]).sentence.is_visual() and bbox_vert_aligned( bbox_from_span(_to_span(c[i])), bbox_from_span(_to_span(c[0])) ) for i in range(len(c)) ] )
#vtb def from_function(cls, function): module_name = function.__module__ function_name = function.__name__ class_name = "" function_source_hasher = hashlib.sha1() try: source = inspect.getsource(function) if sys.version_info[0] >= 3: source = source.encode() function_source_hasher.update(source) function_source_hash = function_source_hasher.digest() except (IOError, OSError, TypeError): function_source_hash = b"" return cls(module_name, function_name, class_name, function_source_hash)
Create a FunctionDescriptor from a function instance. This function is used to create the function descriptor from a python function. If a function is a class function, it should not be used by this function. Args: cls: Current class which is required argument for classmethod. function: the python function used to create the function descriptor. Returns: The FunctionDescriptor instance created according to the function.
### Input: Create a FunctionDescriptor from a function instance. This function is used to create the function descriptor from a python function. If a function is a class function, it should not be used by this function. Args: cls: Current class which is required argument for classmethod. function: the python function used to create the function descriptor. Returns: The FunctionDescriptor instance created according to the function. ### Response: #vtb def from_function(cls, function): module_name = function.__module__ function_name = function.__name__ class_name = "" function_source_hasher = hashlib.sha1() try: source = inspect.getsource(function) if sys.version_info[0] >= 3: source = source.encode() function_source_hasher.update(source) function_source_hash = function_source_hasher.digest() except (IOError, OSError, TypeError): function_source_hash = b"" return cls(module_name, function_name, class_name, function_source_hash)
#vtb def write_to_file(filename, content): if not config["destdir"]: print("{destdir} config variable not present. Did you forget to run init()?") sys.exit(8) abs_filename = os.path.abspath(config["destdir"] + "/" + filename) abs_filepath = os.path.dirname(abs_filename) if not os.path.exists(abs_filepath): try: os.makedirs(abs_filepath) except OSError as e: print("Cannot create directory " + abs_filepath) print("Error %d: %s" % (e.errno, e.strerror)) sys.exit(6) with codecs.open(abs_filename, "w", "utf-8") as out: if isinstance(content, str): content = [content] for line in content: if line is not None: out.write(line) out.write("\n")
Writes content to the given file. The file's directory will be created if needed. :param filename: name of the output file, relative to the "destination folder" provided by the user :param content: iterable (line-by-line) that should be written to the file. Either a list or a generator. Each line will be appended with a "\n". Lines containing None will be skipped.
### Input: Writes content to the given file. The file's directory will be created if needed. :param filename: name of the output file, relative to the "destination folder" provided by the user :param content: iterable (line-by-line) that should be written to the file. Either a list or a generator. Each line will be appended with a "\n". Lines containing None will be skipped. ### Response: #vtb def write_to_file(filename, content): if not config["destdir"]: print("{destdir} config variable not present. Did you forget to run init()?") sys.exit(8) abs_filename = os.path.abspath(config["destdir"] + "/" + filename) abs_filepath = os.path.dirname(abs_filename) if not os.path.exists(abs_filepath): try: os.makedirs(abs_filepath) except OSError as e: print("Cannot create directory " + abs_filepath) print("Error %d: %s" % (e.errno, e.strerror)) sys.exit(6) with codecs.open(abs_filename, "w", "utf-8") as out: if isinstance(content, str): content = [content] for line in content: if line is not None: out.write(line) out.write("\n")
#vtb def namer(cls, imageUrl, pageUrl): index = int(compile(r).search(pageUrl).group(1)) ext = imageUrl.rsplit(, 1)[1] return "SnowFlakes-%d.%s" % (index, ext)
Use strip index number for image name.
### Input: Use strip index number for image name. ### Response: #vtb def namer(cls, imageUrl, pageUrl): index = int(compile(r).search(pageUrl).group(1)) ext = imageUrl.rsplit(, 1)[1] return "SnowFlakes-%d.%s" % (index, ext)
#vtb def visit(self, event): to_visit = False if event.arr_time_ut <= self.min_transfer_time+self.get_min_visit_time(): to_visit = True else: for ve in self.visit_events: if (event.trip_I == ve.trip_I) and event.arr_time_ut < ve.arr_time_ut: to_visit = True if to_visit: self.visit_events.append(event) min_time = self.get_min_visit_time() self.visit_events = [v for v in self.visit_events if v.arr_time_ut <= min_time+self.min_transfer_time] return to_visit
Visit the stop if it has not been visited already by an event with earlier arr_time_ut (or with other trip that does not require a transfer) Parameters ---------- event : Event an instance of the Event (namedtuple) Returns ------- visited : bool if visit is stored, returns True, otherwise False
### Input: Visit the stop if it has not been visited already by an event with earlier arr_time_ut (or with other trip that does not require a transfer) Parameters ---------- event : Event an instance of the Event (namedtuple) Returns ------- visited : bool if visit is stored, returns True, otherwise False ### Response: #vtb def visit(self, event): to_visit = False if event.arr_time_ut <= self.min_transfer_time+self.get_min_visit_time(): to_visit = True else: for ve in self.visit_events: if (event.trip_I == ve.trip_I) and event.arr_time_ut < ve.arr_time_ut: to_visit = True if to_visit: self.visit_events.append(event) min_time = self.get_min_visit_time() self.visit_events = [v for v in self.visit_events if v.arr_time_ut <= min_time+self.min_transfer_time] return to_visit
#vtb def autocommit(data_access): if not data_access.autocommit: data_access.commit() old_autocommit = data_access.autocommit data_access.autocommit = True try: yield data_access finally: data_access.autocommit = old_autocommit
Make statements autocommit. :param data_access: a DataAccess instance
### Input: Make statements autocommit. :param data_access: a DataAccess instance ### Response: #vtb def autocommit(data_access): if not data_access.autocommit: data_access.commit() old_autocommit = data_access.autocommit data_access.autocommit = True try: yield data_access finally: data_access.autocommit = old_autocommit
#vtb def _node_le(self, node_self, node_other): for x in [, , ]: if node_self.__getattribute__(x) != node_other.__getattribute__(x): return False for a in node_self.attrib: if a not in node_other.attrib or \ node_self.attrib[a] != node_other.attrib[a]: return False for child in node_self.getchildren(): peers = self._get_peers(child, node_other) if len(peers) < 1: return False elif len(peers) > 1: raise ConfigError( \ .format(self.device.get_xpath(child))) else: schma_node = self.device.get_schema_node(child) if schma_node.get() == and \ schma_node.get() == or \ schma_node.get() == and \ schma_node.get() == : elder_siblings = list(child.itersiblings(tag=child.tag, preceding=True)) if elder_siblings: immediate_elder_sibling = elder_siblings[0] peers_of_immediate_elder_sibling = \ self._get_peers(immediate_elder_sibling, node_other) if len(peers_of_immediate_elder_sibling) < 1: return False elif len(peers_of_immediate_elder_sibling) > 1: p = self.device.get_xpath(immediate_elder_sibling) raise ConfigError( \ .format(p)) elder_siblings_of_peer = \ list(peers[0].itersiblings(tag=child.tag, preceding=True)) if peers_of_immediate_elder_sibling[0] not in \ elder_siblings_of_peer: return False if not self._node_le(child, peers[0]): return False return True
_node_le Low-level api: Return True if all descendants of one node exist in the other node. Otherwise False. This is a recursive method. Parameters ---------- node_self : `Element` A node to be compared. node_other : `Element` Another node to be compared. Returns ------- bool True if all descendants of node_self exist in node_other, otherwise False.
### Input: _node_le Low-level api: Return True if all descendants of one node exist in the other node. Otherwise False. This is a recursive method. Parameters ---------- node_self : `Element` A node to be compared. node_other : `Element` Another node to be compared. Returns ------- bool True if all descendants of node_self exist in node_other, otherwise False. ### Response: #vtb def _node_le(self, node_self, node_other): for x in [, , ]: if node_self.__getattribute__(x) != node_other.__getattribute__(x): return False for a in node_self.attrib: if a not in node_other.attrib or \ node_self.attrib[a] != node_other.attrib[a]: return False for child in node_self.getchildren(): peers = self._get_peers(child, node_other) if len(peers) < 1: return False elif len(peers) > 1: raise ConfigError( \ .format(self.device.get_xpath(child))) else: schma_node = self.device.get_schema_node(child) if schma_node.get() == and \ schma_node.get() == or \ schma_node.get() == and \ schma_node.get() == : elder_siblings = list(child.itersiblings(tag=child.tag, preceding=True)) if elder_siblings: immediate_elder_sibling = elder_siblings[0] peers_of_immediate_elder_sibling = \ self._get_peers(immediate_elder_sibling, node_other) if len(peers_of_immediate_elder_sibling) < 1: return False elif len(peers_of_immediate_elder_sibling) > 1: p = self.device.get_xpath(immediate_elder_sibling) raise ConfigError( \ .format(p)) elder_siblings_of_peer = \ list(peers[0].itersiblings(tag=child.tag, preceding=True)) if peers_of_immediate_elder_sibling[0] not in \ elder_siblings_of_peer: return False if not self._node_le(child, peers[0]): return False return True
#vtb def cli(env, identifier, enabled, port, weight, healthcheck_type, ip_address): mgr = SoftLayer.LoadBalancerManager(env.client) loadbal_id, group_id = loadbal.parse_id(identifier) ip_address_id = None if ip_address: ip_service = env.client[] ip_record = ip_service.getByIpAddress(ip_address) if len(ip_record) > 0: ip_address_id = ip_record[] mgr.add_service(loadbal_id, group_id, ip_address_id=ip_address_id, enabled=enabled, port=port, weight=weight, hc_type=healthcheck_type) env.fout()
Adds a new load balancer service.
### Input: Adds a new load balancer service. ### Response: #vtb def cli(env, identifier, enabled, port, weight, healthcheck_type, ip_address): mgr = SoftLayer.LoadBalancerManager(env.client) loadbal_id, group_id = loadbal.parse_id(identifier) ip_address_id = None if ip_address: ip_service = env.client[] ip_record = ip_service.getByIpAddress(ip_address) if len(ip_record) > 0: ip_address_id = ip_record[] mgr.add_service(loadbal_id, group_id, ip_address_id=ip_address_id, enabled=enabled, port=port, weight=weight, hc_type=healthcheck_type) env.fout()
#vtb def update_checkplotdict_nbrlcs( checkplotdict, timecol, magcol, errcol, lcformat=, lcformatdir=None, verbose=True, ): bests checkplot. This is used to extract the correct times-series from the neighborsve stored your lcformat description JSONs, other than the usual directories lcproc knows to search for them in. Use this along with `lcformat` to specify an LC format JSON file that try: formatinfo = get_lcformat(lcformat, use_lcformat_dir=lcformatdir) if formatinfo: (dfileglob, readerfunc, dtimecols, dmagcols, derrcols, magsarefluxes, normfunc) = formatinfo else: LOGERROR("cant figure out the light curve format") return checkplotdict if not ( in checkplotdict and checkplotdict[] and len(checkplotdict[]) > 0): LOGERROR( % (checkplotdict[])) return checkplotdict nbrmagkeys = {} for mc in objmagkeys: if (( in lcdict) and (isinstance(lcdict[], dict)) and (mc in lcdict[]) and (lcdict[][mc] is not None) and (np.isfinite(lcdict[][mc]))): nbrmagkeys[mc] = lcdict[][mc] magdiffs = {} for omc in objmagkeys: if omc in nbrmagkeys: magdiffs[omc] = objmagkeys[omc] - nbrmagkeys[omc] colordiffs = {} "for target: %s, neighbor: %s, neighbor LC: %s" % (checkplotdict[], nbr[], nbr[])) continue nbrdict = _pkl_magseries_plot(xtimes, xmags, xerrs, magsarefluxes=magsarefluxes) nbr.update(nbrdict) if in checkplotdict: pfmethods = checkplotdict[] else: pfmethods = [] for cpkey in checkplotdict: for pfkey in PFMETHODS: if pfkey in cpkey: pfmethods.append(pfkey) for lspt in pfmethods: nbr[lspt] = {} operiod, oepoch = (checkplotdict[lspt][0][], checkplotdict[lspt][0][]) (ophasewrap, ophasesort, ophasebin, ominbinelems, oplotxlim) = ( checkplotdict[lspt][0][], checkplotdict[lspt][0][], checkplotdict[lspt][0][], checkplotdict[lspt][0][], checkplotdict[lspt][0][], ) nbr = _pkl_phased_magseries_plot( nbr, lspt.split()[1], 0, xtimes, xmags, xerrs, operiod, oepoch, phasewrap=ophasewrap, phasesort=ophasesort, phasebin=ophasebin, minbinelems=ominbinelems, plotxlim=oplotxlim, magsarefluxes=magsarefluxes, verbose=verbose, override_pfmethod=lspt ) return checkplotdict
For all neighbors in a checkplotdict, make LCs and phased LCs. Parameters ---------- checkplotdict : dict This is the checkplot to process. The light curves for the neighbors to the object here will be extracted from the stored file paths, and this function will make plots of these time-series. If the object has 'best' periods and epochs generated by period-finder functions in this checkplotdict, phased light curve plots of each neighbor will be made using these to check the effects of blending. timecol,magcol,errcol : str The timecol, magcol, and errcol keys used to generate this object's checkplot. This is used to extract the correct times-series from the neighbors' light curves. lcformat : str This is the `formatkey` associated with your light curve format, which you previously passed in to the `lcproc.register_lcformat` function. This will be used to look up how to find and read the light curves specified in `basedir` or `use_list_of_filenames`. lcformatdir : str or None If this is provided, gives the path to a directory when you've stored your lcformat description JSONs, other than the usual directories lcproc knows to search for them in. Use this along with `lcformat` to specify an LC format JSON file that's not currently registered with lcproc. Returns ------- dict The input checkplotdict is returned with the neighor light curve plots added in.
### Input: For all neighbors in a checkplotdict, make LCs and phased LCs. Parameters ---------- checkplotdict : dict This is the checkplot to process. The light curves for the neighbors to the object here will be extracted from the stored file paths, and this function will make plots of these time-series. If the object has 'best' periods and epochs generated by period-finder functions in this checkplotdict, phased light curve plots of each neighbor will be made using these to check the effects of blending. timecol,magcol,errcol : str The timecol, magcol, and errcol keys used to generate this object's checkplot. This is used to extract the correct times-series from the neighbors' light curves. lcformat : str This is the `formatkey` associated with your light curve format, which you previously passed in to the `lcproc.register_lcformat` function. This will be used to look up how to find and read the light curves specified in `basedir` or `use_list_of_filenames`. lcformatdir : str or None If this is provided, gives the path to a directory when you've stored your lcformat description JSONs, other than the usual directories lcproc knows to search for them in. Use this along with `lcformat` to specify an LC format JSON file that's not currently registered with lcproc. Returns ------- dict The input checkplotdict is returned with the neighor light curve plots added in. ### Response: #vtb def update_checkplotdict_nbrlcs( checkplotdict, timecol, magcol, errcol, lcformat=, lcformatdir=None, verbose=True, ): bests checkplot. This is used to extract the correct times-series from the neighborsve stored your lcformat description JSONs, other than the usual directories lcproc knows to search for them in. Use this along with `lcformat` to specify an LC format JSON file that try: formatinfo = get_lcformat(lcformat, use_lcformat_dir=lcformatdir) if formatinfo: (dfileglob, readerfunc, dtimecols, dmagcols, derrcols, magsarefluxes, normfunc) = formatinfo else: LOGERROR("cant figure out the light curve format") return checkplotdict if not ( in checkplotdict and checkplotdict[] and len(checkplotdict[]) > 0): LOGERROR( % (checkplotdict[])) return checkplotdict nbrmagkeys = {} for mc in objmagkeys: if (( in lcdict) and (isinstance(lcdict[], dict)) and (mc in lcdict[]) and (lcdict[][mc] is not None) and (np.isfinite(lcdict[][mc]))): nbrmagkeys[mc] = lcdict[][mc] magdiffs = {} for omc in objmagkeys: if omc in nbrmagkeys: magdiffs[omc] = objmagkeys[omc] - nbrmagkeys[omc] colordiffs = {} "for target: %s, neighbor: %s, neighbor LC: %s" % (checkplotdict[], nbr[], nbr[])) continue nbrdict = _pkl_magseries_plot(xtimes, xmags, xerrs, magsarefluxes=magsarefluxes) nbr.update(nbrdict) if in checkplotdict: pfmethods = checkplotdict[] else: pfmethods = [] for cpkey in checkplotdict: for pfkey in PFMETHODS: if pfkey in cpkey: pfmethods.append(pfkey) for lspt in pfmethods: nbr[lspt] = {} operiod, oepoch = (checkplotdict[lspt][0][], checkplotdict[lspt][0][]) (ophasewrap, ophasesort, ophasebin, ominbinelems, oplotxlim) = ( checkplotdict[lspt][0][], checkplotdict[lspt][0][], checkplotdict[lspt][0][], checkplotdict[lspt][0][], checkplotdict[lspt][0][], ) nbr = _pkl_phased_magseries_plot( nbr, lspt.split()[1], 0, xtimes, xmags, xerrs, operiod, oepoch, phasewrap=ophasewrap, phasesort=ophasesort, phasebin=ophasebin, minbinelems=ominbinelems, plotxlim=oplotxlim, magsarefluxes=magsarefluxes, verbose=verbose, override_pfmethod=lspt ) return checkplotdict
#vtb def parse_field(fld, selectable, aggregated=True, default_aggregation=): aggregation_lookup = { : func.sum, : func.min, : func.max, : func.avg, : func.count, : lambda fld: func.count(distinct(fld)), : lambda fld: func.date_trunc(, fld), : lambda fld: func.date_trunc(, fld), : lambda fld: func.date_trunc(, fld), : lambda fld: func.date_trunc(, fld), : lambda fld: func.date_part(, func.age(fld)), None: lambda fld: fld, } if isinstance(fld, basestring): fld = { : fld, } if not isinstance(fld, dict): raise BadIngredient() if not in fld: raise BadIngredient() if not isinstance(fld[], basestring): raise BadIngredient() if in fld: if not isinstance(fld[], dict) and \ not fld[] is None: raise BadIngredient() else: fld[] = None initial_aggregation = default_aggregation if aggregated else None if in fld: if not isinstance(fld[], basestring) and \ not fld[] is None: raise BadIngredient() if fld[] is None: fld[] = initial_aggregation else: fld[] = initial_aggregation value = fld.get(, None) if value is None: raise BadIngredient() field_parts = [] for word in tokenize(value): if word in (, , , ): field_parts.append(word) else: field_parts.append(find_column(selectable, word)) if len(field_parts) is None: raise BadIngredient() if len(field_parts) % 2 != 1: raise BadIngredient() field = field_parts[0] if len(field_parts) > 1: for operator, other_field in zip(field_parts[1::2], field_parts[2::2]): if operator == : field = field.__add__(other_field) elif operator == : field = field.__sub__(other_field) elif operator == : field = field.__div__(other_field) elif operator == : field = field.__mul__(other_field) else: raise BadIngredient(.format(operator)) aggr = fld.get(, ) if aggr is not None: aggr = aggr.strip() if aggr not in aggregation_lookup: raise BadIngredient(.format(aggr)) aggregator = aggregation_lookup[aggr] condition = parse_condition( fld.get(, None), selectable, aggregated=False, default_aggregation=default_aggregation ) if condition is not None: field = case([(condition, field)]) return aggregator(field)
Parse a field object from yaml into a sqlalchemy expression
### Input: Parse a field object from yaml into a sqlalchemy expression ### Response: #vtb def parse_field(fld, selectable, aggregated=True, default_aggregation=): aggregation_lookup = { : func.sum, : func.min, : func.max, : func.avg, : func.count, : lambda fld: func.count(distinct(fld)), : lambda fld: func.date_trunc(, fld), : lambda fld: func.date_trunc(, fld), : lambda fld: func.date_trunc(, fld), : lambda fld: func.date_trunc(, fld), : lambda fld: func.date_part(, func.age(fld)), None: lambda fld: fld, } if isinstance(fld, basestring): fld = { : fld, } if not isinstance(fld, dict): raise BadIngredient() if not in fld: raise BadIngredient() if not isinstance(fld[], basestring): raise BadIngredient() if in fld: if not isinstance(fld[], dict) and \ not fld[] is None: raise BadIngredient() else: fld[] = None initial_aggregation = default_aggregation if aggregated else None if in fld: if not isinstance(fld[], basestring) and \ not fld[] is None: raise BadIngredient() if fld[] is None: fld[] = initial_aggregation else: fld[] = initial_aggregation value = fld.get(, None) if value is None: raise BadIngredient() field_parts = [] for word in tokenize(value): if word in (, , , ): field_parts.append(word) else: field_parts.append(find_column(selectable, word)) if len(field_parts) is None: raise BadIngredient() if len(field_parts) % 2 != 1: raise BadIngredient() field = field_parts[0] if len(field_parts) > 1: for operator, other_field in zip(field_parts[1::2], field_parts[2::2]): if operator == : field = field.__add__(other_field) elif operator == : field = field.__sub__(other_field) elif operator == : field = field.__div__(other_field) elif operator == : field = field.__mul__(other_field) else: raise BadIngredient(.format(operator)) aggr = fld.get(, ) if aggr is not None: aggr = aggr.strip() if aggr not in aggregation_lookup: raise BadIngredient(.format(aggr)) aggregator = aggregation_lookup[aggr] condition = parse_condition( fld.get(, None), selectable, aggregated=False, default_aggregation=default_aggregation ) if condition is not None: field = case([(condition, field)]) return aggregator(field)
#vtb def stop(opts, bot, event): name = opts[] slack_username = opts[] now = datetime.datetime.now() delta = now - bot.timers.pop(name) response = bot.stop_fmt.format(delta) if slack_username: mention = users = bot.slack.users.list().body[] for user in users: if user[] == slack_username: mention = "<@%s>" % user[] break response = "%s: %s" % (mention, response) return response
Usage: stop [--name=<name>] [--notify=<slack_username>] Stop a timer. _name_ works the same as for `start`. If given _slack_username_, reply with an at-mention to the given user.
### Input: Usage: stop [--name=<name>] [--notify=<slack_username>] Stop a timer. _name_ works the same as for `start`. If given _slack_username_, reply with an at-mention to the given user. ### Response: #vtb def stop(opts, bot, event): name = opts[] slack_username = opts[] now = datetime.datetime.now() delta = now - bot.timers.pop(name) response = bot.stop_fmt.format(delta) if slack_username: mention = users = bot.slack.users.list().body[] for user in users: if user[] == slack_username: mention = "<@%s>" % user[] break response = "%s: %s" % (mention, response) return response
#vtb def _gpdfit(x): prior_bs = 3 prior_k = 10 len_x = len(x) m_est = 30 + int(len_x ** 0.5) b_ary = 1 - np.sqrt(m_est / (np.arange(1, m_est + 1, dtype=float) - 0.5)) b_ary /= prior_bs * x[int(len_x / 4 + 0.5) - 1] b_ary += 1 / x[-1] k_ary = np.log1p(-b_ary[:, None] * x).mean(axis=1) len_scale = len_x * (np.log(-(b_ary / k_ary)) - k_ary - 1) weights = 1 / np.exp(len_scale - len_scale[:, None]).sum(axis=1) real_idxs = weights >= 10 * np.finfo(float).eps if not np.all(real_idxs): weights = weights[real_idxs] b_ary = b_ary[real_idxs] weights /= weights.sum() b_post = np.sum(b_ary * weights) k_post = np.log1p(-b_post * x).mean() k_post = (len_x * k_post + prior_k * 0.5) / (len_x + prior_k) sigma = -k_post / b_post return k_post, sigma
Estimate the parameters for the Generalized Pareto Distribution (GPD). Empirical Bayes estimate for the parameters of the generalized Pareto distribution given the data. Parameters ---------- x : array sorted 1D data array Returns ------- k : float estimated shape parameter sigma : float estimated scale parameter
### Input: Estimate the parameters for the Generalized Pareto Distribution (GPD). Empirical Bayes estimate for the parameters of the generalized Pareto distribution given the data. Parameters ---------- x : array sorted 1D data array Returns ------- k : float estimated shape parameter sigma : float estimated scale parameter ### Response: #vtb def _gpdfit(x): prior_bs = 3 prior_k = 10 len_x = len(x) m_est = 30 + int(len_x ** 0.5) b_ary = 1 - np.sqrt(m_est / (np.arange(1, m_est + 1, dtype=float) - 0.5)) b_ary /= prior_bs * x[int(len_x / 4 + 0.5) - 1] b_ary += 1 / x[-1] k_ary = np.log1p(-b_ary[:, None] * x).mean(axis=1) len_scale = len_x * (np.log(-(b_ary / k_ary)) - k_ary - 1) weights = 1 / np.exp(len_scale - len_scale[:, None]).sum(axis=1) real_idxs = weights >= 10 * np.finfo(float).eps if not np.all(real_idxs): weights = weights[real_idxs] b_ary = b_ary[real_idxs] weights /= weights.sum() b_post = np.sum(b_ary * weights) k_post = np.log1p(-b_post * x).mean() k_post = (len_x * k_post + prior_k * 0.5) / (len_x + prior_k) sigma = -k_post / b_post return k_post, sigma
#vtb def l2traceroute_input_rbridge_id(self, **kwargs): config = ET.Element("config") l2traceroute = ET.Element("l2traceroute") config = l2traceroute input = ET.SubElement(l2traceroute, "input") rbridge_id = ET.SubElement(input, "rbridge-id") rbridge_id.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
### Input: Auto Generated Code ### Response: #vtb def l2traceroute_input_rbridge_id(self, **kwargs): config = ET.Element("config") l2traceroute = ET.Element("l2traceroute") config = l2traceroute input = ET.SubElement(l2traceroute, "input") rbridge_id = ET.SubElement(input, "rbridge-id") rbridge_id.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
#vtb def to_grpc_address(target: str) -> str: u = urlparse(target) if u.scheme == "dns": raise ValueError("dns:// not supported") if u.scheme == "unix": return "unix:"+u.path return u.netloc
Converts a standard gRPC target to one that is supported by grpcio :param target: the server address. :returns: the converted address.
### Input: Converts a standard gRPC target to one that is supported by grpcio :param target: the server address. :returns: the converted address. ### Response: #vtb def to_grpc_address(target: str) -> str: u = urlparse(target) if u.scheme == "dns": raise ValueError("dns:// not supported") if u.scheme == "unix": return "unix:"+u.path return u.netloc
#vtb def download(ctx): user, project_name = get_project_or_local(ctx.obj.get()) try: PolyaxonClient().project.download_repo(user, project_name) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error(.format(project_name)) Printer.print_error(.format(e)) sys.exit(1) Printer.print_success()
Download code of the current project.
### Input: Download code of the current project. ### Response: #vtb def download(ctx): user, project_name = get_project_or_local(ctx.obj.get()) try: PolyaxonClient().project.download_repo(user, project_name) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error(.format(project_name)) Printer.print_error(.format(e)) sys.exit(1) Printer.print_success()
#vtb def passed(self): return [test for test in self.all() if not test.failed() and not test.skipped()]
Return all the passing testcases :return:
### Input: Return all the passing testcases :return: ### Response: #vtb def passed(self): return [test for test in self.all() if not test.failed() and not test.skipped()]
#vtb def user_identity_show(self, user_id, id, **kwargs): "https://developer.zendesk.com/rest_api/docs/core/user_identities api_path = "/api/v2/users/{user_id}/identities/{id}.json" api_path = api_path.format(user_id=user_id, id=id) return self.call(api_path, **kwargs)
https://developer.zendesk.com/rest_api/docs/core/user_identities#show-identity
### Input: https://developer.zendesk.com/rest_api/docs/core/user_identities#show-identity ### Response: #vtb def user_identity_show(self, user_id, id, **kwargs): "https://developer.zendesk.com/rest_api/docs/core/user_identities api_path = "/api/v2/users/{user_id}/identities/{id}.json" api_path = api_path.format(user_id=user_id, id=id) return self.call(api_path, **kwargs)
#vtb def bell(self, percent = 0, onerror = None): request.Bell(display = self.display, onerror = onerror, percent = percent)
Ring the bell at the volume percent which is relative the base volume. See XBell(3X11).
### Input: Ring the bell at the volume percent which is relative the base volume. See XBell(3X11). ### Response: #vtb def bell(self, percent = 0, onerror = None): request.Bell(display = self.display, onerror = onerror, percent = percent)
#vtb def _validate_and_parse(self, batch_object): if not waffle.waffle().is_enabled(waffle.ENABLE_COMPLETION_TRACKING): raise ValidationError( _("BlockCompletion.objects.submit_batch_completion should not be called when the feature is disabled.") ) for key in self.REQUIRED_KEYS: if key not in batch_object: raise ValidationError(_("Key not found.").format(key=key)) username = batch_object[] user = User.objects.get(username=username) course_key_obj = self._validate_and_parse_course_key(batch_object[]) if not CourseEnrollment.is_enrolled(user, course_key_obj): raise ValidationError(_()) blocks = batch_object[] block_objs = [] for block_key in blocks: block_key_obj = self._validate_and_parse_block_key(block_key, course_key_obj) completion = float(blocks[block_key]) block_objs.append((block_key_obj, completion)) return user, course_key_obj, block_objs
Performs validation on the batch object to make sure it is in the proper format. Parameters: * batch_object: The data provided to a POST. The expected format is the following: { "username": "username", "course_key": "course-key", "blocks": { "block_key1": 0.0, "block_key2": 1.0, "block_key3": 1.0, } } Return Value: * tuple: (User, CourseKey, List of tuples (UsageKey, completion_float) Raises: django.core.exceptions.ValidationError: If any aspect of validation fails a ValidationError is raised. ObjectDoesNotExist: If a database object cannot be found an ObjectDoesNotExist is raised.
### Input: Performs validation on the batch object to make sure it is in the proper format. Parameters: * batch_object: The data provided to a POST. The expected format is the following: { "username": "username", "course_key": "course-key", "blocks": { "block_key1": 0.0, "block_key2": 1.0, "block_key3": 1.0, } } Return Value: * tuple: (User, CourseKey, List of tuples (UsageKey, completion_float) Raises: django.core.exceptions.ValidationError: If any aspect of validation fails a ValidationError is raised. ObjectDoesNotExist: If a database object cannot be found an ObjectDoesNotExist is raised. ### Response: #vtb def _validate_and_parse(self, batch_object): if not waffle.waffle().is_enabled(waffle.ENABLE_COMPLETION_TRACKING): raise ValidationError( _("BlockCompletion.objects.submit_batch_completion should not be called when the feature is disabled.") ) for key in self.REQUIRED_KEYS: if key not in batch_object: raise ValidationError(_("Key not found.").format(key=key)) username = batch_object[] user = User.objects.get(username=username) course_key_obj = self._validate_and_parse_course_key(batch_object[]) if not CourseEnrollment.is_enrolled(user, course_key_obj): raise ValidationError(_()) blocks = batch_object[] block_objs = [] for block_key in blocks: block_key_obj = self._validate_and_parse_block_key(block_key, course_key_obj) completion = float(blocks[block_key]) block_objs.append((block_key_obj, completion)) return user, course_key_obj, block_objs
#vtb def rgb_to_websafe(r, g=None, b=None, alt=False): if type(r) in [list,tuple]: r, g, b = r websafeComponent = _websafe_component return tuple((websafeComponent(v, alt) for v in (r, g, b)))
Convert the color from RGB to 'web safe' RGB Parameters: :r: The Red component value [0...1] :g: The Green component value [0...1] :b: The Blue component value [0...1] :alt: If True, use the alternative color instead of the nearest one. Can be used for dithering. Returns: The color as an (r, g, b) tuple in the range: the range: r[0...1], g[0...1], b[0...1] >>> '(%g, %g, %g)' % rgb_to_websafe(1, 0.55, 0.0) '(1, 0.6, 0)'
### Input: Convert the color from RGB to 'web safe' RGB Parameters: :r: The Red component value [0...1] :g: The Green component value [0...1] :b: The Blue component value [0...1] :alt: If True, use the alternative color instead of the nearest one. Can be used for dithering. Returns: The color as an (r, g, b) tuple in the range: the range: r[0...1], g[0...1], b[0...1] >>> '(%g, %g, %g)' % rgb_to_websafe(1, 0.55, 0.0) '(1, 0.6, 0)' ### Response: #vtb def rgb_to_websafe(r, g=None, b=None, alt=False): if type(r) in [list,tuple]: r, g, b = r websafeComponent = _websafe_component return tuple((websafeComponent(v, alt) for v in (r, g, b)))
#vtb def add_device_callback(self, devices, callback): if not devices: return False if not isinstance(devices, (tuple, list)): devices = [devices] for device in devices: device_id = device if isinstance(device, AbodeDevice): device_id = device.device_id if not self._abode.get_device(device_id): raise AbodeException((ERROR.EVENT_DEVICE_INVALID)) _LOGGER.debug( "Subscribing to updated for device_id: %s", device_id) self._device_callbacks[device_id].append((callback)) return True
Register a device callback.
### Input: Register a device callback. ### Response: #vtb def add_device_callback(self, devices, callback): if not devices: return False if not isinstance(devices, (tuple, list)): devices = [devices] for device in devices: device_id = device if isinstance(device, AbodeDevice): device_id = device.device_id if not self._abode.get_device(device_id): raise AbodeException((ERROR.EVENT_DEVICE_INVALID)) _LOGGER.debug( "Subscribing to updated for device_id: %s", device_id) self._device_callbacks[device_id].append((callback)) return True
#vtb def calf(self, spec): if not isinstance(spec, Spec): raise TypeError() if not spec.get(BUILD_DIR): tempdir = realpath(mkdtemp()) spec.advise(CLEANUP, shutil.rmtree, tempdir) build_dir = join(tempdir, ) mkdir(build_dir) spec[BUILD_DIR] = build_dir else: build_dir = self.realpath(spec, BUILD_DIR) if not isdir(build_dir): logger.error("build_dir is not a directory", build_dir) raise_os_error(errno.ENOTDIR, build_dir) self.realpath(spec, EXPORT_TARGET) spec.handle(SETUP) try: process = (, , , , ) for p in process: spec.handle( + p) getattr(self, p)(spec) spec.handle( + p) spec.handle(SUCCESS) except ToolchainCancel: pass finally: spec.handle(CLEANUP)
Typical safe usage is this, which sets everything that could be problematic up. Requires the filename which everything will be produced to.
### Input: Typical safe usage is this, which sets everything that could be problematic up. Requires the filename which everything will be produced to. ### Response: #vtb def calf(self, spec): if not isinstance(spec, Spec): raise TypeError() if not spec.get(BUILD_DIR): tempdir = realpath(mkdtemp()) spec.advise(CLEANUP, shutil.rmtree, tempdir) build_dir = join(tempdir, ) mkdir(build_dir) spec[BUILD_DIR] = build_dir else: build_dir = self.realpath(spec, BUILD_DIR) if not isdir(build_dir): logger.error("build_dir is not a directory", build_dir) raise_os_error(errno.ENOTDIR, build_dir) self.realpath(spec, EXPORT_TARGET) spec.handle(SETUP) try: process = (, , , , ) for p in process: spec.handle( + p) getattr(self, p)(spec) spec.handle( + p) spec.handle(SUCCESS) except ToolchainCancel: pass finally: spec.handle(CLEANUP)
#vtb def hops(node1, node2): if node1 == node2: return 0 elif set(node1.interfaces) & set(node2.interfaces): return 1 else: return 0
returns # of hops it takes to get from node1 to node2, 1 means they're on the same link
### Input: returns # of hops it takes to get from node1 to node2, 1 means they're on the same link ### Response: #vtb def hops(node1, node2): if node1 == node2: return 0 elif set(node1.interfaces) & set(node2.interfaces): return 1 else: return 0
#vtb def qnh_estimate(self): alt_gps = self.master.field(, , 0) * 0.001 pressure2 = self.master.field(, , 0) ground_temp = self.get_mav_param(, 21) temp = ground_temp + 273.15 pressure1 = pressure2 / math.exp(math.log(1.0 - (alt_gps / (153.8462 * temp))) / 0.190259) return pressure1
estimate QNH pressure from GPS altitude and scaled pressure
### Input: estimate QNH pressure from GPS altitude and scaled pressure ### Response: #vtb def qnh_estimate(self): alt_gps = self.master.field(, , 0) * 0.001 pressure2 = self.master.field(, , 0) ground_temp = self.get_mav_param(, 21) temp = ground_temp + 273.15 pressure1 = pressure2 / math.exp(math.log(1.0 - (alt_gps / (153.8462 * temp))) / 0.190259) return pressure1
#vtb def device_statistics(fritz, args): stats = fritz.get_device_statistics(args.ain) print(stats)
Command that prints the device statistics.
### Input: Command that prints the device statistics. ### Response: #vtb def device_statistics(fritz, args): stats = fritz.get_device_statistics(args.ain) print(stats)
#vtb def get_role_by_code(role_code,**kwargs): try: role = db.DBSession.query(Role).filter(Role.code==role_code).one() return role except NoResultFound: raise ResourceNotFoundError("Role not found (role_code={})".format(role_code))
Get a role by its code
### Input: Get a role by its code ### Response: #vtb def get_role_by_code(role_code,**kwargs): try: role = db.DBSession.query(Role).filter(Role.code==role_code).one() return role except NoResultFound: raise ResourceNotFoundError("Role not found (role_code={})".format(role_code))
#vtb def is_text_visible(driver, text, selector, by=By.CSS_SELECTOR): try: element = driver.find_element(by=by, value=selector) return element.is_displayed() and text in element.text except Exception: return False
Returns whether the specified text is visible in the specified selector. @Params driver - the webdriver object (required) text - the text string to search for selector - the locator that is used (required) by - the method to search for the locator (Default: By.CSS_SELECTOR) @Returns Boolean (is text visible)
### Input: Returns whether the specified text is visible in the specified selector. @Params driver - the webdriver object (required) text - the text string to search for selector - the locator that is used (required) by - the method to search for the locator (Default: By.CSS_SELECTOR) @Returns Boolean (is text visible) ### Response: #vtb def is_text_visible(driver, text, selector, by=By.CSS_SELECTOR): try: element = driver.find_element(by=by, value=selector) return element.is_displayed() and text in element.text except Exception: return False
#vtb def get_active_for(self, user, user_agent=_MARK, ip_address=_MARK): conditions = [LoginSession.user == user] if user_agent is not _MARK: if user_agent is None: user_agent = request.environ.get("HTTP_USER_AGENT", "") conditions.append(LoginSession.user_agent == user_agent) if ip_address is not _MARK: if ip_address is None: ip_addresses = request.headers.getlist("X-Forwarded-For") ip_address = ip_addresses[0] if ip_addresses else request.remote_addr conditions.append(LoginSession.ip_address == ip_address) session = ( LoginSession.query.filter(*conditions) .order_by(LoginSession.id.desc()) .first() ) return session
Return last known session for given user. :param user: user session :type user: `abilian.core.models.subjects.User` :param user_agent: *exact* user agent string to lookup, or `None` to have user_agent extracted from request object. If not provided at all, no filtering on user_agent. :type user_agent: string or None, or absent :param ip_address: client IP, or `None` to have ip_address extracted from request object (requires header 'X-Forwarded-For'). If not provided at all, no filtering on ip_address. :type ip_address: string or None, or absent :rtype: `LoginSession` or `None` if no session is found.
### Input: Return last known session for given user. :param user: user session :type user: `abilian.core.models.subjects.User` :param user_agent: *exact* user agent string to lookup, or `None` to have user_agent extracted from request object. If not provided at all, no filtering on user_agent. :type user_agent: string or None, or absent :param ip_address: client IP, or `None` to have ip_address extracted from request object (requires header 'X-Forwarded-For'). If not provided at all, no filtering on ip_address. :type ip_address: string or None, or absent :rtype: `LoginSession` or `None` if no session is found. ### Response: #vtb def get_active_for(self, user, user_agent=_MARK, ip_address=_MARK): conditions = [LoginSession.user == user] if user_agent is not _MARK: if user_agent is None: user_agent = request.environ.get("HTTP_USER_AGENT", "") conditions.append(LoginSession.user_agent == user_agent) if ip_address is not _MARK: if ip_address is None: ip_addresses = request.headers.getlist("X-Forwarded-For") ip_address = ip_addresses[0] if ip_addresses else request.remote_addr conditions.append(LoginSession.ip_address == ip_address) session = ( LoginSession.query.filter(*conditions) .order_by(LoginSession.id.desc()) .first() ) return session
#vtb def clean_headers(headers): clean = {} try: for k, v in six.iteritems(headers): if not isinstance(k, six.binary_type): k = str(k) if not isinstance(v, six.binary_type): v = str(v) clean[_helpers._to_bytes(k)] = _helpers._to_bytes(v) except UnicodeEncodeError: from oauth2client.client import NonAsciiHeaderError raise NonAsciiHeaderError(k, , v) return clean
Forces header keys and values to be strings, i.e not unicode. The httplib module just concats the header keys and values in a way that may make the message header a unicode string, which, if it then tries to contatenate to a binary request body may result in a unicode decode error. Args: headers: dict, A dictionary of headers. Returns: The same dictionary but with all the keys converted to strings.
### Input: Forces header keys and values to be strings, i.e not unicode. The httplib module just concats the header keys and values in a way that may make the message header a unicode string, which, if it then tries to contatenate to a binary request body may result in a unicode decode error. Args: headers: dict, A dictionary of headers. Returns: The same dictionary but with all the keys converted to strings. ### Response: #vtb def clean_headers(headers): clean = {} try: for k, v in six.iteritems(headers): if not isinstance(k, six.binary_type): k = str(k) if not isinstance(v, six.binary_type): v = str(v) clean[_helpers._to_bytes(k)] = _helpers._to_bytes(v) except UnicodeEncodeError: from oauth2client.client import NonAsciiHeaderError raise NonAsciiHeaderError(k, , v) return clean
#vtb def get_step(self, grad): if self._momentum is None: self._momentum = self.initial_accumulator_value * np.ones_like(grad) self._momentum += grad ** 2 return self.learning_rate * grad / np.sqrt(self._momentum)
Computes the 'step' to take for the next gradient descent update. Returns the step rather than performing the update so that parameters can be updated in place rather than overwritten. Examples -------- >>> gradient = # ... >>> optimizer = AdaGradOptimizer(0.01) >>> params -= optimizer.get_step(gradient) Parameters ---------- grad Returns ------- np.array Size matches `grad`.
### Input: Computes the 'step' to take for the next gradient descent update. Returns the step rather than performing the update so that parameters can be updated in place rather than overwritten. Examples -------- >>> gradient = # ... >>> optimizer = AdaGradOptimizer(0.01) >>> params -= optimizer.get_step(gradient) Parameters ---------- grad Returns ------- np.array Size matches `grad`. ### Response: #vtb def get_step(self, grad): if self._momentum is None: self._momentum = self.initial_accumulator_value * np.ones_like(grad) self._momentum += grad ** 2 return self.learning_rate * grad / np.sqrt(self._momentum)
#vtb def parse_args(self, args, scope): arguments = list(zip(args, [] * len(args))) if args and args[0] else None zl = itertools.zip_longest if sys.version_info[ 0] == 3 else itertools.izip_longest if self.args: parsed = [ v if hasattr(v, ) else v for v in copy.copy(self.args) ] args = args if isinstance(args, list) else [args] vars = [ self._parse_arg(var, arg, scope) for arg, var in zl([a for a in args], parsed) ] for var in vars: if var: var.parse(scope) if not arguments: arguments = [v.value for v in vars if v] if not arguments: arguments = Variable([, None, arguments]).parse(scope)
Parse arguments to mixin. Add them to scope as variables. Sets upp special variable @arguments as well. args: args (list): arguments scope (Scope): current scope raises: SyntaxError
### Input: Parse arguments to mixin. Add them to scope as variables. Sets upp special variable @arguments as well. args: args (list): arguments scope (Scope): current scope raises: SyntaxError ### Response: #vtb def parse_args(self, args, scope): arguments = list(zip(args, [] * len(args))) if args and args[0] else None zl = itertools.zip_longest if sys.version_info[ 0] == 3 else itertools.izip_longest if self.args: parsed = [ v if hasattr(v, ) else v for v in copy.copy(self.args) ] args = args if isinstance(args, list) else [args] vars = [ self._parse_arg(var, arg, scope) for arg, var in zl([a for a in args], parsed) ] for var in vars: if var: var.parse(scope) if not arguments: arguments = [v.value for v in vars if v] if not arguments: arguments = Variable([, None, arguments]).parse(scope)
#vtb def put_settings(self, body=None, params=None): return self.transport.perform_request(, , params=params, body=body)
Update cluster wide specific settings. `<http://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-update-settings.html>`_ :arg body: The settings to be updated. Can be either `transient` or `persistent` (survives cluster restart). :arg flat_settings: Return settings in flat format (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout
### Input: Update cluster wide specific settings. `<http://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-update-settings.html>`_ :arg body: The settings to be updated. Can be either `transient` or `persistent` (survives cluster restart). :arg flat_settings: Return settings in flat format (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout ### Response: #vtb def put_settings(self, body=None, params=None): return self.transport.perform_request(, , params=params, body=body)
#vtb def get_distribute_verbatim_metadata(self): metadata = dict(self._mdata[]) metadata.update({: self._my_map[]}) return Metadata(**metadata)
Gets the metadata for the distribute verbatim rights flag. return: (osid.Metadata) - metadata for the distribution rights fields *compliance: mandatory -- This method must be implemented.*
### Input: Gets the metadata for the distribute verbatim rights flag. return: (osid.Metadata) - metadata for the distribution rights fields *compliance: mandatory -- This method must be implemented.* ### Response: #vtb def get_distribute_verbatim_metadata(self): metadata = dict(self._mdata[]) metadata.update({: self._my_map[]}) return Metadata(**metadata)
#vtb def moveoutletstostrm(np, flowdir, streamRaster, outlet, modifiedOutlet, workingdir=None, mpiexedir=None, exedir=None, log_file=None, runtime_file=None, hostfile=None): fname = TauDEM.func_name() return TauDEM.run(FileClass.get_executable_fullpath(fname, exedir), {: flowdir, : streamRaster, : outlet}, workingdir, None, {: modifiedOutlet}, {: mpiexedir, : hostfile, : np}, {: log_file, : runtime_file})
Run move the given outlets to stream
### Input: Run move the given outlets to stream ### Response: #vtb def moveoutletstostrm(np, flowdir, streamRaster, outlet, modifiedOutlet, workingdir=None, mpiexedir=None, exedir=None, log_file=None, runtime_file=None, hostfile=None): fname = TauDEM.func_name() return TauDEM.run(FileClass.get_executable_fullpath(fname, exedir), {: flowdir, : streamRaster, : outlet}, workingdir, None, {: modifiedOutlet}, {: mpiexedir, : hostfile, : np}, {: log_file, : runtime_file})
#vtb def find_primitive(cell, symprec=1e-5): lattice, positions, numbers = spg.find_primitive(cell.totuple(), symprec) if lattice is None: return None else: return Atoms(numbers=numbers, scaled_positions=positions, cell=lattice, pbc=True)
A primitive cell is searched in the input cell. When a primitive cell is found, an object of Atoms class of the primitive cell is returned. When not, None is returned.
### Input: A primitive cell is searched in the input cell. When a primitive cell is found, an object of Atoms class of the primitive cell is returned. When not, None is returned. ### Response: #vtb def find_primitive(cell, symprec=1e-5): lattice, positions, numbers = spg.find_primitive(cell.totuple(), symprec) if lattice is None: return None else: return Atoms(numbers=numbers, scaled_positions=positions, cell=lattice, pbc=True)
#vtb def output(self, value): return super(Map, self).output(self.stream, value)
SPL output port assignment expression. Arguments: value(str): SPL expression used for an output assignment. This can be a string, a constant, or an :py:class:`Expression`. Returns: Expression: Output assignment expression that is valid as a the context of this operator.
### Input: SPL output port assignment expression. Arguments: value(str): SPL expression used for an output assignment. This can be a string, a constant, or an :py:class:`Expression`. Returns: Expression: Output assignment expression that is valid as a the context of this operator. ### Response: #vtb def output(self, value): return super(Map, self).output(self.stream, value)
#vtb def get(self, name, acc=None, default=None): if acc in self.data[] and name in self.data[][acc]: return self.data[][acc][name] if name in self.data: return self.data[name] return default
Return the named config for the given account. If an account is given, first checks the account space for the name. If no account given, or if the name not found in the account space, look for the name in the global config space. If still not found, return the default, if given, otherwise ``None``.
### Input: Return the named config for the given account. If an account is given, first checks the account space for the name. If no account given, or if the name not found in the account space, look for the name in the global config space. If still not found, return the default, if given, otherwise ``None``. ### Response: #vtb def get(self, name, acc=None, default=None): if acc in self.data[] and name in self.data[][acc]: return self.data[][acc][name] if name in self.data: return self.data[name] return default
#vtb def shell_sqlalchemy(session: SqlalchemySession, backend: ShellBackend): namespace = { : session } namespace.update(backend.get_namespace()) embed(user_ns=namespace, header=backend.header)
This command includes SQLAlchemy DB Session
### Input: This command includes SQLAlchemy DB Session ### Response: #vtb def shell_sqlalchemy(session: SqlalchemySession, backend: ShellBackend): namespace = { : session } namespace.update(backend.get_namespace()) embed(user_ns=namespace, header=backend.header)
#vtb def count_leases_by_owner(self, leases): owners = [l.owner for l in leases] return dict(Counter(owners))
Returns a dictionary of leases by current owner.
### Input: Returns a dictionary of leases by current owner. ### Response: #vtb def count_leases_by_owner(self, leases): owners = [l.owner for l in leases] return dict(Counter(owners))
#vtb def _get_network(project_id, network_name, service): return service.networks().get(project=project_id, network=network_name).execute()
Fetch network selfLink from network name.
### Input: Fetch network selfLink from network name. ### Response: #vtb def _get_network(project_id, network_name, service): return service.networks().get(project=project_id, network=network_name).execute()
#vtb def get_attrs(cls): ignore = dir(type(, (object,), {})) + [] attrs = [ item for item in inspect.getmembers(cls) if item[0] not in ignore and not isinstance( item[1], ( types.FunctionType, types.MethodType, classmethod, staticmethod, property))] attrs.sort(key=lambda attr: (getattr(attr[1], , -1), attr[0])) return attrs
Get all class attributes ordered by definition
### Input: Get all class attributes ordered by definition ### Response: #vtb def get_attrs(cls): ignore = dir(type(, (object,), {})) + [] attrs = [ item for item in inspect.getmembers(cls) if item[0] not in ignore and not isinstance( item[1], ( types.FunctionType, types.MethodType, classmethod, staticmethod, property))] attrs.sort(key=lambda attr: (getattr(attr[1], , -1), attr[0])) return attrs
#vtb def update_dataset_marker(self): start_time = self.parent.overview.start_time markers = [] if self.parent.info.markers is not None: markers = self.parent.info.markers self.idx_marker.clearContents() self.idx_marker.setRowCount(len(markers)) for i, mrk in enumerate(markers): abs_time = (start_time + timedelta(seconds=mrk[])).strftime() dur = timedelta(seconds=mrk[] - mrk[]) duration = .format(dur.seconds, round(dur.microseconds / 1000)) item_time = QTableWidgetItem(abs_time) item_duration = QTableWidgetItem(duration) item_name = QTableWidgetItem(mrk[]) color = self.parent.value() item_time.setForeground(QColor(color)) item_duration.setForeground(QColor(color)) item_name.setForeground(QColor(color)) self.idx_marker.setItem(i, 0, item_time) self.idx_marker.setItem(i, 1, item_duration) self.idx_marker.setItem(i, 2, item_name) marker_start = [mrk[] for mrk in markers] marker_end = [mrk[] for mrk in markers] self.idx_marker.setProperty(, marker_start) self.idx_marker.setProperty(, marker_end) if self.parent.traces.data is not None: self.parent.traces.display() self.parent.overview.display_markers()
Update markers which are in the dataset. It always updates the list of events. Depending on the settings, it might add the markers to overview and traces.
### Input: Update markers which are in the dataset. It always updates the list of events. Depending on the settings, it might add the markers to overview and traces. ### Response: #vtb def update_dataset_marker(self): start_time = self.parent.overview.start_time markers = [] if self.parent.info.markers is not None: markers = self.parent.info.markers self.idx_marker.clearContents() self.idx_marker.setRowCount(len(markers)) for i, mrk in enumerate(markers): abs_time = (start_time + timedelta(seconds=mrk[])).strftime() dur = timedelta(seconds=mrk[] - mrk[]) duration = .format(dur.seconds, round(dur.microseconds / 1000)) item_time = QTableWidgetItem(abs_time) item_duration = QTableWidgetItem(duration) item_name = QTableWidgetItem(mrk[]) color = self.parent.value() item_time.setForeground(QColor(color)) item_duration.setForeground(QColor(color)) item_name.setForeground(QColor(color)) self.idx_marker.setItem(i, 0, item_time) self.idx_marker.setItem(i, 1, item_duration) self.idx_marker.setItem(i, 2, item_name) marker_start = [mrk[] for mrk in markers] marker_end = [mrk[] for mrk in markers] self.idx_marker.setProperty(, marker_start) self.idx_marker.setProperty(, marker_end) if self.parent.traces.data is not None: self.parent.traces.display() self.parent.overview.display_markers()
#vtb def from_string(contents): lines = [l.strip() for l in contents.split("\n")] link0_patt = re.compile(r"^(%.+)\s*=\s*(.+)") link0_dict = {} for i, l in enumerate(lines): if link0_patt.match(l): m = link0_patt.match(l) link0_dict[m.group(1).strip("=")] = m.group(2) route_patt = re.compile(r"^ route = "" route_index = None for i, l in enumerate(lines): if route_patt.match(l): route += " " + l route_index = i elif (l == "" or l.isspace()) and route_index: break functional, basis_set, route_paras, dieze_tag = read_route_line(route) ind = 2 title = [] while lines[route_index + ind].strip(): title.append(lines[route_index + ind].strip()) ind += 1 title = .join(title) ind += 1 toks = re.split(r"[,\s]+", lines[route_index + ind]) charge = int(toks[0]) spin_mult = int(toks[1]) coord_lines = [] spaces = 0 input_paras = {} ind += 1 for i in range(route_index + ind, len(lines)): if lines[i].strip() == "": spaces += 1 if spaces >= 2: d = lines[i].split("=") if len(d) == 2: input_paras[d[0]] = d[1] else: coord_lines.append(lines[i].strip()) mol = GaussianInput._parse_coords(coord_lines) mol.set_charge_and_spin(charge, spin_mult) return GaussianInput(mol, charge=charge, spin_multiplicity=spin_mult, title=title, functional=functional, basis_set=basis_set, route_parameters=route_paras, input_parameters=input_paras, link0_parameters=link0_dict, dieze_tag=dieze_tag)
Creates GaussianInput from a string. Args: contents: String representing an Gaussian input file. Returns: GaussianInput object
### Input: Creates GaussianInput from a string. Args: contents: String representing an Gaussian input file. Returns: GaussianInput object ### Response: #vtb def from_string(contents): lines = [l.strip() for l in contents.split("\n")] link0_patt = re.compile(r"^(%.+)\s*=\s*(.+)") link0_dict = {} for i, l in enumerate(lines): if link0_patt.match(l): m = link0_patt.match(l) link0_dict[m.group(1).strip("=")] = m.group(2) route_patt = re.compile(r"^ route = "" route_index = None for i, l in enumerate(lines): if route_patt.match(l): route += " " + l route_index = i elif (l == "" or l.isspace()) and route_index: break functional, basis_set, route_paras, dieze_tag = read_route_line(route) ind = 2 title = [] while lines[route_index + ind].strip(): title.append(lines[route_index + ind].strip()) ind += 1 title = .join(title) ind += 1 toks = re.split(r"[,\s]+", lines[route_index + ind]) charge = int(toks[0]) spin_mult = int(toks[1]) coord_lines = [] spaces = 0 input_paras = {} ind += 1 for i in range(route_index + ind, len(lines)): if lines[i].strip() == "": spaces += 1 if spaces >= 2: d = lines[i].split("=") if len(d) == 2: input_paras[d[0]] = d[1] else: coord_lines.append(lines[i].strip()) mol = GaussianInput._parse_coords(coord_lines) mol.set_charge_and_spin(charge, spin_mult) return GaussianInput(mol, charge=charge, spin_multiplicity=spin_mult, title=title, functional=functional, basis_set=basis_set, route_parameters=route_paras, input_parameters=input_paras, link0_parameters=link0_dict, dieze_tag=dieze_tag)
#vtb def sequence(context, data): number = data.get(, context.params.get(, 1)) stop = context.params.get() step = context.params.get(, 1) delay = context.params.get() prefix = context.params.get() while True: tag = None if prefix is None else % (prefix, number) if tag is None or not context.check_tag(tag): context.emit(data={: number}) if tag is not None: context.set_tag(tag, True) number = number + step if step > 0 and number >= stop: break if step < 0 and number <= stop: break if delay is not None: context.recurse(data={: number}, delay=delay) break
Generate a sequence of numbers. It is the memorious equivalent of the xrange function, accepting the ``start``, ``stop`` and ``step`` parameters. This can run in two ways: * As a single function generating all numbers in the given range. * Recursively, generating numbers one by one with an optional ``delay``. The latter mode is useful in order to generate very large sequences without completely clogging up the user queue. If an optional ``tag`` is given, each number will be emitted only once across multiple runs of the crawler.
### Input: Generate a sequence of numbers. It is the memorious equivalent of the xrange function, accepting the ``start``, ``stop`` and ``step`` parameters. This can run in two ways: * As a single function generating all numbers in the given range. * Recursively, generating numbers one by one with an optional ``delay``. The latter mode is useful in order to generate very large sequences without completely clogging up the user queue. If an optional ``tag`` is given, each number will be emitted only once across multiple runs of the crawler. ### Response: #vtb def sequence(context, data): number = data.get(, context.params.get(, 1)) stop = context.params.get() step = context.params.get(, 1) delay = context.params.get() prefix = context.params.get() while True: tag = None if prefix is None else % (prefix, number) if tag is None or not context.check_tag(tag): context.emit(data={: number}) if tag is not None: context.set_tag(tag, True) number = number + step if step > 0 and number >= stop: break if step < 0 and number <= stop: break if delay is not None: context.recurse(data={: number}, delay=delay) break
#vtb def _updateWordSet(self): self._wordSet = set(self._keywords) | set(self._customCompletions) start = time.time() for line in self._qpart.lines: for match in _wordRegExp.findall(line): self._wordSet.add(match) if time.time() - start > self._WORD_SET_UPDATE_MAX_TIME_SEC: break
Make a set of words, which shall be completed, from text
### Input: Make a set of words, which shall be completed, from text ### Response: #vtb def _updateWordSet(self): self._wordSet = set(self._keywords) | set(self._customCompletions) start = time.time() for line in self._qpart.lines: for match in _wordRegExp.findall(line): self._wordSet.add(match) if time.time() - start > self._WORD_SET_UPDATE_MAX_TIME_SEC: break
#vtb def rename_acquisition(self, plate_name, name, new_name): logger.info( , name, self.experiment_name, plate_name ) content = {: new_name} acquisition_id = self._get_acquisition_id(plate_name, name) url = self._build_api_url( .format( experiment_id=self._experiment_id, acquisition_id=acquisition_id ) ) res = self._session.put(url, json=content) res.raise_for_status()
Renames an acquisition. Parameters ---------- plate_name: str name of the parent plate name: str name of the acquisition that should be renamed new_name: str name that should be given to the acquisition See also -------- :func:`tmserver.api.acquisition.update_acquisition` :class:`tmlib.models.acquisition.Acquisition`
### Input: Renames an acquisition. Parameters ---------- plate_name: str name of the parent plate name: str name of the acquisition that should be renamed new_name: str name that should be given to the acquisition See also -------- :func:`tmserver.api.acquisition.update_acquisition` :class:`tmlib.models.acquisition.Acquisition` ### Response: #vtb def rename_acquisition(self, plate_name, name, new_name): logger.info( , name, self.experiment_name, plate_name ) content = {: new_name} acquisition_id = self._get_acquisition_id(plate_name, name) url = self._build_api_url( .format( experiment_id=self._experiment_id, acquisition_id=acquisition_id ) ) res = self._session.put(url, json=content) res.raise_for_status()
#vtb def backlink(node): seen = set() to_see = [node] while to_see: node = to_see.pop() seen.add(node) for succ in node.next: succ.prev.add(node) if succ not in seen: to_see.append(succ)
Given a CFG with outgoing links, create incoming links.
### Input: Given a CFG with outgoing links, create incoming links. ### Response: #vtb def backlink(node): seen = set() to_see = [node] while to_see: node = to_see.pop() seen.add(node) for succ in node.next: succ.prev.add(node) if succ not in seen: to_see.append(succ)
#vtb def makeOrmValuesSubqueryCondition(ormSession, column, values: List[Union[int, str]]): if isPostGreSQLDialect(ormSession.bind): return column.in_(values) if not isMssqlDialect(ormSession.bind): raise NotImplementedError() sql = _createMssqlSqlText(values) sub_qry = ormSession.query(column) sub_qry = sub_qry.from_statement(sql) return column.in_(sub_qry)
Make Orm Values Subquery :param ormSession: The orm session instance :param column: The column from the Declarative table, eg TableItem.colName :param values: A list of string or int values
### Input: Make Orm Values Subquery :param ormSession: The orm session instance :param column: The column from the Declarative table, eg TableItem.colName :param values: A list of string or int values ### Response: #vtb def makeOrmValuesSubqueryCondition(ormSession, column, values: List[Union[int, str]]): if isPostGreSQLDialect(ormSession.bind): return column.in_(values) if not isMssqlDialect(ormSession.bind): raise NotImplementedError() sql = _createMssqlSqlText(values) sub_qry = ormSession.query(column) sub_qry = sub_qry.from_statement(sql) return column.in_(sub_qry)
#vtb def hide(self): self._hidden = True for artist in self.annotations.values(): artist.set_visible(False) for fig in self.figures: fig.canvas.draw() return self
Hides all annotation artists associated with the DataCursor. Returns self to allow "chaining". (e.g. ``datacursor.hide().disable()``)
### Input: Hides all annotation artists associated with the DataCursor. Returns self to allow "chaining". (e.g. ``datacursor.hide().disable()``) ### Response: #vtb def hide(self): self._hidden = True for artist in self.annotations.values(): artist.set_visible(False) for fig in self.figures: fig.canvas.draw() return self
#vtb def _straight_line_vertices(adjacency_mat, node_coords, directed=False): if not issparse(adjacency_mat): adjacency_mat = np.asarray(adjacency_mat, float) if (adjacency_mat.ndim != 2 or adjacency_mat.shape[0] != adjacency_mat.shape[1]): raise ValueError("Adjacency matrix should be square.") arrow_vertices = np.array([]) edges = _get_edges(adjacency_mat) line_vertices = node_coords[edges.ravel()] if directed: arrows = np.array(list(_get_directed_edges(adjacency_mat))) arrow_vertices = node_coords[arrows.ravel()] arrow_vertices = arrow_vertices.reshape((len(arrow_vertices)/2, 4)) return line_vertices, arrow_vertices
Generate the vertices for straight lines between nodes. If it is a directed graph, it also generates the vertices which can be passed to an :class:`ArrowVisual`. Parameters ---------- adjacency_mat : array The adjacency matrix of the graph node_coords : array The current coordinates of all nodes in the graph directed : bool Wether the graph is directed. If this is true it will also generate the vertices for arrows which can be passed to :class:`ArrowVisual`. Returns ------- vertices : tuple Returns a tuple containing containing (`line_vertices`, `arrow_vertices`)
### Input: Generate the vertices for straight lines between nodes. If it is a directed graph, it also generates the vertices which can be passed to an :class:`ArrowVisual`. Parameters ---------- adjacency_mat : array The adjacency matrix of the graph node_coords : array The current coordinates of all nodes in the graph directed : bool Wether the graph is directed. If this is true it will also generate the vertices for arrows which can be passed to :class:`ArrowVisual`. Returns ------- vertices : tuple Returns a tuple containing containing (`line_vertices`, `arrow_vertices`) ### Response: #vtb def _straight_line_vertices(adjacency_mat, node_coords, directed=False): if not issparse(adjacency_mat): adjacency_mat = np.asarray(adjacency_mat, float) if (adjacency_mat.ndim != 2 or adjacency_mat.shape[0] != adjacency_mat.shape[1]): raise ValueError("Adjacency matrix should be square.") arrow_vertices = np.array([]) edges = _get_edges(adjacency_mat) line_vertices = node_coords[edges.ravel()] if directed: arrows = np.array(list(_get_directed_edges(adjacency_mat))) arrow_vertices = node_coords[arrows.ravel()] arrow_vertices = arrow_vertices.reshape((len(arrow_vertices)/2, 4)) return line_vertices, arrow_vertices
#vtb def list(self,table, **kparams): result = self.table_api_get(table, **kparams) return self.to_records(result, table)
get a collection of records by table name. returns a dict (the json map) for python 3.4
### Input: get a collection of records by table name. returns a dict (the json map) for python 3.4 ### Response: #vtb def list(self,table, **kparams): result = self.table_api_get(table, **kparams) return self.to_records(result, table)
#vtb def have_thumbnail(self, fitsimage, image): chname = self.fv.get_channel_name(fitsimage) idx = image.get(, None) path = image.get(, None) if path is not None: path = os.path.abspath(path) name = iohelper.name_image_from_path(path, idx=idx) else: name = name = image.get(, name) thumbkey = self.get_thumb_key(chname, name, path) with self.thmblock: return thumbkey in self.thumb_dict
Returns True if we already have a thumbnail version of this image cached, False otherwise.
### Input: Returns True if we already have a thumbnail version of this image cached, False otherwise. ### Response: #vtb def have_thumbnail(self, fitsimage, image): chname = self.fv.get_channel_name(fitsimage) idx = image.get(, None) path = image.get(, None) if path is not None: path = os.path.abspath(path) name = iohelper.name_image_from_path(path, idx=idx) else: name = name = image.get(, name) thumbkey = self.get_thumb_key(chname, name, path) with self.thmblock: return thumbkey in self.thumb_dict
#vtb def setupNodding(self): g = get_root(self).globals if not self.nod(): if not self.isDrift(): self.clear.enable() self.nodPattern = {} self.check() return self.nod.set(False) self.nodPattern = {} return self.nodPattern = data self.clear.set(True) self.check()
Setup Nodding for GTC
### Input: Setup Nodding for GTC ### Response: #vtb def setupNodding(self): g = get_root(self).globals if not self.nod(): if not self.isDrift(): self.clear.enable() self.nodPattern = {} self.check() return self.nod.set(False) self.nodPattern = {} return self.nodPattern = data self.clear.set(True) self.check()
#vtb def copy(self, key): copy = Set(key=key, db=self.db) copy.clear() copy |= self return copy
Copy the set to another key and return the new Set. WARNING: If the key exists, it overwrites it.
### Input: Copy the set to another key and return the new Set. WARNING: If the key exists, it overwrites it. ### Response: #vtb def copy(self, key): copy = Set(key=key, db=self.db) copy.clear() copy |= self return copy
#vtb def parameterize( self, country: Optional[str] = "South Sudan", state: Optional[str] = None, year: Optional[int] = None, month: Optional[int] = None, unit: Optional[str] = None, fallback_aggaxes: List[str] = ["year", "month"], aggfunc: Callable = np.mean, ): valid_axes = ("country", "state", "year", "month") if any(map(lambda axis: axis not in valid_axes, fallback_aggaxes)): raise ValueError( "All elements of the fallback_aggaxes set must be one of the " f"following: {valid_axes}" ) for n in self.nodes(data=True): for indicator in n[1]["indicators"].values(): indicator.mean, indicator.unit = get_indicator_value( indicator, country, state, year, month, unit, fallback_aggaxes, aggfunc, ) indicator.stdev = 0.1 * abs(indicator.mean)
Parameterize the analysis graph. Args: country year month fallback_aggaxes: An iterable of strings denoting the axes upon which to perform fallback aggregation if the desired constraints cannot be met. aggfunc: The function that will be called to perform the aggregation if there are multiple matches.
### Input: Parameterize the analysis graph. Args: country year month fallback_aggaxes: An iterable of strings denoting the axes upon which to perform fallback aggregation if the desired constraints cannot be met. aggfunc: The function that will be called to perform the aggregation if there are multiple matches. ### Response: #vtb def parameterize( self, country: Optional[str] = "South Sudan", state: Optional[str] = None, year: Optional[int] = None, month: Optional[int] = None, unit: Optional[str] = None, fallback_aggaxes: List[str] = ["year", "month"], aggfunc: Callable = np.mean, ): valid_axes = ("country", "state", "year", "month") if any(map(lambda axis: axis not in valid_axes, fallback_aggaxes)): raise ValueError( "All elements of the fallback_aggaxes set must be one of the " f"following: {valid_axes}" ) for n in self.nodes(data=True): for indicator in n[1]["indicators"].values(): indicator.mean, indicator.unit = get_indicator_value( indicator, country, state, year, month, unit, fallback_aggaxes, aggfunc, ) indicator.stdev = 0.1 * abs(indicator.mean)
#vtb def kill_process(procname, scriptname): import signal import subprocess p = subprocess.Popen([, ], stdout=subprocess.PIPE) out, err = p.communicate() for line in out.decode().splitlines(): if procname in line and scriptname in line: pid = int(line.split()[1]) info( % (procname, scriptname, pid)) os.kill(pid, signal.SIGKILL)
kill WSGI processes that may be running in development
### Input: kill WSGI processes that may be running in development ### Response: #vtb def kill_process(procname, scriptname): import signal import subprocess p = subprocess.Popen([, ], stdout=subprocess.PIPE) out, err = p.communicate() for line in out.decode().splitlines(): if procname in line and scriptname in line: pid = int(line.split()[1]) info( % (procname, scriptname, pid)) os.kill(pid, signal.SIGKILL)
#vtb def check_snmp(self): from glances.snmp import GlancesSNMPClient clientsnmp = GlancesSNMPClient(host=self.args.client, port=self.args.snmp_port, version=self.args.snmp_version, community=self.args.snmp_community, user=self.args.snmp_user, auth=self.args.snmp_auth) ret = clientsnmp.get_by_oid("1.3.6.1.2.1.1.5.0") != {} if ret: oid_os_name = clientsnmp.get_by_oid("1.3.6.1.2.1.1.1.0") try: self.system_name = self.get_system_name(oid_os_name[]) logger.info("SNMP system name detected: {}".format(self.system_name)) except KeyError: self.system_name = None logger.warning("Cannot detect SNMP system name") return ret
Chek if SNMP is available on the server.
### Input: Chek if SNMP is available on the server. ### Response: #vtb def check_snmp(self): from glances.snmp import GlancesSNMPClient clientsnmp = GlancesSNMPClient(host=self.args.client, port=self.args.snmp_port, version=self.args.snmp_version, community=self.args.snmp_community, user=self.args.snmp_user, auth=self.args.snmp_auth) ret = clientsnmp.get_by_oid("1.3.6.1.2.1.1.5.0") != {} if ret: oid_os_name = clientsnmp.get_by_oid("1.3.6.1.2.1.1.1.0") try: self.system_name = self.get_system_name(oid_os_name[]) logger.info("SNMP system name detected: {}".format(self.system_name)) except KeyError: self.system_name = None logger.warning("Cannot detect SNMP system name") return ret
#vtb def next_page(self, max_=None): result = type(self)() result.after = After(self.last.value) result.max_ = max_ return result
Return a query set which requests the page after this response. :param max_: Maximum number of items to return. :type max_: :class:`int` or :data:`None` :rtype: :class:`ResultSetMetadata` :return: A new request set up to request the next page. Must be called on a result set which has :attr:`last` set.
### Input: Return a query set which requests the page after this response. :param max_: Maximum number of items to return. :type max_: :class:`int` or :data:`None` :rtype: :class:`ResultSetMetadata` :return: A new request set up to request the next page. Must be called on a result set which has :attr:`last` set. ### Response: #vtb def next_page(self, max_=None): result = type(self)() result.after = After(self.last.value) result.max_ = max_ return result
#vtb def read(self): if self._is_initialized: return self._is_initialized = True if not isinstance(self._file_or_files, (tuple, list)): files_to_read = [self._file_or_files] else: files_to_read = list(self._file_or_files) seen = set(files_to_read) num_read_include_files = 0 while files_to_read: file_path = files_to_read.pop(0) fp = file_path file_ok = False if hasattr(fp, "seek"): self._read(fp, fp.name) else: try: with open(file_path, ) as fp: file_ok = True self._read(fp, fp.name) except IOError: continue assert osp.isabs(file_path), "Need absolute paths to be sure our cycle checks will work" include_path = osp.join(osp.dirname(file_path), include_path) include_path = osp.normpath(include_path) if include_path in seen or not os.access(include_path, os.R_OK): continue seen.add(include_path) files_to_read.insert(0, include_path) num_read_include_files += 1 if num_read_include_files == 0: self._merge_includes = False
Reads the data stored in the files we have been initialized with. It will ignore files that cannot be read, possibly leaving an empty configuration :return: Nothing :raise IOError: if a file cannot be handled
### Input: Reads the data stored in the files we have been initialized with. It will ignore files that cannot be read, possibly leaving an empty configuration :return: Nothing :raise IOError: if a file cannot be handled ### Response: #vtb def read(self): if self._is_initialized: return self._is_initialized = True if not isinstance(self._file_or_files, (tuple, list)): files_to_read = [self._file_or_files] else: files_to_read = list(self._file_or_files) seen = set(files_to_read) num_read_include_files = 0 while files_to_read: file_path = files_to_read.pop(0) fp = file_path file_ok = False if hasattr(fp, "seek"): self._read(fp, fp.name) else: try: with open(file_path, ) as fp: file_ok = True self._read(fp, fp.name) except IOError: continue assert osp.isabs(file_path), "Need absolute paths to be sure our cycle checks will work" include_path = osp.join(osp.dirname(file_path), include_path) include_path = osp.normpath(include_path) if include_path in seen or not os.access(include_path, os.R_OK): continue seen.add(include_path) files_to_read.insert(0, include_path) num_read_include_files += 1 if num_read_include_files == 0: self._merge_includes = False