text
stringlengths
75
104k
code_tokens
sequence
avg_line_len
float64
7.91
980
score
float64
0
0.18
def update_opdocs(self, checksum, opdocs, revision=None): """ Modifies the internal state based a change to the content and returns the sets of words added and removed. :Parameters: checksum : `hashable` A checksum generated from the text of a revision opdocs : `iterable` ( `dict` ) A sequence of operations that represent the diff of this new revision revision : `mixed` Revision metadata :Returns: A triple of lists: current_tokens : `list` ( :class:`~mwpersistence.Token` ) A sequence of Tokens representing the revision that was just processed. tokens_added : `list` ( :class:`~mwpersistence.Token` ) Tokens that were added while updating state. tokens_removed : `list` ( :class:`~mwpersistence.Token` ) Tokens that were removed while updating state. """ return self._update(checksum=checksum, opdocs=opdocs, revision=revision)
[ "def", "update_opdocs", "(", "self", ",", "checksum", ",", "opdocs", ",", "revision", "=", "None", ")", ":", "return", "self", ".", "_update", "(", "checksum", "=", "checksum", ",", "opdocs", "=", "opdocs", ",", "revision", "=", "revision", ")" ]
40.925926
0.001768
def get_default_mapping(self, z, cmapper): """Create dictionary containing default ColumnDataSource glyph to data mappings. """ map_annular = dict(x=self.max_radius, y=self.max_radius, inner_radius="inner_radius", outer_radius="outer_radius", start_angle="start_angle", end_angle="end_angle", fill_color={'field': z, 'transform': cmapper}) map_seg_label = dict(x="x", y="y", text="text", angle="angle", text_align="center") map_ann_label = dict(x="x", y="y", text="text", angle="angle", text_align="center", text_baseline="bottom") map_xmarks = dict(xs="xs", ys="ys") map_ymarks = dict(x= self.max_radius, y=self.max_radius, start_angle=0, end_angle=2*np.pi, radius="radius") return {'annular_wedge_1': map_annular, 'text_1': map_seg_label, 'text_2': map_ann_label, 'multi_line_1': map_xmarks, 'arc_1': map_ymarks}
[ "def", "get_default_mapping", "(", "self", ",", "z", ",", "cmapper", ")", ":", "map_annular", "=", "dict", "(", "x", "=", "self", ".", "max_radius", ",", "y", "=", "self", ".", "max_radius", ",", "inner_radius", "=", "\"inner_radius\"", ",", "outer_radius", "=", "\"outer_radius\"", ",", "start_angle", "=", "\"start_angle\"", ",", "end_angle", "=", "\"end_angle\"", ",", "fill_color", "=", "{", "'field'", ":", "z", ",", "'transform'", ":", "cmapper", "}", ")", "map_seg_label", "=", "dict", "(", "x", "=", "\"x\"", ",", "y", "=", "\"y\"", ",", "text", "=", "\"text\"", ",", "angle", "=", "\"angle\"", ",", "text_align", "=", "\"center\"", ")", "map_ann_label", "=", "dict", "(", "x", "=", "\"x\"", ",", "y", "=", "\"y\"", ",", "text", "=", "\"text\"", ",", "angle", "=", "\"angle\"", ",", "text_align", "=", "\"center\"", ",", "text_baseline", "=", "\"bottom\"", ")", "map_xmarks", "=", "dict", "(", "xs", "=", "\"xs\"", ",", "ys", "=", "\"ys\"", ")", "map_ymarks", "=", "dict", "(", "x", "=", "self", ".", "max_radius", ",", "y", "=", "self", ".", "max_radius", ",", "start_angle", "=", "0", ",", "end_angle", "=", "2", "*", "np", ".", "pi", ",", "radius", "=", "\"radius\"", ")", "return", "{", "'annular_wedge_1'", ":", "map_annular", ",", "'text_1'", ":", "map_seg_label", ",", "'text_2'", ":", "map_ann_label", ",", "'multi_line_1'", ":", "map_xmarks", ",", "'arc_1'", ":", "map_ymarks", "}" ]
39.2
0.00249
def _route(self, attr, args, kwargs, **fkwargs): """ Perform routing and return db_nums """ return self.cluster.hosts.keys()
[ "def", "_route", "(", "self", ",", "attr", ",", "args", ",", "kwargs", ",", "*", "*", "fkwargs", ")", ":", "return", "self", ".", "cluster", ".", "hosts", ".", "keys", "(", ")" ]
30.4
0.012821
def _make_policies(self): """ Convert the 'scalingPolicies' dictionary into AutoScalePolicy objects. """ self.policies = [AutoScalePolicy(self.manager, dct, self) for dct in self.scalingPolicies]
[ "def", "_make_policies", "(", "self", ")", ":", "self", ".", "policies", "=", "[", "AutoScalePolicy", "(", "self", ".", "manager", ",", "dct", ",", "self", ")", "for", "dct", "in", "self", ".", "scalingPolicies", "]" ]
39.666667
0.012346
def _roots_to_targets(self, build_graph, target_roots): """Populate the BuildGraph and target list from a set of input TargetRoots.""" with self._run_tracker.new_workunit(name='parse', labels=[WorkUnitLabel.SETUP]): return [ build_graph.get_target(address) for address in build_graph.inject_roots_closure(target_roots, self._fail_fast) ]
[ "def", "_roots_to_targets", "(", "self", ",", "build_graph", ",", "target_roots", ")", ":", "with", "self", ".", "_run_tracker", ".", "new_workunit", "(", "name", "=", "'parse'", ",", "labels", "=", "[", "WorkUnitLabel", ".", "SETUP", "]", ")", ":", "return", "[", "build_graph", ".", "get_target", "(", "address", ")", "for", "address", "in", "build_graph", ".", "inject_roots_closure", "(", "target_roots", ",", "self", ".", "_fail_fast", ")", "]" ]
46.75
0.010499
def getipmacarp(self): """ Function operates on the IMCDev object and updates the ipmacarp attribute :return: """ self.ipmacarp = get_ip_mac_arp_list(self.auth, self.url, devid = self.devid)
[ "def", "getipmacarp", "(", "self", ")", ":", "self", ".", "ipmacarp", "=", "get_ip_mac_arp_list", "(", "self", ".", "auth", ",", "self", ".", "url", ",", "devid", "=", "self", ".", "devid", ")" ]
37.5
0.026087
def check_class(self, id_, class_, lineno, scope=None, show_error=True): """ Check the id is either undefined or defined with the given class. - If the identifier (e.g. variable) does not exists means it's undeclared, and returns True (OK). - If the identifier exists, but its class_ attribute is unknown yet (None), returns also True. This means the identifier has been referenced in advanced and it's undeclared. Otherwise fails returning False. """ assert CLASS.is_valid(class_) entry = self.get_entry(id_, scope) if entry is None or entry.class_ == CLASS.unknown: # Undeclared yet return True if entry.class_ != class_: if show_error: if entry.class_ == CLASS.array: a1 = 'n' else: a1 = '' if class_ == CLASS.array: a2 = 'n' else: a2 = '' syntax_error(lineno, "identifier '%s' is a%s %s, not a%s %s" % (id_, a1, entry.class_, a2, class_)) return False return True
[ "def", "check_class", "(", "self", ",", "id_", ",", "class_", ",", "lineno", ",", "scope", "=", "None", ",", "show_error", "=", "True", ")", ":", "assert", "CLASS", ".", "is_valid", "(", "class_", ")", "entry", "=", "self", ".", "get_entry", "(", "id_", ",", "scope", ")", "if", "entry", "is", "None", "or", "entry", ".", "class_", "==", "CLASS", ".", "unknown", ":", "# Undeclared yet", "return", "True", "if", "entry", ".", "class_", "!=", "class_", ":", "if", "show_error", ":", "if", "entry", ".", "class_", "==", "CLASS", ".", "array", ":", "a1", "=", "'n'", "else", ":", "a1", "=", "''", "if", "class_", "==", "CLASS", ".", "array", ":", "a2", "=", "'n'", "else", ":", "a2", "=", "''", "syntax_error", "(", "lineno", ",", "\"identifier '%s' is a%s %s, not a%s %s\"", "%", "(", "id_", ",", "a1", ",", "entry", ".", "class_", ",", "a2", ",", "class_", ")", ")", "return", "False", "return", "True" ]
36.8125
0.001654
def can_run_c_extension(name=None): """ Determine whether the given Python C extension loads correctly. If ``name`` is ``None``, tests all Python C extensions, and return ``True`` if and only if all load correctly. :param string name: the name of the Python C extension to test :rtype: bool """ def can_run_cdtw(): """ Python C extension for computing DTW """ try: import aeneas.cdtw.cdtw return True except ImportError: return False def can_run_cmfcc(): """ Python C extension for computing MFCC """ try: import aeneas.cmfcc.cmfcc return True except ImportError: return False def can_run_cew(): """ Python C extension for synthesizing with eSpeak """ try: import aeneas.cew.cew return True except ImportError: return False def can_run_cfw(): """ Python C extension for synthesizing with Festival """ try: import aeneas.cfw.cfw return True except ImportError: return False if name == "cdtw": return can_run_cdtw() elif name == "cmfcc": return can_run_cmfcc() elif name == "cew": return can_run_cew() elif name == "cfw": return can_run_cfw() else: # NOTE cfw is still experimental! return can_run_cdtw() and can_run_cmfcc() and can_run_cew()
[ "def", "can_run_c_extension", "(", "name", "=", "None", ")", ":", "def", "can_run_cdtw", "(", ")", ":", "\"\"\" Python C extension for computing DTW \"\"\"", "try", ":", "import", "aeneas", ".", "cdtw", ".", "cdtw", "return", "True", "except", "ImportError", ":", "return", "False", "def", "can_run_cmfcc", "(", ")", ":", "\"\"\" Python C extension for computing MFCC \"\"\"", "try", ":", "import", "aeneas", ".", "cmfcc", ".", "cmfcc", "return", "True", "except", "ImportError", ":", "return", "False", "def", "can_run_cew", "(", ")", ":", "\"\"\" Python C extension for synthesizing with eSpeak \"\"\"", "try", ":", "import", "aeneas", ".", "cew", ".", "cew", "return", "True", "except", "ImportError", ":", "return", "False", "def", "can_run_cfw", "(", ")", ":", "\"\"\" Python C extension for synthesizing with Festival \"\"\"", "try", ":", "import", "aeneas", ".", "cfw", ".", "cfw", "return", "True", "except", "ImportError", ":", "return", "False", "if", "name", "==", "\"cdtw\"", ":", "return", "can_run_cdtw", "(", ")", "elif", "name", "==", "\"cmfcc\"", ":", "return", "can_run_cmfcc", "(", ")", "elif", "name", "==", "\"cew\"", ":", "return", "can_run_cew", "(", ")", "elif", "name", "==", "\"cfw\"", ":", "return", "can_run_cfw", "(", ")", "else", ":", "# NOTE cfw is still experimental!", "return", "can_run_cdtw", "(", ")", "and", "can_run_cmfcc", "(", ")", "and", "can_run_cew", "(", ")" ]
27.283019
0.000668
def create_oqhazardlib_source(self, tom, mesh_spacing, use_defaults=False): """ Returns an instance of the :class: `openquake.hazardlib.source.simple_fault.SimpleFaultSource` :param tom: Temporal occurrance model :param float mesh_spacing: Mesh spacing """ if not self.mfd: raise ValueError("Cannot write to hazardlib without MFD") return SimpleFaultSource( self.id, self.name, self.trt, self.mfd, mesh_spacing, conv.mag_scale_rel_to_hazardlib(self.mag_scale_rel, use_defaults), conv.render_aspect_ratio(self.rupt_aspect_ratio, use_defaults), tom, self.upper_depth, self.lower_depth, self.fault_trace, self.dip, self.rake)
[ "def", "create_oqhazardlib_source", "(", "self", ",", "tom", ",", "mesh_spacing", ",", "use_defaults", "=", "False", ")", ":", "if", "not", "self", ".", "mfd", ":", "raise", "ValueError", "(", "\"Cannot write to hazardlib without MFD\"", ")", "return", "SimpleFaultSource", "(", "self", ".", "id", ",", "self", ".", "name", ",", "self", ".", "trt", ",", "self", ".", "mfd", ",", "mesh_spacing", ",", "conv", ".", "mag_scale_rel_to_hazardlib", "(", "self", ".", "mag_scale_rel", ",", "use_defaults", ")", ",", "conv", ".", "render_aspect_ratio", "(", "self", ".", "rupt_aspect_ratio", ",", "use_defaults", ")", ",", "tom", ",", "self", ".", "upper_depth", ",", "self", ".", "lower_depth", ",", "self", ".", "fault_trace", ",", "self", ".", "dip", ",", "self", ".", "rake", ")" ]
32.923077
0.00227
def add_to_hash(self, filename, hasher): """Contribute `filename`'s data to the Md5Hash `hasher`.""" hasher.update(self.executed_lines(filename)) hasher.update(self.executed_arcs(filename))
[ "def", "add_to_hash", "(", "self", ",", "filename", ",", "hasher", ")", ":", "hasher", ".", "update", "(", "self", ".", "executed_lines", "(", "filename", ")", ")", "hasher", ".", "update", "(", "self", ".", "executed_arcs", "(", "filename", ")", ")" ]
52.5
0.00939
def template(page=None, layout=None, **kwargs): """ Decorator to change the view template and layout. It works on both View class and view methods on class only $layout is applied, everything else will be passed to the kwargs Using as first argument, it will be the layout. :first arg or $layout: The layout to use for that view :param layout: The layout to use for that view :param kwargs: get pass to the TEMPLATE_CONTEXT ** on method that return a dict page or layout are optional :param page: The html page :param layout: The layout to use for that view :param kwargs: get pass to the view as k/V ** on other methods that return other type, it doesn't apply :return: """ pkey = "_template_extends__" def decorator(f): if inspect.isclass(f): layout_ = layout or page extends = kwargs.pop("extends", None) if extends and hasattr(extends, pkey): items = getattr(extends, pkey).items() if "layout" in items: layout_ = items.pop("layout") for k, v in items: kwargs.setdefault(k, v) if not layout_: layout_ = "layout.html" kwargs.setdefault("brand_name", "") kwargs["layout"] = layout_ setattr(f, pkey, kwargs) setattr(f, "base_layout", kwargs.get("layout")) f.g(TEMPLATE_CONTEXT=kwargs) return f else: @functools.wraps(f) def wrap(*args2, **kwargs2): response = f(*args2, **kwargs2) if isinstance(response, dict) or response is None: response = response or {} if page: response.setdefault("template_", page) if layout: response.setdefault("layout_", layout) for k, v in kwargs.items(): response.setdefault(k, v) return response return wrap return decorator
[ "def", "template", "(", "page", "=", "None", ",", "layout", "=", "None", ",", "*", "*", "kwargs", ")", ":", "pkey", "=", "\"_template_extends__\"", "def", "decorator", "(", "f", ")", ":", "if", "inspect", ".", "isclass", "(", "f", ")", ":", "layout_", "=", "layout", "or", "page", "extends", "=", "kwargs", ".", "pop", "(", "\"extends\"", ",", "None", ")", "if", "extends", "and", "hasattr", "(", "extends", ",", "pkey", ")", ":", "items", "=", "getattr", "(", "extends", ",", "pkey", ")", ".", "items", "(", ")", "if", "\"layout\"", "in", "items", ":", "layout_", "=", "items", ".", "pop", "(", "\"layout\"", ")", "for", "k", ",", "v", "in", "items", ":", "kwargs", ".", "setdefault", "(", "k", ",", "v", ")", "if", "not", "layout_", ":", "layout_", "=", "\"layout.html\"", "kwargs", ".", "setdefault", "(", "\"brand_name\"", ",", "\"\"", ")", "kwargs", "[", "\"layout\"", "]", "=", "layout_", "setattr", "(", "f", ",", "pkey", ",", "kwargs", ")", "setattr", "(", "f", ",", "\"base_layout\"", ",", "kwargs", ".", "get", "(", "\"layout\"", ")", ")", "f", ".", "g", "(", "TEMPLATE_CONTEXT", "=", "kwargs", ")", "return", "f", "else", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "wrap", "(", "*", "args2", ",", "*", "*", "kwargs2", ")", ":", "response", "=", "f", "(", "*", "args2", ",", "*", "*", "kwargs2", ")", "if", "isinstance", "(", "response", ",", "dict", ")", "or", "response", "is", "None", ":", "response", "=", "response", "or", "{", "}", "if", "page", ":", "response", ".", "setdefault", "(", "\"template_\"", ",", "page", ")", "if", "layout", ":", "response", ".", "setdefault", "(", "\"layout_\"", ",", "layout", ")", "for", "k", ",", "v", "in", "kwargs", ".", "items", "(", ")", ":", "response", ".", "setdefault", "(", "k", ",", "v", ")", "return", "response", "return", "wrap", "return", "decorator" ]
33.09375
0.000459
def filter_create(self, phrase, context, irreversible = False, whole_word = True, expires_in = None): """ Creates a new keyword filter. `phrase` is the phrase that should be filtered out, `context` specifies from where to filter the keywords. Valid contexts are 'home', 'notifications', 'public' and 'thread'. Set `irreversible` to True if you want the filter to just delete statuses server side. This works only for the 'home' and 'notifications' contexts. Set `whole_word` to False if you want to allow filter matches to start or end within a word, not only at word boundaries. Set `expires_in` to specify for how many seconds the filter should be kept around. Returns the `filter dict`_ of the newly created filter. """ params = self.__generate_params(locals()) for context_val in context: if not context_val in ['home', 'notifications', 'public', 'thread']: raise MastodonIllegalArgumentError('Invalid filter context.') return self.__api_request('POST', '/api/v1/filters', params)
[ "def", "filter_create", "(", "self", ",", "phrase", ",", "context", ",", "irreversible", "=", "False", ",", "whole_word", "=", "True", ",", "expires_in", "=", "None", ")", ":", "params", "=", "self", ".", "__generate_params", "(", "locals", "(", ")", ")", "for", "context_val", "in", "context", ":", "if", "not", "context_val", "in", "[", "'home'", ",", "'notifications'", ",", "'public'", ",", "'thread'", "]", ":", "raise", "MastodonIllegalArgumentError", "(", "'Invalid filter context.'", ")", "return", "self", ".", "__api_request", "(", "'POST'", ",", "'/api/v1/filters'", ",", "params", ")" ]
48.5
0.016849
def get_dual_rmetric( self, invert_h = False, mode_inv = 'svd' ): """ Compute the dual Riemannian Metric This is not satisfactory, because if mdimG<mdimY the shape of H will not be the same as the shape of G. TODO(maybe): return a (copied) smaller H with only the rows and columns in G. """ if self.H is None: self.H, self.G, self.Hvv, self.Hsvals, self.Gsvals = riemann_metric(self.Y, self.L, self.mdimG, invert_h = invert_h, mode_inv = mode_inv) if invert_h: return self.H, self.G else: return self.H
[ "def", "get_dual_rmetric", "(", "self", ",", "invert_h", "=", "False", ",", "mode_inv", "=", "'svd'", ")", ":", "if", "self", ".", "H", "is", "None", ":", "self", ".", "H", ",", "self", ".", "G", ",", "self", ".", "Hvv", ",", "self", ".", "Hsvals", ",", "self", ".", "Gsvals", "=", "riemann_metric", "(", "self", ".", "Y", ",", "self", ".", "L", ",", "self", ".", "mdimG", ",", "invert_h", "=", "invert_h", ",", "mode_inv", "=", "mode_inv", ")", "if", "invert_h", ":", "return", "self", ".", "H", ",", "self", ".", "G", "else", ":", "return", "self", ".", "H" ]
46
0.021311
def _load(db_data, db): """ Load :class:`mongomock.database.Database` from dict data. """ if db.name != db_data["name"]: raise ValueError("dbname doesn't matches! Maybe wrong database data.") db.__init__(client=db._client, name=db.name) for col_name, col_data in iteritems(db_data["_collections"]): collection = db.get_collection(col_name) collection._documents = col_data["_documents"] collection._uniques = col_data["_uniques"] db._collections[col_name] = collection return db
[ "def", "_load", "(", "db_data", ",", "db", ")", ":", "if", "db", ".", "name", "!=", "db_data", "[", "\"name\"", "]", ":", "raise", "ValueError", "(", "\"dbname doesn't matches! Maybe wrong database data.\"", ")", "db", ".", "__init__", "(", "client", "=", "db", ".", "_client", ",", "name", "=", "db", ".", "name", ")", "for", "col_name", ",", "col_data", "in", "iteritems", "(", "db_data", "[", "\"_collections\"", "]", ")", ":", "collection", "=", "db", ".", "get_collection", "(", "col_name", ")", "collection", ".", "_documents", "=", "col_data", "[", "\"_documents\"", "]", "collection", ".", "_uniques", "=", "col_data", "[", "\"_uniques\"", "]", "db", ".", "_collections", "[", "col_name", "]", "=", "collection", "return", "db" ]
35.6
0.001825
def _upload_resumable_all(self, upload_info, bitmap, number_of_units, unit_size): """Prepare and upload all resumable units and return upload_key upload_info -- UploadInfo object bitmap -- bitmap node of upload/check number_of_units -- number of units requested unit_size -- size of a single upload unit in bytes """ fd = upload_info.fd upload_key = None for unit_id in range(number_of_units): upload_status = decode_resumable_upload_bitmap( bitmap, number_of_units) if upload_status[unit_id]: logger.debug("Skipping unit %d/%d - already uploaded", unit_id + 1, number_of_units) continue logger.debug("Uploading unit %d/%d", unit_id + 1, number_of_units) offset = unit_id * unit_size with SubsetIO(fd, offset, unit_size) as unit_fd: unit_info = _UploadUnitInfo( upload_info=upload_info, hash_=upload_info.hash_info.units[unit_id], fd=unit_fd, uid=unit_id) upload_result = self._upload_resumable_unit(unit_info) # upload_key is needed for polling if upload_key is None: upload_key = upload_result['doupload']['key'] return upload_key
[ "def", "_upload_resumable_all", "(", "self", ",", "upload_info", ",", "bitmap", ",", "number_of_units", ",", "unit_size", ")", ":", "fd", "=", "upload_info", ".", "fd", "upload_key", "=", "None", "for", "unit_id", "in", "range", "(", "number_of_units", ")", ":", "upload_status", "=", "decode_resumable_upload_bitmap", "(", "bitmap", ",", "number_of_units", ")", "if", "upload_status", "[", "unit_id", "]", ":", "logger", ".", "debug", "(", "\"Skipping unit %d/%d - already uploaded\"", ",", "unit_id", "+", "1", ",", "number_of_units", ")", "continue", "logger", ".", "debug", "(", "\"Uploading unit %d/%d\"", ",", "unit_id", "+", "1", ",", "number_of_units", ")", "offset", "=", "unit_id", "*", "unit_size", "with", "SubsetIO", "(", "fd", ",", "offset", ",", "unit_size", ")", "as", "unit_fd", ":", "unit_info", "=", "_UploadUnitInfo", "(", "upload_info", "=", "upload_info", ",", "hash_", "=", "upload_info", ".", "hash_info", ".", "units", "[", "unit_id", "]", ",", "fd", "=", "unit_fd", ",", "uid", "=", "unit_id", ")", "upload_result", "=", "self", ".", "_upload_resumable_unit", "(", "unit_info", ")", "# upload_key is needed for polling", "if", "upload_key", "is", "None", ":", "upload_key", "=", "upload_result", "[", "'doupload'", "]", "[", "'key'", "]", "return", "upload_key" ]
33.44186
0.002027
def debug_inspect_node(self, node_msindex): """ Get info about the node. See pycut.inspect_node() for details. Processing is done in temporary shape. :param node_seed: :return: node_unariesalt, node_neighboor_edges_and_weights, node_neighboor_seeds """ return inspect_node(self.nlinks, self.unariesalt2, self.msinds, node_msindex)
[ "def", "debug_inspect_node", "(", "self", ",", "node_msindex", ")", ":", "return", "inspect_node", "(", "self", ".", "nlinks", ",", "self", ".", "unariesalt2", ",", "self", ".", "msinds", ",", "node_msindex", ")" ]
42.111111
0.010336
def point_in_prism(tri1, tri2, pt): ''' point_in_prism(tri1, tri2, pt) yields True if the given point is inside the prism that stretches between triangle 1 and triangle 2. Will automatically thread over extended dimensions. If multiple triangles are given, then the vertices must be an earlier dimension than the coordinates; e.g., a 3 x 3 x n array will be assumed to organized such that element [0,1,k] is the y coordinate of the first vertex of the k'th triangle. ''' bcs = prism_barycentric_coordinates(tri1, tri2, pt) return np.logical_not(np.isclose(np.sum(bcs, axis=0), 0))
[ "def", "point_in_prism", "(", "tri1", ",", "tri2", ",", "pt", ")", ":", "bcs", "=", "prism_barycentric_coordinates", "(", "tri1", ",", "tri2", ",", "pt", ")", "return", "np", ".", "logical_not", "(", "np", ".", "isclose", "(", "np", ".", "sum", "(", "bcs", ",", "axis", "=", "0", ")", ",", "0", ")", ")" ]
61.5
0.008013
def smeft_toarray(wc_name, wc_dict): """Construct a numpy array with Wilson coefficient values from a dictionary of label-value pairs corresponding to the non-redundant elements.""" shape = smeftutil.C_keys_shape[wc_name] C = np.zeros(shape, dtype=complex) for k, v in wc_dict.items(): if k.split('_')[0] != wc_name: continue indices = k.split('_')[-1] # e.g. '1213' indices = tuple(int(s) - 1 for s in indices) # e.g. (1, 2, 1, 3) C[indices] = v C = smeftutil.symmetrize({wc_name: C})[wc_name] return C
[ "def", "smeft_toarray", "(", "wc_name", ",", "wc_dict", ")", ":", "shape", "=", "smeftutil", ".", "C_keys_shape", "[", "wc_name", "]", "C", "=", "np", ".", "zeros", "(", "shape", ",", "dtype", "=", "complex", ")", "for", "k", ",", "v", "in", "wc_dict", ".", "items", "(", ")", ":", "if", "k", ".", "split", "(", "'_'", ")", "[", "0", "]", "!=", "wc_name", ":", "continue", "indices", "=", "k", ".", "split", "(", "'_'", ")", "[", "-", "1", "]", "# e.g. '1213'", "indices", "=", "tuple", "(", "int", "(", "s", ")", "-", "1", "for", "s", "in", "indices", ")", "# e.g. (1, 2, 1, 3)", "C", "[", "indices", "]", "=", "v", "C", "=", "smeftutil", ".", "symmetrize", "(", "{", "wc_name", ":", "C", "}", ")", "[", "wc_name", "]", "return", "C" ]
40.571429
0.001721
def validate(bbllines:iter, *, profiling=False): """Yield lines of warnings and errors about input bbl lines. profiling -- yield also info lines about input bbl file. If bbllines is a valid file name, it will be read. Else, it should be an iterable of bubble file lines. """ if isinstance(bbllines, str): if os.path.exists(bbllines): # filename containing bubble bbllines = utils.file_lines(bbllines) elif '\n' not in bbllines or '\t' not in bbllines: # probably a bad file name: let's rise the proper error bbllines = utils.file_lines(bbllines) else: # bubble itself bbllines = bbllines.split('\n') bubble = tuple(bbllines) data = tuple(utils.line_data(line) for line in bubble) types = tuple(utils.line_type(line) for line in bubble) # launch profiling if profiling: ltype_counts = Counter(types) for ltype, count in ltype_counts.items(): yield 'INFO {} lines of type {}'.format(count, ltype) yield 'INFO {} lines of payload'.format( ltype_counts['EDGE'] + ltype_counts['IN'] + ltype_counts['NODE'] + ltype_counts['SET']) # launch validation for errline in (l for l, t in zip(bubble, types) if t == 'ERROR'): yield 'ERROR line is not bubble: "{}"'.format(errline) tree = BubbleTree.from_bubble_data(data) cc, subroots = tree.connected_components() # print('cc:', cc) # print('subroots:', subroots) if profiling: yield 'INFO {} top (power)nodes'.format(len(tree.roots)) yield 'INFO {} connected components'.format(len(cc)) yield 'INFO {} nodes are defined, {} are used'.format( ltype_counts['NODE'], len(tuple(tree.nodes()))) yield 'INFO {} powernodes are defined, {} are used'.format( ltype_counts['SET'], len(tuple(tree.powernodes()))) yield from inclusions_validation(tree) yield from mergeability_validation(tree)
[ "def", "validate", "(", "bbllines", ":", "iter", ",", "*", ",", "profiling", "=", "False", ")", ":", "if", "isinstance", "(", "bbllines", ",", "str", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "bbllines", ")", ":", "# filename containing bubble", "bbllines", "=", "utils", ".", "file_lines", "(", "bbllines", ")", "elif", "'\\n'", "not", "in", "bbllines", "or", "'\\t'", "not", "in", "bbllines", ":", "# probably a bad file name: let's rise the proper error", "bbllines", "=", "utils", ".", "file_lines", "(", "bbllines", ")", "else", ":", "# bubble itself", "bbllines", "=", "bbllines", ".", "split", "(", "'\\n'", ")", "bubble", "=", "tuple", "(", "bbllines", ")", "data", "=", "tuple", "(", "utils", ".", "line_data", "(", "line", ")", "for", "line", "in", "bubble", ")", "types", "=", "tuple", "(", "utils", ".", "line_type", "(", "line", ")", "for", "line", "in", "bubble", ")", "# launch profiling", "if", "profiling", ":", "ltype_counts", "=", "Counter", "(", "types", ")", "for", "ltype", ",", "count", "in", "ltype_counts", ".", "items", "(", ")", ":", "yield", "'INFO {} lines of type {}'", ".", "format", "(", "count", ",", "ltype", ")", "yield", "'INFO {} lines of payload'", ".", "format", "(", "ltype_counts", "[", "'EDGE'", "]", "+", "ltype_counts", "[", "'IN'", "]", "+", "ltype_counts", "[", "'NODE'", "]", "+", "ltype_counts", "[", "'SET'", "]", ")", "# launch validation", "for", "errline", "in", "(", "l", "for", "l", ",", "t", "in", "zip", "(", "bubble", ",", "types", ")", "if", "t", "==", "'ERROR'", ")", ":", "yield", "'ERROR line is not bubble: \"{}\"'", ".", "format", "(", "errline", ")", "tree", "=", "BubbleTree", ".", "from_bubble_data", "(", "data", ")", "cc", ",", "subroots", "=", "tree", ".", "connected_components", "(", ")", "# print('cc:', cc)", "# print('subroots:', subroots)", "if", "profiling", ":", "yield", "'INFO {} top (power)nodes'", ".", "format", "(", "len", "(", "tree", ".", "roots", ")", ")", "yield", "'INFO {} connected components'", ".", "format", "(", "len", "(", "cc", ")", ")", "yield", "'INFO {} nodes are defined, {} are used'", ".", "format", "(", "ltype_counts", "[", "'NODE'", "]", ",", "len", "(", "tuple", "(", "tree", ".", "nodes", "(", ")", ")", ")", ")", "yield", "'INFO {} powernodes are defined, {} are used'", ".", "format", "(", "ltype_counts", "[", "'SET'", "]", ",", "len", "(", "tuple", "(", "tree", ".", "powernodes", "(", ")", ")", ")", ")", "yield", "from", "inclusions_validation", "(", "tree", ")", "yield", "from", "mergeability_validation", "(", "tree", ")" ]
43.466667
0.0015
def get_total_ram(): """The total amount of system RAM in bytes. This is what is reported by the OS, and may be overcommitted when there are multiple containers hosted on the same machine. """ with open('/proc/meminfo', 'r') as f: for line in f.readlines(): if line: key, value, unit = line.split() if key == 'MemTotal:': assert unit == 'kB', 'Unknown unit' return int(value) * 1024 # Classic, not KiB. raise NotImplementedError()
[ "def", "get_total_ram", "(", ")", ":", "with", "open", "(", "'/proc/meminfo'", ",", "'r'", ")", "as", "f", ":", "for", "line", "in", "f", ".", "readlines", "(", ")", ":", "if", "line", ":", "key", ",", "value", ",", "unit", "=", "line", ".", "split", "(", ")", "if", "key", "==", "'MemTotal:'", ":", "assert", "unit", "==", "'kB'", ",", "'Unknown unit'", "return", "int", "(", "value", ")", "*", "1024", "# Classic, not KiB.", "raise", "NotImplementedError", "(", ")" ]
38.5
0.001812
def _to_point(dims): """Convert (width, height) or size -> point.Point.""" assert dims if isinstance(dims, (tuple, list)): if len(dims) != 2: raise ValueError( "A two element tuple or list is expected here, got {}.".format(dims)) else: width = int(dims[0]) height = int(dims[1]) if width <= 0 or height <= 0: raise ValueError("Must specify +ve dims, got {}.".format(dims)) else: return point.Point(width, height) else: size = int(dims) if size <= 0: raise ValueError( "Must specify a +ve value for size, got {}.".format(dims)) else: return point.Point(size, size)
[ "def", "_to_point", "(", "dims", ")", ":", "assert", "dims", "if", "isinstance", "(", "dims", ",", "(", "tuple", ",", "list", ")", ")", ":", "if", "len", "(", "dims", ")", "!=", "2", ":", "raise", "ValueError", "(", "\"A two element tuple or list is expected here, got {}.\"", ".", "format", "(", "dims", ")", ")", "else", ":", "width", "=", "int", "(", "dims", "[", "0", "]", ")", "height", "=", "int", "(", "dims", "[", "1", "]", ")", "if", "width", "<=", "0", "or", "height", "<=", "0", ":", "raise", "ValueError", "(", "\"Must specify +ve dims, got {}.\"", ".", "format", "(", "dims", ")", ")", "else", ":", "return", "point", ".", "Point", "(", "width", ",", "height", ")", "else", ":", "size", "=", "int", "(", "dims", ")", "if", "size", "<=", "0", ":", "raise", "ValueError", "(", "\"Must specify a +ve value for size, got {}.\"", ".", "format", "(", "dims", ")", ")", "else", ":", "return", "point", ".", "Point", "(", "size", ",", "size", ")" ]
29.5
0.01791
def _Rforce(self,R,z,phi=0.,t=0.): """ NAME: _Rforce PURPOSE: evaluate the radial force for this potential INPUT: R - Galactocentric cylindrical radius z - vertical height phi - azimuth t - time OUTPUT: the radial force HISTORY: 2013-06-26 - Written - Bovy (IAS) """ r= nu.sqrt(R*R+z*z) return -self._mass(r)*R/r**3.
[ "def", "_Rforce", "(", "self", ",", "R", ",", "z", ",", "phi", "=", "0.", ",", "t", "=", "0.", ")", ":", "r", "=", "nu", ".", "sqrt", "(", "R", "*", "R", "+", "z", "*", "z", ")", "return", "-", "self", ".", "_mass", "(", "r", ")", "*", "R", "/", "r", "**", "3." ]
25.5
0.014706
def FixmatFactory(fixmatfile, categories = None, var_name = 'fixmat', field_name='x'): """ Loads a single fixmat (fixmatfile). Parameters: fixmatfile : string The matlab fixmat that should be loaded. categories : instance of stimuli.Categories, optional Links data in categories to data in fixmat. """ try: data = loadmat(fixmatfile, struct_as_record = False) keys = list(data.keys()) data = data[var_name][0][0] except KeyError: raise RuntimeError('%s is not a field of the matlab structure. Possible'+ 'Keys are %s'%str(keys)) num_fix = data.__getattribute__(field_name).size # Get a list with fieldnames and a list with parameters fields = {} parameters = {} for field in data._fieldnames: if data.__getattribute__(field).size == num_fix: fields[field] = data.__getattribute__(field) else: parameters[field] = data.__getattribute__(field)[0].tolist() if len(parameters[field]) == 1: parameters[field] = parameters[field][0] # Generate FixMat fixmat = FixMat(categories = categories) fixmat._fields = list(fields.keys()) for (field, value) in list(fields.items()): fixmat.__dict__[field] = value.reshape(-1,) fixmat._parameters = parameters fixmat._subjects = None for (field, value) in list(parameters.items()): fixmat.__dict__[field] = value fixmat._num_fix = num_fix return fixmat
[ "def", "FixmatFactory", "(", "fixmatfile", ",", "categories", "=", "None", ",", "var_name", "=", "'fixmat'", ",", "field_name", "=", "'x'", ")", ":", "try", ":", "data", "=", "loadmat", "(", "fixmatfile", ",", "struct_as_record", "=", "False", ")", "keys", "=", "list", "(", "data", ".", "keys", "(", ")", ")", "data", "=", "data", "[", "var_name", "]", "[", "0", "]", "[", "0", "]", "except", "KeyError", ":", "raise", "RuntimeError", "(", "'%s is not a field of the matlab structure. Possible'", "+", "'Keys are %s'", "%", "str", "(", "keys", ")", ")", "num_fix", "=", "data", ".", "__getattribute__", "(", "field_name", ")", ".", "size", "# Get a list with fieldnames and a list with parameters", "fields", "=", "{", "}", "parameters", "=", "{", "}", "for", "field", "in", "data", ".", "_fieldnames", ":", "if", "data", ".", "__getattribute__", "(", "field", ")", ".", "size", "==", "num_fix", ":", "fields", "[", "field", "]", "=", "data", ".", "__getattribute__", "(", "field", ")", "else", ":", "parameters", "[", "field", "]", "=", "data", ".", "__getattribute__", "(", "field", ")", "[", "0", "]", ".", "tolist", "(", ")", "if", "len", "(", "parameters", "[", "field", "]", ")", "==", "1", ":", "parameters", "[", "field", "]", "=", "parameters", "[", "field", "]", "[", "0", "]", "# Generate FixMat", "fixmat", "=", "FixMat", "(", "categories", "=", "categories", ")", "fixmat", ".", "_fields", "=", "list", "(", "fields", ".", "keys", "(", ")", ")", "for", "(", "field", ",", "value", ")", "in", "list", "(", "fields", ".", "items", "(", ")", ")", ":", "fixmat", ".", "__dict__", "[", "field", "]", "=", "value", ".", "reshape", "(", "-", "1", ",", ")", "fixmat", ".", "_parameters", "=", "parameters", "fixmat", ".", "_subjects", "=", "None", "for", "(", "field", ",", "value", ")", "in", "list", "(", "parameters", ".", "items", "(", ")", ")", ":", "fixmat", ".", "__dict__", "[", "field", "]", "=", "value", "fixmat", ".", "_num_fix", "=", "num_fix", "return", "fixmat" ]
35.465116
0.012125
def ignore_path(path, ignore_list=None, whitelist=None): """ Returns a boolean indicating if a path should be ignored given an ignore_list and a whitelist of glob patterns. """ if ignore_list is None: return True should_ignore = matches_glob_list(path, ignore_list) if whitelist is None: return should_ignore return should_ignore and not matches_glob_list(path, whitelist)
[ "def", "ignore_path", "(", "path", ",", "ignore_list", "=", "None", ",", "whitelist", "=", "None", ")", ":", "if", "ignore_list", "is", "None", ":", "return", "True", "should_ignore", "=", "matches_glob_list", "(", "path", ",", "ignore_list", ")", "if", "whitelist", "is", "None", ":", "return", "should_ignore", "return", "should_ignore", "and", "not", "matches_glob_list", "(", "path", ",", "whitelist", ")" ]
31.538462
0.00237
def trigger(self, source): """ Triggers all actions meant to trigger on the board state from `source`. """ actions = self.evaluate(source) if actions: if not hasattr(actions, "__iter__"): actions = (actions, ) source.game.trigger_actions(source, actions)
[ "def", "trigger", "(", "self", ",", "source", ")", ":", "actions", "=", "self", ".", "evaluate", "(", "source", ")", "if", "actions", ":", "if", "not", "hasattr", "(", "actions", ",", "\"__iter__\"", ")", ":", "actions", "=", "(", "actions", ",", ")", "source", ".", "game", ".", "trigger_actions", "(", "source", ",", "actions", ")" ]
29.555556
0.036496
def plot_vectors(self, arrows=True): """ Plot vectors of positional transition of LISA values within quadrant in scatterplot in a polar plot. Parameters ---------- ax : Matplotlib Axes instance, optional If given, the figure will be created inside this axis. Default =None. arrows : boolean, optional If True show arrowheads of vectors. Default =True **kwargs : keyword arguments, optional Keywords used for creating and designing the plot. Note: 'c' and 'color' cannot be passed when attribute is not None Returns ------- fig : Matplotlib Figure instance Moran scatterplot figure ax : matplotlib Axes instance Axes in which the figure is plotted """ from splot.giddy import dynamic_lisa_vectors fig, ax = dynamic_lisa_vectors(self, arrows=arrows) return fig, ax
[ "def", "plot_vectors", "(", "self", ",", "arrows", "=", "True", ")", ":", "from", "splot", ".", "giddy", "import", "dynamic_lisa_vectors", "fig", ",", "ax", "=", "dynamic_lisa_vectors", "(", "self", ",", "arrows", "=", "arrows", ")", "return", "fig", ",", "ax" ]
32.793103
0.002043
def get_settings_from_client(client): """Pull out settings from a SoftLayer.BaseClient instance. :param client: SoftLayer.BaseClient instance """ settings = { 'username': '', 'api_key': '', 'timeout': '', 'endpoint_url': '', } try: settings['username'] = client.auth.username settings['api_key'] = client.auth.api_key except AttributeError: pass transport = _resolve_transport(client.transport) try: settings['timeout'] = transport.timeout settings['endpoint_url'] = transport.endpoint_url except AttributeError: pass return settings
[ "def", "get_settings_from_client", "(", "client", ")", ":", "settings", "=", "{", "'username'", ":", "''", ",", "'api_key'", ":", "''", ",", "'timeout'", ":", "''", ",", "'endpoint_url'", ":", "''", ",", "}", "try", ":", "settings", "[", "'username'", "]", "=", "client", ".", "auth", ".", "username", "settings", "[", "'api_key'", "]", "=", "client", ".", "auth", ".", "api_key", "except", "AttributeError", ":", "pass", "transport", "=", "_resolve_transport", "(", "client", ".", "transport", ")", "try", ":", "settings", "[", "'timeout'", "]", "=", "transport", ".", "timeout", "settings", "[", "'endpoint_url'", "]", "=", "transport", ".", "endpoint_url", "except", "AttributeError", ":", "pass", "return", "settings" ]
25.44
0.001515
def _init(): """Dynamically import engines that initialize successfully.""" import importlib import os import re filenames = os.listdir(os.path.dirname(__file__)) module_names = set() for filename in filenames: match = re.match(r'^(?P<name>[A-Z_a-z]\w*)\.py[co]?$', filename) if match: module_names.add(match.group('name')) for module_name in module_names: try: module = importlib.import_module('.' + module_name, __name__) except ImportError: continue for name, member in module.__dict__.items(): if not isinstance(member, type): # skip non-new-style classes continue if not issubclass(member, Engine): # skip non-subclasses of Engine continue if member is Engine: # skip "abstract" class Engine continue try: handle = member.handle except AttributeError: continue engines[handle] = member
[ "def", "_init", "(", ")", ":", "import", "importlib", "import", "os", "import", "re", "filenames", "=", "os", ".", "listdir", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ")", "module_names", "=", "set", "(", ")", "for", "filename", "in", "filenames", ":", "match", "=", "re", ".", "match", "(", "r'^(?P<name>[A-Z_a-z]\\w*)\\.py[co]?$'", ",", "filename", ")", "if", "match", ":", "module_names", ".", "add", "(", "match", ".", "group", "(", "'name'", ")", ")", "for", "module_name", "in", "module_names", ":", "try", ":", "module", "=", "importlib", ".", "import_module", "(", "'.'", "+", "module_name", ",", "__name__", ")", "except", "ImportError", ":", "continue", "for", "name", ",", "member", "in", "module", ".", "__dict__", ".", "items", "(", ")", ":", "if", "not", "isinstance", "(", "member", ",", "type", ")", ":", "# skip non-new-style classes", "continue", "if", "not", "issubclass", "(", "member", ",", "Engine", ")", ":", "# skip non-subclasses of Engine", "continue", "if", "member", "is", "Engine", ":", "# skip \"abstract\" class Engine", "continue", "try", ":", "handle", "=", "member", ".", "handle", "except", "AttributeError", ":", "continue", "engines", "[", "handle", "]", "=", "member" ]
28.864865
0.000906
def displayEmptyInputWarningBox(display=True, parent=None): """ Displays a warning box for the 'input' parameter. """ if sys.version_info[0] >= 3: from tkinter.messagebox import showwarning else: from tkMessageBox import showwarning if display: msg = 'No valid input files found! '+\ 'Please check the value for the "input" parameter.' showwarning(parent=parent,message=msg, title="No valid inputs!") return "yes"
[ "def", "displayEmptyInputWarningBox", "(", "display", "=", "True", ",", "parent", "=", "None", ")", ":", "if", "sys", ".", "version_info", "[", "0", "]", ">=", "3", ":", "from", "tkinter", ".", "messagebox", "import", "showwarning", "else", ":", "from", "tkMessageBox", "import", "showwarning", "if", "display", ":", "msg", "=", "'No valid input files found! '", "+", "'Please check the value for the \"input\" parameter.'", "showwarning", "(", "parent", "=", "parent", ",", "message", "=", "msg", ",", "title", "=", "\"No valid inputs!\"", ")", "return", "\"yes\"" ]
35.846154
0.008368
def _check_directory_arguments(self): """ Validates arguments for loading from directories, including static image and time series directories. """ if not os.path.isdir(self.datapath): raise (NotADirectoryError('Directory does not exist: %s' % self.datapath)) if self.time_delay: if self.time_delay < 1: raise ValueError('Time step argument must be greater than 0, but gave: %i' % self.time_delay) if not isinstance(self.time_delay, int): raise ValueError('Time step argument must be an integer, but gave: %s' % str(self.time_delay))
[ "def", "_check_directory_arguments", "(", "self", ")", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "self", ".", "datapath", ")", ":", "raise", "(", "NotADirectoryError", "(", "'Directory does not exist: %s'", "%", "self", ".", "datapath", ")", ")", "if", "self", ".", "time_delay", ":", "if", "self", ".", "time_delay", "<", "1", ":", "raise", "ValueError", "(", "'Time step argument must be greater than 0, but gave: %i'", "%", "self", ".", "time_delay", ")", "if", "not", "isinstance", "(", "self", ".", "time_delay", ",", "int", ")", ":", "raise", "ValueError", "(", "'Time step argument must be an integer, but gave: %s'", "%", "str", "(", "self", ".", "time_delay", ")", ")" ]
57.363636
0.00936
def remove_invalid_fields(self, queryset, fields, view): """Remove invalid fields from an ordering. Overwrites the DRF default remove_invalid_fields method to return both the valid orderings and any invalid orderings. """ valid_orderings = [] invalid_orderings = [] # for each field sent down from the query param, # determine if its valid or invalid for term in fields: stripped_term = term.lstrip('-') # add back the '-' add the end if necessary reverse_sort_term = '' if len(stripped_term) is len(term) else '-' ordering = self.ordering_for(stripped_term, view) if ordering: valid_orderings.append(reverse_sort_term + ordering) else: invalid_orderings.append(term) return valid_orderings, invalid_orderings
[ "def", "remove_invalid_fields", "(", "self", ",", "queryset", ",", "fields", ",", "view", ")", ":", "valid_orderings", "=", "[", "]", "invalid_orderings", "=", "[", "]", "# for each field sent down from the query param,", "# determine if its valid or invalid", "for", "term", "in", "fields", ":", "stripped_term", "=", "term", ".", "lstrip", "(", "'-'", ")", "# add back the '-' add the end if necessary", "reverse_sort_term", "=", "''", "if", "len", "(", "stripped_term", ")", "is", "len", "(", "term", ")", "else", "'-'", "ordering", "=", "self", ".", "ordering_for", "(", "stripped_term", ",", "view", ")", "if", "ordering", ":", "valid_orderings", ".", "append", "(", "reverse_sort_term", "+", "ordering", ")", "else", ":", "invalid_orderings", ".", "append", "(", "term", ")", "return", "valid_orderings", ",", "invalid_orderings" ]
36.458333
0.002227
def is_null(*symbols): """ True if no nodes or all the given nodes are either None, NOP or empty blocks. For blocks this applies recursively """ from symbols.symbol_ import Symbol for sym in symbols: if sym is None: continue if not isinstance(sym, Symbol): return False if sym.token == 'NOP': continue if sym.token == 'BLOCK': if not is_null(*sym.children): return False continue return False return True
[ "def", "is_null", "(", "*", "symbols", ")", ":", "from", "symbols", ".", "symbol_", "import", "Symbol", "for", "sym", "in", "symbols", ":", "if", "sym", "is", "None", ":", "continue", "if", "not", "isinstance", "(", "sym", ",", "Symbol", ")", ":", "return", "False", "if", "sym", ".", "token", "==", "'NOP'", ":", "continue", "if", "sym", ".", "token", "==", "'BLOCK'", ":", "if", "not", "is_null", "(", "*", "sym", ".", "children", ")", ":", "return", "False", "continue", "return", "False", "return", "True" ]
27.736842
0.001835
def get_renderer(app, id): """Retrieve a renderer. :param app: :class:`~flask.Flask` application to look ``id`` up on :param id: Internal renderer id-string to look up """ renderer = app.extensions.get('nav_renderers', {})[id] if isinstance(renderer, tuple): mod_name, cls_name = renderer mod = import_module(mod_name) cls = mod for name in cls_name.split('.'): cls = getattr(cls, name) return cls return renderer
[ "def", "get_renderer", "(", "app", ",", "id", ")", ":", "renderer", "=", "app", ".", "extensions", ".", "get", "(", "'nav_renderers'", ",", "{", "}", ")", "[", "id", "]", "if", "isinstance", "(", "renderer", ",", "tuple", ")", ":", "mod_name", ",", "cls_name", "=", "renderer", "mod", "=", "import_module", "(", "mod_name", ")", "cls", "=", "mod", "for", "name", "in", "cls_name", ".", "split", "(", "'.'", ")", ":", "cls", "=", "getattr", "(", "cls", ",", "name", ")", "return", "cls", "return", "renderer" ]
25.263158
0.002008
def append_if_local_or_in_imports(self, definition): """Add definition to list. Handles local definitions and adds to project_definitions. """ if isinstance(definition, LocalModuleDefinition): self.definitions.append(definition) elif self.import_names == ["*"]: self.definitions.append(definition) elif self.import_names and definition.name in self.import_names: self.definitions.append(definition) elif (self.import_alias_mapping and definition.name in self.import_alias_mapping.values()): self.definitions.append(definition) if definition.parent_module_name: self.definitions.append(definition) if definition.node not in project_definitions: project_definitions[definition.node] = definition
[ "def", "append_if_local_or_in_imports", "(", "self", ",", "definition", ")", ":", "if", "isinstance", "(", "definition", ",", "LocalModuleDefinition", ")", ":", "self", ".", "definitions", ".", "append", "(", "definition", ")", "elif", "self", ".", "import_names", "==", "[", "\"*\"", "]", ":", "self", ".", "definitions", ".", "append", "(", "definition", ")", "elif", "self", ".", "import_names", "and", "definition", ".", "name", "in", "self", ".", "import_names", ":", "self", ".", "definitions", ".", "append", "(", "definition", ")", "elif", "(", "self", ".", "import_alias_mapping", "and", "definition", ".", "name", "in", "self", ".", "import_alias_mapping", ".", "values", "(", ")", ")", ":", "self", ".", "definitions", ".", "append", "(", "definition", ")", "if", "definition", ".", "parent_module_name", ":", "self", ".", "definitions", ".", "append", "(", "definition", ")", "if", "definition", ".", "node", "not", "in", "project_definitions", ":", "project_definitions", "[", "definition", ".", "node", "]", "=", "definition" ]
41.75
0.002342
def deepupdate( mapping: abc.MutableMapping, other: abc.Mapping, listextend=False ): """update one dictionary from another recursively. Only individual values will be overwritten--not entire branches of nested dictionaries. """ def inner(other, previouskeys): """previouskeys is a tuple that stores all the names of keys we've recursed into so far so it can they can be looked up recursively on the pimary mapping when a value needs updateing. """ for key, value in other.items(): if isinstance(value, abc.Mapping): inner(value, (*previouskeys, key)) else: node = mapping for previouskey in previouskeys: node = node.setdefault(previouskey, {}) target = node.get(key) if ( listextend and isinstance(target, abc.MutableSequence) and isinstance(value, abc.Sequence) ): target.extend(value) else: node[key] = value inner(other, ())
[ "def", "deepupdate", "(", "mapping", ":", "abc", ".", "MutableMapping", ",", "other", ":", "abc", ".", "Mapping", ",", "listextend", "=", "False", ")", ":", "def", "inner", "(", "other", ",", "previouskeys", ")", ":", "\"\"\"previouskeys is a tuple that stores all the names of keys\n we've recursed into so far so it can they can be looked up\n recursively on the pimary mapping when a value needs updateing.\n \"\"\"", "for", "key", ",", "value", "in", "other", ".", "items", "(", ")", ":", "if", "isinstance", "(", "value", ",", "abc", ".", "Mapping", ")", ":", "inner", "(", "value", ",", "(", "*", "previouskeys", ",", "key", ")", ")", "else", ":", "node", "=", "mapping", "for", "previouskey", "in", "previouskeys", ":", "node", "=", "node", ".", "setdefault", "(", "previouskey", ",", "{", "}", ")", "target", "=", "node", ".", "get", "(", "key", ")", "if", "(", "listextend", "and", "isinstance", "(", "target", ",", "abc", ".", "MutableSequence", ")", "and", "isinstance", "(", "value", ",", "abc", ".", "Sequence", ")", ")", ":", "target", ".", "extend", "(", "value", ")", "else", ":", "node", "[", "key", "]", "=", "value", "inner", "(", "other", ",", "(", ")", ")" ]
35.125
0.000866
def main(): """Start the bot.""" # Bale Bot Authorization Token updater = Updater("TOKEN") # Get the dispatcher to register handlers dp = updater.dispatcher # on different commands - answer in Bale dp.add_handler(CommandHandler("start", start)) dp.add_handler(CommandHandler("help", help)) # on noncommand i.e message - echo the message on Bale dp.add_handler(MessageHandler(DefaultFilter(), echo)) # log all errors dp.add_error_handler(error) # Start the Bot updater.run()
[ "def", "main", "(", ")", ":", "# Bale Bot Authorization Token", "updater", "=", "Updater", "(", "\"TOKEN\"", ")", "# Get the dispatcher to register handlers", "dp", "=", "updater", ".", "dispatcher", "# on different commands - answer in Bale", "dp", ".", "add_handler", "(", "CommandHandler", "(", "\"start\"", ",", "start", ")", ")", "dp", ".", "add_handler", "(", "CommandHandler", "(", "\"help\"", ",", "help", ")", ")", "# on noncommand i.e message - echo the message on Bale", "dp", ".", "add_handler", "(", "MessageHandler", "(", "DefaultFilter", "(", ")", ",", "echo", ")", ")", "# log all errors", "dp", ".", "add_error_handler", "(", "error", ")", "# Start the Bot", "updater", ".", "run", "(", ")" ]
25.75
0.001873
def setParts( self, parts ): """ Sets the path for this edit widget by providing the parts to the path. :param parts | [<str>, ..] """ self.setText(self.separator().join(map(str, parts)))
[ "def", "setParts", "(", "self", ",", "parts", ")", ":", "self", ".", "setText", "(", "self", ".", "separator", "(", ")", ".", "join", "(", "map", "(", "str", ",", "parts", ")", ")", ")" ]
33.571429
0.020747
def printDeadCells(self): """ Print statistics for the dead cells """ columnCasualties = numpy.zeros(self.numberOfColumns()) for cell in self.deadCells: col = self.columnForCell(cell) columnCasualties[col] += 1 for col in range(self.numberOfColumns()): print col, columnCasualties[col]
[ "def", "printDeadCells", "(", "self", ")", ":", "columnCasualties", "=", "numpy", ".", "zeros", "(", "self", ".", "numberOfColumns", "(", ")", ")", "for", "cell", "in", "self", ".", "deadCells", ":", "col", "=", "self", ".", "columnForCell", "(", "cell", ")", "columnCasualties", "[", "col", "]", "+=", "1", "for", "col", "in", "range", "(", "self", ".", "numberOfColumns", "(", ")", ")", ":", "print", "col", ",", "columnCasualties", "[", "col", "]" ]
31.8
0.012232
def scan_file(fullpath, relpath, assign_id): """ scan a file and put it into the index """ # pylint: disable=too-many-branches,too-many-statements,too-many-locals # Since a file has changed, the lrucache is invalid. load_message.cache_clear() try: entry = load_message(fullpath) except FileNotFoundError: # The file doesn't exist, so remove it from the index record = model.Entry.get(file_path=fullpath) if record: expire_record(record) return True entry_id = get_entry_id(entry, fullpath, assign_id) if entry_id is None: return False fixup_needed = False basename = os.path.basename(relpath) title = entry['title'] or guess_title(basename) values = { 'file_path': fullpath, 'category': entry.get('Category', utils.get_category(relpath)), 'status': model.PublishStatus[entry.get('Status', 'SCHEDULED').upper()].value, 'entry_type': entry.get('Entry-Type', ''), 'slug_text': make_slug(entry.get('Slug-Text', title)), 'redirect_url': entry.get('Redirect-To', ''), 'title': title, 'sort_title': entry.get('Sort-Title', title), 'entry_template': entry.get('Entry-Template', '') } entry_date = None if 'Date' in entry: try: entry_date = arrow.get(entry['Date'], tzinfo=config.timezone) except arrow.parser.ParserError: entry_date = None if entry_date is None: del entry['Date'] entry_date = arrow.get( os.stat(fullpath).st_ctime).to(config.timezone) entry['Date'] = entry_date.format() fixup_needed = True if 'Last-Modified' in entry: last_modified_str = entry['Last-Modified'] try: last_modified = arrow.get( last_modified_str, tzinfo=config.timezone) except arrow.parser.ParserError: last_modified = arrow.get() del entry['Last-Modified'] entry['Last-Modified'] = last_modified.format() fixup_needed = True values['display_date'] = entry_date.isoformat() values['utc_date'] = entry_date.to('utc').datetime values['local_date'] = entry_date.naive logger.debug("getting entry %s with id %d", fullpath, entry_id) record = model.Entry.get(id=entry_id) if record: logger.debug("Reusing existing entry %d", record.id) record.set(**values) else: record = model.Entry(id=entry_id, **values) # Update the entry ID if str(record.id) != entry['Entry-ID']: del entry['Entry-ID'] entry['Entry-ID'] = str(record.id) fixup_needed = True if 'UUID' not in entry: entry['UUID'] = str(uuid.uuid5( uuid.NAMESPACE_URL, 'file://' + fullpath)) fixup_needed = True # add other relationships to the index path_alias.remove_aliases(record) if record.visible: for alias in entry.get_all('Path-Alias', []): path_alias.set_alias(alias, entry=record) with orm.db_session: set_tags = { t.lower() for t in entry.get_all('Tag', []) + entry.get_all('Hidden-Tag', []) } for tag in record.tags: if tag.key in set_tags: set_tags.remove(tag.key) else: tag.delete() for tag in set_tags: model.EntryTag(entry=record, key=tag) orm.commit() if record.status == model.PublishStatus.DRAFT.value: logger.info("Not touching draft entry %s", fullpath) elif fixup_needed: logger.info("Fixing up entry %s", fullpath) save_file(fullpath, entry) return record
[ "def", "scan_file", "(", "fullpath", ",", "relpath", ",", "assign_id", ")", ":", "# pylint: disable=too-many-branches,too-many-statements,too-many-locals", "# Since a file has changed, the lrucache is invalid.", "load_message", ".", "cache_clear", "(", ")", "try", ":", "entry", "=", "load_message", "(", "fullpath", ")", "except", "FileNotFoundError", ":", "# The file doesn't exist, so remove it from the index", "record", "=", "model", ".", "Entry", ".", "get", "(", "file_path", "=", "fullpath", ")", "if", "record", ":", "expire_record", "(", "record", ")", "return", "True", "entry_id", "=", "get_entry_id", "(", "entry", ",", "fullpath", ",", "assign_id", ")", "if", "entry_id", "is", "None", ":", "return", "False", "fixup_needed", "=", "False", "basename", "=", "os", ".", "path", ".", "basename", "(", "relpath", ")", "title", "=", "entry", "[", "'title'", "]", "or", "guess_title", "(", "basename", ")", "values", "=", "{", "'file_path'", ":", "fullpath", ",", "'category'", ":", "entry", ".", "get", "(", "'Category'", ",", "utils", ".", "get_category", "(", "relpath", ")", ")", ",", "'status'", ":", "model", ".", "PublishStatus", "[", "entry", ".", "get", "(", "'Status'", ",", "'SCHEDULED'", ")", ".", "upper", "(", ")", "]", ".", "value", ",", "'entry_type'", ":", "entry", ".", "get", "(", "'Entry-Type'", ",", "''", ")", ",", "'slug_text'", ":", "make_slug", "(", "entry", ".", "get", "(", "'Slug-Text'", ",", "title", ")", ")", ",", "'redirect_url'", ":", "entry", ".", "get", "(", "'Redirect-To'", ",", "''", ")", ",", "'title'", ":", "title", ",", "'sort_title'", ":", "entry", ".", "get", "(", "'Sort-Title'", ",", "title", ")", ",", "'entry_template'", ":", "entry", ".", "get", "(", "'Entry-Template'", ",", "''", ")", "}", "entry_date", "=", "None", "if", "'Date'", "in", "entry", ":", "try", ":", "entry_date", "=", "arrow", ".", "get", "(", "entry", "[", "'Date'", "]", ",", "tzinfo", "=", "config", ".", "timezone", ")", "except", "arrow", ".", "parser", ".", "ParserError", ":", "entry_date", "=", "None", "if", "entry_date", "is", "None", ":", "del", "entry", "[", "'Date'", "]", "entry_date", "=", "arrow", ".", "get", "(", "os", ".", "stat", "(", "fullpath", ")", ".", "st_ctime", ")", ".", "to", "(", "config", ".", "timezone", ")", "entry", "[", "'Date'", "]", "=", "entry_date", ".", "format", "(", ")", "fixup_needed", "=", "True", "if", "'Last-Modified'", "in", "entry", ":", "last_modified_str", "=", "entry", "[", "'Last-Modified'", "]", "try", ":", "last_modified", "=", "arrow", ".", "get", "(", "last_modified_str", ",", "tzinfo", "=", "config", ".", "timezone", ")", "except", "arrow", ".", "parser", ".", "ParserError", ":", "last_modified", "=", "arrow", ".", "get", "(", ")", "del", "entry", "[", "'Last-Modified'", "]", "entry", "[", "'Last-Modified'", "]", "=", "last_modified", ".", "format", "(", ")", "fixup_needed", "=", "True", "values", "[", "'display_date'", "]", "=", "entry_date", ".", "isoformat", "(", ")", "values", "[", "'utc_date'", "]", "=", "entry_date", ".", "to", "(", "'utc'", ")", ".", "datetime", "values", "[", "'local_date'", "]", "=", "entry_date", ".", "naive", "logger", ".", "debug", "(", "\"getting entry %s with id %d\"", ",", "fullpath", ",", "entry_id", ")", "record", "=", "model", ".", "Entry", ".", "get", "(", "id", "=", "entry_id", ")", "if", "record", ":", "logger", ".", "debug", "(", "\"Reusing existing entry %d\"", ",", "record", ".", "id", ")", "record", ".", "set", "(", "*", "*", "values", ")", "else", ":", "record", "=", "model", ".", "Entry", "(", "id", "=", "entry_id", ",", "*", "*", "values", ")", "# Update the entry ID", "if", "str", "(", "record", ".", "id", ")", "!=", "entry", "[", "'Entry-ID'", "]", ":", "del", "entry", "[", "'Entry-ID'", "]", "entry", "[", "'Entry-ID'", "]", "=", "str", "(", "record", ".", "id", ")", "fixup_needed", "=", "True", "if", "'UUID'", "not", "in", "entry", ":", "entry", "[", "'UUID'", "]", "=", "str", "(", "uuid", ".", "uuid5", "(", "uuid", ".", "NAMESPACE_URL", ",", "'file://'", "+", "fullpath", ")", ")", "fixup_needed", "=", "True", "# add other relationships to the index", "path_alias", ".", "remove_aliases", "(", "record", ")", "if", "record", ".", "visible", ":", "for", "alias", "in", "entry", ".", "get_all", "(", "'Path-Alias'", ",", "[", "]", ")", ":", "path_alias", ".", "set_alias", "(", "alias", ",", "entry", "=", "record", ")", "with", "orm", ".", "db_session", ":", "set_tags", "=", "{", "t", ".", "lower", "(", ")", "for", "t", "in", "entry", ".", "get_all", "(", "'Tag'", ",", "[", "]", ")", "+", "entry", ".", "get_all", "(", "'Hidden-Tag'", ",", "[", "]", ")", "}", "for", "tag", "in", "record", ".", "tags", ":", "if", "tag", ".", "key", "in", "set_tags", ":", "set_tags", ".", "remove", "(", "tag", ".", "key", ")", "else", ":", "tag", ".", "delete", "(", ")", "for", "tag", "in", "set_tags", ":", "model", ".", "EntryTag", "(", "entry", "=", "record", ",", "key", "=", "tag", ")", "orm", ".", "commit", "(", ")", "if", "record", ".", "status", "==", "model", ".", "PublishStatus", ".", "DRAFT", ".", "value", ":", "logger", ".", "info", "(", "\"Not touching draft entry %s\"", ",", "fullpath", ")", "elif", "fixup_needed", ":", "logger", ".", "info", "(", "\"Fixing up entry %s\"", ",", "fullpath", ")", "save_file", "(", "fullpath", ",", "entry", ")", "return", "record" ]
32.088496
0.000535
def get_route_to(self, destination="", protocol=""): """ Only IPv4 supported, vrf aware, longer_prefixes parameter ready """ longer_pref = "" # longer_prefixes support, for future use vrf = "" ip_version = None try: ip_version = IPNetwork(destination).version except AddrFormatError: return "Please specify a valid destination!" if ip_version == 4: # process IPv4 routing table routes = {} if vrf: send_cmd = "show ip route vrf {vrf} {destination} {longer}".format( vrf=vrf, destination=destination, longer=longer_pref ).rstrip() else: send_cmd = "show ip route vrf all {destination} {longer}".format( destination=destination, longer=longer_pref ).rstrip() out_sh_ip_rou = self._send_command(send_cmd) # IP Route Table for VRF "TEST" for vrfsec in out_sh_ip_rou.split("IP Route Table for ")[1:]: if "Route not found" in vrfsec: continue vrffound = False preffound = False nh_list = [] cur_prefix = "" for line in vrfsec.split("\n"): if not vrffound: vrfstr = RE_RT_VRF_NAME.match(line) if vrfstr: curvrf = vrfstr.group(1) vrffound = True else: # 10.10.56.0/24, ubest/mbest: 2/0 prefstr = RE_RT_IPV4_ROUTE_PREF.match(line) if prefstr: if preffound: # precess previous prefix if cur_prefix not in routes: routes[cur_prefix] = [] for nh in nh_list: routes[cur_prefix].append(nh) nh_list = [] else: preffound = True cur_prefix = prefstr.group(1) continue # *via 10.2.49.60, Vlan3013, [0/0], 1y18w, direct # via 10.17.205.132, Po77.3602, [110/20], 1y18w, ospf-1000, # type-2, tag 2112 # *via 10.17.207.42, Eth3/7.212, [110/20], 02:19:36, ospf-1000, type-2, # tag 2121 # *via 10.17.207.73, [1/0], 1y18w, static # *via 10.17.209.132%vrf2, Po87.3606, [20/20], 1y25w, bgp-65000, # external, tag 65000 # *via Vlan596, [1/0], 1y18w, static viastr = RE_IP_ROUTE_VIA_REGEX.match(line) if viastr: nh_used = viastr.group("used") == "*" nh_ip = viastr.group("ip") or "" # when next hop is leaked from other vrf, for future use # nh_vrf = viastr.group('vrf') nh_int = viastr.group("int") nh_metric = viastr.group("metric") nh_age = bgp_time_conversion(viastr.group("age")) nh_source = viastr.group("source") # for future use # rest_of_line = viastr.group('rest') # use only routes from specified protocol if protocol and protocol != nh_source: continue # routing protocol process number, for future use # nh_source_proc_nr = viastr.group('procnr) if nh_int: nh_int_canon = helpers.canonical_interface_name(nh_int) else: nh_int_canon = "" route_entry = { "protocol": nh_source, "outgoing_interface": nh_int_canon, "age": nh_age, "current_active": nh_used, "routing_table": curvrf, "last_active": nh_used, "next_hop": nh_ip, "selected_next_hop": nh_used, "inactive_reason": "", "preference": int(nh_metric), } if nh_source == "bgp": route_entry[ "protocol_attributes" ] = self._get_bgp_route_attr(cur_prefix, curvrf, nh_ip) else: route_entry["protocol_attributes"] = {} nh_list.append(route_entry) # process last next hop entries if preffound: if cur_prefix not in routes: routes[cur_prefix] = [] for nh in nh_list: routes[cur_prefix].append(nh) return routes
[ "def", "get_route_to", "(", "self", ",", "destination", "=", "\"\"", ",", "protocol", "=", "\"\"", ")", ":", "longer_pref", "=", "\"\"", "# longer_prefixes support, for future use", "vrf", "=", "\"\"", "ip_version", "=", "None", "try", ":", "ip_version", "=", "IPNetwork", "(", "destination", ")", ".", "version", "except", "AddrFormatError", ":", "return", "\"Please specify a valid destination!\"", "if", "ip_version", "==", "4", ":", "# process IPv4 routing table", "routes", "=", "{", "}", "if", "vrf", ":", "send_cmd", "=", "\"show ip route vrf {vrf} {destination} {longer}\"", ".", "format", "(", "vrf", "=", "vrf", ",", "destination", "=", "destination", ",", "longer", "=", "longer_pref", ")", ".", "rstrip", "(", ")", "else", ":", "send_cmd", "=", "\"show ip route vrf all {destination} {longer}\"", ".", "format", "(", "destination", "=", "destination", ",", "longer", "=", "longer_pref", ")", ".", "rstrip", "(", ")", "out_sh_ip_rou", "=", "self", ".", "_send_command", "(", "send_cmd", ")", "# IP Route Table for VRF \"TEST\"", "for", "vrfsec", "in", "out_sh_ip_rou", ".", "split", "(", "\"IP Route Table for \"", ")", "[", "1", ":", "]", ":", "if", "\"Route not found\"", "in", "vrfsec", ":", "continue", "vrffound", "=", "False", "preffound", "=", "False", "nh_list", "=", "[", "]", "cur_prefix", "=", "\"\"", "for", "line", "in", "vrfsec", ".", "split", "(", "\"\\n\"", ")", ":", "if", "not", "vrffound", ":", "vrfstr", "=", "RE_RT_VRF_NAME", ".", "match", "(", "line", ")", "if", "vrfstr", ":", "curvrf", "=", "vrfstr", ".", "group", "(", "1", ")", "vrffound", "=", "True", "else", ":", "# 10.10.56.0/24, ubest/mbest: 2/0", "prefstr", "=", "RE_RT_IPV4_ROUTE_PREF", ".", "match", "(", "line", ")", "if", "prefstr", ":", "if", "preffound", ":", "# precess previous prefix", "if", "cur_prefix", "not", "in", "routes", ":", "routes", "[", "cur_prefix", "]", "=", "[", "]", "for", "nh", "in", "nh_list", ":", "routes", "[", "cur_prefix", "]", ".", "append", "(", "nh", ")", "nh_list", "=", "[", "]", "else", ":", "preffound", "=", "True", "cur_prefix", "=", "prefstr", ".", "group", "(", "1", ")", "continue", "# *via 10.2.49.60, Vlan3013, [0/0], 1y18w, direct", "# via 10.17.205.132, Po77.3602, [110/20], 1y18w, ospf-1000,", "# type-2, tag 2112", "# *via 10.17.207.42, Eth3/7.212, [110/20], 02:19:36, ospf-1000, type-2,", "# tag 2121", "# *via 10.17.207.73, [1/0], 1y18w, static", "# *via 10.17.209.132%vrf2, Po87.3606, [20/20], 1y25w, bgp-65000,", "# external, tag 65000", "# *via Vlan596, [1/0], 1y18w, static", "viastr", "=", "RE_IP_ROUTE_VIA_REGEX", ".", "match", "(", "line", ")", "if", "viastr", ":", "nh_used", "=", "viastr", ".", "group", "(", "\"used\"", ")", "==", "\"*\"", "nh_ip", "=", "viastr", ".", "group", "(", "\"ip\"", ")", "or", "\"\"", "# when next hop is leaked from other vrf, for future use", "# nh_vrf = viastr.group('vrf')", "nh_int", "=", "viastr", ".", "group", "(", "\"int\"", ")", "nh_metric", "=", "viastr", ".", "group", "(", "\"metric\"", ")", "nh_age", "=", "bgp_time_conversion", "(", "viastr", ".", "group", "(", "\"age\"", ")", ")", "nh_source", "=", "viastr", ".", "group", "(", "\"source\"", ")", "# for future use", "# rest_of_line = viastr.group('rest')", "# use only routes from specified protocol", "if", "protocol", "and", "protocol", "!=", "nh_source", ":", "continue", "# routing protocol process number, for future use", "# nh_source_proc_nr = viastr.group('procnr)", "if", "nh_int", ":", "nh_int_canon", "=", "helpers", ".", "canonical_interface_name", "(", "nh_int", ")", "else", ":", "nh_int_canon", "=", "\"\"", "route_entry", "=", "{", "\"protocol\"", ":", "nh_source", ",", "\"outgoing_interface\"", ":", "nh_int_canon", ",", "\"age\"", ":", "nh_age", ",", "\"current_active\"", ":", "nh_used", ",", "\"routing_table\"", ":", "curvrf", ",", "\"last_active\"", ":", "nh_used", ",", "\"next_hop\"", ":", "nh_ip", ",", "\"selected_next_hop\"", ":", "nh_used", ",", "\"inactive_reason\"", ":", "\"\"", ",", "\"preference\"", ":", "int", "(", "nh_metric", ")", ",", "}", "if", "nh_source", "==", "\"bgp\"", ":", "route_entry", "[", "\"protocol_attributes\"", "]", "=", "self", ".", "_get_bgp_route_attr", "(", "cur_prefix", ",", "curvrf", ",", "nh_ip", ")", "else", ":", "route_entry", "[", "\"protocol_attributes\"", "]", "=", "{", "}", "nh_list", ".", "append", "(", "route_entry", ")", "# process last next hop entries", "if", "preffound", ":", "if", "cur_prefix", "not", "in", "routes", ":", "routes", "[", "cur_prefix", "]", "=", "[", "]", "for", "nh", "in", "nh_list", ":", "routes", "[", "cur_prefix", "]", ".", "append", "(", "nh", ")", "return", "routes" ]
50.841121
0.001803
def load_state_dict(self, state_dict: Dict[str, Any]) -> None: """ Load the schedulers state. Parameters ---------- state_dict : ``Dict[str, Any]`` Scheduler state. Should be an object returned from a call to ``state_dict``. """ self.__dict__.update(state_dict)
[ "def", "load_state_dict", "(", "self", ",", "state_dict", ":", "Dict", "[", "str", ",", "Any", "]", ")", "->", "None", ":", "self", ".", "__dict__", ".", "update", "(", "state_dict", ")" ]
32.1
0.009091
def _read_from_paths(): """ Try to read data from configuration paths ($HOME/_SETTINGS_PATH, /etc/_SETTINGS_PATH). """ home = os.environ.get("HOME", "") home_path = os.path.join(home, _SETTINGS_PATH) etc_path = os.path.join("/etc", _SETTINGS_PATH) env_path = os.environ.get("SETTINGS_PATH", "") read_path = None if env_path and os.path.exists(env_path): read_path = env_path elif home and os.path.exists(home_path): read_path = home_path elif os.path.exists(etc_path): read_path = etc_path if not read_path: return "{}" with open(read_path) as f: return f.read()
[ "def", "_read_from_paths", "(", ")", ":", "home", "=", "os", ".", "environ", ".", "get", "(", "\"HOME\"", ",", "\"\"", ")", "home_path", "=", "os", ".", "path", ".", "join", "(", "home", ",", "_SETTINGS_PATH", ")", "etc_path", "=", "os", ".", "path", ".", "join", "(", "\"/etc\"", ",", "_SETTINGS_PATH", ")", "env_path", "=", "os", ".", "environ", ".", "get", "(", "\"SETTINGS_PATH\"", ",", "\"\"", ")", "read_path", "=", "None", "if", "env_path", "and", "os", ".", "path", ".", "exists", "(", "env_path", ")", ":", "read_path", "=", "env_path", "elif", "home", "and", "os", ".", "path", ".", "exists", "(", "home_path", ")", ":", "read_path", "=", "home_path", "elif", "os", ".", "path", ".", "exists", "(", "etc_path", ")", ":", "read_path", "=", "etc_path", "if", "not", "read_path", ":", "return", "\"{}\"", "with", "open", "(", "read_path", ")", "as", "f", ":", "return", "f", ".", "read", "(", ")" ]
27.782609
0.001513
def A(*a): """convert iterable object into numpy array""" return np.array(a[0]) if len(a)==1 else [np.array(o) for o in a]
[ "def", "A", "(", "*", "a", ")", ":", "return", "np", ".", "array", "(", "a", "[", "0", "]", ")", "if", "len", "(", "a", ")", "==", "1", "else", "[", "np", ".", "array", "(", "o", ")", "for", "o", "in", "a", "]" ]
42.666667
0.015385
def _gl_look_at(self, pos, target, up): """ The standard lookAt method :param pos: current position :param target: target position to look at :param up: direction up """ z = vector.normalise(pos - target) x = vector.normalise(vector3.cross(vector.normalise(up), z)) y = vector3.cross(z, x) translate = matrix44.create_identity() translate[3][0] = -pos.x translate[3][1] = -pos.y translate[3][2] = -pos.z rotate = matrix44.create_identity() rotate[0][0] = x[0] # -- X rotate[1][0] = x[1] rotate[2][0] = x[2] rotate[0][1] = y[0] # -- Y rotate[1][1] = y[1] rotate[2][1] = y[2] rotate[0][2] = z[0] # -- Z rotate[1][2] = z[1] rotate[2][2] = z[2] return matrix44.multiply(translate, rotate)
[ "def", "_gl_look_at", "(", "self", ",", "pos", ",", "target", ",", "up", ")", ":", "z", "=", "vector", ".", "normalise", "(", "pos", "-", "target", ")", "x", "=", "vector", ".", "normalise", "(", "vector3", ".", "cross", "(", "vector", ".", "normalise", "(", "up", ")", ",", "z", ")", ")", "y", "=", "vector3", ".", "cross", "(", "z", ",", "x", ")", "translate", "=", "matrix44", ".", "create_identity", "(", ")", "translate", "[", "3", "]", "[", "0", "]", "=", "-", "pos", ".", "x", "translate", "[", "3", "]", "[", "1", "]", "=", "-", "pos", ".", "y", "translate", "[", "3", "]", "[", "2", "]", "=", "-", "pos", ".", "z", "rotate", "=", "matrix44", ".", "create_identity", "(", ")", "rotate", "[", "0", "]", "[", "0", "]", "=", "x", "[", "0", "]", "# -- X", "rotate", "[", "1", "]", "[", "0", "]", "=", "x", "[", "1", "]", "rotate", "[", "2", "]", "[", "0", "]", "=", "x", "[", "2", "]", "rotate", "[", "0", "]", "[", "1", "]", "=", "y", "[", "0", "]", "# -- Y", "rotate", "[", "1", "]", "[", "1", "]", "=", "y", "[", "1", "]", "rotate", "[", "2", "]", "[", "1", "]", "=", "y", "[", "2", "]", "rotate", "[", "0", "]", "[", "2", "]", "=", "z", "[", "0", "]", "# -- Z", "rotate", "[", "1", "]", "[", "2", "]", "=", "z", "[", "1", "]", "rotate", "[", "2", "]", "[", "2", "]", "=", "z", "[", "2", "]", "return", "matrix44", ".", "multiply", "(", "translate", ",", "rotate", ")" ]
29.482759
0.002265
def write(self, ostream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Write the data encoding the Digest object to a stream. Args: ostream (Stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0. """ tstream = BytearrayStream() self.hashing_algorithm.write(tstream, kmip_version=kmip_version) self.digest_value.write(tstream, kmip_version=kmip_version) self.key_format_type.write(tstream, kmip_version=kmip_version) self.length = tstream.length() super(Digest, self).write(ostream, kmip_version=kmip_version) ostream.write(tstream.buffer)
[ "def", "write", "(", "self", ",", "ostream", ",", "kmip_version", "=", "enums", ".", "KMIPVersion", ".", "KMIP_1_0", ")", ":", "tstream", "=", "BytearrayStream", "(", ")", "self", ".", "hashing_algorithm", ".", "write", "(", "tstream", ",", "kmip_version", "=", "kmip_version", ")", "self", ".", "digest_value", ".", "write", "(", "tstream", ",", "kmip_version", "=", "kmip_version", ")", "self", ".", "key_format_type", ".", "write", "(", "tstream", ",", "kmip_version", "=", "kmip_version", ")", "self", ".", "length", "=", "tstream", ".", "length", "(", ")", "super", "(", "Digest", ",", "self", ")", ".", "write", "(", "ostream", ",", "kmip_version", "=", "kmip_version", ")", "ostream", ".", "write", "(", "tstream", ".", "buffer", ")" ]
44.15
0.002217
def get_key(raw=False): """ Gets a single key from stdin """ file_descriptor = stdin.fileno() state = tcgetattr(file_descriptor) chars = [] try: setraw(stdin.fileno()) for i in range(3): char = stdin.read(1) ordinal = ord(char) chars.append(char) if i == 0 and ordinal != 27: break elif i == 1 and ordinal != 91: break elif i == 2 and ordinal != 51: break finally: tcsetattr(file_descriptor, TCSADRAIN, state) result = "".join(chars) return result if raw else codes.keys_flipped.get(result, result)
[ "def", "get_key", "(", "raw", "=", "False", ")", ":", "file_descriptor", "=", "stdin", ".", "fileno", "(", ")", "state", "=", "tcgetattr", "(", "file_descriptor", ")", "chars", "=", "[", "]", "try", ":", "setraw", "(", "stdin", ".", "fileno", "(", ")", ")", "for", "i", "in", "range", "(", "3", ")", ":", "char", "=", "stdin", ".", "read", "(", "1", ")", "ordinal", "=", "ord", "(", "char", ")", "chars", ".", "append", "(", "char", ")", "if", "i", "==", "0", "and", "ordinal", "!=", "27", ":", "break", "elif", "i", "==", "1", "and", "ordinal", "!=", "91", ":", "break", "elif", "i", "==", "2", "and", "ordinal", "!=", "51", ":", "break", "finally", ":", "tcsetattr", "(", "file_descriptor", ",", "TCSADRAIN", ",", "state", ")", "result", "=", "\"\"", ".", "join", "(", "chars", ")", "return", "result", "if", "raw", "else", "codes", ".", "keys_flipped", ".", "get", "(", "result", ",", "result", ")" ]
29.863636
0.001475
def make_view(robot): """ 为一个 BaseRoBot 生成 Bottle view。 Usage :: from werobot import WeRoBot robot = WeRoBot(token='token') @robot.handler def hello(message): return 'Hello World!' from bottle import Bottle from werobot.contrib.bottle import make_view app = Bottle() app.route( '/robot', # WeRoBot 挂载地址 ['GET', 'POST'], make_view(robot) ) :param robot: 一个 BaseRoBot 实例 :return: 一个标准的 Bottle view """ def werobot_view(*args, **kwargs): if not robot.check_signature( request.query.timestamp, request.query.nonce, request.query.signature ): return HTTPResponse( status=403, body=robot.make_error_page(html.escape(request.url)) ) if request.method == 'GET': return request.query.echostr else: body = request.body.read() message = robot.parse_message( body, timestamp=request.query.timestamp, nonce=request.query.nonce, msg_signature=request.query.msg_signature ) return robot.get_encrypted_reply(message) return werobot_view
[ "def", "make_view", "(", "robot", ")", ":", "def", "werobot_view", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "robot", ".", "check_signature", "(", "request", ".", "query", ".", "timestamp", ",", "request", ".", "query", ".", "nonce", ",", "request", ".", "query", ".", "signature", ")", ":", "return", "HTTPResponse", "(", "status", "=", "403", ",", "body", "=", "robot", ".", "make_error_page", "(", "html", ".", "escape", "(", "request", ".", "url", ")", ")", ")", "if", "request", ".", "method", "==", "'GET'", ":", "return", "request", ".", "query", ".", "echostr", "else", ":", "body", "=", "request", ".", "body", ".", "read", "(", ")", "message", "=", "robot", ".", "parse_message", "(", "body", ",", "timestamp", "=", "request", ".", "query", ".", "timestamp", ",", "nonce", "=", "request", ".", "query", ".", "nonce", ",", "msg_signature", "=", "request", ".", "query", ".", "msg_signature", ")", "return", "robot", ".", "get_encrypted_reply", "(", "message", ")", "return", "werobot_view" ]
24.403846
0.000758
def delete(self, **context): """ Removes this record from the database. If the dryRun \ flag is specified then the command will be logged and \ not executed. :note From version 0.6.0 on, this method now accepts a mutable keyword dictionary of values. You can supply any member value for either the <orb.LookupOptions> or <orb.Context>, as well as the keyword 'lookup' to an instance of <orb.LookupOptions> and 'options' for an instance of the <orb.Context> :return <int> """ if not self.isRecord(): return 0 event = orb.events.DeleteEvent(record=self, context=self.context(**context)) if self.processEvent(event): self.onDelete(event) if event.preventDefault: return 0 if self.__delayed: self.__delayed = False self.read() with WriteLocker(self.__dataLock): self.__loaded.clear() context = self.context(**context) conn = context.db.connection() _, count = conn.delete([self], context) # clear out the old values if count == 1: col = self.schema().column(self.schema().idColumn()) with WriteLocker(self.__dataLock): self.__values[col.name()] = (None, None) return count
[ "def", "delete", "(", "self", ",", "*", "*", "context", ")", ":", "if", "not", "self", ".", "isRecord", "(", ")", ":", "return", "0", "event", "=", "orb", ".", "events", ".", "DeleteEvent", "(", "record", "=", "self", ",", "context", "=", "self", ".", "context", "(", "*", "*", "context", ")", ")", "if", "self", ".", "processEvent", "(", "event", ")", ":", "self", ".", "onDelete", "(", "event", ")", "if", "event", ".", "preventDefault", ":", "return", "0", "if", "self", ".", "__delayed", ":", "self", ".", "__delayed", "=", "False", "self", ".", "read", "(", ")", "with", "WriteLocker", "(", "self", ".", "__dataLock", ")", ":", "self", ".", "__loaded", ".", "clear", "(", ")", "context", "=", "self", ".", "context", "(", "*", "*", "context", ")", "conn", "=", "context", ".", "db", ".", "connection", "(", ")", "_", ",", "count", "=", "conn", ".", "delete", "(", "[", "self", "]", ",", "context", ")", "# clear out the old values", "if", "count", "==", "1", ":", "col", "=", "self", ".", "schema", "(", ")", ".", "column", "(", "self", ".", "schema", "(", ")", ".", "idColumn", "(", ")", ")", "with", "WriteLocker", "(", "self", ".", "__dataLock", ")", ":", "self", ".", "__values", "[", "col", ".", "name", "(", ")", "]", "=", "(", "None", ",", "None", ")", "return", "count" ]
32.813953
0.002065
def find_common_root(elements): """ Find root which is common for all `elements`. Args: elements (list): List of double-linked HTMLElement objects. Returns: list: Vector of HTMLElement containing path to common root. """ if not elements: raise UserWarning("Can't find common root - no elements suplied.") root_path = el_to_path_vector(elements.pop()) for el in elements: el_path = el_to_path_vector(el) root_path = common_vector_root(root_path, el_path) if not root_path: raise UserWarning( "Vectors without common root:\n%s" % str(el_path) ) return root_path
[ "def", "find_common_root", "(", "elements", ")", ":", "if", "not", "elements", ":", "raise", "UserWarning", "(", "\"Can't find common root - no elements suplied.\"", ")", "root_path", "=", "el_to_path_vector", "(", "elements", ".", "pop", "(", ")", ")", "for", "el", "in", "elements", ":", "el_path", "=", "el_to_path_vector", "(", "el", ")", "root_path", "=", "common_vector_root", "(", "root_path", ",", "el_path", ")", "if", "not", "root_path", ":", "raise", "UserWarning", "(", "\"Vectors without common root:\\n%s\"", "%", "str", "(", "el_path", ")", ")", "return", "root_path" ]
25.576923
0.001449
def dependency_to_rpm(dep, runtime): """Converts a dependency got by pkg_resources.Requirement.parse() to RPM format. Args: dep - a dependency retrieved by pkg_resources.Requirement.parse() runtime - whether the returned dependency should be runtime (True) or build time (False) Returns: List of semi-SPECFILE dependencies (package names are not properly converted yet). For example: [['Requires', 'jinja2'], ['Conflicts', 'jinja2', '=', '2.0.1']] """ logger.debug('Dependencies provided: {0} runtime: {1}.'.format( dep, runtime)) converted = [] if not len(dep.specs): converted.append(['Requires', dep.project_name]) else: for ver_spec in dep.specs: if ver_spec[0] == '!=': converted.append( ['Conflicts', dep.project_name, '=', ver_spec[1]]) elif ver_spec[0] == '==': converted.append( ['Requires', dep.project_name, '=', ver_spec[1]]) else: converted.append( ['Requires', dep.project_name, ver_spec[0], ver_spec[1]]) if not runtime: for conv in converted: conv[0] = "Build" + conv[0] logger.debug('Converted dependencies: {0}.'.format(converted)) return converted
[ "def", "dependency_to_rpm", "(", "dep", ",", "runtime", ")", ":", "logger", ".", "debug", "(", "'Dependencies provided: {0} runtime: {1}.'", ".", "format", "(", "dep", ",", "runtime", ")", ")", "converted", "=", "[", "]", "if", "not", "len", "(", "dep", ".", "specs", ")", ":", "converted", ".", "append", "(", "[", "'Requires'", ",", "dep", ".", "project_name", "]", ")", "else", ":", "for", "ver_spec", "in", "dep", ".", "specs", ":", "if", "ver_spec", "[", "0", "]", "==", "'!='", ":", "converted", ".", "append", "(", "[", "'Conflicts'", ",", "dep", ".", "project_name", ",", "'='", ",", "ver_spec", "[", "1", "]", "]", ")", "elif", "ver_spec", "[", "0", "]", "==", "'=='", ":", "converted", ".", "append", "(", "[", "'Requires'", ",", "dep", ".", "project_name", ",", "'='", ",", "ver_spec", "[", "1", "]", "]", ")", "else", ":", "converted", ".", "append", "(", "[", "'Requires'", ",", "dep", ".", "project_name", ",", "ver_spec", "[", "0", "]", ",", "ver_spec", "[", "1", "]", "]", ")", "if", "not", "runtime", ":", "for", "conv", "in", "converted", ":", "conv", "[", "0", "]", "=", "\"Build\"", "+", "conv", "[", "0", "]", "logger", ".", "debug", "(", "'Converted dependencies: {0}.'", ".", "format", "(", "converted", ")", ")", "return", "converted" ]
37.194444
0.000728
def execute_nonstop_tasks(self, tasks_cls): """ Just a wrapper to the execute_batch_tasks method """ self.execute_batch_tasks(tasks_cls, self.conf['sortinghat']['sleep_for'], self.conf['general']['min_update_delay'], False)
[ "def", "execute_nonstop_tasks", "(", "self", ",", "tasks_cls", ")", ":", "self", ".", "execute_batch_tasks", "(", "tasks_cls", ",", "self", ".", "conf", "[", "'sortinghat'", "]", "[", "'sleep_for'", "]", ",", "self", ".", "conf", "[", "'general'", "]", "[", "'min_update_delay'", "]", ",", "False", ")" ]
45.571429
0.009231
def user_data(self, access_token, *args, **kwargs): """Loads user data from service""" return self.get_json(self.USER_INFO_URL, method="POST", headers=self._get_headers(access_token))
[ "def", "user_data", "(", "self", ",", "access_token", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "get_json", "(", "self", ".", "USER_INFO_URL", ",", "method", "=", "\"POST\"", ",", "headers", "=", "self", ".", "_get_headers", "(", "access_token", ")", ")" ]
49.25
0.015
def run_initialization_experiment(seed, num_neurons = 50, dim = 40, num_bins = 10, num_samples = 50*600, neuron_size = 10000, num_dendrites = 400, dendrite_length = 25, power = 10, ): """ Runs an experiment testing classifying a binary dataset, based on Poirazi & Mel's original experiment. Learning is using our modified variant of their rule, and positive and negative neurons compete to classify a datapoint. Performance has historically been poor, noticeably worse than what is achieved with only a single neuron using an HTM-style learning rule on datasets of similar size. It is suspected that the simplifications made to the P&M learning rule are having a negative effect. Furthermore, P&M report that they are willing to train for an exceptional amount of time, up to 96,000 iterations per neuron. We have never even begun to approach this long a training time, so it is possible that our performance would converge with theirs given more time. This experiment does not correspond to a figure in the paper, but we report our results across an average of 50 trials, using the settings above. """ numpy.random.seed(seed) nonlinearity = power_nonlinearity(power) pos_neurons = [Neuron(size = neuron_size, num_dendrites = num_dendrites, dendrite_length = dendrite_length, nonlinearity = nonlinearity, dim = dim*num_bins) for i in range(num_neurons/2)] neg_neurons = [Neuron(size = neuron_size, num_dendrites = num_dendrites, dendrite_length = dendrite_length, nonlinearity = nonlinearity, dim = dim*num_bins) for i in range(num_neurons/2)] #pos, neg = generate_evenly_distributed_data_sparse(dim = 400, num_active = 40, num_samples = num_samples/2), generate_evenly_distributed_data_sparse(dim = 400, num_active = 40, num_samples = num_samples/2) pos, neg = generate_data(dim = dim, num_bins = num_bins, num_samples = num_samples, sparse = True) if (pos.nRows() > num_dendrites*len(pos_neurons)): print "Too much data to have unique dendrites for positive neurons, clustering" pos = pos.toDense() model = KMeans(n_clusters = len(pos_neurons), n_jobs=1) clusters = model.fit_predict(pos) neuron_data = [SM32() for i in range(len(pos_neurons))] for datapoint, cluster in zip(pos, clusters): neuron_data[cluster].append(SM32([datapoint])) for i, neuron in enumerate(pos_neurons): neuron.HTM_style_initialize_on_data(neuron_data[i], [1 for i in range(neuron_data[i].nRows())]) pos = SM32(pos) else: print "Directly initializing positive neurons with unique dendrites" neuron_data = split_sparse_matrix(pos, len(pos_neurons)) for neuron, data in zip(pos_neurons, neuron_data): neuron.HTM_style_initialize_on_data(data, [1 for i in range(data.nRows())]) if (neg.nRows() > num_dendrites*len(neg_neurons)): print "Too much data to have unique dendrites for negative neurons, clustering" neg = neg.toDense() model = KMeans(n_clusters = len(neg_neurons), n_jobs=1) clusters = model.fit_predict(neg) neuron_data = [SM32() for i in range(len(neg_neurons))] for datapoint, cluster in zip(neg, clusters): neuron_data[cluster].append(SM32([datapoint])) for i, neuron in enumerate(neg_neurons): neuron.HTM_style_initialize_on_data(neuron_data[i], [1 for i in range(neuron_data[i].nRows())]) neg = SM32(neg) else: print "Directly initializing negative neurons with unique dendrites" neuron_data = split_sparse_matrix(neg, len(neg_neurons)) for neuron, data in zip(neg_neurons, neuron_data): neuron.HTM_style_initialize_on_data(data, [1 for i in range(data.nRows())]) print "Calculating error" labels = [1 for i in range(pos.nRows())] + [-1 for i in range(neg.nRows())] data = pos data.append(neg) error, fp, fn = get_error(data, labels, pos_neurons, neg_neurons) print "Error at initialization is {}, with {} false positives and {} false negatives".format(error, fp, fn) return error
[ "def", "run_initialization_experiment", "(", "seed", ",", "num_neurons", "=", "50", ",", "dim", "=", "40", ",", "num_bins", "=", "10", ",", "num_samples", "=", "50", "*", "600", ",", "neuron_size", "=", "10000", ",", "num_dendrites", "=", "400", ",", "dendrite_length", "=", "25", ",", "power", "=", "10", ",", ")", ":", "numpy", ".", "random", ".", "seed", "(", "seed", ")", "nonlinearity", "=", "power_nonlinearity", "(", "power", ")", "pos_neurons", "=", "[", "Neuron", "(", "size", "=", "neuron_size", ",", "num_dendrites", "=", "num_dendrites", ",", "dendrite_length", "=", "dendrite_length", ",", "nonlinearity", "=", "nonlinearity", ",", "dim", "=", "dim", "*", "num_bins", ")", "for", "i", "in", "range", "(", "num_neurons", "/", "2", ")", "]", "neg_neurons", "=", "[", "Neuron", "(", "size", "=", "neuron_size", ",", "num_dendrites", "=", "num_dendrites", ",", "dendrite_length", "=", "dendrite_length", ",", "nonlinearity", "=", "nonlinearity", ",", "dim", "=", "dim", "*", "num_bins", ")", "for", "i", "in", "range", "(", "num_neurons", "/", "2", ")", "]", "#pos, neg = generate_evenly_distributed_data_sparse(dim = 400, num_active = 40, num_samples = num_samples/2), generate_evenly_distributed_data_sparse(dim = 400, num_active = 40, num_samples = num_samples/2)", "pos", ",", "neg", "=", "generate_data", "(", "dim", "=", "dim", ",", "num_bins", "=", "num_bins", ",", "num_samples", "=", "num_samples", ",", "sparse", "=", "True", ")", "if", "(", "pos", ".", "nRows", "(", ")", ">", "num_dendrites", "*", "len", "(", "pos_neurons", ")", ")", ":", "print", "\"Too much data to have unique dendrites for positive neurons, clustering\"", "pos", "=", "pos", ".", "toDense", "(", ")", "model", "=", "KMeans", "(", "n_clusters", "=", "len", "(", "pos_neurons", ")", ",", "n_jobs", "=", "1", ")", "clusters", "=", "model", ".", "fit_predict", "(", "pos", ")", "neuron_data", "=", "[", "SM32", "(", ")", "for", "i", "in", "range", "(", "len", "(", "pos_neurons", ")", ")", "]", "for", "datapoint", ",", "cluster", "in", "zip", "(", "pos", ",", "clusters", ")", ":", "neuron_data", "[", "cluster", "]", ".", "append", "(", "SM32", "(", "[", "datapoint", "]", ")", ")", "for", "i", ",", "neuron", "in", "enumerate", "(", "pos_neurons", ")", ":", "neuron", ".", "HTM_style_initialize_on_data", "(", "neuron_data", "[", "i", "]", ",", "[", "1", "for", "i", "in", "range", "(", "neuron_data", "[", "i", "]", ".", "nRows", "(", ")", ")", "]", ")", "pos", "=", "SM32", "(", "pos", ")", "else", ":", "print", "\"Directly initializing positive neurons with unique dendrites\"", "neuron_data", "=", "split_sparse_matrix", "(", "pos", ",", "len", "(", "pos_neurons", ")", ")", "for", "neuron", ",", "data", "in", "zip", "(", "pos_neurons", ",", "neuron_data", ")", ":", "neuron", ".", "HTM_style_initialize_on_data", "(", "data", ",", "[", "1", "for", "i", "in", "range", "(", "data", ".", "nRows", "(", ")", ")", "]", ")", "if", "(", "neg", ".", "nRows", "(", ")", ">", "num_dendrites", "*", "len", "(", "neg_neurons", ")", ")", ":", "print", "\"Too much data to have unique dendrites for negative neurons, clustering\"", "neg", "=", "neg", ".", "toDense", "(", ")", "model", "=", "KMeans", "(", "n_clusters", "=", "len", "(", "neg_neurons", ")", ",", "n_jobs", "=", "1", ")", "clusters", "=", "model", ".", "fit_predict", "(", "neg", ")", "neuron_data", "=", "[", "SM32", "(", ")", "for", "i", "in", "range", "(", "len", "(", "neg_neurons", ")", ")", "]", "for", "datapoint", ",", "cluster", "in", "zip", "(", "neg", ",", "clusters", ")", ":", "neuron_data", "[", "cluster", "]", ".", "append", "(", "SM32", "(", "[", "datapoint", "]", ")", ")", "for", "i", ",", "neuron", "in", "enumerate", "(", "neg_neurons", ")", ":", "neuron", ".", "HTM_style_initialize_on_data", "(", "neuron_data", "[", "i", "]", ",", "[", "1", "for", "i", "in", "range", "(", "neuron_data", "[", "i", "]", ".", "nRows", "(", ")", ")", "]", ")", "neg", "=", "SM32", "(", "neg", ")", "else", ":", "print", "\"Directly initializing negative neurons with unique dendrites\"", "neuron_data", "=", "split_sparse_matrix", "(", "neg", ",", "len", "(", "neg_neurons", ")", ")", "for", "neuron", ",", "data", "in", "zip", "(", "neg_neurons", ",", "neuron_data", ")", ":", "neuron", ".", "HTM_style_initialize_on_data", "(", "data", ",", "[", "1", "for", "i", "in", "range", "(", "data", ".", "nRows", "(", ")", ")", "]", ")", "print", "\"Calculating error\"", "labels", "=", "[", "1", "for", "i", "in", "range", "(", "pos", ".", "nRows", "(", ")", ")", "]", "+", "[", "-", "1", "for", "i", "in", "range", "(", "neg", ".", "nRows", "(", ")", ")", "]", "data", "=", "pos", "data", ".", "append", "(", "neg", ")", "error", ",", "fp", ",", "fn", "=", "get_error", "(", "data", ",", "labels", ",", "pos_neurons", ",", "neg_neurons", ")", "print", "\"Error at initialization is {}, with {} false positives and {} false negatives\"", ".", "format", "(", "error", ",", "fp", ",", "fn", ")", "return", "error" ]
51.195122
0.020332
def poisson(grid, spacing=None, dtype=float, format=None, type='FD'): """Return a sparse matrix for the N-dimensional Poisson problem. The matrix represents a finite Difference approximation to the Poisson problem on a regular n-dimensional grid with unit grid spacing and Dirichlet boundary conditions. Parameters ---------- grid : tuple of integers grid dimensions e.g. (100,100) Notes ----- The matrix is symmetric and positive definite (SPD). Examples -------- >>> from pyamg.gallery import poisson >>> # 4 nodes in one dimension >>> poisson( (4,) ).todense() matrix([[ 2., -1., 0., 0.], [-1., 2., -1., 0.], [ 0., -1., 2., -1.], [ 0., 0., -1., 2.]]) >>> # rectangular two dimensional grid >>> poisson( (2,3) ).todense() matrix([[ 4., -1., 0., -1., 0., 0.], [-1., 4., -1., 0., -1., 0.], [ 0., -1., 4., 0., 0., -1.], [-1., 0., 0., 4., -1., 0.], [ 0., -1., 0., -1., 4., -1.], [ 0., 0., -1., 0., -1., 4.]]) """ grid = tuple(grid) N = len(grid) # grid dimension if N < 1 or min(grid) < 1: raise ValueError('invalid grid shape: %s' % str(grid)) # create N-dimension Laplacian stencil if type == 'FD': stencil = np.zeros((3,) * N, dtype=dtype) for i in range(N): stencil[(1,)*i + (0,) + (1,)*(N-i-1)] = -1 stencil[(1,)*i + (2,) + (1,)*(N-i-1)] = -1 stencil[(1,)*N] = 2*N if type == 'FE': stencil = -np.ones((3,) * N, dtype=dtype) stencil[(1,)*N] = 3**N - 1 return stencil_grid(stencil, grid, format=format)
[ "def", "poisson", "(", "grid", ",", "spacing", "=", "None", ",", "dtype", "=", "float", ",", "format", "=", "None", ",", "type", "=", "'FD'", ")", ":", "grid", "=", "tuple", "(", "grid", ")", "N", "=", "len", "(", "grid", ")", "# grid dimension", "if", "N", "<", "1", "or", "min", "(", "grid", ")", "<", "1", ":", "raise", "ValueError", "(", "'invalid grid shape: %s'", "%", "str", "(", "grid", ")", ")", "# create N-dimension Laplacian stencil", "if", "type", "==", "'FD'", ":", "stencil", "=", "np", ".", "zeros", "(", "(", "3", ",", ")", "*", "N", ",", "dtype", "=", "dtype", ")", "for", "i", "in", "range", "(", "N", ")", ":", "stencil", "[", "(", "1", ",", ")", "*", "i", "+", "(", "0", ",", ")", "+", "(", "1", ",", ")", "*", "(", "N", "-", "i", "-", "1", ")", "]", "=", "-", "1", "stencil", "[", "(", "1", ",", ")", "*", "i", "+", "(", "2", ",", ")", "+", "(", "1", ",", ")", "*", "(", "N", "-", "i", "-", "1", ")", "]", "=", "-", "1", "stencil", "[", "(", "1", ",", ")", "*", "N", "]", "=", "2", "*", "N", "if", "type", "==", "'FE'", ":", "stencil", "=", "-", "np", ".", "ones", "(", "(", "3", ",", ")", "*", "N", ",", "dtype", "=", "dtype", ")", "stencil", "[", "(", "1", ",", ")", "*", "N", "]", "=", "3", "**", "N", "-", "1", "return", "stencil_grid", "(", "stencil", ",", "grid", ",", "format", "=", "format", ")" ]
29.803571
0.00058
def summarize_video_metrics(hook_args): """Computes video metrics summaries using the decoder output.""" problem_name = hook_args.problem.name current_problem = hook_args.problem hparams = hook_args.hparams output_dirs = hook_args.output_dirs predictions = hook_args.predictions frame_shape = [ current_problem.frame_height, current_problem.frame_width, current_problem.num_channels ] metrics_graph = tf.Graph() with metrics_graph.as_default(): if predictions: metrics_results, _ = video_metrics.compute_video_metrics_from_predictions( predictions, decode_hparams=hook_args.decode_hparams) else: metrics_results, _ = video_metrics.compute_video_metrics_from_png_files( output_dirs, problem_name, hparams.video_num_target_frames, frame_shape) summary_values = [] for name, array in six.iteritems(metrics_results): for ind, val in enumerate(array): tag = "metric_{}/{}".format(name, ind) summary_values.append(tf.Summary.Value(tag=tag, simple_value=val)) return summary_values
[ "def", "summarize_video_metrics", "(", "hook_args", ")", ":", "problem_name", "=", "hook_args", ".", "problem", ".", "name", "current_problem", "=", "hook_args", ".", "problem", "hparams", "=", "hook_args", ".", "hparams", "output_dirs", "=", "hook_args", ".", "output_dirs", "predictions", "=", "hook_args", ".", "predictions", "frame_shape", "=", "[", "current_problem", ".", "frame_height", ",", "current_problem", ".", "frame_width", ",", "current_problem", ".", "num_channels", "]", "metrics_graph", "=", "tf", ".", "Graph", "(", ")", "with", "metrics_graph", ".", "as_default", "(", ")", ":", "if", "predictions", ":", "metrics_results", ",", "_", "=", "video_metrics", ".", "compute_video_metrics_from_predictions", "(", "predictions", ",", "decode_hparams", "=", "hook_args", ".", "decode_hparams", ")", "else", ":", "metrics_results", ",", "_", "=", "video_metrics", ".", "compute_video_metrics_from_png_files", "(", "output_dirs", ",", "problem_name", ",", "hparams", ".", "video_num_target_frames", ",", "frame_shape", ")", "summary_values", "=", "[", "]", "for", "name", ",", "array", "in", "six", ".", "iteritems", "(", "metrics_results", ")", ":", "for", "ind", ",", "val", "in", "enumerate", "(", "array", ")", ":", "tag", "=", "\"metric_{}/{}\"", ".", "format", "(", "name", ",", "ind", ")", "summary_values", ".", "append", "(", "tf", ".", "Summary", ".", "Value", "(", "tag", "=", "tag", ",", "simple_value", "=", "val", ")", ")", "return", "summary_values" ]
39
0.016682
def parallel_runners(name, runners, **kwargs): # pylint: disable=unused-argument ''' Executes multiple runner modules on the master in parallel. .. versionadded:: 2017.x.0 (Nitrogen) A separate thread is spawned for each runner. This state is intended to be used with the orchestrate runner in place of the ``saltmod.runner`` state when different tasks should be run in parallel. In general, Salt states are not safe when used concurrently, so ensure that they are used in a safe way (e.g. by only targeting separate minions in parallel tasks). name: name identifying this state. The name is provided as part of the output, but not used for anything else. runners: list of runners that should be run in parallel. Each element of the list has to be a dictionary. This dictionary's name entry stores the name of the runner function that shall be invoked. The optional kwarg entry stores a dictionary of named arguments that are passed to the runner function. .. code-block:: yaml parallel-state: salt.parallel_runners: - runners: my_runner_1: - name: state.orchestrate - kwarg: mods: orchestrate_state_1 my_runner_2: - name: state.orchestrate - kwarg: mods: orchestrate_state_2 ''' # For the sake of consistency, we treat a single string in the same way as # a key without a value. This allows something like # salt.parallel_runners: # - runners: # state.orchestrate # Obviously, this will only work if the specified runner does not need any # arguments. if isinstance(runners, six.string_types): runners = {runners: [{name: runners}]} # If the runners argument is not a string, it must be a dict. Everything # else is considered an error. if not isinstance(runners, dict): return { 'name': name, 'result': False, 'changes': {}, 'comment': 'The runners parameter must be a string or dict.' } # The configuration for each runner is given as a list of key-value pairs. # This is not very useful for what we want to do, but it is the typical # style used in Salt. For further processing, we convert each of these # lists to a dict. This also makes it easier to check whether a name has # been specified explicitly. for runner_id, runner_config in six.iteritems(runners): if runner_config is None: runner_config = {} else: runner_config = salt.utils.data.repack_dictlist(runner_config) if 'name' not in runner_config: runner_config['name'] = runner_id runners[runner_id] = runner_config try: jid = __orchestration_jid__ except NameError: log.debug( 'Unable to fire args event due to missing __orchestration_jid__') jid = None def call_runner(runner_config): return __salt__['saltutil.runner'](runner_config['name'], __orchestration_jid__=jid, __env__=__env__, full_return=True, **(runner_config.get('kwarg', {}))) try: outputs = _parallel_map(call_runner, list(six.itervalues(runners))) except salt.exceptions.SaltException as exc: return { 'name': name, 'result': False, 'success': False, 'changes': {}, 'comment': 'One of the runners raised an exception: {0}'.format( exc) } # We bundle the results of the runners with the IDs of the runners so that # we can easily identify which output belongs to which runner. At the same # time we exctract the actual return value of the runner (saltutil.runner # adds some extra information that is not interesting to us). outputs = { runner_id: out['return']for runner_id, out in six.moves.zip(six.iterkeys(runners), outputs) } # If each of the runners returned its output in the format compatible with # the 'highstate' outputter, we can leverage this fact when merging the # outputs. highstate_output = all( [out.get('outputter', '') == 'highstate' and 'data' in out for out in six.itervalues(outputs)] ) # The following helper function is used to extract changes from highstate # output. def extract_changes(obj): if not isinstance(obj, dict): return {} elif 'changes' in obj: if (isinstance(obj['changes'], dict) and obj['changes'].get('out', '') == 'highstate' and 'ret' in obj['changes']): return obj['changes']['ret'] else: return obj['changes'] else: found_changes = {} for key, value in six.iteritems(obj): change = extract_changes(value) if change: found_changes[key] = change return found_changes if highstate_output: failed_runners = [runner_id for runner_id, out in six.iteritems(outputs) if out['data'].get('retcode', 0) != 0] all_successful = not failed_runners if all_successful: comment = 'All runner functions executed successfully.' else: runner_comments = [ 'Runner {0} failed with return value:\n{1}'.format( runner_id, salt.output.out_format(outputs[runner_id], 'nested', __opts__, nested_indent=2) ) for runner_id in failed_runners ] comment = '\n'.join(runner_comments) changes = {} for runner_id, out in six.iteritems(outputs): runner_changes = extract_changes(out['data']) if runner_changes: changes[runner_id] = runner_changes else: failed_runners = [runner_id for runner_id, out in six.iteritems(outputs) if out.get('exit_code', 0) != 0] all_successful = not failed_runners if all_successful: comment = 'All runner functions executed successfully.' else: if len(failed_runners) == 1: comment = 'Runner {0} failed.'.format(failed_runners[0]) else: comment =\ 'Runners {0} failed.'.format(', '.join(failed_runners)) changes = {'ret': { runner_id: out for runner_id, out in six.iteritems(outputs) }} ret = { 'name': name, 'result': all_successful, 'changes': changes, 'comment': comment } # The 'runner' function includes out['jid'] as '__jid__' in the returned # dict, but we cannot do this here because we have more than one JID if # we have more than one runner. return ret
[ "def", "parallel_runners", "(", "name", ",", "runners", ",", "*", "*", "kwargs", ")", ":", "# pylint: disable=unused-argument", "# For the sake of consistency, we treat a single string in the same way as", "# a key without a value. This allows something like", "# salt.parallel_runners:", "# - runners:", "# state.orchestrate", "# Obviously, this will only work if the specified runner does not need any", "# arguments.", "if", "isinstance", "(", "runners", ",", "six", ".", "string_types", ")", ":", "runners", "=", "{", "runners", ":", "[", "{", "name", ":", "runners", "}", "]", "}", "# If the runners argument is not a string, it must be a dict. Everything", "# else is considered an error.", "if", "not", "isinstance", "(", "runners", ",", "dict", ")", ":", "return", "{", "'name'", ":", "name", ",", "'result'", ":", "False", ",", "'changes'", ":", "{", "}", ",", "'comment'", ":", "'The runners parameter must be a string or dict.'", "}", "# The configuration for each runner is given as a list of key-value pairs.", "# This is not very useful for what we want to do, but it is the typical", "# style used in Salt. For further processing, we convert each of these", "# lists to a dict. This also makes it easier to check whether a name has", "# been specified explicitly.", "for", "runner_id", ",", "runner_config", "in", "six", ".", "iteritems", "(", "runners", ")", ":", "if", "runner_config", "is", "None", ":", "runner_config", "=", "{", "}", "else", ":", "runner_config", "=", "salt", ".", "utils", ".", "data", ".", "repack_dictlist", "(", "runner_config", ")", "if", "'name'", "not", "in", "runner_config", ":", "runner_config", "[", "'name'", "]", "=", "runner_id", "runners", "[", "runner_id", "]", "=", "runner_config", "try", ":", "jid", "=", "__orchestration_jid__", "except", "NameError", ":", "log", ".", "debug", "(", "'Unable to fire args event due to missing __orchestration_jid__'", ")", "jid", "=", "None", "def", "call_runner", "(", "runner_config", ")", ":", "return", "__salt__", "[", "'saltutil.runner'", "]", "(", "runner_config", "[", "'name'", "]", ",", "__orchestration_jid__", "=", "jid", ",", "__env__", "=", "__env__", ",", "full_return", "=", "True", ",", "*", "*", "(", "runner_config", ".", "get", "(", "'kwarg'", ",", "{", "}", ")", ")", ")", "try", ":", "outputs", "=", "_parallel_map", "(", "call_runner", ",", "list", "(", "six", ".", "itervalues", "(", "runners", ")", ")", ")", "except", "salt", ".", "exceptions", ".", "SaltException", "as", "exc", ":", "return", "{", "'name'", ":", "name", ",", "'result'", ":", "False", ",", "'success'", ":", "False", ",", "'changes'", ":", "{", "}", ",", "'comment'", ":", "'One of the runners raised an exception: {0}'", ".", "format", "(", "exc", ")", "}", "# We bundle the results of the runners with the IDs of the runners so that", "# we can easily identify which output belongs to which runner. At the same", "# time we exctract the actual return value of the runner (saltutil.runner", "# adds some extra information that is not interesting to us).", "outputs", "=", "{", "runner_id", ":", "out", "[", "'return'", "]", "for", "runner_id", ",", "out", "in", "six", ".", "moves", ".", "zip", "(", "six", ".", "iterkeys", "(", "runners", ")", ",", "outputs", ")", "}", "# If each of the runners returned its output in the format compatible with", "# the 'highstate' outputter, we can leverage this fact when merging the", "# outputs.", "highstate_output", "=", "all", "(", "[", "out", ".", "get", "(", "'outputter'", ",", "''", ")", "==", "'highstate'", "and", "'data'", "in", "out", "for", "out", "in", "six", ".", "itervalues", "(", "outputs", ")", "]", ")", "# The following helper function is used to extract changes from highstate", "# output.", "def", "extract_changes", "(", "obj", ")", ":", "if", "not", "isinstance", "(", "obj", ",", "dict", ")", ":", "return", "{", "}", "elif", "'changes'", "in", "obj", ":", "if", "(", "isinstance", "(", "obj", "[", "'changes'", "]", ",", "dict", ")", "and", "obj", "[", "'changes'", "]", ".", "get", "(", "'out'", ",", "''", ")", "==", "'highstate'", "and", "'ret'", "in", "obj", "[", "'changes'", "]", ")", ":", "return", "obj", "[", "'changes'", "]", "[", "'ret'", "]", "else", ":", "return", "obj", "[", "'changes'", "]", "else", ":", "found_changes", "=", "{", "}", "for", "key", ",", "value", "in", "six", ".", "iteritems", "(", "obj", ")", ":", "change", "=", "extract_changes", "(", "value", ")", "if", "change", ":", "found_changes", "[", "key", "]", "=", "change", "return", "found_changes", "if", "highstate_output", ":", "failed_runners", "=", "[", "runner_id", "for", "runner_id", ",", "out", "in", "six", ".", "iteritems", "(", "outputs", ")", "if", "out", "[", "'data'", "]", ".", "get", "(", "'retcode'", ",", "0", ")", "!=", "0", "]", "all_successful", "=", "not", "failed_runners", "if", "all_successful", ":", "comment", "=", "'All runner functions executed successfully.'", "else", ":", "runner_comments", "=", "[", "'Runner {0} failed with return value:\\n{1}'", ".", "format", "(", "runner_id", ",", "salt", ".", "output", ".", "out_format", "(", "outputs", "[", "runner_id", "]", ",", "'nested'", ",", "__opts__", ",", "nested_indent", "=", "2", ")", ")", "for", "runner_id", "in", "failed_runners", "]", "comment", "=", "'\\n'", ".", "join", "(", "runner_comments", ")", "changes", "=", "{", "}", "for", "runner_id", ",", "out", "in", "six", ".", "iteritems", "(", "outputs", ")", ":", "runner_changes", "=", "extract_changes", "(", "out", "[", "'data'", "]", ")", "if", "runner_changes", ":", "changes", "[", "runner_id", "]", "=", "runner_changes", "else", ":", "failed_runners", "=", "[", "runner_id", "for", "runner_id", ",", "out", "in", "six", ".", "iteritems", "(", "outputs", ")", "if", "out", ".", "get", "(", "'exit_code'", ",", "0", ")", "!=", "0", "]", "all_successful", "=", "not", "failed_runners", "if", "all_successful", ":", "comment", "=", "'All runner functions executed successfully.'", "else", ":", "if", "len", "(", "failed_runners", ")", "==", "1", ":", "comment", "=", "'Runner {0} failed.'", ".", "format", "(", "failed_runners", "[", "0", "]", ")", "else", ":", "comment", "=", "'Runners {0} failed.'", ".", "format", "(", "', '", ".", "join", "(", "failed_runners", ")", ")", "changes", "=", "{", "'ret'", ":", "{", "runner_id", ":", "out", "for", "runner_id", ",", "out", "in", "six", ".", "iteritems", "(", "outputs", ")", "}", "}", "ret", "=", "{", "'name'", ":", "name", ",", "'result'", ":", "all_successful", ",", "'changes'", ":", "changes", ",", "'comment'", ":", "comment", "}", "# The 'runner' function includes out['jid'] as '__jid__' in the returned", "# dict, but we cannot do this here because we have more than one JID if", "# we have more than one runner.", "return", "ret" ]
39.494505
0.000271
def save(sources, targets, masked=False): """ Save the numeric results of each source into its corresponding target. Parameters ---------- sources: list The list of source arrays for saving from; limited to length 1. targets: list The list of target arrays for saving to; limited to length 1. masked: boolean Uses a masked array from sources if True. """ # TODO: Remove restriction assert len(sources) == 1 and len(targets) == 1 array = sources[0] target = targets[0] # Request bitesize pieces of the source and assign them to the # target. # NB. This algorithm does not use the minimal number of chunks. # e.g. If the second dimension could be sliced as 0:99, 99:100 # then clearly the first dimension would have to be single # slices for the 0:99 case, but could be bigger slices for the # 99:100 case. # It's not yet clear if this really matters. all_slices = _all_slices(array) for index in np.ndindex(*[len(slices) for slices in all_slices]): keys = tuple(slices[i] for slices, i in zip(all_slices, index)) if masked: target[keys] = array[keys].masked_array() else: target[keys] = array[keys].ndarray()
[ "def", "save", "(", "sources", ",", "targets", ",", "masked", "=", "False", ")", ":", "# TODO: Remove restriction", "assert", "len", "(", "sources", ")", "==", "1", "and", "len", "(", "targets", ")", "==", "1", "array", "=", "sources", "[", "0", "]", "target", "=", "targets", "[", "0", "]", "# Request bitesize pieces of the source and assign them to the", "# target.", "# NB. This algorithm does not use the minimal number of chunks.", "# e.g. If the second dimension could be sliced as 0:99, 99:100", "# then clearly the first dimension would have to be single", "# slices for the 0:99 case, but could be bigger slices for the", "# 99:100 case.", "# It's not yet clear if this really matters.", "all_slices", "=", "_all_slices", "(", "array", ")", "for", "index", "in", "np", ".", "ndindex", "(", "*", "[", "len", "(", "slices", ")", "for", "slices", "in", "all_slices", "]", ")", ":", "keys", "=", "tuple", "(", "slices", "[", "i", "]", "for", "slices", ",", "i", "in", "zip", "(", "all_slices", ",", "index", ")", ")", "if", "masked", ":", "target", "[", "keys", "]", "=", "array", "[", "keys", "]", ".", "masked_array", "(", ")", "else", ":", "target", "[", "keys", "]", "=", "array", "[", "keys", "]", ".", "ndarray", "(", ")" ]
36.970588
0.000775
def get_season_code_from_name(self, season_name) -> int: """ Args: season_name: season name Returns: season code """ self.validator_season_name.validate(season_name, 'get_season_code_from_name') return self.seasons_enum[season_name]
[ "def", "get_season_code_from_name", "(", "self", ",", "season_name", ")", "->", "int", ":", "self", ".", "validator_season_name", ".", "validate", "(", "season_name", ",", "'get_season_code_from_name'", ")", "return", "self", ".", "seasons_enum", "[", "season_name", "]" ]
31.666667
0.010239
def check_arguments(c: typing.Callable, hints: typing.Mapping[str, typing.Optional[type]], *args, **kwargs) -> None: """Check arguments type, raise :class:`TypeError` if argument type is not expected type. :param c: callable object want to check types :param hints: assumed type of given ``c`` result of :func:`typing.get_type_hints` """ signature = inspect.signature(c) bound = signature.bind(*args, **kwargs) for argument_name, value in bound.arguments.items(): try: type_hint = hints[argument_name] except KeyError: continue actual_type, correct = check_type(value, type_hint) if not correct: raise TypeError( 'Incorrect type `{}`, expected `{}` for `{}`'.format( actual_type, type_hint, argument_name ) )
[ "def", "check_arguments", "(", "c", ":", "typing", ".", "Callable", ",", "hints", ":", "typing", ".", "Mapping", "[", "str", ",", "typing", ".", "Optional", "[", "type", "]", "]", ",", "*", "args", ",", "*", "*", "kwargs", ")", "->", "None", ":", "signature", "=", "inspect", ".", "signature", "(", "c", ")", "bound", "=", "signature", ".", "bind", "(", "*", "args", ",", "*", "*", "kwargs", ")", "for", "argument_name", ",", "value", "in", "bound", ".", "arguments", ".", "items", "(", ")", ":", "try", ":", "type_hint", "=", "hints", "[", "argument_name", "]", "except", "KeyError", ":", "continue", "actual_type", ",", "correct", "=", "check_type", "(", "value", ",", "type_hint", ")", "if", "not", "correct", ":", "raise", "TypeError", "(", "'Incorrect type `{}`, expected `{}` for `{}`'", ".", "format", "(", "actual_type", ",", "type_hint", ",", "argument_name", ")", ")" ]
36.32
0.001073
def _reconnect(self, errorState): """ Attempt to reconnect. If the current back-off delay is 0, L{connect} is called. Otherwise, it will cause a transition to the C{'waiting'} state, ultimately causing a call to L{connect} when the delay expires. """ def connect(): if self.noisy: log.msg("Reconnecting now.") self.connect() backOff = self.backOffs[errorState] if self._errorState != errorState or self._delay is None: self._errorState = errorState self._delay = backOff['initial'] else: self._delay = min(backOff['max'], self._delay * backOff['factor']) if self._delay == 0: connect() else: self._reconnectDelayedCall = self.reactor.callLater(self._delay, connect) self._toState('waiting')
[ "def", "_reconnect", "(", "self", ",", "errorState", ")", ":", "def", "connect", "(", ")", ":", "if", "self", ".", "noisy", ":", "log", ".", "msg", "(", "\"Reconnecting now.\"", ")", "self", ".", "connect", "(", ")", "backOff", "=", "self", ".", "backOffs", "[", "errorState", "]", "if", "self", ".", "_errorState", "!=", "errorState", "or", "self", ".", "_delay", "is", "None", ":", "self", ".", "_errorState", "=", "errorState", "self", ".", "_delay", "=", "backOff", "[", "'initial'", "]", "else", ":", "self", ".", "_delay", "=", "min", "(", "backOff", "[", "'max'", "]", ",", "self", ".", "_delay", "*", "backOff", "[", "'factor'", "]", ")", "if", "self", ".", "_delay", "==", "0", ":", "connect", "(", ")", "else", ":", "self", ".", "_reconnectDelayedCall", "=", "self", ".", "reactor", ".", "callLater", "(", "self", ".", "_delay", ",", "connect", ")", "self", ".", "_toState", "(", "'waiting'", ")" ]
34.814815
0.00207
def read_dictionary_file(dictionary_path): """Return all words in dictionary file as set.""" try: return _user_dictionary_cache[dictionary_path] except KeyError: if dictionary_path and os.path.exists(dictionary_path): with open(dictionary_path, "rt") as dict_f: words = set(re.findall(r"(\w[\w']*\w|\w)", " ".join(dict_f.read().splitlines()))) return words return set()
[ "def", "read_dictionary_file", "(", "dictionary_path", ")", ":", "try", ":", "return", "_user_dictionary_cache", "[", "dictionary_path", "]", "except", "KeyError", ":", "if", "dictionary_path", "and", "os", ".", "path", ".", "exists", "(", "dictionary_path", ")", ":", "with", "open", "(", "dictionary_path", ",", "\"rt\"", ")", "as", "dict_f", ":", "words", "=", "set", "(", "re", ".", "findall", "(", "r\"(\\w[\\w']*\\w|\\w)\"", ",", "\" \"", ".", "join", "(", "dict_f", ".", "read", "(", ")", ".", "splitlines", "(", ")", ")", ")", ")", "return", "words", "return", "set", "(", ")" ]
39.833333
0.002045
def resize(image, x, y, stretch=False, top=None, left=None, mode='RGB', resample=None): """Return an image resized.""" if x <= 0: raise ValueError('x must be greater than zero') if y <= 0: raise ValueError('y must be greater than zero') from PIL import Image resample = Image.ANTIALIAS if resample is None else resample if not isinstance(resample, numbers.Number): try: resample = getattr(Image, resample.upper()) except: raise ValueError("(1) Didn't understand resample=%s" % resample) if not isinstance(resample, numbers.Number): raise ValueError("(2) Didn't understand resample=%s" % resample) size = x, y if stretch: return image.resize(size, resample=resample) result = Image.new(mode, size) ratios = [d1 / d2 for d1, d2 in zip(size, image.size)] if ratios[0] < ratios[1]: new_size = (size[0], int(image.size[1] * ratios[0])) else: new_size = (int(image.size[0] * ratios[1]), size[1]) image = image.resize(new_size, resample=resample) if left is None: box_x = int((x - new_size[0]) / 2) elif left: box_x = 0 else: box_x = x - new_size[0] if top is None: box_y = int((y - new_size[1]) / 2) elif top: box_y = 0 else: box_y = y - new_size[1] result.paste(image, box=(box_x, box_y)) return result
[ "def", "resize", "(", "image", ",", "x", ",", "y", ",", "stretch", "=", "False", ",", "top", "=", "None", ",", "left", "=", "None", ",", "mode", "=", "'RGB'", ",", "resample", "=", "None", ")", ":", "if", "x", "<=", "0", ":", "raise", "ValueError", "(", "'x must be greater than zero'", ")", "if", "y", "<=", "0", ":", "raise", "ValueError", "(", "'y must be greater than zero'", ")", "from", "PIL", "import", "Image", "resample", "=", "Image", ".", "ANTIALIAS", "if", "resample", "is", "None", "else", "resample", "if", "not", "isinstance", "(", "resample", ",", "numbers", ".", "Number", ")", ":", "try", ":", "resample", "=", "getattr", "(", "Image", ",", "resample", ".", "upper", "(", ")", ")", "except", ":", "raise", "ValueError", "(", "\"(1) Didn't understand resample=%s\"", "%", "resample", ")", "if", "not", "isinstance", "(", "resample", ",", "numbers", ".", "Number", ")", ":", "raise", "ValueError", "(", "\"(2) Didn't understand resample=%s\"", "%", "resample", ")", "size", "=", "x", ",", "y", "if", "stretch", ":", "return", "image", ".", "resize", "(", "size", ",", "resample", "=", "resample", ")", "result", "=", "Image", ".", "new", "(", "mode", ",", "size", ")", "ratios", "=", "[", "d1", "/", "d2", "for", "d1", ",", "d2", "in", "zip", "(", "size", ",", "image", ".", "size", ")", "]", "if", "ratios", "[", "0", "]", "<", "ratios", "[", "1", "]", ":", "new_size", "=", "(", "size", "[", "0", "]", ",", "int", "(", "image", ".", "size", "[", "1", "]", "*", "ratios", "[", "0", "]", ")", ")", "else", ":", "new_size", "=", "(", "int", "(", "image", ".", "size", "[", "0", "]", "*", "ratios", "[", "1", "]", ")", ",", "size", "[", "1", "]", ")", "image", "=", "image", ".", "resize", "(", "new_size", ",", "resample", "=", "resample", ")", "if", "left", "is", "None", ":", "box_x", "=", "int", "(", "(", "x", "-", "new_size", "[", "0", "]", ")", "/", "2", ")", "elif", "left", ":", "box_x", "=", "0", "else", ":", "box_x", "=", "x", "-", "new_size", "[", "0", "]", "if", "top", "is", "None", ":", "box_y", "=", "int", "(", "(", "y", "-", "new_size", "[", "1", "]", ")", "/", "2", ")", "elif", "top", ":", "box_y", "=", "0", "else", ":", "box_y", "=", "y", "-", "new_size", "[", "1", "]", "result", ".", "paste", "(", "image", ",", "box", "=", "(", "box_x", ",", "box_y", ")", ")", "return", "result" ]
29.808511
0.001382
def p_expr_shl_expr(p): """ expr : expr SHL expr """ if p[1] is None or p[3] is None: p[0] = None return if p[1].type_ in (TYPE.float_, TYPE.fixed): p[1] = make_typecast(TYPE.ulong, p[1], p.lineno(2)) p[0] = make_binary(p.lineno(2), 'SHL', p[1], make_typecast(TYPE.ubyte, p[3], p.lineno(2)), lambda x, y: x << y)
[ "def", "p_expr_shl_expr", "(", "p", ")", ":", "if", "p", "[", "1", "]", "is", "None", "or", "p", "[", "3", "]", "is", "None", ":", "p", "[", "0", "]", "=", "None", "return", "if", "p", "[", "1", "]", ".", "type_", "in", "(", "TYPE", ".", "float_", ",", "TYPE", ".", "fixed", ")", ":", "p", "[", "1", "]", "=", "make_typecast", "(", "TYPE", ".", "ulong", ",", "p", "[", "1", "]", ",", "p", ".", "lineno", "(", "2", ")", ")", "p", "[", "0", "]", "=", "make_binary", "(", "p", ".", "lineno", "(", "2", ")", ",", "'SHL'", ",", "p", "[", "1", "]", ",", "make_typecast", "(", "TYPE", ".", "ubyte", ",", "p", "[", "3", "]", ",", "p", ".", "lineno", "(", "2", ")", ")", ",", "lambda", "x", ",", "y", ":", "x", "<<", "y", ")" ]
30.153846
0.002475
def render(self): """Render this page and return the rendition. Converts the markdown content to html, and then renders the (mako) template specified in the config, using that html. The task of writing of the rendition to a real file is responsibility of the generate method. """ (pthemedir, ptemplatefname) = self._theme_and_template_fp() mylookup = TemplateLookup(directories=[self.site.dirs['s2'], pthemedir], input_encoding='utf-8', output_encoding='utf-8') makotemplate = Template(filename=ptemplatefname, lookup=mylookup, module_directory=self.site._makodir) # I don't really need to use the meta extension here, because I render self._content (has no metadata) #page_html = markdown.markdown(self._content) md = markdown.Markdown(extensions=['meta','fenced_code', 'codehilite'],output_format="html5") page_html = md.convert(self._content) # need to trigger the conversion to obtain md.Meta # We assume that the page is always in a dir one level below www themepath = "../themes/" + os.path.split(pthemedir)[1] + '/' commonpath = "../common/" # HERE I'll pass the config variable to the mako template, so I can use the title etc. #buf = StringIO() #ctx = Context(buf, dict(pageContent=page_html, isFrontPage=False, themePath=themepath, pageTitle='pedo', # commonPath=commonpath)) #makotemplate.render_context(ctx) #rendition = buf.getvalue() # IS THERE `PIWIK CODE? # IS THERE DISQUS CODE? # READ from s2 if there's disqus_code.html.tpl and piwik_code.html.tpl # if there's piwik, just define the variable piwik_code with its contents # if there's disqus... nested render? # HERE I NEED TO DIRECTLY INCLUDE A TEMPLATE IN ANOTHER TEMPLATE!!! MAKO! #d_sn = self.site.site_config['disqus_shortname'] #if d_sn: # the site uses disqus piwik_code = None disqus_code, disqus_shortname, disqus_identifier, disqus_title, disqus_url= None, None, None, None, None piwik_code_tpl = os.path.join(self.site.dirs['s2'],'piwik_code.html.tpl') if os.path.isfile(piwik_code_tpl): piwik_code = '/piwik_code.html.tpl' disqus_code_tpl = os.path.join(self.site.dirs['s2'],'disqus_code.html.tpl') if os.path.isfile(disqus_code_tpl): disqus_code = '/disqus_code.html.tpl' disqus_shortname = self.site.site_config['disqus_shortname'] disqus_identifier = self._config['page_id'][0] disqus_title = self.title disqus_url = os.path.join(self.site.site_config['site_url'],self._slug) rendition = makotemplate.render(pageContent=page_html,isFrontPage=False, themePath=themepath, commonPath=commonpath, pageTitle=self.title, piwik_code=piwik_code, disqus_code=disqus_code, disqus_shortname = disqus_shortname, disqus_identifier = disqus_identifier, disqus_url = disqus_url, disqus_title= disqus_title) return rendition
[ "def", "render", "(", "self", ")", ":", "(", "pthemedir", ",", "ptemplatefname", ")", "=", "self", ".", "_theme_and_template_fp", "(", ")", "mylookup", "=", "TemplateLookup", "(", "directories", "=", "[", "self", ".", "site", ".", "dirs", "[", "'s2'", "]", ",", "pthemedir", "]", ",", "input_encoding", "=", "'utf-8'", ",", "output_encoding", "=", "'utf-8'", ")", "makotemplate", "=", "Template", "(", "filename", "=", "ptemplatefname", ",", "lookup", "=", "mylookup", ",", "module_directory", "=", "self", ".", "site", ".", "_makodir", ")", "# I don't really need to use the meta extension here, because I render self._content (has no metadata)", "#page_html = markdown.markdown(self._content)", "md", "=", "markdown", ".", "Markdown", "(", "extensions", "=", "[", "'meta'", ",", "'fenced_code'", ",", "'codehilite'", "]", ",", "output_format", "=", "\"html5\"", ")", "page_html", "=", "md", ".", "convert", "(", "self", ".", "_content", ")", "# need to trigger the conversion to obtain md.Meta", "# We assume that the page is always in a dir one level below www", "themepath", "=", "\"../themes/\"", "+", "os", ".", "path", ".", "split", "(", "pthemedir", ")", "[", "1", "]", "+", "'/'", "commonpath", "=", "\"../common/\"", "# HERE I'll pass the config variable to the mako template, so I can use the title etc.", "#buf = StringIO()", "#ctx = Context(buf, dict(pageContent=page_html, isFrontPage=False, themePath=themepath, pageTitle='pedo',", "# commonPath=commonpath))", "#makotemplate.render_context(ctx)", "#rendition = buf.getvalue()", "# IS THERE `PIWIK CODE?", "# IS THERE DISQUS CODE?", "# READ from s2 if there's disqus_code.html.tpl and piwik_code.html.tpl", "# if there's piwik, just define the variable piwik_code with its contents", "# if there's disqus... nested render?", "# HERE I NEED TO DIRECTLY INCLUDE A TEMPLATE IN ANOTHER TEMPLATE!!! MAKO!", "#d_sn = self.site.site_config['disqus_shortname']", "#if d_sn: # the site uses disqus", "piwik_code", "=", "None", "disqus_code", ",", "disqus_shortname", ",", "disqus_identifier", ",", "disqus_title", ",", "disqus_url", "=", "None", ",", "None", ",", "None", ",", "None", ",", "None", "piwik_code_tpl", "=", "os", ".", "path", ".", "join", "(", "self", ".", "site", ".", "dirs", "[", "'s2'", "]", ",", "'piwik_code.html.tpl'", ")", "if", "os", ".", "path", ".", "isfile", "(", "piwik_code_tpl", ")", ":", "piwik_code", "=", "'/piwik_code.html.tpl'", "disqus_code_tpl", "=", "os", ".", "path", ".", "join", "(", "self", ".", "site", ".", "dirs", "[", "'s2'", "]", ",", "'disqus_code.html.tpl'", ")", "if", "os", ".", "path", ".", "isfile", "(", "disqus_code_tpl", ")", ":", "disqus_code", "=", "'/disqus_code.html.tpl'", "disqus_shortname", "=", "self", ".", "site", ".", "site_config", "[", "'disqus_shortname'", "]", "disqus_identifier", "=", "self", ".", "_config", "[", "'page_id'", "]", "[", "0", "]", "disqus_title", "=", "self", ".", "title", "disqus_url", "=", "os", ".", "path", ".", "join", "(", "self", ".", "site", ".", "site_config", "[", "'site_url'", "]", ",", "self", ".", "_slug", ")", "rendition", "=", "makotemplate", ".", "render", "(", "pageContent", "=", "page_html", ",", "isFrontPage", "=", "False", ",", "themePath", "=", "themepath", ",", "commonPath", "=", "commonpath", ",", "pageTitle", "=", "self", ".", "title", ",", "piwik_code", "=", "piwik_code", ",", "disqus_code", "=", "disqus_code", ",", "disqus_shortname", "=", "disqus_shortname", ",", "disqus_identifier", "=", "disqus_identifier", ",", "disqus_url", "=", "disqus_url", ",", "disqus_title", "=", "disqus_title", ")", "return", "rendition" ]
49.753623
0.010568
def voronoi_neighbors_from_pixels_and_ridge_points(pixels, ridge_points): """Compute the neighbors of every pixel as a list of the pixel index's each pixel shares a vertex with. The ridge points of the Voronoi grid are used to derive this. Parameters ---------- ridge_points : scipy.spatial.Voronoi.ridge_points Each Voronoi-ridge (two indexes representing a pixel mapping_matrix). """ pixel_neighbors_size = np.zeros(shape=(pixels)) for ridge_index in range(ridge_points.shape[0]): pair0 = ridge_points[ridge_index, 0] pair1 = ridge_points[ridge_index, 1] pixel_neighbors_size[pair0] += 1 pixel_neighbors_size[pair1] += 1 pixel_neighbors_index = np.zeros(shape=(pixels)) pixel_neighbors = -1 * np.ones(shape=(pixels, int(np.max(pixel_neighbors_size)))) for ridge_index in range(ridge_points.shape[0]): pair0 = ridge_points[ridge_index, 0] pair1 = ridge_points[ridge_index, 1] pixel_neighbors[pair0, int(pixel_neighbors_index[pair0])] = pair1 pixel_neighbors[pair1, int(pixel_neighbors_index[pair1])] = pair0 pixel_neighbors_index[pair0] += 1 pixel_neighbors_index[pair1] += 1 return pixel_neighbors, pixel_neighbors_size
[ "def", "voronoi_neighbors_from_pixels_and_ridge_points", "(", "pixels", ",", "ridge_points", ")", ":", "pixel_neighbors_size", "=", "np", ".", "zeros", "(", "shape", "=", "(", "pixels", ")", ")", "for", "ridge_index", "in", "range", "(", "ridge_points", ".", "shape", "[", "0", "]", ")", ":", "pair0", "=", "ridge_points", "[", "ridge_index", ",", "0", "]", "pair1", "=", "ridge_points", "[", "ridge_index", ",", "1", "]", "pixel_neighbors_size", "[", "pair0", "]", "+=", "1", "pixel_neighbors_size", "[", "pair1", "]", "+=", "1", "pixel_neighbors_index", "=", "np", ".", "zeros", "(", "shape", "=", "(", "pixels", ")", ")", "pixel_neighbors", "=", "-", "1", "*", "np", ".", "ones", "(", "shape", "=", "(", "pixels", ",", "int", "(", "np", ".", "max", "(", "pixel_neighbors_size", ")", ")", ")", ")", "for", "ridge_index", "in", "range", "(", "ridge_points", ".", "shape", "[", "0", "]", ")", ":", "pair0", "=", "ridge_points", "[", "ridge_index", ",", "0", "]", "pair1", "=", "ridge_points", "[", "ridge_index", ",", "1", "]", "pixel_neighbors", "[", "pair0", ",", "int", "(", "pixel_neighbors_index", "[", "pair0", "]", ")", "]", "=", "pair1", "pixel_neighbors", "[", "pair1", ",", "int", "(", "pixel_neighbors_index", "[", "pair1", "]", ")", "]", "=", "pair0", "pixel_neighbors_index", "[", "pair0", "]", "+=", "1", "pixel_neighbors_index", "[", "pair1", "]", "+=", "1", "return", "pixel_neighbors", ",", "pixel_neighbors_size" ]
39.806452
0.002373
def format_hyperlink( val, hlx, hxl, xhl ): """ Formats an html hyperlink into other forms. @hlx, hxl, xhl: values returned by set_output_format """ if '<a href="' in str(val) and hlx != '<a href="': val = val.replace('<a href="', hlx).replace('">', hxl, 1).replace('</a>', xhl) return val
[ "def", "format_hyperlink", "(", "val", ",", "hlx", ",", "hxl", ",", "xhl", ")", ":", "if", "'<a href=\"'", "in", "str", "(", "val", ")", "and", "hlx", "!=", "'<a href=\"'", ":", "val", "=", "val", ".", "replace", "(", "'<a href=\"'", ",", "hlx", ")", ".", "replace", "(", "'\">'", ",", "hxl", ",", "1", ")", ".", "replace", "(", "'</a>'", ",", "xhl", ")", "return", "val" ]
31.5
0.015432
def fit_apply(fit_result,vec_array): '''fit_apply(fir_result,vec_array) -> vec_array Applies a fit result to an array of vectors ''' return map( lambda x,t1=fit_result[0],mt2=negate(fit_result[1]), m=fit_result[2]: add(t1,transform(m,add(mt2,x))),vec_array)
[ "def", "fit_apply", "(", "fit_result", ",", "vec_array", ")", ":", "return", "map", "(", "lambda", "x", ",", "t1", "=", "fit_result", "[", "0", "]", ",", "mt2", "=", "negate", "(", "fit_result", "[", "1", "]", ")", ",", "m", "=", "fit_result", "[", "2", "]", ":", "add", "(", "t1", ",", "transform", "(", "m", ",", "add", "(", "mt2", ",", "x", ")", ")", ")", ",", "vec_array", ")" ]
35
0.038328
def visualRect(self, index): """The rectangle for the bounds of the item at *index*. :qtdoc:`Re-implemented<QAbstractItemView.visualRect>` :param index: index for the rect you want :type index: :qtdoc:`QModelIndex` :returns: :qtdoc:`QRect` -- rectangle of the borders of the item """ if len(self._rects[index.row()]) -1 < index.column() or index.row() == -1: #Er, so I don't know why this was getting called with index -1 return QtCore.QRect() return self.visualRectRC(index.row(),index.column())
[ "def", "visualRect", "(", "self", ",", "index", ")", ":", "if", "len", "(", "self", ".", "_rects", "[", "index", ".", "row", "(", ")", "]", ")", "-", "1", "<", "index", ".", "column", "(", ")", "or", "index", ".", "row", "(", ")", "==", "-", "1", ":", "#Er, so I don't know why this was getting called with index -1", "return", "QtCore", ".", "QRect", "(", ")", "return", "self", ".", "visualRectRC", "(", "index", ".", "row", "(", ")", ",", "index", ".", "column", "(", ")", ")" ]
47.583333
0.013746
def graph_impl(self, run, tag, is_conceptual, limit_attr_size=None, large_attrs_key=None): """Result of the form `(body, mime_type)`, or `None` if no graph exists.""" if is_conceptual: tensor_events = self._multiplexer.Tensors(run, tag) # Take the first event if there are multiple events written from different # steps. keras_model_config = json.loads(tensor_events[0].tensor_proto.string_val[0]) graph = keras_util.keras_model_to_graph_def(keras_model_config) elif tag: tensor_events = self._multiplexer.Tensors(run, tag) # Take the first event if there are multiple events written from different # steps. run_metadata = config_pb2.RunMetadata.FromString( tensor_events[0].tensor_proto.string_val[0]) graph = graph_pb2.GraphDef() for func_graph in run_metadata.function_graphs: graph_util.combine_graph_defs(graph, func_graph.pre_optimization_graph) else: graph = self._multiplexer.Graph(run) # This next line might raise a ValueError if the limit parameters # are invalid (size is negative, size present but key absent, etc.). process_graph.prepare_graph_for_ui(graph, limit_attr_size, large_attrs_key) return (str(graph), 'text/x-protobuf')
[ "def", "graph_impl", "(", "self", ",", "run", ",", "tag", ",", "is_conceptual", ",", "limit_attr_size", "=", "None", ",", "large_attrs_key", "=", "None", ")", ":", "if", "is_conceptual", ":", "tensor_events", "=", "self", ".", "_multiplexer", ".", "Tensors", "(", "run", ",", "tag", ")", "# Take the first event if there are multiple events written from different", "# steps.", "keras_model_config", "=", "json", ".", "loads", "(", "tensor_events", "[", "0", "]", ".", "tensor_proto", ".", "string_val", "[", "0", "]", ")", "graph", "=", "keras_util", ".", "keras_model_to_graph_def", "(", "keras_model_config", ")", "elif", "tag", ":", "tensor_events", "=", "self", ".", "_multiplexer", ".", "Tensors", "(", "run", ",", "tag", ")", "# Take the first event if there are multiple events written from different", "# steps.", "run_metadata", "=", "config_pb2", ".", "RunMetadata", ".", "FromString", "(", "tensor_events", "[", "0", "]", ".", "tensor_proto", ".", "string_val", "[", "0", "]", ")", "graph", "=", "graph_pb2", ".", "GraphDef", "(", ")", "for", "func_graph", "in", "run_metadata", ".", "function_graphs", ":", "graph_util", ".", "combine_graph_defs", "(", "graph", ",", "func_graph", ".", "pre_optimization_graph", ")", "else", ":", "graph", "=", "self", ".", "_multiplexer", ".", "Graph", "(", "run", ")", "# This next line might raise a ValueError if the limit parameters", "# are invalid (size is negative, size present but key absent, etc.).", "process_graph", ".", "prepare_graph_for_ui", "(", "graph", ",", "limit_attr_size", ",", "large_attrs_key", ")", "return", "(", "str", "(", "graph", ")", ",", "'text/x-protobuf'", ")" ]
49.76
0.013407
def readin_rho(filename, rhofile=True, aniso=False): """Read in the values of the resistivity in Ohmm. The format is variable: rho-file or mag-file. """ if aniso: a = [[0, 1, 2], [2, 3, 4]] else: a = [0, 2] if rhofile: if filename is None: filename = 'rho/rho.dat' with open(filename, 'r') as fid: mag = np.loadtxt(fid, skiprows=1, usecols=(a[0])) else: if filename is None: filename = read_iter() with open(filename, 'r') as fid: mag = np.power(10, np.loadtxt(fid, skiprows=1, usecols=(a[1]))) return mag
[ "def", "readin_rho", "(", "filename", ",", "rhofile", "=", "True", ",", "aniso", "=", "False", ")", ":", "if", "aniso", ":", "a", "=", "[", "[", "0", ",", "1", ",", "2", "]", ",", "[", "2", ",", "3", ",", "4", "]", "]", "else", ":", "a", "=", "[", "0", ",", "2", "]", "if", "rhofile", ":", "if", "filename", "is", "None", ":", "filename", "=", "'rho/rho.dat'", "with", "open", "(", "filename", ",", "'r'", ")", "as", "fid", ":", "mag", "=", "np", ".", "loadtxt", "(", "fid", ",", "skiprows", "=", "1", ",", "usecols", "=", "(", "a", "[", "0", "]", ")", ")", "else", ":", "if", "filename", "is", "None", ":", "filename", "=", "read_iter", "(", ")", "with", "open", "(", "filename", ",", "'r'", ")", "as", "fid", ":", "mag", "=", "np", ".", "power", "(", "10", ",", "np", ".", "loadtxt", "(", "fid", ",", "skiprows", "=", "1", ",", "usecols", "=", "(", "a", "[", "1", "]", ")", ")", ")", "return", "mag" ]
29.285714
0.001575
def plot_world(world, **kwargs): """ Addes a heat-map representing the data in world (an EnvironmentFile object) to the current plot. kwargs: palette - a seaborn palette (list of RGB values) indicating how to color values. Will be converted to a continuous colormap if necessary denom - the maximum value of numbers in the grid (only used if the grid actually contains numbers). This is used to normalize values and use the full dynamic range of the color pallete. """ denom, palette = get_kwargs(world, kwargs, False) world = color_grid(world, palette, denom, True) plt.tick_params(labelbottom="off", labeltop="off", labelleft="off", labelright="off", bottom="off", top="off", left="off", right="off") # plt.tight_layout() plt.imshow(world, interpolation="none", zorder=1) axes = plt.gca() axes.autoscale(False)
[ "def", "plot_world", "(", "world", ",", "*", "*", "kwargs", ")", ":", "denom", ",", "palette", "=", "get_kwargs", "(", "world", ",", "kwargs", ",", "False", ")", "world", "=", "color_grid", "(", "world", ",", "palette", ",", "denom", ",", "True", ")", "plt", ".", "tick_params", "(", "labelbottom", "=", "\"off\"", ",", "labeltop", "=", "\"off\"", ",", "labelleft", "=", "\"off\"", ",", "labelright", "=", "\"off\"", ",", "bottom", "=", "\"off\"", ",", "top", "=", "\"off\"", ",", "left", "=", "\"off\"", ",", "right", "=", "\"off\"", ")", "# plt.tight_layout()", "plt", ".", "imshow", "(", "world", ",", "interpolation", "=", "\"none\"", ",", "zorder", "=", "1", ")", "axes", "=", "plt", ".", "gca", "(", ")", "axes", ".", "autoscale", "(", "False", ")" ]
45.043478
0.000945
def list_chunks(l, n): """ Return a list of chunks :param l: List :param n: int The number of items per chunk :return: List """ if n < 1: n = 1 return [l[i:i + n] for i in range(0, len(l), n)]
[ "def", "list_chunks", "(", "l", ",", "n", ")", ":", "if", "n", "<", "1", ":", "n", "=", "1", "return", "[", "l", "[", "i", ":", "i", "+", "n", "]", "for", "i", "in", "range", "(", "0", ",", "len", "(", "l", ")", ",", "n", ")", "]" ]
22.3
0.008621
def update(self, a, b, c, d): """ Update contingency table with new values without creating a new object. """ self.table.ravel()[:] = [a, b, c, d] self.N = self.table.sum()
[ "def", "update", "(", "self", ",", "a", ",", "b", ",", "c", ",", "d", ")", ":", "self", ".", "table", ".", "ravel", "(", ")", "[", ":", "]", "=", "[", "a", ",", "b", ",", "c", ",", "d", "]", "self", ".", "N", "=", "self", ".", "table", ".", "sum", "(", ")" ]
34.5
0.009434
def check_auth(self, all_credentials): """Update this socket's authentication. Log in or out to bring this socket's credentials up to date with those provided. Can raise ConnectionFailure or OperationFailure. :Parameters: - `all_credentials`: dict, maps auth source to MongoCredential. """ if all_credentials or self.authset: cached = set(itervalues(all_credentials)) authset = self.authset.copy() # Logout any credentials that no longer exist in the cache. for credentials in authset - cached: auth.logout(credentials.source, self) self.authset.discard(credentials) for credentials in cached - authset: auth.authenticate(credentials, self) self.authset.add(credentials)
[ "def", "check_auth", "(", "self", ",", "all_credentials", ")", ":", "if", "all_credentials", "or", "self", ".", "authset", ":", "cached", "=", "set", "(", "itervalues", "(", "all_credentials", ")", ")", "authset", "=", "self", ".", "authset", ".", "copy", "(", ")", "# Logout any credentials that no longer exist in the cache.", "for", "credentials", "in", "authset", "-", "cached", ":", "auth", ".", "logout", "(", "credentials", ".", "source", ",", "self", ")", "self", ".", "authset", ".", "discard", "(", "credentials", ")", "for", "credentials", "in", "cached", "-", "authset", ":", "auth", ".", "authenticate", "(", "credentials", ",", "self", ")", "self", ".", "authset", ".", "add", "(", "credentials", ")" ]
39.809524
0.002336
def from_semiaxes(cls,axes): """ Get axis-aligned elliptical conic from axis lenths This can be converted into a hyperbola by getting the dual conic """ ax = list(1/N.array(axes)**2) #ax[-1] *= -1 # Not sure what is going on here... arr = N.diag(ax + [-1]) return arr.view(cls)
[ "def", "from_semiaxes", "(", "cls", ",", "axes", ")", ":", "ax", "=", "list", "(", "1", "/", "N", ".", "array", "(", "axes", ")", "**", "2", ")", "#ax[-1] *= -1 # Not sure what is going on here...", "arr", "=", "N", ".", "diag", "(", "ax", "+", "[", "-", "1", "]", ")", "return", "arr", ".", "view", "(", "cls", ")" ]
37.111111
0.011696
def validate(ref_time, ref_freqs, est_time, est_freqs): """Checks that the time and frequency inputs are well-formed. Parameters ---------- ref_time : np.ndarray reference time stamps in seconds ref_freqs : list of np.ndarray reference frequencies in Hz est_time : np.ndarray estimate time stamps in seconds est_freqs : list of np.ndarray estimated frequencies in Hz """ util.validate_events(ref_time, max_time=MAX_TIME) util.validate_events(est_time, max_time=MAX_TIME) if ref_time.size == 0: warnings.warn("Reference times are empty.") if ref_time.ndim != 1: raise ValueError("Reference times have invalid dimension") if len(ref_freqs) == 0: warnings.warn("Reference frequencies are empty.") if est_time.size == 0: warnings.warn("Estimated times are empty.") if est_time.ndim != 1: raise ValueError("Estimated times have invalid dimension") if len(est_freqs) == 0: warnings.warn("Estimated frequencies are empty.") if ref_time.size != len(ref_freqs): raise ValueError('Reference times and frequencies have unequal ' 'lengths.') if est_time.size != len(est_freqs): raise ValueError('Estimate times and frequencies have unequal ' 'lengths.') for freq in ref_freqs: util.validate_frequencies(freq, max_freq=MAX_FREQ, min_freq=MIN_FREQ, allow_negatives=False) for freq in est_freqs: util.validate_frequencies(freq, max_freq=MAX_FREQ, min_freq=MIN_FREQ, allow_negatives=False)
[ "def", "validate", "(", "ref_time", ",", "ref_freqs", ",", "est_time", ",", "est_freqs", ")", ":", "util", ".", "validate_events", "(", "ref_time", ",", "max_time", "=", "MAX_TIME", ")", "util", ".", "validate_events", "(", "est_time", ",", "max_time", "=", "MAX_TIME", ")", "if", "ref_time", ".", "size", "==", "0", ":", "warnings", ".", "warn", "(", "\"Reference times are empty.\"", ")", "if", "ref_time", ".", "ndim", "!=", "1", ":", "raise", "ValueError", "(", "\"Reference times have invalid dimension\"", ")", "if", "len", "(", "ref_freqs", ")", "==", "0", ":", "warnings", ".", "warn", "(", "\"Reference frequencies are empty.\"", ")", "if", "est_time", ".", "size", "==", "0", ":", "warnings", ".", "warn", "(", "\"Estimated times are empty.\"", ")", "if", "est_time", ".", "ndim", "!=", "1", ":", "raise", "ValueError", "(", "\"Estimated times have invalid dimension\"", ")", "if", "len", "(", "est_freqs", ")", "==", "0", ":", "warnings", ".", "warn", "(", "\"Estimated frequencies are empty.\"", ")", "if", "ref_time", ".", "size", "!=", "len", "(", "ref_freqs", ")", ":", "raise", "ValueError", "(", "'Reference times and frequencies have unequal '", "'lengths.'", ")", "if", "est_time", ".", "size", "!=", "len", "(", "est_freqs", ")", ":", "raise", "ValueError", "(", "'Estimate times and frequencies have unequal '", "'lengths.'", ")", "for", "freq", "in", "ref_freqs", ":", "util", ".", "validate_frequencies", "(", "freq", ",", "max_freq", "=", "MAX_FREQ", ",", "min_freq", "=", "MIN_FREQ", ",", "allow_negatives", "=", "False", ")", "for", "freq", "in", "est_freqs", ":", "util", ".", "validate_frequencies", "(", "freq", ",", "max_freq", "=", "MAX_FREQ", ",", "min_freq", "=", "MIN_FREQ", ",", "allow_negatives", "=", "False", ")" ]
36.555556
0.000592
def generate_value_processor(type_, collectionFormat=None, items=None, **kwargs): """ Create a callable that will take the string value of a header and cast it to the appropriate type. This can involve: - splitting a header of type 'array' by its delimeters. - type casting the internal elements of the array. """ processors = [] if is_non_string_iterable(type_): assert False, "This should not be possible" else: if type_ == ARRAY and collectionFormat: if collectionFormat in DELIMETERS: delimeter = DELIMETERS[collectionFormat] # split the string based on the delimeter specified by the # `collectionFormat` processors.append(operator.methodcaller('split', delimeter)) else: if collectionFormat != MULTI: raise TypeError("collectionFormat not implemented") processors.append(add_string_into_list) # remove any Falsy values like empty strings. processors.append(functools.partial(filter, bool)) # strip off any whitespace processors.append(functools.partial(map, operator.methodcaller('strip'))) if items is not None: if isinstance(items, collections.Mapping): items_processors = itertools.repeat( generate_value_processor(**items) ) elif isinstance(items, collections.Sequence): items_processors = itertools.chain( (generate_value_processor(**item) for item in items), itertools.repeat(lambda v: v), ) elif isinstance(items, six.string_types): raise NotImplementedError("Not implemented") else: assert False, "Should not be possible" # 1. zip the processor and the array items together # 2. apply the processor to each array item. # 3. cast the starmap generator to a list. processors.append( chain_reduce_partial( functools.partial(zip, items_processors), functools.partial(itertools.starmap, lambda fn, v: fn(v)), list, ) ) else: processors.append( functools.partial(cast_value_to_type, type_=type_) ) def processor(value, **kwargs): try: return chain_reduce_partial(*processors)(value) except (ValueError, TypeError): return value return processor
[ "def", "generate_value_processor", "(", "type_", ",", "collectionFormat", "=", "None", ",", "items", "=", "None", ",", "*", "*", "kwargs", ")", ":", "processors", "=", "[", "]", "if", "is_non_string_iterable", "(", "type_", ")", ":", "assert", "False", ",", "\"This should not be possible\"", "else", ":", "if", "type_", "==", "ARRAY", "and", "collectionFormat", ":", "if", "collectionFormat", "in", "DELIMETERS", ":", "delimeter", "=", "DELIMETERS", "[", "collectionFormat", "]", "# split the string based on the delimeter specified by the", "# `collectionFormat`", "processors", ".", "append", "(", "operator", ".", "methodcaller", "(", "'split'", ",", "delimeter", ")", ")", "else", ":", "if", "collectionFormat", "!=", "MULTI", ":", "raise", "TypeError", "(", "\"collectionFormat not implemented\"", ")", "processors", ".", "append", "(", "add_string_into_list", ")", "# remove any Falsy values like empty strings.", "processors", ".", "append", "(", "functools", ".", "partial", "(", "filter", ",", "bool", ")", ")", "# strip off any whitespace", "processors", ".", "append", "(", "functools", ".", "partial", "(", "map", ",", "operator", ".", "methodcaller", "(", "'strip'", ")", ")", ")", "if", "items", "is", "not", "None", ":", "if", "isinstance", "(", "items", ",", "collections", ".", "Mapping", ")", ":", "items_processors", "=", "itertools", ".", "repeat", "(", "generate_value_processor", "(", "*", "*", "items", ")", ")", "elif", "isinstance", "(", "items", ",", "collections", ".", "Sequence", ")", ":", "items_processors", "=", "itertools", ".", "chain", "(", "(", "generate_value_processor", "(", "*", "*", "item", ")", "for", "item", "in", "items", ")", ",", "itertools", ".", "repeat", "(", "lambda", "v", ":", "v", ")", ",", ")", "elif", "isinstance", "(", "items", ",", "six", ".", "string_types", ")", ":", "raise", "NotImplementedError", "(", "\"Not implemented\"", ")", "else", ":", "assert", "False", ",", "\"Should not be possible\"", "# 1. zip the processor and the array items together", "# 2. apply the processor to each array item.", "# 3. cast the starmap generator to a list.", "processors", ".", "append", "(", "chain_reduce_partial", "(", "functools", ".", "partial", "(", "zip", ",", "items_processors", ")", ",", "functools", ".", "partial", "(", "itertools", ".", "starmap", ",", "lambda", "fn", ",", "v", ":", "fn", "(", "v", ")", ")", ",", "list", ",", ")", ")", "else", ":", "processors", ".", "append", "(", "functools", ".", "partial", "(", "cast_value_to_type", ",", "type_", "=", "type_", ")", ")", "def", "processor", "(", "value", ",", "*", "*", "kwargs", ")", ":", "try", ":", "return", "chain_reduce_partial", "(", "*", "processors", ")", "(", "value", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "return", "value", "return", "processor" ]
43.403226
0.001453
def gettext(*args, **kwargs): """ Return the localized translation of message, based on the language, and locale directory of the domain specified in the translation key (or the current global domain). This function is usually aliased as ``_``. """ key = args[0] key_match = TRANSLATION_KEY_RE.match(key) translation = _gettext(*args, **kwargs) if not key_match or translation != key: return translation return _get_domain(key_match).gettext(*args, **kwargs)
[ "def", "gettext", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "key", "=", "args", "[", "0", "]", "key_match", "=", "TRANSLATION_KEY_RE", ".", "match", "(", "key", ")", "translation", "=", "_gettext", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "not", "key_match", "or", "translation", "!=", "key", ":", "return", "translation", "return", "_get_domain", "(", "key_match", ")", ".", "gettext", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
38.076923
0.001972
def rl_marks(x): """ Replace +-, (c), (tm), (r), (p), etc by its typographic eqivalents """ # простые замены, можно без регулярок replacements = ( (u'(r)', u'\u00ae'), # ® (u'(R)', u'\u00ae'), # ® (u'(p)', u'\u00a7'), # § (u'(P)', u'\u00a7'), # § (u'(tm)', u'\u2122'), # ™ (u'(TM)', u'\u2122'), # ™ ) patterns = ( # копирайт ставится до года: © 2008 Юрий Юревич (re.compile(u'\\([cCсС]\\)\\s*(\\d+)', re.UNICODE), u'\u00a9\u202f\\1'), (r'([^+])(\+\-|\-\+)', u'\\1\u00b1'), # ± # градусы с минусом (u'\\-(\\d+)[\\s]*([FCС][^\\w])', u'\u2212\\1\202f\u00b0\\2'), # −12 °C, −53 °F # градусы без минуса (u'(\\d+)[\\s]*([FCС][^\\w])', u'\\1\u202f\u00b0\\2'), # 12 °C, 53 °F # ® и ™ приклеиваются к предыдущему слову, без пробела (re.compile(u'([A-Za-zА-Яа-я\\!\\?])\\s*(\xae|\u2122)', re.UNICODE), u'\\1\\2'), # No5 -> № 5 (re.compile(u'(\\s)(No|no|NO|\u2116)[\\s\u2009]*(\\d+)', re.UNICODE), u'\\1\u2116\u2009\\3'), ) for what, to in replacements: x = x.replace(what, to) return _sub_patterns(patterns, x)
[ "def", "rl_marks", "(", "x", ")", ":", "# простые замены, можно без регулярок", "replacements", "=", "(", "(", "u'(r)'", ",", "u'\\u00ae'", ")", ",", "# ®", "(", "u'(R)'", ",", "u'\\u00ae'", ")", ",", "# ®", "(", "u'(p)'", ",", "u'\\u00a7'", ")", ",", "# §", "(", "u'(P)'", ",", "u'\\u00a7'", ")", ",", "# §", "(", "u'(tm)'", ",", "u'\\u2122'", ")", ",", "# ™", "(", "u'(TM)'", ",", "u'\\u2122'", ")", ",", "# ™", ")", "patterns", "=", "(", "# копирайт ставится до года: © 2008 Юрий Юревич", "(", "re", ".", "compile", "(", "u'\\\\([cCсС]\\\\)\\\\s*(\\\\d+)', ", "r", ".U", "N", "ICODE),", " ", "u", "\\u00a9\\u202f\\\\1'),", "", "", "(", "r'([^+])(\\+\\-|\\-\\+)'", ",", "u'\\\\1\\u00b1'", ")", ",", "# ±", "# градусы с минусом", "(", "u'\\\\-(\\\\d+)[\\\\s]*([FCС][^\\\\w])',", " ", "'\\u2212\\\\1\\202f\\u00b0\\\\2')", ",", " ", " −12 °C, −53 °F", "# градусы без минуса", "(", "u'(\\\\d+)[\\\\s]*([FCС][^\\\\w])',", " ", "'\\\\1\\u202f\\u00b0\\\\2')", ",", " ", " 12 °C, 53 °F", "# ® и ™ приклеиваются к предыдущему слову, без пробела", "(", "re", ".", "compile", "(", "u'([A-Za-zА-Яа-я\\\\!\\\\?])\\\\s*(\\xae|\\u2122)', re", ".", "NI", "C", "ODE), u", "'", "\\", "1\\\\2'),", "", "", "# No5 -> № 5", "(", "re", ".", "compile", "(", "u'(\\\\s)(No|no|NO|\\u2116)[\\\\s\\u2009]*(\\\\d+)'", ",", "re", ".", "UNICODE", ")", ",", "u'\\\\1\\u2116\\u2009\\\\3'", ")", ",", ")", "for", "what", ",", "to", "in", "replacements", ":", "x", "=", "x", ".", "replace", "(", "what", ",", "to", ")", "return", "_sub_patterns", "(", "patterns", ",", "x", ")" ]
38.533333
0.011814
def _activate_stream(self, idx): '''Randomly select and create a stream. StochasticMux adds mode handling to _activate_stream, making it so that if we're not sampling "with_replacement", the distribution for this chosen streamer is set to 0, causing the streamer not to be available until it is exhausted. Parameters ---------- idx : int, [0:n_streams - 1] The stream index to replace ''' # Get the number of samples for this streamer. n_samples_to_stream = None if self.rate is not None: n_samples_to_stream = 1 + self.rng.poisson(lam=self.rate) # instantiate a new streamer streamer = self.streamers[idx].iterate(max_iter=n_samples_to_stream) weight = self.weights[idx] # If we're sampling without replacement, zero this one out # This effectively disables this stream as soon as it is chosen, # preventing it from being chosen again (unless it is revived) # if not self.with_replacement: if self.mode != "with_replacement": self.distribution_[idx] = 0.0 # Correct the distribution if (self.distribution_ > 0).any(): self.distribution_[:] /= np.sum(self.distribution_) return streamer, weight
[ "def", "_activate_stream", "(", "self", ",", "idx", ")", ":", "# Get the number of samples for this streamer.", "n_samples_to_stream", "=", "None", "if", "self", ".", "rate", "is", "not", "None", ":", "n_samples_to_stream", "=", "1", "+", "self", ".", "rng", ".", "poisson", "(", "lam", "=", "self", ".", "rate", ")", "# instantiate a new streamer", "streamer", "=", "self", ".", "streamers", "[", "idx", "]", ".", "iterate", "(", "max_iter", "=", "n_samples_to_stream", ")", "weight", "=", "self", ".", "weights", "[", "idx", "]", "# If we're sampling without replacement, zero this one out", "# This effectively disables this stream as soon as it is chosen,", "# preventing it from being chosen again (unless it is revived)", "# if not self.with_replacement:", "if", "self", ".", "mode", "!=", "\"with_replacement\"", ":", "self", ".", "distribution_", "[", "idx", "]", "=", "0.0", "# Correct the distribution", "if", "(", "self", ".", "distribution_", ">", "0", ")", ".", "any", "(", ")", ":", "self", ".", "distribution_", "[", ":", "]", "/=", "np", ".", "sum", "(", "self", ".", "distribution_", ")", "return", "streamer", ",", "weight" ]
38.558824
0.001488
def expand_entries(entries, ignore_xs=None): """Turn all Xs which are not ignored in all entries into ``0`` s and ``1`` s. For example:: >>> from rig.routing_table import RoutingTableEntry >>> entries = [ ... RoutingTableEntry(set(), 0b0100, 0xfffffff0 | 0b1100), # 01XX ... RoutingTableEntry(set(), 0b0010, 0xfffffff0 | 0b0010), # XX1X ... ] >>> list(expand_entries(entries)) == [ ... RoutingTableEntry(set(), 0b0100, 0xfffffff0 | 0b1110), # 010X ... RoutingTableEntry(set(), 0b0110, 0xfffffff0 | 0b1110), # 011X ... RoutingTableEntry(set(), 0b0010, 0xfffffff0 | 0b1110), # 001X ... RoutingTableEntry(set(), 0b1010, 0xfffffff0 | 0b1110), # 101X ... RoutingTableEntry(set(), 0b1110, 0xfffffff0 | 0b1110), # 111X ... ] True Note that the ``X`` in the LSB was retained because it is common to all entries. Any duplicated entries will be removed (in this case the first and second entries will both match ``0000``, so when the second entry is expanded only one entry is retained):: >>> from rig.routing_table import Routes >>> entries = [ ... RoutingTableEntry({Routes.north}, 0b0000, 0b1111), # 0000 -> N ... RoutingTableEntry({Routes.south}, 0b0000, 0b1011), # 0X00 -> S ... ] >>> list(expand_entries(entries)) == [ ... RoutingTableEntry({Routes.north}, 0b0000, 0b1111), # 0000 -> N ... RoutingTableEntry({Routes.south}, 0b0100, 0b1111), # 0100 -> S ... ] True .. warning:: It is assumed that the input routing table is orthogonal (i.e., there are no two entries which would match the same key). If this is not the case, any entries which are covered (i.e. unreachable) in the input table will be omitted and a warning produced. As a result, all output routing tables are guaranteed to be orthogonal. Parameters ---------- entries : [:py:class:`~rig.routing_table.RoutingTableEntry`...] or similar The entries to expand. Other Parameters ---------------- ignore_xs : int Mask of bits in which Xs should not be expanded. If None (the default) then Xs which are common to all entries will not be expanded. Yields ------ :py:class:`~rig.routing_table.RoutingTableEntry` Routing table entries which represent the original entries but with all Xs not masked off by `ignore_xs` replaced with 1s and 0s. """ # Find the common Xs for the entries if ignore_xs is None: ignore_xs = get_common_xs(entries) # Keep a track of keys that we've seen seen_keys = set({}) # Expand each entry in turn for entry in entries: for new_entry in expand_entry(entry, ignore_xs): if new_entry.key in seen_keys: # We've already used this key, warn that the table is # over-complete. warnings.warn("Table is not orthogonal: Key {:#010x} matches " "multiple entries.".format(new_entry.key)) else: # Mark the key as seen and yield the new entry seen_keys.add(new_entry.key) yield new_entry
[ "def", "expand_entries", "(", "entries", ",", "ignore_xs", "=", "None", ")", ":", "# Find the common Xs for the entries", "if", "ignore_xs", "is", "None", ":", "ignore_xs", "=", "get_common_xs", "(", "entries", ")", "# Keep a track of keys that we've seen", "seen_keys", "=", "set", "(", "{", "}", ")", "# Expand each entry in turn", "for", "entry", "in", "entries", ":", "for", "new_entry", "in", "expand_entry", "(", "entry", ",", "ignore_xs", ")", ":", "if", "new_entry", ".", "key", "in", "seen_keys", ":", "# We've already used this key, warn that the table is", "# over-complete.", "warnings", ".", "warn", "(", "\"Table is not orthogonal: Key {:#010x} matches \"", "\"multiple entries.\"", ".", "format", "(", "new_entry", ".", "key", ")", ")", "else", ":", "# Mark the key as seen and yield the new entry", "seen_keys", ".", "add", "(", "new_entry", ".", "key", ")", "yield", "new_entry" ]
39.914634
0.000298
def gts7(Input, flags, output): ''' /* Thermospheric portion of NRLMSISE-00 * See GTD7 for more extensive comments * alt > 72.5 km! */ ''' zn1 = [120.0, 110.0, 100.0, 90.0, 72.5] mn1 = 5 dgtr=1.74533E-2; dr=1.72142E-2; alpha = [-0.38, 0.0, 0.0, 0.0, 0.17, 0.0, -0.38, 0.0, 0.0] altl = [200.0, 300.0, 160.0, 250.0, 240.0, 450.0, 320.0, 450.0] za = pdl[1][15]; zn1[0] = za; for j in range(9): output.d[j]=0; #/* TINF VARIATIONS NOT IMPORTANT BELOW ZA OR ZN1(1) */ if (Input.alt>zn1[0]): tinf = ptm[0]*pt[0] * \ (1.0+flags.sw[16]*globe7(pt,Input,flags)); else: tinf = ptm[0]*pt[0]; output.t[0]=tinf; #/* GRADIENT VARIATIONS NOT IMPORTANT BELOW ZN1(5) */ if (Input.alt>zn1[4]): g0 = ptm[3]*ps[0] * \ (1.0+flags.sw[19]*globe7(ps,Input,flags)); else: g0 = ptm[3]*ps[0]; tlb = ptm[1] * (1.0 + flags.sw[17]*globe7(pd[3],Input,flags))*pd[3][0]; s = g0 / (tinf - tlb); #/* Lower thermosphere temp variations not significant for # * density above 300 km */ if (Input.alt<300.0): meso_tn1[1]=ptm[6]*ptl[0][0]/(1.0-flags.sw[18]*glob7s(ptl[0], Input, flags)); meso_tn1[2]=ptm[2]*ptl[1][0]/(1.0-flags.sw[18]*glob7s(ptl[1], Input, flags)); meso_tn1[3]=ptm[7]*ptl[2][0]/(1.0-flags.sw[18]*glob7s(ptl[2], Input, flags)); meso_tn1[4]=ptm[4]*ptl[3][0]/(1.0-flags.sw[18]*flags.sw[20]*glob7s(ptl[3], Input, flags)); meso_tgn1[1]=ptm[8]*pma[8][0]*(1.0+flags.sw[18]*flags.sw[20]*glob7s(pma[8], Input, flags))*meso_tn1[4]*meso_tn1[4]/(pow((ptm[4]*ptl[3][0]),2.0)); else: meso_tn1[1]=ptm[6]*ptl[0][0]; meso_tn1[2]=ptm[2]*ptl[1][0]; meso_tn1[3]=ptm[7]*ptl[2][0]; meso_tn1[4]=ptm[4]*ptl[3][0]; meso_tgn1[1]=ptm[8]*pma[8][0]*meso_tn1[4]*meso_tn1[4]/(pow((ptm[4]*ptl[3][0]),2.0)); z0 = zn1[3]; t0 = meso_tn1[3]; tr12 = 1.0; #/* N2 variation factor at Zlb */ g28=flags.sw[21]*globe7(pd[2], Input, flags); #/* VARIATION OF TURBOPAUSE HEIGHT */ zhf=pdl[1][24]*(1.0+flags.sw[5]*pdl[0][24]*sin(dgtr*Input.g_lat)*cos(dr*(Input.doy-pt[13]))); output.t[0]=tinf; xmm = pdm[2][4]; z = Input.alt; #/**** N2 DENSITY ****/ #/* Diffusive density at Zlb */ db28 = pdm[2][0]*exp(g28)*pd[2][0]; #/* Diffusive density at Alt */ RandomVariable = [output.t[1]] output.d[2]=densu(z,db28,tinf,tlb,28.0,alpha[2],RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1); output.t[1] = RandomVariable[0] dd=output.d[2]; #/* Turbopause */ zh28=pdm[2][2]*zhf; zhm28=pdm[2][3]*pdl[1][5]; xmd=28.0-xmm; #/* Mixed density at Zlb */ tz = [0] b28=densu(zh28,db28,tinf,tlb,xmd,(alpha[2]-1.0),tz,ptm[5],s,mn1, zn1,meso_tn1,meso_tgn1); if ((flags.sw[15]) and (z<=altl[2])): #/* Mixed density at Alt */ global dm28 dm28=densu(z,b28,tinf,tlb,xmm,alpha[2],tz,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1); #/* Net density at Alt */ output.d[2]=dnet(output.d[2],dm28,zhm28,xmm,28.0); #/**** HE DENSITY ****/ #/* Density variation factor at Zlb */ g4 = flags.sw[21]*globe7(pd[0], Input, flags); #/* Diffusive density at Zlb */ db04 = pdm[0][0]*exp(g4)*pd[0][0]; #/* Diffusive density at Alt */ RandomVariable = [output.t[1]] output.d[0]=densu(z,db04,tinf,tlb, 4.,alpha[0],RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1); output.t[1] = RandomVariable[0] dd=output.d[0]; if ((flags.sw[15]) and (z<altl[0])): #/* Turbopause */ zh04=pdm[0][2]; #/* Mixed density at Zlb */ RandomVariable = [output.t[1]] b04=densu(zh04,db04,tinf,tlb,4.-xmm,alpha[0]-1.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1); output.t[1] = RandomVariable[0] #/* Mixed density at Alt */ RandomVariable = [output.t[1]] global dm04 dm04=densu(z,b04,tinf,tlb,xmm,0.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1); output.t[1] = RandomVariable[0] zhm04=zhm28; #/* Net density at Alt */ output.d[0]=dnet(output.d[0],dm04,zhm04,xmm,4.); #/* Correction to specified mixing ratio at ground */ rl=log(b28*pdm[0][1]/b04); zc04=pdm[0][4]*pdl[1][0]; hc04=pdm[0][5]*pdl[1][1]; #/* Net density corrected at Alt */ output.d[0]=output.d[0]*ccor(z,rl,hc04,zc04); #/**** O DENSITY ****/ #/* Density variation factor at Zlb */ g16= flags.sw[21]*globe7(pd[1],Input,flags); #/* Diffusive density at Zlb */ db16 = pdm[1][0]*exp(g16)*pd[1][0]; #/* Diffusive density at Alt */ RandomVariable = [output.t[1]] output.d[1]=densu(z,db16,tinf,tlb, 16.,alpha[1],RandomVariable,ptm[5],s,mn1, zn1,meso_tn1,meso_tgn1); output.t[1] = RandomVariable[0] dd=output.d[1]; if ((flags.sw[15]) and (z<=altl[1])): #/* Turbopause */ zh16=pdm[1][2]; #/* Mixed density at Zlb */ RandomVariable = [output.t[1]] b16=densu(zh16,db16,tinf,tlb,16.0-xmm,(alpha[1]-1.0), RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1); output.t[1] = RandomVariable[0] #/* Mixed density at Alt */ RandomVariable = [output.t[1]] global dm16 dm16=densu(z,b16,tinf,tlb,xmm,0.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1); output.t[1] = RandomVariable[0] zhm16=zhm28; #/* Net density at Alt */ output.d[1]=dnet(output.d[1],dm16,zhm16,xmm,16.); rl=pdm[1][1]*pdl[1][16]*(1.0+flags.sw[1]*pdl[0][23]*(Input.f107A-150.0)); hc16=pdm[1][5]*pdl[1][3]; zc16=pdm[1][4]*pdl[1][2]; hc216=pdm[1][5]*pdl[1][4]; output.d[1]=output.d[1]*ccor2(z,rl,hc16,zc16,hc216); #/* Chemistry correction */ hcc16=pdm[1][7]*pdl[1][13]; zcc16=pdm[1][6]*pdl[1][12]; rc16=pdm[1][3]*pdl[1][14]; #/* Net density corrected at Alt */ output.d[1]=output.d[1]*ccor(z,rc16,hcc16,zcc16); #/**** O2 DENSITY ****/ #/* Density variation factor at Zlb */ g32= flags.sw[21]*globe7(pd[4], Input, flags); #/* Diffusive density at Zlb */ db32 = pdm[3][0]*exp(g32)*pd[4][0]; #/* Diffusive density at Alt */ RandomVariable = [output.t[1]] output.d[3]=densu(z,db32,tinf,tlb, 32.,alpha[3],RandomVariable,ptm[5],s,mn1, zn1,meso_tn1,meso_tgn1); output.t[1] = RandomVariable[0] dd=output.d[3]; if (flags.sw[15]): if (z<=altl[3]): #/* Turbopause */ zh32=pdm[3][2]; #/* Mixed density at Zlb */ RandomVariable = [output.t[1]] b32=densu(zh32,db32,tinf,tlb,32.-xmm,alpha[3]-1., RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1); output.t[1] = RandomVariable[0] #/* Mixed density at Alt */ RandomVariable = [output.t[1]] global dm32 dm32=densu(z,b32,tinf,tlb,xmm,0.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1); output.t[1] = RandomVariable[0] zhm32=zhm28; #/* Net density at Alt */ output.d[3]=dnet(output.d[3],dm32,zhm32,xmm,32.); #/* Correction to specified mixing ratio at ground */ rl=log(b28*pdm[3][1]/b32); hc32=pdm[3][5]*pdl[1][7]; zc32=pdm[3][4]*pdl[1][6]; output.d[3]=output.d[3]*ccor(z,rl,hc32,zc32); #/* Correction for general departure from diffusive equilibrium above Zlb */ hcc32=pdm[3][7]*pdl[1][22]; hcc232=pdm[3][7]*pdl[0][22]; zcc32=pdm[3][6]*pdl[1][21]; rc32=pdm[3][3]*pdl[1][23]*(1.+flags.sw[1]*pdl[0][23]*(Input.f107A-150.)); #/* Net density corrected at Alt */ output.d[3]=output.d[3]*ccor2(z,rc32,hcc32,zcc32,hcc232); #/**** AR DENSITY ****/ #/* Density variation factor at Zlb */ g40= flags.sw[21]*globe7(pd[5],Input,flags); #/* Diffusive density at Zlb */ db40 = pdm[4][0]*exp(g40)*pd[5][0]; #/* Diffusive density at Alt */ RandomVariable = [output.t[1]] output.d[4]=densu(z,db40,tinf,tlb, 40.,alpha[4],RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1); output.t[1] = RandomVariable[0] dd=output.d[4]; if ((flags.sw[15]) and (z<=altl[4])): #/* Turbopause */ zh40=pdm[4][2]; #/* Mixed density at Zlb */ RandomVariable = [output.t[1]] b40=densu(zh40,db40,tinf,tlb,40.-xmm,alpha[4]-1.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1); output.t[1] = RandomVariable[0] #/* Mixed density at Alt */ RandomVariable = [output.t[1]] global dm40 dm40=densu(z,b40,tinf,tlb,xmm,0.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1); output.t[1] = RandomVariable[0] zhm40=zhm28; #/* Net density at Alt */ output.d[4]=dnet(output.d[4],dm40,zhm40,xmm,40.); #/* Correction to specified mixing ratio at ground */ rl=log(b28*pdm[4][1]/b40); hc40=pdm[4][5]*pdl[1][9]; zc40=pdm[4][4]*pdl[1][8]; #/* Net density corrected at Alt */ output.d[4]=output.d[4]*ccor(z,rl,hc40,zc40); #/**** HYDROGEN DENSITY ****/ #/* Density variation factor at Zlb */ g1 = flags.sw[21]*globe7(pd[6], Input, flags); #/* Diffusive density at Zlb */ db01 = pdm[5][0]*exp(g1)*pd[6][0]; #/* Diffusive density at Alt */ RandomVariable = [output.t[1]] output.d[6]=densu(z,db01,tinf,tlb,1.,alpha[6],RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1); output.t[1] = RandomVariable[0] dd=output.d[6]; if ((flags.sw[15]) and (z<=altl[6])): #/* Turbopause */ zh01=pdm[5][2]; #/* Mixed density at Zlb */ RandomVariable = [output.t[1]] b01=densu(zh01,db01,tinf,tlb,1.-xmm,alpha[6]-1., RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1); output.t[1] = RandomVariable[0] #/* Mixed density at Alt */ RandomVariable = [output.t[1]] global dm01 dm01=densu(z,b01,tinf,tlb,xmm,0.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1); output.t[1] = RandomVariable[0] zhm01=zhm28; #/* Net density at Alt */ output.d[6]=dnet(output.d[6],dm01,zhm01,xmm,1.); #/* Correction to specified mixing ratio at ground */ rl=log(b28*pdm[5][1]*sqrt(pdl[1][17]*pdl[1][17])/b01); hc01=pdm[5][5]*pdl[1][11]; zc01=pdm[5][4]*pdl[1][10]; output.d[6]=output.d[6]*ccor(z,rl,hc01,zc01); #/* Chemistry correction */ hcc01=pdm[5][7]*pdl[1][19]; zcc01=pdm[5][6]*pdl[1][18]; rc01=pdm[5][3]*pdl[1][20]; #/* Net density corrected at Alt */ output.d[6]=output.d[6]*ccor(z,rc01,hcc01,zcc01); #/**** ATOMIC NITROGEN DENSITY ****/ #/* Density variation factor at Zlb */ g14 = flags.sw[21]*globe7(pd[7],Input,flags); #/* Diffusive density at Zlb */ db14 = pdm[6][0]*exp(g14)*pd[7][0]; #/* Diffusive density at Alt */ RandomVariable = [output.t[1]] output.d[7]=densu(z,db14,tinf,tlb,14.,alpha[7],RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1); output.t[1] = RandomVariable[0] dd=output.d[7]; if ((flags.sw[15]) and (z<=altl[7])): #/* Turbopause */ zh14=pdm[6][2]; #/* Mixed density at Zlb */ RandomVariable = [output.t[1]] b14=densu(zh14,db14,tinf,tlb,14.-xmm,alpha[7]-1., RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1); output.t[1] = RandomVariable[0] #/* Mixed density at Alt */ RandomVariable = [output.t[1]] global dm14 dm14=densu(z,b14,tinf,tlb,xmm,0.,RandomVariable,ptm[5],s,mn1,zn1,meso_tn1,meso_tgn1); output.t[1] = RandomVariable[0] zhm14=zhm28; #/* Net density at Alt */ output.d[7]=dnet(output.d[7],dm14,zhm14,xmm,14.); #/* Correction to specified mixing ratio at ground */ rl=log(b28*pdm[6][1]*sqrt(pdl[0][2]*pdl[0][2])/b14); hc14=pdm[6][5]*pdl[0][1]; zc14=pdm[6][4]*pdl[0][0]; output.d[7]=output.d[7]*ccor(z,rl,hc14,zc14); #/* Chemistry correction */ hcc14=pdm[6][7]*pdl[0][4]; zcc14=pdm[6][6]*pdl[0][3]; rc14=pdm[6][3]*pdl[0][5]; #/* Net density corrected at Alt */ output.d[7]=output.d[7]*ccor(z,rc14,hcc14,zcc14); #/**** Anomalous OXYGEN DENSITY ****/ g16h = flags.sw[21]*globe7(pd[8],Input,flags); db16h = pdm[7][0]*exp(g16h)*pd[8][0]; tho = pdm[7][9]*pdl[0][6]; RandomVariable = [output.t[1]] dd=densu(z,db16h,tho,tho,16.,alpha[8],RandomVariable,ptm[5],s,mn1, zn1,meso_tn1,meso_tgn1); output.t[1] = RandomVariable[0] zsht=pdm[7][5]; zmho=pdm[7][4]; zsho=scalh(zmho,16.0,tho); output.d[8]=dd*exp(-zsht/zsho*(exp(-(z-zmho)/zsht)-1.)); #/* total mass density */ output.d[5] = 1.66E-24*(4.0*output.d[0]+16.0*output.d[1]+28.0*output.d[2]+32.0*output.d[3]+40.0*output.d[4]+ output.d[6]+14.0*output.d[7]); db48=1.66E-24*(4.0*db04+16.0*db16+28.0*db28+32.0*db32+40.0*db40+db01+14.0*db14); #/* temperature */ z = sqrt(Input.alt*Input.alt); RandomVariable = [output.t[1]] ddum = densu(z,1.0, tinf, tlb, 0.0, 0.0, RandomVariable, ptm[5], s, mn1, zn1, meso_tn1, meso_tgn1); output.t[1] = RandomVariable[0] if (flags.sw[0]): # pragma: no cover for i in range(9): output.d[i]=output.d[i]*1.0E6; output.d[5]=output.d[5]/1000; return
[ "def", "gts7", "(", "Input", ",", "flags", ",", "output", ")", ":", "zn1", "=", "[", "120.0", ",", "110.0", ",", "100.0", ",", "90.0", ",", "72.5", "]", "mn1", "=", "5", "dgtr", "=", "1.74533E-2", "dr", "=", "1.72142E-2", "alpha", "=", "[", "-", "0.38", ",", "0.0", ",", "0.0", ",", "0.0", ",", "0.17", ",", "0.0", ",", "-", "0.38", ",", "0.0", ",", "0.0", "]", "altl", "=", "[", "200.0", ",", "300.0", ",", "160.0", ",", "250.0", ",", "240.0", ",", "450.0", ",", "320.0", ",", "450.0", "]", "za", "=", "pdl", "[", "1", "]", "[", "15", "]", "zn1", "[", "0", "]", "=", "za", "for", "j", "in", "range", "(", "9", ")", ":", "output", ".", "d", "[", "j", "]", "=", "0", "#/* TINF VARIATIONS NOT IMPORTANT BELOW ZA OR ZN1(1) */", "if", "(", "Input", ".", "alt", ">", "zn1", "[", "0", "]", ")", ":", "tinf", "=", "ptm", "[", "0", "]", "*", "pt", "[", "0", "]", "*", "(", "1.0", "+", "flags", ".", "sw", "[", "16", "]", "*", "globe7", "(", "pt", ",", "Input", ",", "flags", ")", ")", "else", ":", "tinf", "=", "ptm", "[", "0", "]", "*", "pt", "[", "0", "]", "output", ".", "t", "[", "0", "]", "=", "tinf", "#/* GRADIENT VARIATIONS NOT IMPORTANT BELOW ZN1(5) */", "if", "(", "Input", ".", "alt", ">", "zn1", "[", "4", "]", ")", ":", "g0", "=", "ptm", "[", "3", "]", "*", "ps", "[", "0", "]", "*", "(", "1.0", "+", "flags", ".", "sw", "[", "19", "]", "*", "globe7", "(", "ps", ",", "Input", ",", "flags", ")", ")", "else", ":", "g0", "=", "ptm", "[", "3", "]", "*", "ps", "[", "0", "]", "tlb", "=", "ptm", "[", "1", "]", "*", "(", "1.0", "+", "flags", ".", "sw", "[", "17", "]", "*", "globe7", "(", "pd", "[", "3", "]", ",", "Input", ",", "flags", ")", ")", "*", "pd", "[", "3", "]", "[", "0", "]", "s", "=", "g0", "/", "(", "tinf", "-", "tlb", ")", "#/* Lower thermosphere temp variations not significant for", "# * density above 300 km */", "if", "(", "Input", ".", "alt", "<", "300.0", ")", ":", "meso_tn1", "[", "1", "]", "=", "ptm", "[", "6", "]", "*", "ptl", "[", "0", "]", "[", "0", "]", "/", "(", "1.0", "-", "flags", ".", "sw", "[", "18", "]", "*", "glob7s", "(", "ptl", "[", "0", "]", ",", "Input", ",", "flags", ")", ")", "meso_tn1", "[", "2", "]", "=", "ptm", "[", "2", "]", "*", "ptl", "[", "1", "]", "[", "0", "]", "/", "(", "1.0", "-", "flags", ".", "sw", "[", "18", "]", "*", "glob7s", "(", "ptl", "[", "1", "]", ",", "Input", ",", "flags", ")", ")", "meso_tn1", "[", "3", "]", "=", "ptm", "[", "7", "]", "*", "ptl", "[", "2", "]", "[", "0", "]", "/", "(", "1.0", "-", "flags", ".", "sw", "[", "18", "]", "*", "glob7s", "(", "ptl", "[", "2", "]", ",", "Input", ",", "flags", ")", ")", "meso_tn1", "[", "4", "]", "=", "ptm", "[", "4", "]", "*", "ptl", "[", "3", "]", "[", "0", "]", "/", "(", "1.0", "-", "flags", ".", "sw", "[", "18", "]", "*", "flags", ".", "sw", "[", "20", "]", "*", "glob7s", "(", "ptl", "[", "3", "]", ",", "Input", ",", "flags", ")", ")", "meso_tgn1", "[", "1", "]", "=", "ptm", "[", "8", "]", "*", "pma", "[", "8", "]", "[", "0", "]", "*", "(", "1.0", "+", "flags", ".", "sw", "[", "18", "]", "*", "flags", ".", "sw", "[", "20", "]", "*", "glob7s", "(", "pma", "[", "8", "]", ",", "Input", ",", "flags", ")", ")", "*", "meso_tn1", "[", "4", "]", "*", "meso_tn1", "[", "4", "]", "/", "(", "pow", "(", "(", "ptm", "[", "4", "]", "*", "ptl", "[", "3", "]", "[", "0", "]", ")", ",", "2.0", ")", ")", "else", ":", "meso_tn1", "[", "1", "]", "=", "ptm", "[", "6", "]", "*", "ptl", "[", "0", "]", "[", "0", "]", "meso_tn1", "[", "2", "]", "=", "ptm", "[", "2", "]", "*", "ptl", "[", "1", "]", "[", "0", "]", "meso_tn1", "[", "3", "]", "=", "ptm", "[", "7", "]", "*", "ptl", "[", "2", "]", "[", "0", "]", "meso_tn1", "[", "4", "]", "=", "ptm", "[", "4", "]", "*", "ptl", "[", "3", "]", "[", "0", "]", "meso_tgn1", "[", "1", "]", "=", "ptm", "[", "8", "]", "*", "pma", "[", "8", "]", "[", "0", "]", "*", "meso_tn1", "[", "4", "]", "*", "meso_tn1", "[", "4", "]", "/", "(", "pow", "(", "(", "ptm", "[", "4", "]", "*", "ptl", "[", "3", "]", "[", "0", "]", ")", ",", "2.0", ")", ")", "z0", "=", "zn1", "[", "3", "]", "t0", "=", "meso_tn1", "[", "3", "]", "tr12", "=", "1.0", "#/* N2 variation factor at Zlb */", "g28", "=", "flags", ".", "sw", "[", "21", "]", "*", "globe7", "(", "pd", "[", "2", "]", ",", "Input", ",", "flags", ")", "#/* VARIATION OF TURBOPAUSE HEIGHT */", "zhf", "=", "pdl", "[", "1", "]", "[", "24", "]", "*", "(", "1.0", "+", "flags", ".", "sw", "[", "5", "]", "*", "pdl", "[", "0", "]", "[", "24", "]", "*", "sin", "(", "dgtr", "*", "Input", ".", "g_lat", ")", "*", "cos", "(", "dr", "*", "(", "Input", ".", "doy", "-", "pt", "[", "13", "]", ")", ")", ")", "output", ".", "t", "[", "0", "]", "=", "tinf", "xmm", "=", "pdm", "[", "2", "]", "[", "4", "]", "z", "=", "Input", ".", "alt", "#/**** N2 DENSITY ****/", "#/* Diffusive density at Zlb */", "db28", "=", "pdm", "[", "2", "]", "[", "0", "]", "*", "exp", "(", "g28", ")", "*", "pd", "[", "2", "]", "[", "0", "]", "#/* Diffusive density at Alt */", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "output", ".", "d", "[", "2", "]", "=", "densu", "(", "z", ",", "db28", ",", "tinf", ",", "tlb", ",", "28.0", ",", "alpha", "[", "2", "]", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "dd", "=", "output", ".", "d", "[", "2", "]", "#/* Turbopause */", "zh28", "=", "pdm", "[", "2", "]", "[", "2", "]", "*", "zhf", "zhm28", "=", "pdm", "[", "2", "]", "[", "3", "]", "*", "pdl", "[", "1", "]", "[", "5", "]", "xmd", "=", "28.0", "-", "xmm", "#/* Mixed density at Zlb */", "tz", "=", "[", "0", "]", "b28", "=", "densu", "(", "zh28", ",", "db28", ",", "tinf", ",", "tlb", ",", "xmd", ",", "(", "alpha", "[", "2", "]", "-", "1.0", ")", ",", "tz", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "if", "(", "(", "flags", ".", "sw", "[", "15", "]", ")", "and", "(", "z", "<=", "altl", "[", "2", "]", ")", ")", ":", "#/* Mixed density at Alt */", "global", "dm28", "dm28", "=", "densu", "(", "z", ",", "b28", ",", "tinf", ",", "tlb", ",", "xmm", ",", "alpha", "[", "2", "]", ",", "tz", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "#/* Net density at Alt */", "output", ".", "d", "[", "2", "]", "=", "dnet", "(", "output", ".", "d", "[", "2", "]", ",", "dm28", ",", "zhm28", ",", "xmm", ",", "28.0", ")", "#/**** HE DENSITY ****/", "#/* Density variation factor at Zlb */", "g4", "=", "flags", ".", "sw", "[", "21", "]", "*", "globe7", "(", "pd", "[", "0", "]", ",", "Input", ",", "flags", ")", "#/* Diffusive density at Zlb */", "db04", "=", "pdm", "[", "0", "]", "[", "0", "]", "*", "exp", "(", "g4", ")", "*", "pd", "[", "0", "]", "[", "0", "]", "#/* Diffusive density at Alt */", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "output", ".", "d", "[", "0", "]", "=", "densu", "(", "z", ",", "db04", ",", "tinf", ",", "tlb", ",", "4.", ",", "alpha", "[", "0", "]", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "dd", "=", "output", ".", "d", "[", "0", "]", "if", "(", "(", "flags", ".", "sw", "[", "15", "]", ")", "and", "(", "z", "<", "altl", "[", "0", "]", ")", ")", ":", "#/* Turbopause */", "zh04", "=", "pdm", "[", "0", "]", "[", "2", "]", "#/* Mixed density at Zlb */", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "b04", "=", "densu", "(", "zh04", ",", "db04", ",", "tinf", ",", "tlb", ",", "4.", "-", "xmm", ",", "alpha", "[", "0", "]", "-", "1.", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "#/* Mixed density at Alt */", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "global", "dm04", "dm04", "=", "densu", "(", "z", ",", "b04", ",", "tinf", ",", "tlb", ",", "xmm", ",", "0.", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "zhm04", "=", "zhm28", "#/* Net density at Alt */", "output", ".", "d", "[", "0", "]", "=", "dnet", "(", "output", ".", "d", "[", "0", "]", ",", "dm04", ",", "zhm04", ",", "xmm", ",", "4.", ")", "#/* Correction to specified mixing ratio at ground */", "rl", "=", "log", "(", "b28", "*", "pdm", "[", "0", "]", "[", "1", "]", "/", "b04", ")", "zc04", "=", "pdm", "[", "0", "]", "[", "4", "]", "*", "pdl", "[", "1", "]", "[", "0", "]", "hc04", "=", "pdm", "[", "0", "]", "[", "5", "]", "*", "pdl", "[", "1", "]", "[", "1", "]", "#/* Net density corrected at Alt */", "output", ".", "d", "[", "0", "]", "=", "output", ".", "d", "[", "0", "]", "*", "ccor", "(", "z", ",", "rl", ",", "hc04", ",", "zc04", ")", "#/**** O DENSITY ****/", "#/* Density variation factor at Zlb */", "g16", "=", "flags", ".", "sw", "[", "21", "]", "*", "globe7", "(", "pd", "[", "1", "]", ",", "Input", ",", "flags", ")", "#/* Diffusive density at Zlb */", "db16", "=", "pdm", "[", "1", "]", "[", "0", "]", "*", "exp", "(", "g16", ")", "*", "pd", "[", "1", "]", "[", "0", "]", "#/* Diffusive density at Alt */", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "output", ".", "d", "[", "1", "]", "=", "densu", "(", "z", ",", "db16", ",", "tinf", ",", "tlb", ",", "16.", ",", "alpha", "[", "1", "]", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "dd", "=", "output", ".", "d", "[", "1", "]", "if", "(", "(", "flags", ".", "sw", "[", "15", "]", ")", "and", "(", "z", "<=", "altl", "[", "1", "]", ")", ")", ":", "#/* Turbopause */", "zh16", "=", "pdm", "[", "1", "]", "[", "2", "]", "#/* Mixed density at Zlb */", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "b16", "=", "densu", "(", "zh16", ",", "db16", ",", "tinf", ",", "tlb", ",", "16.0", "-", "xmm", ",", "(", "alpha", "[", "1", "]", "-", "1.0", ")", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "#/* Mixed density at Alt */", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "global", "dm16", "dm16", "=", "densu", "(", "z", ",", "b16", ",", "tinf", ",", "tlb", ",", "xmm", ",", "0.", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "zhm16", "=", "zhm28", "#/* Net density at Alt */", "output", ".", "d", "[", "1", "]", "=", "dnet", "(", "output", ".", "d", "[", "1", "]", ",", "dm16", ",", "zhm16", ",", "xmm", ",", "16.", ")", "rl", "=", "pdm", "[", "1", "]", "[", "1", "]", "*", "pdl", "[", "1", "]", "[", "16", "]", "*", "(", "1.0", "+", "flags", ".", "sw", "[", "1", "]", "*", "pdl", "[", "0", "]", "[", "23", "]", "*", "(", "Input", ".", "f107A", "-", "150.0", ")", ")", "hc16", "=", "pdm", "[", "1", "]", "[", "5", "]", "*", "pdl", "[", "1", "]", "[", "3", "]", "zc16", "=", "pdm", "[", "1", "]", "[", "4", "]", "*", "pdl", "[", "1", "]", "[", "2", "]", "hc216", "=", "pdm", "[", "1", "]", "[", "5", "]", "*", "pdl", "[", "1", "]", "[", "4", "]", "output", ".", "d", "[", "1", "]", "=", "output", ".", "d", "[", "1", "]", "*", "ccor2", "(", "z", ",", "rl", ",", "hc16", ",", "zc16", ",", "hc216", ")", "#/* Chemistry correction */", "hcc16", "=", "pdm", "[", "1", "]", "[", "7", "]", "*", "pdl", "[", "1", "]", "[", "13", "]", "zcc16", "=", "pdm", "[", "1", "]", "[", "6", "]", "*", "pdl", "[", "1", "]", "[", "12", "]", "rc16", "=", "pdm", "[", "1", "]", "[", "3", "]", "*", "pdl", "[", "1", "]", "[", "14", "]", "#/* Net density corrected at Alt */", "output", ".", "d", "[", "1", "]", "=", "output", ".", "d", "[", "1", "]", "*", "ccor", "(", "z", ",", "rc16", ",", "hcc16", ",", "zcc16", ")", "#/**** O2 DENSITY ****/", "#/* Density variation factor at Zlb */", "g32", "=", "flags", ".", "sw", "[", "21", "]", "*", "globe7", "(", "pd", "[", "4", "]", ",", "Input", ",", "flags", ")", "#/* Diffusive density at Zlb */", "db32", "=", "pdm", "[", "3", "]", "[", "0", "]", "*", "exp", "(", "g32", ")", "*", "pd", "[", "4", "]", "[", "0", "]", "#/* Diffusive density at Alt */", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "output", ".", "d", "[", "3", "]", "=", "densu", "(", "z", ",", "db32", ",", "tinf", ",", "tlb", ",", "32.", ",", "alpha", "[", "3", "]", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "dd", "=", "output", ".", "d", "[", "3", "]", "if", "(", "flags", ".", "sw", "[", "15", "]", ")", ":", "if", "(", "z", "<=", "altl", "[", "3", "]", ")", ":", "#/* Turbopause */", "zh32", "=", "pdm", "[", "3", "]", "[", "2", "]", "#/* Mixed density at Zlb */", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "b32", "=", "densu", "(", "zh32", ",", "db32", ",", "tinf", ",", "tlb", ",", "32.", "-", "xmm", ",", "alpha", "[", "3", "]", "-", "1.", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "#/* Mixed density at Alt */", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "global", "dm32", "dm32", "=", "densu", "(", "z", ",", "b32", ",", "tinf", ",", "tlb", ",", "xmm", ",", "0.", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "zhm32", "=", "zhm28", "#/* Net density at Alt */", "output", ".", "d", "[", "3", "]", "=", "dnet", "(", "output", ".", "d", "[", "3", "]", ",", "dm32", ",", "zhm32", ",", "xmm", ",", "32.", ")", "#/* Correction to specified mixing ratio at ground */", "rl", "=", "log", "(", "b28", "*", "pdm", "[", "3", "]", "[", "1", "]", "/", "b32", ")", "hc32", "=", "pdm", "[", "3", "]", "[", "5", "]", "*", "pdl", "[", "1", "]", "[", "7", "]", "zc32", "=", "pdm", "[", "3", "]", "[", "4", "]", "*", "pdl", "[", "1", "]", "[", "6", "]", "output", ".", "d", "[", "3", "]", "=", "output", ".", "d", "[", "3", "]", "*", "ccor", "(", "z", ",", "rl", ",", "hc32", ",", "zc32", ")", "#/* Correction for general departure from diffusive equilibrium above Zlb */", "hcc32", "=", "pdm", "[", "3", "]", "[", "7", "]", "*", "pdl", "[", "1", "]", "[", "22", "]", "hcc232", "=", "pdm", "[", "3", "]", "[", "7", "]", "*", "pdl", "[", "0", "]", "[", "22", "]", "zcc32", "=", "pdm", "[", "3", "]", "[", "6", "]", "*", "pdl", "[", "1", "]", "[", "21", "]", "rc32", "=", "pdm", "[", "3", "]", "[", "3", "]", "*", "pdl", "[", "1", "]", "[", "23", "]", "*", "(", "1.", "+", "flags", ".", "sw", "[", "1", "]", "*", "pdl", "[", "0", "]", "[", "23", "]", "*", "(", "Input", ".", "f107A", "-", "150.", ")", ")", "#/* Net density corrected at Alt */", "output", ".", "d", "[", "3", "]", "=", "output", ".", "d", "[", "3", "]", "*", "ccor2", "(", "z", ",", "rc32", ",", "hcc32", ",", "zcc32", ",", "hcc232", ")", "#/**** AR DENSITY ****/", "#/* Density variation factor at Zlb */", "g40", "=", "flags", ".", "sw", "[", "21", "]", "*", "globe7", "(", "pd", "[", "5", "]", ",", "Input", ",", "flags", ")", "#/* Diffusive density at Zlb */", "db40", "=", "pdm", "[", "4", "]", "[", "0", "]", "*", "exp", "(", "g40", ")", "*", "pd", "[", "5", "]", "[", "0", "]", "#/* Diffusive density at Alt */", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "output", ".", "d", "[", "4", "]", "=", "densu", "(", "z", ",", "db40", ",", "tinf", ",", "tlb", ",", "40.", ",", "alpha", "[", "4", "]", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "dd", "=", "output", ".", "d", "[", "4", "]", "if", "(", "(", "flags", ".", "sw", "[", "15", "]", ")", "and", "(", "z", "<=", "altl", "[", "4", "]", ")", ")", ":", "#/* Turbopause */", "zh40", "=", "pdm", "[", "4", "]", "[", "2", "]", "#/* Mixed density at Zlb */", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "b40", "=", "densu", "(", "zh40", ",", "db40", ",", "tinf", ",", "tlb", ",", "40.", "-", "xmm", ",", "alpha", "[", "4", "]", "-", "1.", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "#/* Mixed density at Alt */", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "global", "dm40", "dm40", "=", "densu", "(", "z", ",", "b40", ",", "tinf", ",", "tlb", ",", "xmm", ",", "0.", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "zhm40", "=", "zhm28", "#/* Net density at Alt */", "output", ".", "d", "[", "4", "]", "=", "dnet", "(", "output", ".", "d", "[", "4", "]", ",", "dm40", ",", "zhm40", ",", "xmm", ",", "40.", ")", "#/* Correction to specified mixing ratio at ground */", "rl", "=", "log", "(", "b28", "*", "pdm", "[", "4", "]", "[", "1", "]", "/", "b40", ")", "hc40", "=", "pdm", "[", "4", "]", "[", "5", "]", "*", "pdl", "[", "1", "]", "[", "9", "]", "zc40", "=", "pdm", "[", "4", "]", "[", "4", "]", "*", "pdl", "[", "1", "]", "[", "8", "]", "#/* Net density corrected at Alt */", "output", ".", "d", "[", "4", "]", "=", "output", ".", "d", "[", "4", "]", "*", "ccor", "(", "z", ",", "rl", ",", "hc40", ",", "zc40", ")", "#/**** HYDROGEN DENSITY ****/", "#/* Density variation factor at Zlb */", "g1", "=", "flags", ".", "sw", "[", "21", "]", "*", "globe7", "(", "pd", "[", "6", "]", ",", "Input", ",", "flags", ")", "#/* Diffusive density at Zlb */", "db01", "=", "pdm", "[", "5", "]", "[", "0", "]", "*", "exp", "(", "g1", ")", "*", "pd", "[", "6", "]", "[", "0", "]", "#/* Diffusive density at Alt */", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "output", ".", "d", "[", "6", "]", "=", "densu", "(", "z", ",", "db01", ",", "tinf", ",", "tlb", ",", "1.", ",", "alpha", "[", "6", "]", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "dd", "=", "output", ".", "d", "[", "6", "]", "if", "(", "(", "flags", ".", "sw", "[", "15", "]", ")", "and", "(", "z", "<=", "altl", "[", "6", "]", ")", ")", ":", "#/* Turbopause */", "zh01", "=", "pdm", "[", "5", "]", "[", "2", "]", "#/* Mixed density at Zlb */", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "b01", "=", "densu", "(", "zh01", ",", "db01", ",", "tinf", ",", "tlb", ",", "1.", "-", "xmm", ",", "alpha", "[", "6", "]", "-", "1.", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "#/* Mixed density at Alt */", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "global", "dm01", "dm01", "=", "densu", "(", "z", ",", "b01", ",", "tinf", ",", "tlb", ",", "xmm", ",", "0.", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "zhm01", "=", "zhm28", "#/* Net density at Alt */", "output", ".", "d", "[", "6", "]", "=", "dnet", "(", "output", ".", "d", "[", "6", "]", ",", "dm01", ",", "zhm01", ",", "xmm", ",", "1.", ")", "#/* Correction to specified mixing ratio at ground */", "rl", "=", "log", "(", "b28", "*", "pdm", "[", "5", "]", "[", "1", "]", "*", "sqrt", "(", "pdl", "[", "1", "]", "[", "17", "]", "*", "pdl", "[", "1", "]", "[", "17", "]", ")", "/", "b01", ")", "hc01", "=", "pdm", "[", "5", "]", "[", "5", "]", "*", "pdl", "[", "1", "]", "[", "11", "]", "zc01", "=", "pdm", "[", "5", "]", "[", "4", "]", "*", "pdl", "[", "1", "]", "[", "10", "]", "output", ".", "d", "[", "6", "]", "=", "output", ".", "d", "[", "6", "]", "*", "ccor", "(", "z", ",", "rl", ",", "hc01", ",", "zc01", ")", "#/* Chemistry correction */", "hcc01", "=", "pdm", "[", "5", "]", "[", "7", "]", "*", "pdl", "[", "1", "]", "[", "19", "]", "zcc01", "=", "pdm", "[", "5", "]", "[", "6", "]", "*", "pdl", "[", "1", "]", "[", "18", "]", "rc01", "=", "pdm", "[", "5", "]", "[", "3", "]", "*", "pdl", "[", "1", "]", "[", "20", "]", "#/* Net density corrected at Alt */", "output", ".", "d", "[", "6", "]", "=", "output", ".", "d", "[", "6", "]", "*", "ccor", "(", "z", ",", "rc01", ",", "hcc01", ",", "zcc01", ")", "#/**** ATOMIC NITROGEN DENSITY ****/", "#/* Density variation factor at Zlb */", "g14", "=", "flags", ".", "sw", "[", "21", "]", "*", "globe7", "(", "pd", "[", "7", "]", ",", "Input", ",", "flags", ")", "#/* Diffusive density at Zlb */", "db14", "=", "pdm", "[", "6", "]", "[", "0", "]", "*", "exp", "(", "g14", ")", "*", "pd", "[", "7", "]", "[", "0", "]", "#/* Diffusive density at Alt */", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "output", ".", "d", "[", "7", "]", "=", "densu", "(", "z", ",", "db14", ",", "tinf", ",", "tlb", ",", "14.", ",", "alpha", "[", "7", "]", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "dd", "=", "output", ".", "d", "[", "7", "]", "if", "(", "(", "flags", ".", "sw", "[", "15", "]", ")", "and", "(", "z", "<=", "altl", "[", "7", "]", ")", ")", ":", "#/* Turbopause */", "zh14", "=", "pdm", "[", "6", "]", "[", "2", "]", "#/* Mixed density at Zlb */", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "b14", "=", "densu", "(", "zh14", ",", "db14", ",", "tinf", ",", "tlb", ",", "14.", "-", "xmm", ",", "alpha", "[", "7", "]", "-", "1.", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "#/* Mixed density at Alt */", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "global", "dm14", "dm14", "=", "densu", "(", "z", ",", "b14", ",", "tinf", ",", "tlb", ",", "xmm", ",", "0.", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "zhm14", "=", "zhm28", "#/* Net density at Alt */", "output", ".", "d", "[", "7", "]", "=", "dnet", "(", "output", ".", "d", "[", "7", "]", ",", "dm14", ",", "zhm14", ",", "xmm", ",", "14.", ")", "#/* Correction to specified mixing ratio at ground */", "rl", "=", "log", "(", "b28", "*", "pdm", "[", "6", "]", "[", "1", "]", "*", "sqrt", "(", "pdl", "[", "0", "]", "[", "2", "]", "*", "pdl", "[", "0", "]", "[", "2", "]", ")", "/", "b14", ")", "hc14", "=", "pdm", "[", "6", "]", "[", "5", "]", "*", "pdl", "[", "0", "]", "[", "1", "]", "zc14", "=", "pdm", "[", "6", "]", "[", "4", "]", "*", "pdl", "[", "0", "]", "[", "0", "]", "output", ".", "d", "[", "7", "]", "=", "output", ".", "d", "[", "7", "]", "*", "ccor", "(", "z", ",", "rl", ",", "hc14", ",", "zc14", ")", "#/* Chemistry correction */", "hcc14", "=", "pdm", "[", "6", "]", "[", "7", "]", "*", "pdl", "[", "0", "]", "[", "4", "]", "zcc14", "=", "pdm", "[", "6", "]", "[", "6", "]", "*", "pdl", "[", "0", "]", "[", "3", "]", "rc14", "=", "pdm", "[", "6", "]", "[", "3", "]", "*", "pdl", "[", "0", "]", "[", "5", "]", "#/* Net density corrected at Alt */", "output", ".", "d", "[", "7", "]", "=", "output", ".", "d", "[", "7", "]", "*", "ccor", "(", "z", ",", "rc14", ",", "hcc14", ",", "zcc14", ")", "#/**** Anomalous OXYGEN DENSITY ****/", "g16h", "=", "flags", ".", "sw", "[", "21", "]", "*", "globe7", "(", "pd", "[", "8", "]", ",", "Input", ",", "flags", ")", "db16h", "=", "pdm", "[", "7", "]", "[", "0", "]", "*", "exp", "(", "g16h", ")", "*", "pd", "[", "8", "]", "[", "0", "]", "tho", "=", "pdm", "[", "7", "]", "[", "9", "]", "*", "pdl", "[", "0", "]", "[", "6", "]", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "dd", "=", "densu", "(", "z", ",", "db16h", ",", "tho", ",", "tho", ",", "16.", ",", "alpha", "[", "8", "]", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "zsht", "=", "pdm", "[", "7", "]", "[", "5", "]", "zmho", "=", "pdm", "[", "7", "]", "[", "4", "]", "zsho", "=", "scalh", "(", "zmho", ",", "16.0", ",", "tho", ")", "output", ".", "d", "[", "8", "]", "=", "dd", "*", "exp", "(", "-", "zsht", "/", "zsho", "*", "(", "exp", "(", "-", "(", "z", "-", "zmho", ")", "/", "zsht", ")", "-", "1.", ")", ")", "#/* total mass density */", "output", ".", "d", "[", "5", "]", "=", "1.66E-24", "*", "(", "4.0", "*", "output", ".", "d", "[", "0", "]", "+", "16.0", "*", "output", ".", "d", "[", "1", "]", "+", "28.0", "*", "output", ".", "d", "[", "2", "]", "+", "32.0", "*", "output", ".", "d", "[", "3", "]", "+", "40.0", "*", "output", ".", "d", "[", "4", "]", "+", "output", ".", "d", "[", "6", "]", "+", "14.0", "*", "output", ".", "d", "[", "7", "]", ")", "db48", "=", "1.66E-24", "*", "(", "4.0", "*", "db04", "+", "16.0", "*", "db16", "+", "28.0", "*", "db28", "+", "32.0", "*", "db32", "+", "40.0", "*", "db40", "+", "db01", "+", "14.0", "*", "db14", ")", "#/* temperature */", "z", "=", "sqrt", "(", "Input", ".", "alt", "*", "Input", ".", "alt", ")", "RandomVariable", "=", "[", "output", ".", "t", "[", "1", "]", "]", "ddum", "=", "densu", "(", "z", ",", "1.0", ",", "tinf", ",", "tlb", ",", "0.0", ",", "0.0", ",", "RandomVariable", ",", "ptm", "[", "5", "]", ",", "s", ",", "mn1", ",", "zn1", ",", "meso_tn1", ",", "meso_tgn1", ")", "output", ".", "t", "[", "1", "]", "=", "RandomVariable", "[", "0", "]", "if", "(", "flags", ".", "sw", "[", "0", "]", ")", ":", "# pragma: no cover", "for", "i", "in", "range", "(", "9", ")", ":", "output", ".", "d", "[", "i", "]", "=", "output", ".", "d", "[", "i", "]", "*", "1.0E6", "output", ".", "d", "[", "5", "]", "=", "output", ".", "d", "[", "5", "]", "/", "1000", "return" ]
37.477401
0.055213
def creation_ordered(class_to_decorate): """ Class decorator that ensures that instances will be ordered after creation order when sorted. :type class_to_decorate: class :rtype: class """ next_index = functools.partial(next, itertools.count()) __init__orig = class_to_decorate.__init__ @functools.wraps(__init__orig, assigned=['__doc__']) def __init__(self, *args, **kwargs): object.__setattr__(self, '_index', next_index()) __init__orig(self, *args, **kwargs) setattr(class_to_decorate, '__init__', __init__) # noinspection PyProtectedMember def __lt__(self, other): return self._index < other._index # pragma: no mutate setattr(class_to_decorate, '__lt__', __lt__) class_to_decorate = functools.total_ordering(class_to_decorate) return class_to_decorate
[ "def", "creation_ordered", "(", "class_to_decorate", ")", ":", "next_index", "=", "functools", ".", "partial", "(", "next", ",", "itertools", ".", "count", "(", ")", ")", "__init__orig", "=", "class_to_decorate", ".", "__init__", "@", "functools", ".", "wraps", "(", "__init__orig", ",", "assigned", "=", "[", "'__doc__'", "]", ")", "def", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "object", ".", "__setattr__", "(", "self", ",", "'_index'", ",", "next_index", "(", ")", ")", "__init__orig", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "setattr", "(", "class_to_decorate", ",", "'__init__'", ",", "__init__", ")", "# noinspection PyProtectedMember", "def", "__lt__", "(", "self", ",", "other", ")", ":", "return", "self", ".", "_index", "<", "other", ".", "_index", "# pragma: no mutate", "setattr", "(", "class_to_decorate", ",", "'__lt__'", ",", "__lt__", ")", "class_to_decorate", "=", "functools", ".", "total_ordering", "(", "class_to_decorate", ")", "return", "class_to_decorate" ]
29.785714
0.002323
def radar_xsect(scatterer, h_pol=True): """Radar cross section for the current setup. Args: scatterer: a Scatterer instance. h_pol: If True (default), use horizontal polarization. If False, use vertical polarization. Returns: The radar cross section. """ Z = scatterer.get_Z() if h_pol: return 2 * np.pi * \ (Z[0,0] - Z[0,1] - Z[1,0] + Z[1,1]) else: return 2 * np.pi * \ (Z[0,0] + Z[0,1] + Z[1,0] + Z[1,1])
[ "def", "radar_xsect", "(", "scatterer", ",", "h_pol", "=", "True", ")", ":", "Z", "=", "scatterer", ".", "get_Z", "(", ")", "if", "h_pol", ":", "return", "2", "*", "np", ".", "pi", "*", "(", "Z", "[", "0", ",", "0", "]", "-", "Z", "[", "0", ",", "1", "]", "-", "Z", "[", "1", ",", "0", "]", "+", "Z", "[", "1", ",", "1", "]", ")", "else", ":", "return", "2", "*", "np", ".", "pi", "*", "(", "Z", "[", "0", ",", "0", "]", "+", "Z", "[", "0", ",", "1", "]", "+", "Z", "[", "1", ",", "0", "]", "+", "Z", "[", "1", ",", "1", "]", ")" ]
27.5
0.019531
def list(self, per_page=None, page=None, status=None, service='facebook'): """ Get a list of Pylon tasks :param per_page: How many tasks to display per page :type per_page: int :param page: Which page of tasks to display :type page: int :param status: The status of the tasks to list :type page: string :param service: The PYLON service (facebook) :type service: str :return: dict of REST API output with headers attached :rtype: :class:`~datasift.request.DictResponse` :raises: :class:`~datasift.exceptions.DataSiftApiException`, :class:`requests.exceptions.HTTPError` """ params = {} if per_page is not None: params['per_page'] = per_page if page is not None: params['page'] = page if status: params['status'] = status return self.request.get(service + '/task', params)
[ "def", "list", "(", "self", ",", "per_page", "=", "None", ",", "page", "=", "None", ",", "status", "=", "None", ",", "service", "=", "'facebook'", ")", ":", "params", "=", "{", "}", "if", "per_page", "is", "not", "None", ":", "params", "[", "'per_page'", "]", "=", "per_page", "if", "page", "is", "not", "None", ":", "params", "[", "'page'", "]", "=", "page", "if", "status", ":", "params", "[", "'status'", "]", "=", "status", "return", "self", ".", "request", ".", "get", "(", "service", "+", "'/task'", ",", "params", ")" ]
36.62963
0.00197
def incrby(self, fmt, offset, increment, overflow=None): """ Increment a bitfield by a given amount. :param fmt: format-string for the bitfield being updated, e.g. u8 for an unsigned 8-bit integer. :param int offset: offset (in number of bits). :param int increment: value to increment the bitfield by. :param str overflow: overflow algorithm. Defaults to WRAP, but other acceptable values are SAT and FAIL. See the Redis docs for descriptions of these algorithms. :returns: a :py:class:`BitFieldOperation` instance. """ if overflow is not None and overflow != self._last_overflow: self._last_overflow = overflow self.operations.append(('OVERFLOW', overflow)) self.operations.append(('INCRBY', fmt, offset, increment)) return self
[ "def", "incrby", "(", "self", ",", "fmt", ",", "offset", ",", "increment", ",", "overflow", "=", "None", ")", ":", "if", "overflow", "is", "not", "None", "and", "overflow", "!=", "self", ".", "_last_overflow", ":", "self", ".", "_last_overflow", "=", "overflow", "self", ".", "operations", ".", "append", "(", "(", "'OVERFLOW'", ",", "overflow", ")", ")", "self", ".", "operations", ".", "append", "(", "(", "'INCRBY'", ",", "fmt", ",", "offset", ",", "increment", ")", ")", "return", "self" ]
45.368421
0.002273
def run( project: 'projects.Project', step: 'projects.ProjectStep' ) -> dict: """ Runs the markdown file and renders the contents to the notebook display :param project: :param step: :return: A run response dictionary containing """ with open(step.source_path, 'r') as f: code = f.read() try: cauldron.display.markdown(code, **project.shared.fetch(None)) return {'success': True} except Exception as err: return dict( success=False, html_message=templating.render_template( 'markdown-error.html', error=err ) )
[ "def", "run", "(", "project", ":", "'projects.Project'", ",", "step", ":", "'projects.ProjectStep'", ")", "->", "dict", ":", "with", "open", "(", "step", ".", "source_path", ",", "'r'", ")", "as", "f", ":", "code", "=", "f", ".", "read", "(", ")", "try", ":", "cauldron", ".", "display", ".", "markdown", "(", "code", ",", "*", "*", "project", ".", "shared", ".", "fetch", "(", "None", ")", ")", "return", "{", "'success'", ":", "True", "}", "except", "Exception", "as", "err", ":", "return", "dict", "(", "success", "=", "False", ",", "html_message", "=", "templating", ".", "render_template", "(", "'markdown-error.html'", ",", "error", "=", "err", ")", ")" ]
24.259259
0.001468
def append_dynamics(self, t, dynamics, canvas=0, separate=False, color='blue'): """! @brief Append several dynamics to canvas or canvases (defined by 'canvas' and 'separate' arguments). @param[in] t (list): Time points that corresponds to dynamic values and considered on a X axis. @param[in] dynamics (list): Dynamics where each of them is considered on Y axis. @param[in] canvas (uint): Index of canvas where dynamic should be displayed, in case of 'separate' representation this argument is considered as a first canvas from that displaying should be done. @param[in] separate (bool|list): If 'True' then each dynamic is displayed on separate canvas, if it is defined by list, for example, [ [1, 2], [3, 4] ], then the first and the second dynamics are displayed on the canvas with index 'canvas' and the third and forth are displayed on the next 'canvas + 1' canvas. @param[in] color (string): Color that is used to display output dynamic(s). """ description = dynamic_descr(canvas, t, dynamics, separate, color); self.__dynamic_storage.append(description); self.__update_canvas_xlim(description.time, description.separate);
[ "def", "append_dynamics", "(", "self", ",", "t", ",", "dynamics", ",", "canvas", "=", "0", ",", "separate", "=", "False", ",", "color", "=", "'blue'", ")", ":", "description", "=", "dynamic_descr", "(", "canvas", ",", "t", ",", "dynamics", ",", "separate", ",", "color", ")", "self", ".", "__dynamic_storage", ".", "append", "(", "description", ")", "self", ".", "__update_canvas_xlim", "(", "description", ".", "time", ",", "description", ".", "separate", ")" ]
72.111111
0.010646
def resize(att_mat, max_length=None): """Normalize attention matrices and reshape as necessary.""" for i, att in enumerate(att_mat): # Add extra batch dim for viz code to work. if att.ndim == 3: att = np.expand_dims(att, axis=0) if max_length is not None: # Sum across different attention values for each token. att = att[:, :, :max_length, :max_length] row_sums = np.sum(att, axis=2) # Normalize att /= row_sums[:, :, np.newaxis] att_mat[i] = att return att_mat
[ "def", "resize", "(", "att_mat", ",", "max_length", "=", "None", ")", ":", "for", "i", ",", "att", "in", "enumerate", "(", "att_mat", ")", ":", "# Add extra batch dim for viz code to work.", "if", "att", ".", "ndim", "==", "3", ":", "att", "=", "np", ".", "expand_dims", "(", "att", ",", "axis", "=", "0", ")", "if", "max_length", "is", "not", "None", ":", "# Sum across different attention values for each token.", "att", "=", "att", "[", ":", ",", ":", ",", ":", "max_length", ",", ":", "max_length", "]", "row_sums", "=", "np", ".", "sum", "(", "att", ",", "axis", "=", "2", ")", "# Normalize", "att", "/=", "row_sums", "[", ":", ",", ":", ",", "np", ".", "newaxis", "]", "att_mat", "[", "i", "]", "=", "att", "return", "att_mat" ]
36.214286
0.019231
def dvcircdR(self,R,phi=None): """ NAME: dvcircdR PURPOSE: calculate the derivative of the circular velocity at R wrt R in this potential INPUT: R - Galactocentric radius (can be Quantity) phi= (None) azimuth to use for non-axisymmetric potentials OUTPUT: derivative of the circular rotation velocity wrt R HISTORY: 2013-01-08 - Written - Bovy (IAS) 2016-06-28 - Added phi= keyword for non-axisymmetric potential - Bovy (UofT) """ return 0.5*(-self.Rforce(R,0.,phi=phi,use_physical=False)\ +R*self.R2deriv(R,0.,phi=phi,use_physical=False))\ /self.vcirc(R,phi=phi,use_physical=False)
[ "def", "dvcircdR", "(", "self", ",", "R", ",", "phi", "=", "None", ")", ":", "return", "0.5", "*", "(", "-", "self", ".", "Rforce", "(", "R", ",", "0.", ",", "phi", "=", "phi", ",", "use_physical", "=", "False", ")", "+", "R", "*", "self", ".", "R2deriv", "(", "R", ",", "0.", ",", "phi", "=", "phi", ",", "use_physical", "=", "False", ")", ")", "/", "self", ".", "vcirc", "(", "R", ",", "phi", "=", "phi", ",", "use_physical", "=", "False", ")" ]
26.6875
0.031638
def make_router(): """Return a WSGI application that searches requests to controllers """ global router routings = [ ('GET', '^/$', index), ('GET', '^/api/?$', index), ('POST', '^/api/1/calculate/?$', calculate.api1_calculate), ('GET', '^/api/2/entities/?$', entities.api2_entities), ('GET', '^/api/1/field/?$', field.api1_field), ('GET', '^/api/1/formula/(?P<name>[^/]+)/?$', formula.api1_formula), ('GET', '^/api/2/formula/(?:(?P<period>[A-Za-z0-9:-]*)/)?(?P<names>[A-Za-z0-9_+-]+)/?$', formula.api2_formula), ('GET', '^/api/1/parameters/?$', parameters.api1_parameters), ('GET', '^/api/1/reforms/?$', reforms.api1_reforms), ('POST', '^/api/1/simulate/?$', simulate.api1_simulate), ('GET', '^/api/1/swagger$', swagger.api1_swagger), ('GET', '^/api/1/variables/?$', variables.api1_variables), ] router = urls.make_router(*routings) return router
[ "def", "make_router", "(", ")", ":", "global", "router", "routings", "=", "[", "(", "'GET'", ",", "'^/$'", ",", "index", ")", ",", "(", "'GET'", ",", "'^/api/?$'", ",", "index", ")", ",", "(", "'POST'", ",", "'^/api/1/calculate/?$'", ",", "calculate", ".", "api1_calculate", ")", ",", "(", "'GET'", ",", "'^/api/2/entities/?$'", ",", "entities", ".", "api2_entities", ")", ",", "(", "'GET'", ",", "'^/api/1/field/?$'", ",", "field", ".", "api1_field", ")", ",", "(", "'GET'", ",", "'^/api/1/formula/(?P<name>[^/]+)/?$'", ",", "formula", ".", "api1_formula", ")", ",", "(", "'GET'", ",", "'^/api/2/formula/(?:(?P<period>[A-Za-z0-9:-]*)/)?(?P<names>[A-Za-z0-9_+-]+)/?$'", ",", "formula", ".", "api2_formula", ")", ",", "(", "'GET'", ",", "'^/api/1/parameters/?$'", ",", "parameters", ".", "api1_parameters", ")", ",", "(", "'GET'", ",", "'^/api/1/reforms/?$'", ",", "reforms", ".", "api1_reforms", ")", ",", "(", "'POST'", ",", "'^/api/1/simulate/?$'", ",", "simulate", ".", "api1_simulate", ")", ",", "(", "'GET'", ",", "'^/api/1/swagger$'", ",", "swagger", ".", "api1_swagger", ")", ",", "(", "'GET'", ",", "'^/api/1/variables/?$'", ",", "variables", ".", "api1_variables", ")", ",", "]", "router", "=", "urls", ".", "make_router", "(", "*", "routings", ")", "return", "router" ]
50.105263
0.002062
def pretty_print(d, ind='', verbosity=0): """Pretty print a data dictionary from the bridge client """ assert isinstance(d, dict) for k, v in sorted(d.items()): str_base = '{} - [{}] {}'.format(ind, type(v).__name__, k) if isinstance(v, dict): print(str_base.replace('-', '+', 1)) pretty_print(v, ind=ind+' ', verbosity=verbosity) continue elif isinstance(v, np.ndarray): node = '{}, {}, {}'.format(str_base, v.dtype, v.shape) if verbosity >= 2: node += '\n{}'.format(v) elif isinstance(v, Sequence): if v and isinstance(v, (list, tuple)): itemtype = ' of ' + type(v[0]).__name__ pos = str_base.find(']') str_base = str_base[:pos] + itemtype + str_base[pos:] node = '{}, {}'.format(str_base, v) if verbosity < 1 and len(node) > 80: node = node[:77] + '...' else: node = '{}, {}'.format(str_base, v) print(node)
[ "def", "pretty_print", "(", "d", ",", "ind", "=", "''", ",", "verbosity", "=", "0", ")", ":", "assert", "isinstance", "(", "d", ",", "dict", ")", "for", "k", ",", "v", "in", "sorted", "(", "d", ".", "items", "(", ")", ")", ":", "str_base", "=", "'{} - [{}] {}'", ".", "format", "(", "ind", ",", "type", "(", "v", ")", ".", "__name__", ",", "k", ")", "if", "isinstance", "(", "v", ",", "dict", ")", ":", "print", "(", "str_base", ".", "replace", "(", "'-'", ",", "'+'", ",", "1", ")", ")", "pretty_print", "(", "v", ",", "ind", "=", "ind", "+", "' '", ",", "verbosity", "=", "verbosity", ")", "continue", "elif", "isinstance", "(", "v", ",", "np", ".", "ndarray", ")", ":", "node", "=", "'{}, {}, {}'", ".", "format", "(", "str_base", ",", "v", ".", "dtype", ",", "v", ".", "shape", ")", "if", "verbosity", ">=", "2", ":", "node", "+=", "'\\n{}'", ".", "format", "(", "v", ")", "elif", "isinstance", "(", "v", ",", "Sequence", ")", ":", "if", "v", "and", "isinstance", "(", "v", ",", "(", "list", ",", "tuple", ")", ")", ":", "itemtype", "=", "' of '", "+", "type", "(", "v", "[", "0", "]", ")", ".", "__name__", "pos", "=", "str_base", ".", "find", "(", "']'", ")", "str_base", "=", "str_base", "[", ":", "pos", "]", "+", "itemtype", "+", "str_base", "[", "pos", ":", "]", "node", "=", "'{}, {}'", ".", "format", "(", "str_base", ",", "v", ")", "if", "verbosity", "<", "1", "and", "len", "(", "node", ")", ">", "80", ":", "node", "=", "node", "[", ":", "77", "]", "+", "'...'", "else", ":", "node", "=", "'{}, {}'", ".", "format", "(", "str_base", ",", "v", ")", "print", "(", "node", ")" ]
39.961538
0.00094
def cmd_hasher(f, algorithm): """Compute various hashes for the input data, that can be a file or a stream. Example: \b $ habu.hasher README.rst md5 992a833cd162047daaa6a236b8ac15ae README.rst ripemd160 0566f9141e65e57cae93e0e3b70d1d8c2ccb0623 README.rst sha1 d7dbfd2c5e2828eb22f776550c826e4166526253 README.rst sha256 6bb22d927e1b6307ced616821a1877b6cc35e... README.rst sha512 8743f3eb12a11cf3edcc16e400fb14d599b4a... README.rst whirlpool 96bcc083242e796992c0f3462f330811f9e8c... README.rst You can also specify which algorithm to use. In such case, the output is only the value of the calculated hash: \b $ habu.hasher -a md5 README.rst 992a833cd162047daaa6a236b8ac15ae README.rst """ data = f.read() if not data: print("Empty file or string!") return 1 if algorithm: print(hasher(data, algorithm)[algorithm], f.name) else: for algo, result in hasher(data).items(): print("{:<12} {} {}".format(algo, result, f.name))
[ "def", "cmd_hasher", "(", "f", ",", "algorithm", ")", ":", "data", "=", "f", ".", "read", "(", ")", "if", "not", "data", ":", "print", "(", "\"Empty file or string!\"", ")", "return", "1", "if", "algorithm", ":", "print", "(", "hasher", "(", "data", ",", "algorithm", ")", "[", "algorithm", "]", ",", "f", ".", "name", ")", "else", ":", "for", "algo", ",", "result", "in", "hasher", "(", "data", ")", ".", "items", "(", ")", ":", "print", "(", "\"{:<12} {} {}\"", ".", "format", "(", "algo", ",", "result", ",", "f", ".", "name", ")", ")" ]
31.878788
0.001845
def get_identities(self, identity=None, attrs=None): """ Get identities matching name and attrs of the user, as a list :param: zobjects.Identity or identity name (string) :param: attrs dict of attributes to return only identities matching :returns: list of zobjects.Identity """ resp = self.request('GetIdentities') if 'identity' in resp: identities = resp['identity'] if type(identities) != list: identities = [identities] if identity or attrs: wanted_identities = [] for u_identity in [ zobjects.Identity.from_dict(i) for i in identities]: if identity: if isinstance(identity, zobjects.Identity): if u_identity.name == identity.name: return [u_identity] else: if u_identity.name == identity: return [u_identity] elif attrs: for attr, value in attrs.items(): if (attr in u_identity._a_tags and u_identity._a_tags[attr] == value): wanted_identities.append(u_identity) return wanted_identities else: return [zobjects.Identity.from_dict(i) for i in identities] else: return []
[ "def", "get_identities", "(", "self", ",", "identity", "=", "None", ",", "attrs", "=", "None", ")", ":", "resp", "=", "self", ".", "request", "(", "'GetIdentities'", ")", "if", "'identity'", "in", "resp", ":", "identities", "=", "resp", "[", "'identity'", "]", "if", "type", "(", "identities", ")", "!=", "list", ":", "identities", "=", "[", "identities", "]", "if", "identity", "or", "attrs", ":", "wanted_identities", "=", "[", "]", "for", "u_identity", "in", "[", "zobjects", ".", "Identity", ".", "from_dict", "(", "i", ")", "for", "i", "in", "identities", "]", ":", "if", "identity", ":", "if", "isinstance", "(", "identity", ",", "zobjects", ".", "Identity", ")", ":", "if", "u_identity", ".", "name", "==", "identity", ".", "name", ":", "return", "[", "u_identity", "]", "else", ":", "if", "u_identity", ".", "name", "==", "identity", ":", "return", "[", "u_identity", "]", "elif", "attrs", ":", "for", "attr", ",", "value", "in", "attrs", ".", "items", "(", ")", ":", "if", "(", "attr", "in", "u_identity", ".", "_a_tags", "and", "u_identity", ".", "_a_tags", "[", "attr", "]", "==", "value", ")", ":", "wanted_identities", ".", "append", "(", "u_identity", ")", "return", "wanted_identities", "else", ":", "return", "[", "zobjects", ".", "Identity", ".", "from_dict", "(", "i", ")", "for", "i", "in", "identities", "]", "else", ":", "return", "[", "]" ]
39.631579
0.001296
def add_operator(self, operator): """Add an ``Operator`` to the ``Expression``. The ``Operator`` may result in a new ``Expression`` if an ``Operator`` already exists and is of a different precedence. There are three possibilities when adding an ``Operator`` to an ``Expression`` depending on whether or not an ``Operator`` already exists: - No ``Operator`` on the working ``Expression``; Simply set the ``Operator`` and return ``self``. - ``Operator`` already exists and is higher in precedence; The ``Operator`` and last ``Constraint`` belong in a sub-expression of the working ``Expression``. - ``Operator`` already exists and is lower in precedence; The ``Operator`` belongs to the parent of the working ``Expression`` whether one currently exists or not. To remain in the context of the top ``Expression``, this method will return the parent here rather than ``self``. Args: operator (Operator): What we are adding. Returns: Expression: ``self`` or related ``Expression``. Raises: FiqlObjectExpression: Operator is not a valid ``Operator``. """ if not isinstance(operator, Operator): raise FiqlObjectException("%s is not a valid element type" % ( operator.__class__)) if not self._working_fragment.operator: self._working_fragment.operator = operator elif operator > self._working_fragment.operator: last_constraint = self._working_fragment.elements.pop() self._working_fragment = self._working_fragment \ .create_nested_expression() self._working_fragment.add_element(last_constraint) self._working_fragment.add_operator(operator) elif operator < self._working_fragment.operator: if self._working_fragment.parent: return self._working_fragment.parent.add_operator(operator) else: return Expression().add_element(self._working_fragment) \ .add_operator(operator) return self
[ "def", "add_operator", "(", "self", ",", "operator", ")", ":", "if", "not", "isinstance", "(", "operator", ",", "Operator", ")", ":", "raise", "FiqlObjectException", "(", "\"%s is not a valid element type\"", "%", "(", "operator", ".", "__class__", ")", ")", "if", "not", "self", ".", "_working_fragment", ".", "operator", ":", "self", ".", "_working_fragment", ".", "operator", "=", "operator", "elif", "operator", ">", "self", ".", "_working_fragment", ".", "operator", ":", "last_constraint", "=", "self", ".", "_working_fragment", ".", "elements", ".", "pop", "(", ")", "self", ".", "_working_fragment", "=", "self", ".", "_working_fragment", ".", "create_nested_expression", "(", ")", "self", ".", "_working_fragment", ".", "add_element", "(", "last_constraint", ")", "self", ".", "_working_fragment", ".", "add_operator", "(", "operator", ")", "elif", "operator", "<", "self", ".", "_working_fragment", ".", "operator", ":", "if", "self", ".", "_working_fragment", ".", "parent", ":", "return", "self", ".", "_working_fragment", ".", "parent", ".", "add_operator", "(", "operator", ")", "else", ":", "return", "Expression", "(", ")", ".", "add_element", "(", "self", ".", "_working_fragment", ")", ".", "add_operator", "(", "operator", ")", "return", "self" ]
44.795918
0.001337
def get_git_changeset(): """Returns a numeric identifier of the latest git changeset. The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format. This value isn't guaranteed to be unique, but collisions are very unlikely, so it's sufficient for generating the development version numbers. """ repo_dir = os.path.dirname(os.path.abspath(__file__)) git_log = subprocess.Popen('git log --pretty=format:%ct --quiet -1 HEAD', stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, cwd=repo_dir, universal_newlines=True) timestamp = git_log.communicate()[0] try: timestamp = datetime.datetime.utcfromtimestamp(int(timestamp)) return timestamp.strftime('%Y%m%d%H%M%S') except ValueError: return None
[ "def", "get_git_changeset", "(", ")", ":", "repo_dir", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", "git_log", "=", "subprocess", ".", "Popen", "(", "'git log --pretty=format:%ct --quiet -1 HEAD'", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "shell", "=", "True", ",", "cwd", "=", "repo_dir", ",", "universal_newlines", "=", "True", ")", "timestamp", "=", "git_log", ".", "communicate", "(", ")", "[", "0", "]", "try", ":", "timestamp", "=", "datetime", ".", "datetime", ".", "utcfromtimestamp", "(", "int", "(", "timestamp", ")", ")", "return", "timestamp", ".", "strftime", "(", "'%Y%m%d%H%M%S'", ")", "except", "ValueError", ":", "return", "None" ]
48.411765
0.002384
def post_event(api_key=None, app_key=None, title=None, text=None, date_happened=None, priority=None, host=None, tags=None, alert_type=None, aggregation_key=None, source_type_name=None): ''' Post an event to the Datadog stream. CLI Example .. code-block:: bash salt-call datadog.post_event api_key='0123456789' \\ app_key='9876543210' \\ title='Salt Highstate' \\ text="Salt highstate was run on $(salt-call grains.get id)" \\ tags='["service:salt", "event:highstate"]' Required arguments :param title: The event title. Limited to 100 characters. :param text: The body of the event. Limited to 4000 characters. The text supports markdown. Optional arguments :param date_happened: POSIX timestamp of the event. :param priority: The priority of the event ('normal' or 'low'). :param host: Host name to associate with the event. :param tags: A list of tags to apply to the event. :param alert_type: "error", "warning", "info" or "success". :param aggregation_key: An arbitrary string to use for aggregation, max length of 100 characters. :param source_type_name: The type of event being posted. ''' _initialize_connection(api_key, app_key) if title is None: raise SaltInvocationError('title must be specified') if text is None: raise SaltInvocationError('text must be specified') if alert_type not in [None, 'error', 'warning', 'info', 'success']: # Datadog only supports these alert types but the API doesn't return an # error for an incorrect alert_type, so we can do it here for now. # https://github.com/DataDog/datadogpy/issues/215 message = ('alert_type must be one of "error", "warning", "info", or ' '"success"') raise SaltInvocationError(message) ret = {'result': False, 'response': None, 'comment': ''} try: response = datadog.api.Event.create(title=title, text=text, date_happened=date_happened, priority=priority, host=host, tags=tags, alert_type=alert_type, aggregation_key=aggregation_key, source_type_name=source_type_name ) except ValueError: comment = ('Unexpected exception in Datadog Post Event API ' 'call. Are your keys correct?') ret['comment'] = comment return ret ret['response'] = response if 'status' in response.keys(): ret['result'] = True ret['comment'] = 'Successfully sent event' else: ret['comment'] = 'Error in posting event.' return ret
[ "def", "post_event", "(", "api_key", "=", "None", ",", "app_key", "=", "None", ",", "title", "=", "None", ",", "text", "=", "None", ",", "date_happened", "=", "None", ",", "priority", "=", "None", ",", "host", "=", "None", ",", "tags", "=", "None", ",", "alert_type", "=", "None", ",", "aggregation_key", "=", "None", ",", "source_type_name", "=", "None", ")", ":", "_initialize_connection", "(", "api_key", ",", "app_key", ")", "if", "title", "is", "None", ":", "raise", "SaltInvocationError", "(", "'title must be specified'", ")", "if", "text", "is", "None", ":", "raise", "SaltInvocationError", "(", "'text must be specified'", ")", "if", "alert_type", "not", "in", "[", "None", ",", "'error'", ",", "'warning'", ",", "'info'", ",", "'success'", "]", ":", "# Datadog only supports these alert types but the API doesn't return an", "# error for an incorrect alert_type, so we can do it here for now.", "# https://github.com/DataDog/datadogpy/issues/215", "message", "=", "(", "'alert_type must be one of \"error\", \"warning\", \"info\", or '", "'\"success\"'", ")", "raise", "SaltInvocationError", "(", "message", ")", "ret", "=", "{", "'result'", ":", "False", ",", "'response'", ":", "None", ",", "'comment'", ":", "''", "}", "try", ":", "response", "=", "datadog", ".", "api", ".", "Event", ".", "create", "(", "title", "=", "title", ",", "text", "=", "text", ",", "date_happened", "=", "date_happened", ",", "priority", "=", "priority", ",", "host", "=", "host", ",", "tags", "=", "tags", ",", "alert_type", "=", "alert_type", ",", "aggregation_key", "=", "aggregation_key", ",", "source_type_name", "=", "source_type_name", ")", "except", "ValueError", ":", "comment", "=", "(", "'Unexpected exception in Datadog Post Event API '", "'call. Are your keys correct?'", ")", "ret", "[", "'comment'", "]", "=", "comment", "return", "ret", "ret", "[", "'response'", "]", "=", "response", "if", "'status'", "in", "response", ".", "keys", "(", ")", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "'Successfully sent event'", "else", ":", "ret", "[", "'comment'", "]", "=", "'Error in posting event.'", "return", "ret" ]
40.195122
0.000888
def __init(self): """ initializes the service """ params = { "f" : "json", } json_dict = self._get(self._url, params, securityHandler=self._securityHandler, proxy_port=self._proxy_port, proxy_url=self._proxy_url) self._json = json.dumps(json_dict) self._json_dict = json_dict self.loadAttributes(json_dict=json_dict)
[ "def", "__init", "(", "self", ")", ":", "params", "=", "{", "\"f\"", ":", "\"json\"", ",", "}", "json_dict", "=", "self", ".", "_get", "(", "self", ".", "_url", ",", "params", ",", "securityHandler", "=", "self", ".", "_securityHandler", ",", "proxy_port", "=", "self", ".", "_proxy_port", ",", "proxy_url", "=", "self", ".", "_proxy_url", ")", "self", ".", "_json", "=", "json", ".", "dumps", "(", "json_dict", ")", "self", ".", "_json_dict", "=", "json_dict", "self", ".", "loadAttributes", "(", "json_dict", "=", "json_dict", ")" ]
39.333333
0.012422
def info(self, message, *args, **kwargs): """More important level : default for print and save """ self._log(logging.INFO, message, *args, **kwargs)
[ "def", "info", "(", "self", ",", "message", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_log", "(", "logging", ".", "INFO", ",", "message", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
42.25
0.011628
def haversine_distance(origin, destination): """ Calculate the Haversine distance. Parameters ---------- origin : tuple of float (lat, long) destination : tuple of float (lat, long) Returns ------- distance_in_km : float Examples -------- >>> munich = (48.1372, 11.5756) >>> berlin = (52.5186, 13.4083) >>> round(haversine_distance(munich, berlin), 1) 504.2 >>> new_york_city = (40.712777777778, -74.005833333333) # NYC >>> round(haversine_distance(berlin, new_york_city), 1) 6385.3 """ lat1, lon1 = origin lat2, lon2 = destination if not (-90.0 <= lat1 <= 90): raise ValueError('lat1={:2.2f}, but must be in [-90,+90]'.format(lat1)) if not (-90.0 <= lat2 <= 90): raise ValueError('lat2={:2.2f}, but must be in [-90,+90]'.format(lat2)) if not (-180.0 <= lon1 <= 180): raise ValueError('lon1={:2.2f}, but must be in [-180,+180]' .format(lat1)) if not (-180.0 <= lon2 <= 180): raise ValueError('lon1={:2.2f}, but must be in [-180,+180]' .format(lat1)) radius = 6371 # km dlat = math_stl.radians(lat2 - lat1) dlon = math_stl.radians(lon2 - lon1) a = (math_stl.sin(dlat / 2) * math_stl.sin(dlat / 2) + math_stl.cos(math_stl.radians(lat1)) * math_stl.cos(math_stl.radians(lat2)) * math_stl.sin(dlon / 2) * math_stl.sin(dlon / 2)) c = 2 * math_stl.atan2(math_stl.sqrt(a), math_stl.sqrt(1 - a)) d = radius * c return d
[ "def", "haversine_distance", "(", "origin", ",", "destination", ")", ":", "lat1", ",", "lon1", "=", "origin", "lat2", ",", "lon2", "=", "destination", "if", "not", "(", "-", "90.0", "<=", "lat1", "<=", "90", ")", ":", "raise", "ValueError", "(", "'lat1={:2.2f}, but must be in [-90,+90]'", ".", "format", "(", "lat1", ")", ")", "if", "not", "(", "-", "90.0", "<=", "lat2", "<=", "90", ")", ":", "raise", "ValueError", "(", "'lat2={:2.2f}, but must be in [-90,+90]'", ".", "format", "(", "lat2", ")", ")", "if", "not", "(", "-", "180.0", "<=", "lon1", "<=", "180", ")", ":", "raise", "ValueError", "(", "'lon1={:2.2f}, but must be in [-180,+180]'", ".", "format", "(", "lat1", ")", ")", "if", "not", "(", "-", "180.0", "<=", "lon2", "<=", "180", ")", ":", "raise", "ValueError", "(", "'lon1={:2.2f}, but must be in [-180,+180]'", ".", "format", "(", "lat1", ")", ")", "radius", "=", "6371", "# km", "dlat", "=", "math_stl", ".", "radians", "(", "lat2", "-", "lat1", ")", "dlon", "=", "math_stl", ".", "radians", "(", "lon2", "-", "lon1", ")", "a", "=", "(", "math_stl", ".", "sin", "(", "dlat", "/", "2", ")", "*", "math_stl", ".", "sin", "(", "dlat", "/", "2", ")", "+", "math_stl", ".", "cos", "(", "math_stl", ".", "radians", "(", "lat1", ")", ")", "*", "math_stl", ".", "cos", "(", "math_stl", ".", "radians", "(", "lat2", ")", ")", "*", "math_stl", ".", "sin", "(", "dlon", "/", "2", ")", "*", "math_stl", ".", "sin", "(", "dlon", "/", "2", ")", ")", "c", "=", "2", "*", "math_stl", ".", "atan2", "(", "math_stl", ".", "sqrt", "(", "a", ")", ",", "math_stl", ".", "sqrt", "(", "1", "-", "a", ")", ")", "d", "=", "radius", "*", "c", "return", "d" ]
30.46
0.000636