query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
sequencelengths
20
553
Takes a list of dicts each of which has an
def _inplace_sort_by_id ( unsorted_list ) : if not isinstance ( unsorted_list , list ) : return sorted_list = [ ( i . get ( '@id' ) , i ) for i in unsorted_list ] sorted_list . sort ( ) del unsorted_list [ : ] unsorted_list . extend ( [ i [ 1 ] for i in sorted_list ] )
500
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/nexson_syntax/__init__.py#L736-L746
[ "def", "_undedicate_device", "(", "self", ",", "userid", ",", "vaddr", ")", ":", "action", "=", "'undedicate'", "rd", "=", "(", "'changevm %(uid)s %(act)s %(va)s'", "%", "{", "'uid'", ":", "userid", ",", "'act'", ":", "action", ",", "'va'", ":", "vaddr", "}", ")", "action", "=", "\"undedicate device from userid '%s'\"", "%", "userid", "with", "zvmutils", ".", "log_and_reraise_smt_request_failed", "(", "action", ")", ":", "self", ".", "_request", "(", "rd", ")" ]
Modifies nexson and returns it in version 1 . 2 . 1 with any tree that does not match the ID removed .
def cull_nonmatching_trees ( nexson , tree_id , curr_version = None ) : if curr_version is None : curr_version = detect_nexson_version ( nexson ) if not _is_by_id_hbf ( curr_version ) : nexson = convert_nexson_format ( nexson , BY_ID_HONEY_BADGERFISH ) nexml_el = get_nexml_el ( nexson ) tree_groups = nexml_el [ 'treesById' ] tree_groups_to_del = [ ] for tgi , tree_group in tree_groups . items ( ) : tbi = tree_group [ 'treeById' ] if tree_id in tbi : trees_to_del = [ i for i in tbi . keys ( ) if i != tree_id ] for tid in trees_to_del : tree_group [ '^ot:treeElementOrder' ] . remove ( tid ) del tbi [ tid ] else : tree_groups_to_del . append ( tgi ) for tgid in tree_groups_to_del : nexml_el [ '^ot:treesElementOrder' ] . remove ( tgid ) del tree_groups [ tgid ] return nexson
501
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/nexson_syntax/__init__.py#L1156-L1184
[ "def", "start_stress", "(", "self", ",", "stress_cmd", ")", ":", "with", "open", "(", "os", ".", "devnull", ",", "'w'", ")", "as", "dev_null", ":", "try", ":", "stress_proc", "=", "subprocess", ".", "Popen", "(", "stress_cmd", ",", "stdout", "=", "dev_null", ",", "stderr", "=", "dev_null", ")", "self", ".", "set_stress_process", "(", "psutil", ".", "Process", "(", "stress_proc", ".", "pid", ")", ")", "except", "OSError", ":", "logging", ".", "debug", "(", "\"Unable to start stress\"", ")" ]
Returns URL and param dict for a GET call to phylesystem_api
def phylesystem_api_url ( self , base_url , study_id ) : p = self . _phylesystem_api_params ( ) e = self . _phylesystem_api_ext ( ) if self . content == 'study' : return '{d}/study/{i}{e}' . format ( d = base_url , i = study_id , e = e ) , p elif self . content == 'tree' : if self . content_id is None : return '{d}/study/{i}/tree{e}' . format ( d = base_url , i = study_id , e = e ) , p return '{d}/study/{i}/tree/{t}{e}' . format ( d = base_url , i = study_id , t = self . content_id , e = e ) , p elif self . content == 'subtree' : assert self . content_id is not None t , n = self . content_id p [ 'subtree_id' ] = n return '{d}/study/{i}/subtree/{t}{e}' . format ( d = base_url , i = study_id , t = t , e = e ) , p elif self . content == 'meta' : return '{d}/study/{i}/meta{e}' . format ( d = base_url , i = study_id , e = e ) , p elif self . content == 'otus' : if self . content_id is None : return '{d}/study/{i}/otus{e}' . format ( d = base_url , i = study_id , e = e ) , p return '{d}/study/{i}/otus/{t}{e}' . format ( d = base_url , i = study_id , t = self . content_id , e = e ) , p elif self . content == 'otu' : if self . content_id is None : return '{d}/study/{i}/otu{e}' . format ( d = base_url , i = study_id , e = e ) , p return '{d}/study/{i}/otu/{t}{e}' . format ( d = base_url , i = study_id , t = self . content_id , e = e ) , p elif self . content == 'otumap' : return '{d}/otumap/{i}{e}' . format ( d = base_url , i = study_id , e = e ) , p else : assert False
502
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/nexson_syntax/__init__.py#L400-L429
[ "def", "cat_trials", "(", "x3d", ")", ":", "x3d", "=", "atleast_3d", "(", "x3d", ")", "t", "=", "x3d", ".", "shape", "[", "0", "]", "return", "np", ".", "concatenate", "(", "np", ".", "split", "(", "x3d", ",", "t", ",", "0", ")", ",", "axis", "=", "2", ")", ".", "squeeze", "(", "0", ")" ]
Determine whether the current contents are valid
def _is_valid ( self , log : Optional [ Logger ] = None ) -> bool : return self . _validate ( self , log ) [ 0 ]
503
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/jsglib/jsg_array.py#L33-L35
[ "def", "catalogFactory", "(", "name", ",", "*", "*", "kwargs", ")", ":", "fn", "=", "lambda", "member", ":", "inspect", ".", "isclass", "(", "member", ")", "and", "member", ".", "__module__", "==", "__name__", "catalogs", "=", "odict", "(", "inspect", ".", "getmembers", "(", "sys", ".", "modules", "[", "__name__", "]", ",", "fn", ")", ")", "if", "name", "not", "in", "list", "(", "catalogs", ".", "keys", "(", ")", ")", ":", "msg", "=", "\"%s not found in catalogs:\\n %s\"", "%", "(", "name", ",", "list", "(", "kernels", ".", "keys", "(", ")", ")", ")", "logger", ".", "error", "(", "msg", ")", "msg", "=", "\"Unrecognized catalog: %s\"", "%", "name", "raise", "Exception", "(", "msg", ")", "return", "catalogs", "[", "name", "]", "(", "*", "*", "kwargs", ")" ]
Determine whether val is a valid instance of this array
def _validate ( self , val : list , log : Optional [ Logger ] = None ) -> Tuple [ bool , List [ str ] ] : errors = [ ] if not isinstance ( val , list ) : errors . append ( f"{self._variable_name}: {repr(val)} is not an array" ) else : for i in range ( 0 , len ( val ) ) : v = val [ i ] if not conforms ( v , self . _type , self . _context . NAMESPACE ) : errors . append ( f"{self._variable_name} element {i}: {v} is not a {self._type.__name__}" ) if len ( val ) < self . _min : errors . append ( f"{self._variable_name}: at least {self._min} value{'s' if self._min > 1 else ''} required - " f"element has {len(val) if len(val) else 'none'}" ) if self . _max is not None and len ( val ) > self . _max : errors . append ( f"{self._variable_name}: no more than {self._max} values permitted - element has {len(val)}" ) if log : for error in errors : log . log ( error ) return not bool ( errors ) , errors
504
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/jsglib/jsg_array.py#L37-L61
[ "def", "_connect", "(", "self", ")", ":", "try", ":", "# Open Connection", "self", ".", "influx", "=", "InfluxDBClient", "(", "self", ".", "hostname", ",", "self", ".", "port", ",", "self", ".", "username", ",", "self", ".", "password", ",", "self", ".", "database", ",", "self", ".", "ssl", ")", "# Log", "self", ".", "log", ".", "debug", "(", "\"InfluxdbHandler: Established connection to \"", "\"%s:%d/%s.\"", ",", "self", ".", "hostname", ",", "self", ".", "port", ",", "self", ".", "database", ")", "except", "Exception", "as", "ex", ":", "# Log Error", "self", ".", "_throttle_error", "(", "\"InfluxdbHandler: Failed to connect to \"", "\"%s:%d/%s. %s\"", ",", "self", ".", "hostname", ",", "self", ".", "port", ",", "self", ".", "database", ",", "ex", ")", "# Close Socket", "self", ".", "_close", "(", ")", "return" ]
Iterates over NexsonTreeProxy objects in order determined by the nexson blob
def tree_iter_nexson_proxy ( nexson_proxy ) : nexml_el = nexson_proxy . _nexml_el tg_order = nexml_el [ '^ot:treesElementOrder' ] tgd = nexml_el [ 'treesById' ] for tg_id in tg_order : tg = tgd [ tg_id ] tree_order = tg [ '^ot:treeElementOrder' ] tbid = tg [ 'treeById' ] otus = tg [ '@otus' ] for k in tree_order : v = tbid [ k ] yield nexson_proxy . _create_tree_proxy ( tree_id = k , tree = v , otus = otus )
505
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/nexson_proxy.py#L52-L64
[ "def", "getstatus", "(", ")", ":", "if", "exists", "(", "RUNFILE", ")", ":", "mtime", "=", "os", ".", "stat", "(", "RUNFILE", ")", ".", "st_mtime", "with", "open", "(", "SECTIONFILE", ")", "as", "f", ":", "section", "=", "f", ".", "read", "(", ")", ".", "strip", "(", ")", "# what section?", "return", "UpdateStatus", ".", "RUNNING", ",", "mtime", ",", "section", "if", "exists", "(", "PAUSEFILE", ")", ":", "return", "UpdateStatus", ".", "PAUSED", ",", "None", ",", "None", "mtime", "=", "None", "if", "exists", "(", "TIMEFILE", ")", ":", "mtime", "=", "os", ".", "stat", "(", "TIMEFILE", ")", ".", "st_mtime", "if", "exists", "(", "FAILFILE", ")", ":", "if", "not", "mtime", ":", "mtime", "=", "os", ".", "stat", "(", "FAILFILE", ")", ".", "st_mtime", "# TODO: return a different error code when the error was inability to", "# contact one or more remote servers", "with", "open", "(", "FAILFILE", ")", "as", "f", ":", "content", "=", "f", ".", "read", "(", ")", ".", "strip", "(", ")", "if", "content", "==", "UpdateStatus", ".", "NOCONN", ":", "return", "UpdateStatus", ".", "NOCONN", ",", "mtime", ",", "None", "elif", "content", "==", "UpdateStatus", ".", "DIRTY", ":", "return", "UpdateStatus", ".", "DIRTY", ",", "mtime", ",", "None", "return", "UpdateStatus", ".", "FAILED", ",", "mtime", ",", "None", "if", "mtime", "is", "None", ":", "return", "UpdateStatus", ".", "NEVER", ",", "None", ",", "None", "return", "UpdateStatus", ".", "OK", ",", "mtime", ",", "None" ]
Get status from APC NIS and print output on stdout .
def main ( ) : # No need to use "proper" names on such simple code. # pylint: disable=invalid-name p = argparse . ArgumentParser ( ) p . add_argument ( "--host" , default = "localhost" ) p . add_argument ( "--port" , type = int , default = 3551 ) p . add_argument ( "--strip-units" , action = "store_true" , default = False ) args = p . parse_args ( ) status . print_status ( status . get ( args . host , args . port ) , strip_units = args . strip_units )
506
https://github.com/flyte/apcaccess/blob/0c8a5d5e4ba1c07110e411b4ffea4ddccef4829a/apcaccess/__main__.py#L12-L24
[ "def", "bucket", "(", "self", ",", "experiment", ",", "user_id", ",", "bucketing_id", ")", ":", "if", "not", "experiment", ":", "return", "None", "# Determine if experiment is in a mutually exclusive group", "if", "experiment", ".", "groupPolicy", "in", "GROUP_POLICIES", ":", "group", "=", "self", ".", "config", ".", "get_group", "(", "experiment", ".", "groupId", ")", "if", "not", "group", ":", "return", "None", "user_experiment_id", "=", "self", ".", "find_bucket", "(", "bucketing_id", ",", "experiment", ".", "groupId", ",", "group", ".", "trafficAllocation", ")", "if", "not", "user_experiment_id", ":", "self", ".", "config", ".", "logger", ".", "info", "(", "'User \"%s\" is in no experiment.'", "%", "user_id", ")", "return", "None", "if", "user_experiment_id", "!=", "experiment", ".", "id", ":", "self", ".", "config", ".", "logger", ".", "info", "(", "'User \"%s\" is not in experiment \"%s\" of group %s.'", "%", "(", "user_id", ",", "experiment", ".", "key", ",", "experiment", ".", "groupId", ")", ")", "return", "None", "self", ".", "config", ".", "logger", ".", "info", "(", "'User \"%s\" is in experiment %s of group %s.'", "%", "(", "user_id", ",", "experiment", ".", "key", ",", "experiment", ".", "groupId", ")", ")", "# Bucket user if not in white-list and in group (if any)", "variation_id", "=", "self", ".", "find_bucket", "(", "bucketing_id", ",", "experiment", ".", "id", ",", "experiment", ".", "trafficAllocation", ")", "if", "variation_id", ":", "variation", "=", "self", ".", "config", ".", "get_variation_from_id", "(", "experiment", ".", "key", ",", "variation_id", ")", "self", ".", "config", ".", "logger", ".", "info", "(", "'User \"%s\" is in variation \"%s\" of experiment %s.'", "%", "(", "user_id", ",", "variation", ".", "key", ",", "experiment", ".", "key", ")", ")", "return", "variation", "self", ".", "config", ".", "logger", ".", "info", "(", "'User \"%s\" is in no variation.'", "%", "user_id", ")", "return", "None" ]
A basic WSGI app
def wsgi_app ( self , environ , start_response ) : @ _LOCAL_MANAGER . middleware def _wrapped_app ( environ , start_response ) : request = Request ( environ ) setattr ( _local , _CURRENT_REQUEST_KEY , request ) response = self . _dispatch_request ( request ) return response ( environ , start_response ) return _wrapped_app ( environ , start_response )
507
https://github.com/palantir/typedjsonrpc/blob/274218fcd236ff9643506caa629029c9ba25a0fb/typedjsonrpc/server.py#L101-L109
[ "def", "_set_typeattr", "(", "typeattr", ",", "existing_ta", "=", "None", ")", ":", "if", "existing_ta", "is", "None", ":", "ta", "=", "TypeAttr", "(", "attr_id", "=", "typeattr", ".", "attr_id", ")", "else", ":", "ta", "=", "existing_ta", "ta", ".", "unit_id", "=", "typeattr", ".", "unit_id", "ta", ".", "type_id", "=", "typeattr", ".", "type_id", "ta", ".", "data_type", "=", "typeattr", ".", "data_type", "if", "hasattr", "(", "typeattr", ",", "'default_dataset_id'", ")", "and", "typeattr", ".", "default_dataset_id", "is", "not", "None", ":", "ta", ".", "default_dataset_id", "=", "typeattr", ".", "default_dataset_id", "ta", ".", "description", "=", "typeattr", ".", "description", "ta", ".", "properties", "=", "typeattr", ".", "get_properties", "(", ")", "ta", ".", "attr_is_var", "=", "typeattr", ".", "is_var", "if", "typeattr", ".", "is_var", "is", "not", "None", "else", "'N'", "ta", ".", "data_restriction", "=", "_parse_data_restriction", "(", "typeattr", ".", "data_restriction", ")", "if", "typeattr", ".", "dimension_id", "is", "None", ":", "# All right. Check passed", "pass", "else", ":", "if", "typeattr", ".", "attr_id", "is", "not", "None", "and", "typeattr", ".", "attr_id", ">", "0", ":", "# Getting the passed attribute, so we need to check consistency between attr dimension id and typeattr dimension id", "attr", "=", "ta", ".", "attr", "if", "attr", "is", "not", "None", "and", "attr", ".", "dimension_id", "is", "not", "None", "and", "attr", ".", "dimension_id", "!=", "typeattr", ".", "dimension_id", "or", "attr", "is", "not", "None", "and", "attr", ".", "dimension_id", "is", "not", "None", ":", "# In this case there is an inconsistency between attr.dimension_id and typeattr.dimension_id", "raise", "HydraError", "(", "\"Cannot set a dimension on type attribute which \"", "\"does not match its attribute. Create a new attribute if \"", "\"you want to use attribute %s with dimension_id %s\"", "%", "(", "attr", ".", "name", ",", "typeattr", ".", "dimension_id", ")", ")", "elif", "typeattr", ".", "attr_id", "is", "None", "and", "typeattr", ".", "name", "is", "not", "None", ":", "# Getting/creating the attribute by typeattr dimension id and typeattr name", "# In this case the dimension_id \"null\"/\"not null\" status is ininfluent", "attr", "=", "_get_attr_by_name_and_dimension", "(", "typeattr", ".", "name", ",", "typeattr", ".", "dimension_id", ")", "ta", ".", "attr_id", "=", "attr", ".", "id", "ta", ".", "attr", "=", "attr", "_check_dimension", "(", "ta", ")", "if", "existing_ta", "is", "None", ":", "log", ".", "debug", "(", "\"Adding ta to DB\"", ")", "db", ".", "DBSession", ".", "add", "(", "ta", ")", "return", "ta" ]
For debugging purposes you can run this as a standalone server .
def run ( self , host , port , * * options ) : self . registry . debug = True debugged = DebuggedJsonRpcApplication ( self , evalex = True ) run_simple ( host , port , debugged , use_reloader = True , * * options )
508
https://github.com/palantir/typedjsonrpc/blob/274218fcd236ff9643506caa629029c9ba25a0fb/typedjsonrpc/server.py#L114-L127
[ "def", "_match_to_morph_parents", "(", "self", ",", "type", ",", "results", ")", ":", "for", "result", "in", "results", ":", "if", "result", ".", "get_key", "(", ")", "in", "self", ".", "_dictionary", ".", "get", "(", "type", ",", "[", "]", ")", ":", "for", "model", "in", "self", ".", "_dictionary", "[", "type", "]", "[", "result", ".", "get_key", "(", ")", "]", ":", "model", ".", "set_relation", "(", "self", ".", "_relation", ",", "Result", "(", "result", ",", "self", ",", "model", ",", "related", "=", "result", ")", ")" ]
Runs each function from self . before_first_request_funcs once and only once .
def _try_trigger_before_first_request_funcs ( self ) : # pylint: disable=C0103 if self . _after_first_request_handled : return else : with self . _before_first_request_lock : if self . _after_first_request_handled : return for func in self . _before_first_request_funcs : func ( ) self . _after_first_request_handled = True
509
https://github.com/palantir/typedjsonrpc/blob/274218fcd236ff9643506caa629029c9ba25a0fb/typedjsonrpc/server.py#L129-L139
[ "def", "get_exif_data", "(", "self", ",", "image", ")", ":", "exif_data", "=", "{", "}", "info", "=", "image", ".", "_getexif", "(", ")", "if", "info", ":", "for", "tag", ",", "value", "in", "info", ".", "items", "(", ")", ":", "decoded", "=", "TAGS", ".", "get", "(", "tag", ",", "tag", ")", "if", "decoded", "==", "\"GPSInfo\"", ":", "gps_data", "=", "{", "}", "for", "t", "in", "value", ":", "sub_decoded", "=", "GPSTAGS", ".", "get", "(", "t", ",", "t", ")", "gps_data", "[", "sub_decoded", "]", "=", "value", "[", "t", "]", "exif_data", "[", "decoded", "]", "=", "gps_data", "else", ":", "exif_data", "[", "decoded", "]", "=", "value", "return", "exif_data" ]
Run the application and preserve the traceback frames .
def debug_application ( self , environ , start_response ) : adapter = self . _debug_map . bind_to_environ ( environ ) if adapter . test ( ) : _ , args = adapter . match ( ) return self . handle_debug ( environ , start_response , args [ "traceback_id" ] ) else : return super ( DebuggedJsonRpcApplication , self ) . debug_application ( environ , start_response )
510
https://github.com/palantir/typedjsonrpc/blob/274218fcd236ff9643506caa629029c9ba25a0fb/typedjsonrpc/server.py#L177-L194
[ "def", "is_registration_possible", "(", "self", ",", "user_info", ")", ":", "return", "self", ".", "get_accessibility", "(", ")", ".", "is_open", "(", ")", "and", "self", ".", "_registration", ".", "is_open", "(", ")", "and", "self", ".", "is_user_accepted_by_access_control", "(", "user_info", ")" ]
Handles the debug endpoint for inspecting previous errors .
def handle_debug ( self , environ , start_response , traceback_id ) : if traceback_id not in self . app . registry . tracebacks : abort ( 404 ) self . _copy_over_traceback ( traceback_id ) traceback = self . tracebacks [ traceback_id ] rendered = traceback . render_full ( evalex = self . evalex , secret = self . secret ) response = Response ( rendered . encode ( 'utf-8' , 'replace' ) , headers = [ ( 'Content-Type' , 'text/html; charset=utf-8' ) , ( 'X-XSS-Protection' , '0' ) ] ) return response ( environ , start_response )
511
https://github.com/palantir/typedjsonrpc/blob/274218fcd236ff9643506caa629029c9ba25a0fb/typedjsonrpc/server.py#L196-L216
[ "def", "create_gtk_grid", "(", "self", ",", "row_spacing", "=", "6", ",", "col_spacing", "=", "6", ",", "row_homogenous", "=", "False", ",", "col_homogenous", "=", "True", ")", ":", "grid_lang", "=", "Gtk", ".", "Grid", "(", ")", "grid_lang", ".", "set_column_spacing", "(", "row_spacing", ")", "grid_lang", ".", "set_row_spacing", "(", "col_spacing", ")", "grid_lang", ".", "set_border_width", "(", "12", ")", "grid_lang", ".", "set_row_homogeneous", "(", "row_homogenous", ")", "grid_lang", ".", "set_column_homogeneous", "(", "col_homogenous", ")", "return", "grid_lang" ]
Register the signals .
def register_signals ( self , app ) : before_record_index . connect ( inject_provisional_community ) if app . config [ 'COMMUNITIES_OAI_ENABLED' ] : listen ( Community , 'after_insert' , create_oaipmh_set ) listen ( Community , 'after_delete' , destroy_oaipmh_set ) inclusion_request_created . connect ( new_request )
512
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/ext.py#L59-L65
[ "def", "get_access_token", "(", "self", ")", ":", "if", "(", "self", ".", "token", "is", "None", ")", "or", "(", "datetime", ".", "utcnow", "(", ")", ">", "self", ".", "reuse_token_until", ")", ":", "headers", "=", "{", "'Ocp-Apim-Subscription-Key'", ":", "self", ".", "client_secret", "}", "response", "=", "requests", ".", "post", "(", "self", ".", "base_url", ",", "headers", "=", "headers", ")", "response", ".", "raise_for_status", "(", ")", "self", ".", "token", "=", "response", ".", "content", "self", ".", "reuse_token_until", "=", "datetime", ".", "utcnow", "(", ")", "+", "timedelta", "(", "minutes", "=", "5", ")", "return", "self", ".", "token", ".", "decode", "(", "'utf-8'", ")" ]
Create a command line parser
def genargs ( ) -> ArgumentParser : parser = ArgumentParser ( ) parser . add_argument ( "spec" , help = "JSG specification - can be file name, URI or string" ) parser . add_argument ( "-o" , "--outfile" , help = "Output python file - if omitted, python is not saved" ) parser . add_argument ( "-p" , "--print" , help = "Print python file to stdout" ) parser . add_argument ( "-id" , "--inputdir" , help = "Input directory with JSON files" ) parser . add_argument ( "-i" , "--json" , help = "URL, file name or json text" , nargs = '*' ) return parser
513
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/validate_json.py#L88-L100
[ "def", "changes", "(", "request", ",", "slug", ",", "template_name", "=", "'wakawaka/changes.html'", ",", "extra_context", "=", "None", ")", ":", "rev_a_id", "=", "request", ".", "GET", ".", "get", "(", "'a'", ",", "None", ")", "rev_b_id", "=", "request", ".", "GET", ".", "get", "(", "'b'", ",", "None", ")", "# Some stinky fingers manipulated the url", "if", "not", "rev_a_id", "or", "not", "rev_b_id", ":", "return", "HttpResponseBadRequest", "(", "'Bad Request'", ")", "try", ":", "revision_queryset", "=", "Revision", ".", "objects", ".", "all", "(", ")", "wikipage_queryset", "=", "WikiPage", ".", "objects", ".", "all", "(", ")", "rev_a", "=", "revision_queryset", ".", "get", "(", "pk", "=", "rev_a_id", ")", "rev_b", "=", "revision_queryset", ".", "get", "(", "pk", "=", "rev_b_id", ")", "page", "=", "wikipage_queryset", ".", "get", "(", "slug", "=", "slug", ")", "except", "ObjectDoesNotExist", ":", "raise", "Http404", "if", "rev_a", ".", "content", "!=", "rev_b", ".", "content", ":", "d", "=", "difflib", ".", "unified_diff", "(", "rev_b", ".", "content", ".", "splitlines", "(", ")", ",", "rev_a", ".", "content", ".", "splitlines", "(", ")", ",", "'Original'", ",", "'Current'", ",", "lineterm", "=", "''", ",", ")", "difftext", "=", "'\\n'", ".", "join", "(", "d", ")", "else", ":", "difftext", "=", "_", "(", "u'No changes were made between this two files.'", ")", "template_context", "=", "{", "'page'", ":", "page", ",", "'diff'", ":", "difftext", ",", "'rev_a'", ":", "rev_a", ",", "'rev_b'", ":", "rev_b", ",", "}", "template_context", ".", "update", "(", "extra_context", "or", "{", "}", ")", "return", "render", "(", "request", ",", "template_name", ",", "template_context", ")" ]
Convert a URL or file name to a string
def _to_string ( inp : str ) -> str : if '://' in inp : req = requests . get ( inp ) if not req . ok : raise ValueError ( f"Unable to read {inp}" ) return req . text else : with open ( inp ) as infile : return infile . read ( )
514
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/validate_json.py#L57-L66
[ "def", "group_experiments_greedy", "(", "tomo_expt", ":", "TomographyExperiment", ")", ":", "diag_sets", "=", "_max_tpb_overlap", "(", "tomo_expt", ")", "grouped_expt_settings_list", "=", "list", "(", "diag_sets", ".", "values", "(", ")", ")", "grouped_tomo_expt", "=", "TomographyExperiment", "(", "grouped_expt_settings_list", ",", "program", "=", "tomo_expt", ".", "program", ")", "return", "grouped_tomo_expt" ]
Determine whether json conforms with the JSG specification
def conforms ( self , json : str , name : str = "" , verbose : bool = False ) -> ValidationResult : json = self . _to_string ( json ) if not self . is_json ( json ) else json try : self . json_obj = loads ( json , self . module ) except ValueError as v : return ValidationResult ( False , str ( v ) , name , None ) logfile = StringIO ( ) logger = Logger ( cast ( TextIO , logfile ) ) # cast because of bug in ide if not is_valid ( self . json_obj , logger ) : return ValidationResult ( False , logfile . getvalue ( ) . strip ( '\n' ) , name , None ) return ValidationResult ( True , "" , name , type ( self . json_obj ) . __name__ )
515
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/validate_json.py#L68-L85
[ "def", "clear_data", "(", "self", ",", "request", ")", ":", "for", "key", "in", "request", ".", "session", ".", "keys", "(", ")", ":", "if", "key", ".", "startswith", "(", "constants", ".", "SESSION_KEY", ")", ":", "del", "request", ".", "session", "[", "key", "]" ]
Create revoc registry if need be for input revocation registry identifier ; open and cache tails file reader .
async def _sync_revoc ( self , rr_id : str , rr_size : int = None ) -> None : LOGGER . debug ( 'Issuer._sync_revoc >>> rr_id: %s, rr_size: %s' , rr_id , rr_size ) ( cd_id , tag ) = rev_reg_id2cred_def_id__tag ( rr_id ) try : await self . get_cred_def ( cd_id ) except AbsentCredDef : LOGGER . debug ( 'Issuer._sync_revoc: <!< tails tree %s may be for another ledger; no cred def found on %s' , self . _dir_tails , cd_id ) raise AbsentCredDef ( 'Tails tree {} may be for another ledger; no cred def found on {}' . format ( self . _dir_tails , cd_id ) ) with REVO_CACHE . lock : revo_cache_entry = REVO_CACHE . get ( rr_id , None ) tails = None if revo_cache_entry is None else revo_cache_entry . tails if tails is None : # it's a new revocation registry, or not yet set in cache try : tails = await Tails ( self . _dir_tails , cd_id , tag ) . open ( ) except AbsentTails : await self . _create_rev_reg ( rr_id , rr_size ) # it's a new revocation registry tails = await Tails ( self . _dir_tails , cd_id , tag ) . open ( ) # symlink should exist now if revo_cache_entry is None : REVO_CACHE [ rr_id ] = RevoCacheEntry ( None , tails ) else : REVO_CACHE [ rr_id ] . tails = tails LOGGER . debug ( 'Issuer._sync_revoc <<<' )
516
https://github.com/PSPC-SPAC-buyandsell/von_agent/blob/0b1c17cca3bd178b6e6974af84dbac1dfce5cf45/von_agent/agent/issuer.py#L139-L178
[ "def", "saveSettings", "(", "self", ")", ":", "try", ":", "self", ".", "saveProfile", "(", ")", "except", "Exception", "as", "ex", ":", "# Continue, even if saving the settings fails.", "logger", ".", "warn", "(", "ex", ")", "if", "DEBUGGING", ":", "raise", "finally", ":", "self", ".", "_settingsSaved", "=", "True" ]
Format a value for display as an XML text node .
def quote_xml ( text ) : text = _coerce_unicode ( text ) # If it's a CDATA block, return the text as is. if text . startswith ( CDATA_START ) : return text # If it's not a CDATA block, escape the XML and return the character # encoded string. return saxutils . escape ( text )
517
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/binding_utils.py#L299-L313
[ "def", "flush_region", "(", "self", ",", "region", ",", "mips", "=", "None", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "path", ")", ":", "return", "if", "type", "(", "region", ")", "in", "(", "list", ",", "tuple", ")", ":", "region", "=", "generate_slices", "(", "region", ",", "self", ".", "vol", ".", "bounds", ".", "minpt", ",", "self", ".", "vol", ".", "bounds", ".", "maxpt", ",", "bounded", "=", "False", ")", "region", "=", "Bbox", ".", "from_slices", "(", "region", ")", "mips", "=", "self", ".", "vol", ".", "mip", "if", "mips", "==", "None", "else", "mips", "if", "type", "(", "mips", ")", "==", "int", ":", "mips", "=", "(", "mips", ",", ")", "for", "mip", "in", "mips", ":", "mip_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "path", ",", "self", ".", "vol", ".", "mip_key", "(", "mip", ")", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "mip_path", ")", ":", "continue", "region_mip", "=", "self", ".", "vol", ".", "slices_from_global_coords", "(", "region", ")", "region_mip", "=", "Bbox", ".", "from_slices", "(", "region_mip", ")", "for", "filename", "in", "os", ".", "listdir", "(", "mip_path", ")", ":", "bbox", "=", "Bbox", ".", "from_filename", "(", "filename", ")", "if", "not", "Bbox", ".", "intersects", "(", "region", ",", "bbox", ")", ":", "os", ".", "remove", "(", "os", ".", "path", ".", "join", "(", "mip_path", ",", "filename", ")", ")" ]
Initialize this instance from a namespace URI and optional prefix and schema location URI .
def __construct_from_components ( self , ns_uri , prefix = None , schema_location = None ) : assert ns_uri # other fields are optional self . uri = ns_uri self . schema_location = schema_location or None self . prefixes = OrderedSet ( ) if prefix : self . prefixes . add ( prefix ) self . preferred_prefix = prefix or None
518
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/namespaces.py#L145-L157
[ "def", "to_td", "(", "frame", ",", "name", ",", "con", ",", "if_exists", "=", "'fail'", ",", "time_col", "=", "None", ",", "time_index", "=", "None", ",", "index", "=", "True", ",", "index_label", "=", "None", ",", "chunksize", "=", "10000", ",", "date_format", "=", "None", ")", ":", "database", ",", "table", "=", "name", ".", "split", "(", "'.'", ")", "uploader", "=", "StreamingUploader", "(", "con", ".", "client", ",", "database", ",", "table", ",", "show_progress", "=", "True", ",", "clear_progress", "=", "True", ")", "uploader", ".", "message", "(", "'Streaming import into: {0}.{1}'", ".", "format", "(", "database", ",", "table", ")", ")", "# check existence", "if", "if_exists", "==", "'fail'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "else", ":", "raise", "RuntimeError", "(", "'table \"%s\" already exists'", "%", "name", ")", "elif", "if_exists", "==", "'replace'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "pass", "else", ":", "uploader", ".", "message", "(", "'deleting old table...'", ")", "con", ".", "client", ".", "delete_table", "(", "database", ",", "table", ")", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "elif", "if_exists", "==", "'append'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "else", ":", "raise", "ValueError", "(", "'invalid value for if_exists: %s'", "%", "if_exists", ")", "# \"time_index\" implies \"index=False\"", "if", "time_index", ":", "index", "=", "None", "# convert", "frame", "=", "frame", ".", "copy", "(", ")", "frame", "=", "_convert_time_column", "(", "frame", ",", "time_col", ",", "time_index", ")", "frame", "=", "_convert_index_column", "(", "frame", ",", "index", ",", "index_label", ")", "frame", "=", "_convert_date_format", "(", "frame", ",", "date_format", ")", "# upload", "uploader", ".", "upload_frame", "(", "frame", ",", "chunksize", ")", "uploader", ".", "wait_for_import", "(", "len", "(", "frame", ")", ")" ]
Get the namespace the given prefix maps to .
def namespace_for_prefix ( self , prefix ) : try : ni = self . __lookup_prefix ( prefix ) except PrefixNotFoundError : return None else : return ni . uri
519
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/namespaces.py#L271-L286
[ "def", "retry_on_bad_auth", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "retry_version", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "while", "True", ":", "try", ":", "return", "func", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "except", "trolly", ".", "ResourceUnavailable", ":", "sys", ".", "stderr", ".", "write", "(", "'bad request (refresh board id)\\n'", ")", "self", ".", "_board_id", "=", "None", "self", ".", "save_key", "(", "'board_id'", ",", "None", ")", "except", "trolly", ".", "Unauthorised", ":", "sys", ".", "stderr", ".", "write", "(", "'bad permissions (refresh token)\\n'", ")", "self", ".", "_client", "=", "None", "self", ".", "_token", "=", "None", "self", ".", "save_key", "(", "'token'", ",", "None", ")", "return", "retry_version" ]
Sets the preferred prefix for ns_uri . If add_if_not_exist is True the prefix is added if it s not already registered . Otherwise setting an unknown prefix as preferred is an error . The default is False . Setting to None always works and indicates a preference to use the namespace as a default . The given namespace must already be in this set .
def set_preferred_prefix_for_namespace ( self , ns_uri , prefix , add_if_not_exist = False ) : ni = self . __lookup_uri ( ns_uri ) if not prefix : ni . preferred_prefix = None elif prefix in ni . prefixes : ni . preferred_prefix = prefix elif add_if_not_exist : self . add_prefix ( ns_uri , prefix , set_as_preferred = True ) else : raise PrefixNotFoundError ( prefix )
520
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/namespaces.py#L294-L322
[ "def", "FlushCache", "(", "self", ")", ":", "self", ".", "_cache", "=", "b''", "self", ".", "_cache_start_offset", "=", "None", "self", ".", "_cache_end_offset", "=", "None", "self", ".", "_ResetDecompressorState", "(", ")" ]
Merge incoming_schemaloc into the given _NamespaceInfo ni . If we don t have one yet and the incoming value is non - None update ours with theirs . This modifies ni .
def __merge_schema_locations ( self , ni , incoming_schemaloc ) : if ni . schema_location == incoming_schemaloc : # TODO (bworrell): empty strings? return elif not ni . schema_location : ni . schema_location = incoming_schemaloc or None elif not incoming_schemaloc : return else : raise ConflictingSchemaLocationError ( ni . uri , ni . schema_location , incoming_schemaloc )
521
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/namespaces.py#L324-L336
[ "def", "desc", "(", "self", ",", "description", ")", ":", "return", "self", "|", "Parser", "(", "lambda", "_", ",", "index", ":", "Value", ".", "failure", "(", "index", ",", "description", ")", ")" ]
Adds a new namespace to this set optionally with a prefix and schema location URI .
def add_namespace_uri ( self , ns_uri , prefix = None , schema_location = None ) : assert ns_uri if ns_uri in self . __ns_uri_map : # We have a _NamespaceInfo object for this URI already. So this # is a merge operation. # # We modify a copy of the real _NamespaceInfo so that we are # exception-safe: if something goes wrong, we don't end up with a # half-changed NamespaceSet. ni = self . __lookup_uri ( ns_uri ) new_ni = copy . deepcopy ( ni ) # Reconcile prefixes if prefix : self . __check_prefix_conflict ( ni , prefix ) new_ni . prefixes . add ( prefix ) self . __merge_schema_locations ( new_ni , schema_location ) # At this point, we have a legit new_ni object. Now we update # the set, ensuring our invariants. This should replace # all instances of the old ni in this set. for p in new_ni . prefixes : self . __prefix_map [ p ] = new_ni self . __ns_uri_map [ new_ni . uri ] = new_ni else : # A brand new namespace. The incoming prefix should not exist at # all in the prefix map. if prefix : self . __check_prefix_conflict ( ns_uri , prefix ) ni = _NamespaceInfo ( ns_uri , prefix , schema_location ) self . __add_namespaceinfo ( ni )
522
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/namespaces.py#L344-L405
[ "def", "exit", "(", "self", ",", "code", "=", "None", ",", "msg", "=", "None", ")", ":", "if", "code", "is", "None", ":", "code", "=", "self", ".", "tcex", ".", "exit_code", "if", "code", "==", "3", ":", "self", ".", "tcex", ".", "log", ".", "info", "(", "u'Changing exit code from 3 to 0.'", ")", "code", "=", "0", "# playbooks doesn't support partial failure", "elif", "code", "not", "in", "[", "0", ",", "1", "]", ":", "code", "=", "1", "self", ".", "tcex", ".", "exit", "(", "code", ",", "msg", ")" ]
Removes the indicated namespace from this set .
def remove_namespace ( self , ns_uri ) : if not self . contains_namespace ( ns_uri ) : return ni = self . __ns_uri_map . pop ( ns_uri ) for prefix in ni . prefixes : del self . __prefix_map [ prefix ]
523
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/namespaces.py#L407-L414
[ "def", "_CreateIndexIfNotExists", "(", "self", ",", "index_name", ",", "mappings", ")", ":", "try", ":", "if", "not", "self", ".", "_client", ".", "indices", ".", "exists", "(", "index_name", ")", ":", "self", ".", "_client", ".", "indices", ".", "create", "(", "body", "=", "{", "'mappings'", ":", "mappings", "}", ",", "index", "=", "index_name", ")", "except", "elasticsearch", ".", "exceptions", ".", "ConnectionError", "as", "exception", ":", "raise", "RuntimeError", "(", "'Unable to create Elasticsearch index with error: {0!s}'", ".", "format", "(", "exception", ")", ")" ]
Adds prefix for the given namespace URI . The namespace must already exist in this set . If set_as_preferred is True also set this namespace as the preferred one .
def add_prefix ( self , ns_uri , prefix , set_as_preferred = False ) : assert prefix ni = self . __lookup_uri ( ns_uri ) self . __check_prefix_conflict ( ni , prefix ) ni . prefixes . add ( prefix ) self . __prefix_map [ prefix ] = ni if set_as_preferred : ni . preferred_prefix = prefix
524
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/namespaces.py#L416-L441
[ "def", "parse_message", "(", "message", ",", "nodata", "=", "False", ")", ":", "header", "=", "read_machine_header", "(", "message", ")", "h_len", "=", "__get_machine_header_length", "(", "header", ")", "meta_raw", "=", "message", "[", "h_len", ":", "h_len", "+", "header", "[", "'meta_len'", "]", "]", "meta", "=", "__parse_meta", "(", "meta_raw", ",", "header", ")", "data_start", "=", "h_len", "+", "header", "[", "'meta_len'", "]", "data", "=", "b''", "if", "not", "nodata", ":", "data", "=", "__decompress", "(", "meta", ",", "message", "[", "data_start", ":", "data_start", "+", "header", "[", "'data_len'", "]", "]", ")", "return", "header", ",", "meta", ",", "data" ]
Gets an iterator over the prefixes for the given namespace .
def prefix_iter ( self , ns_uri ) : ni = self . __lookup_uri ( ns_uri ) return iter ( ni . prefixes )
525
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/namespaces.py#L448-L451
[ "def", "to_td", "(", "frame", ",", "name", ",", "con", ",", "if_exists", "=", "'fail'", ",", "time_col", "=", "None", ",", "time_index", "=", "None", ",", "index", "=", "True", ",", "index_label", "=", "None", ",", "chunksize", "=", "10000", ",", "date_format", "=", "None", ")", ":", "database", ",", "table", "=", "name", ".", "split", "(", "'.'", ")", "uploader", "=", "StreamingUploader", "(", "con", ".", "client", ",", "database", ",", "table", ",", "show_progress", "=", "True", ",", "clear_progress", "=", "True", ")", "uploader", ".", "message", "(", "'Streaming import into: {0}.{1}'", ".", "format", "(", "database", ",", "table", ")", ")", "# check existence", "if", "if_exists", "==", "'fail'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "else", ":", "raise", "RuntimeError", "(", "'table \"%s\" already exists'", "%", "name", ")", "elif", "if_exists", "==", "'replace'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "pass", "else", ":", "uploader", ".", "message", "(", "'deleting old table...'", ")", "con", ".", "client", ".", "delete_table", "(", "database", ",", "table", ")", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "elif", "if_exists", "==", "'append'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "else", ":", "raise", "ValueError", "(", "'invalid value for if_exists: %s'", "%", "if_exists", ")", "# \"time_index\" implies \"index=False\"", "if", "time_index", ":", "index", "=", "None", "# convert", "frame", "=", "frame", ".", "copy", "(", ")", "frame", "=", "_convert_time_column", "(", "frame", ",", "time_col", ",", "time_index", ")", "frame", "=", "_convert_index_column", "(", "frame", ",", "index", ",", "index_label", ")", "frame", "=", "_convert_date_format", "(", "frame", ",", "date_format", ")", "# upload", "uploader", ".", "upload_frame", "(", "frame", ",", "chunksize", ")", "uploader", ".", "wait_for_import", "(", "len", "(", "frame", ")", ")" ]
Removes prefix from this set . This is a no - op if the prefix doesn t exist in it .
def remove_prefix ( self , prefix ) : if prefix not in self . __prefix_map : return ni = self . __lookup_prefix ( prefix ) ni . prefixes . discard ( prefix ) del self . __prefix_map [ prefix ] # If we removed the preferred prefix, find a new one. if ni . preferred_prefix == prefix : ni . preferred_prefix = next ( iter ( ni . prefixes ) , None )
526
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/namespaces.py#L453-L466
[ "def", "_ReadMemberFooter", "(", "self", ",", "file_object", ")", ":", "file_offset", "=", "file_object", ".", "get_offset", "(", ")", "member_footer", "=", "self", ".", "_ReadStructure", "(", "file_object", ",", "file_offset", ",", "self", ".", "_MEMBER_FOOTER_SIZE", ",", "self", ".", "_MEMBER_FOOTER", ",", "'member footer'", ")", "self", ".", "uncompressed_data_size", "=", "member_footer", ".", "uncompressed_data_size" ]
Sets the schema location of the given namespace .
def set_schema_location ( self , ns_uri , schema_location , replace = False ) : ni = self . __lookup_uri ( ns_uri ) if ni . schema_location == schema_location : return elif replace or ni . schema_location is None : ni . schema_location = schema_location elif schema_location is None : ni . schema_location = None # Not considered "replacement". else : raise ConflictingSchemaLocationError ( ns_uri , ni . schema_location , schema_location )
527
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/namespaces.py#L483-L512
[ "def", "_bubbleP", "(", "cls", ",", "T", ")", ":", "c", "=", "cls", ".", "_blend", "[", "\"bubble\"", "]", "Tj", "=", "cls", ".", "_blend", "[", "\"Tj\"", "]", "Pj", "=", "cls", ".", "_blend", "[", "\"Pj\"", "]", "Tita", "=", "1", "-", "T", "/", "Tj", "suma", "=", "0", "for", "i", ",", "n", "in", "zip", "(", "c", "[", "\"i\"", "]", ",", "c", "[", "\"n\"", "]", ")", ":", "suma", "+=", "n", "*", "Tita", "**", "(", "i", "/", "2.", ")", "P", "=", "Pj", "*", "exp", "(", "Tj", "/", "T", "*", "suma", ")", "return", "P" ]
Constructs and returns a schemalocation attribute . If no namespaces in this set have any schema locations defined returns an empty string .
def get_schemaloc_string ( self , ns_uris = None , sort = False , delim = "\n" ) : if not ns_uris : ns_uris = six . iterkeys ( self . __ns_uri_map ) if sort : ns_uris = sorted ( ns_uris ) schemalocs = [ ] for ns_uri in ns_uris : ni = self . __lookup_uri ( ns_uri ) if ni . schema_location : schemalocs . append ( "{0.uri} {0.schema_location}" . format ( ni ) ) if not schemalocs : return "" return 'xsi:schemaLocation="{0}"' . format ( delim . join ( schemalocs ) )
528
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/namespaces.py#L593-L627
[ "def", "_bubbleP", "(", "cls", ",", "T", ")", ":", "c", "=", "cls", ".", "_blend", "[", "\"bubble\"", "]", "Tj", "=", "cls", ".", "_blend", "[", "\"Tj\"", "]", "Pj", "=", "cls", ".", "_blend", "[", "\"Pj\"", "]", "Tita", "=", "1", "-", "T", "/", "Tj", "suma", "=", "0", "for", "i", ",", "n", "in", "zip", "(", "c", "[", "\"i\"", "]", ",", "c", "[", "\"n\"", "]", ")", ":", "suma", "+=", "n", "*", "Tita", "**", "(", "i", "/", "2.", ")", "P", "=", "Pj", "*", "exp", "(", "Tj", "/", "T", "*", "suma", ")", "return", "P" ]
Constructs and returns a map from namespace URI to prefix representing all namespaces in this set . The prefix chosen for each namespace is its preferred prefix if it s not None . If the preferred prefix is None one is chosen from the set of registered prefixes . In the latter situation if no prefixes are registered an exception is raised .
def get_uri_prefix_map ( self ) : mapping = { } for ni in six . itervalues ( self . __ns_uri_map ) : if ni . preferred_prefix : mapping [ ni . uri ] = ni . preferred_prefix elif len ( ni . prefixes ) > 0 : mapping [ ni . uri ] = next ( iter ( ni . prefixes ) ) else : # The reason I don't let any namespace map to None here is that # I don't think generateDS supports it. It requires prefixes # for all namespaces. raise NoPrefixesError ( ni . uri ) return mapping
529
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/namespaces.py#L629-L650
[ "def", "set_user", "(", "self", ",", "user", ")", ":", "super", "(", "Segment", ",", "self", ")", ".", "_check_ended", "(", ")", "self", ".", "user", "=", "user" ]
Constructs and returns a map from namespace URI to schema location URI . Namespaces without schema locations are excluded .
def get_uri_schemaloc_map ( self ) : mapping = { } for ni in six . itervalues ( self . __ns_uri_map ) : if ni . schema_location : mapping [ ni . uri ] = ni . schema_location return mapping
530
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/namespaces.py#L673-L682
[ "def", "user_agent", "(", "self", ",", "text", ",", "*", "*", "kwargs", ")", ":", "indicator_obj", "=", "UserAgent", "(", "text", ",", "*", "*", "kwargs", ")", "return", "self", ".", "_indicator", "(", "indicator_obj", ")" ]
Return a subset of this NamespaceSet containing only data for the given namespaces .
def subset ( self , ns_uris ) : sub_ns = NamespaceSet ( ) for ns_uri in ns_uris : ni = self . __lookup_uri ( ns_uri ) new_ni = copy . deepcopy ( ni ) # We should be able to reach into details of our own # implementation on another obj, right?? This makes the subset # operation faster. We can set up the innards directly from a # cloned _NamespaceInfo. sub_ns . _NamespaceSet__add_namespaceinfo ( new_ni ) return sub_ns
531
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/namespaces.py#L690-L717
[ "def", "_retry_failed_log", "(", "failed_trigger_log", ")", ":", "model", "=", "type", "(", "failed_trigger_log", ")", "try", ":", "failed_trigger_log", "=", "(", "model", ".", "objects", ".", "select_for_update", "(", ")", ".", "get", "(", "id", "=", "failed_trigger_log", ".", "id", ",", "state", "=", "TRIGGER_LOG_STATE", "[", "'FAILED'", "]", ",", ")", ")", "except", "model", ".", "DoesNotExist", ":", "return", "False", "failed_trigger_log", ".", "redo", "(", ")", "return", "True" ]
Imports namespaces into this set from other_ns .
def import_from ( self , other_ns , replace = False ) : for other_ns_uri in other_ns . namespace_uris : ni = self . __ns_uri_map . get ( other_ns_uri ) if ni is None : other_ni = other_ns . _NamespaceSet__ns_uri_map [ other_ns_uri ] # Gotta make sure that the other set isn't mapping its prefixes # incompatibly with respect to this set. for other_prefix in other_ni . prefixes : self . __check_prefix_conflict ( other_ns_uri , other_prefix ) cloned_ni = copy . deepcopy ( other_ni ) self . __add_namespaceinfo ( cloned_ni ) elif replace : other_ni = other_ns . _NamespaceSet__ns_uri_map [ other_ns_uri ] for other_prefix in other_ni . prefixes : self . __check_prefix_conflict ( ni , other_prefix ) cloned_ni = copy . deepcopy ( other_ni ) self . remove_namespace ( other_ns_uri ) self . __add_namespaceinfo ( cloned_ni ) else : continue
532
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/namespaces.py#L719-L756
[ "def", "long_path_formatter", "(", "line", ",", "max_width", "=", "pd", ".", "get_option", "(", "'max_colwidth'", ")", ")", ":", "if", "len", "(", "line", ")", ">", "max_width", ":", "tokens", "=", "line", ".", "split", "(", "\".\"", ")", "trial1", "=", "\"%s...%s\"", "%", "(", "tokens", "[", "0", "]", ",", "tokens", "[", "-", "1", "]", ")", "if", "len", "(", "trial1", ")", ">", "max_width", ":", "return", "\"...%s\"", "%", "(", "tokens", "[", "-", "1", "]", "[", "-", "1", ":", "-", "(", "max_width", "-", "3", ")", "]", ")", "else", ":", "return", "trial1", "else", ":", "return", "line" ]
Return the version of the root element passed in .
def _get_version ( self , root ) : # Note: STIX and MAEC use a "version" attribute. To support CybOX, a # subclass will need to combine "cybox_major_version", # "cybox_minor_version", and "cybox_update_version". version = self . get_version ( root ) if version : return StrictVersion ( version ) raise UnknownVersionError ( "Unable to determine the version of the input document. No " "version information found on the root element." )
533
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/parser.py#L55-L77
[ "def", "activation_key_expired", "(", "self", ")", ":", "expiration_days", "=", "datetime", ".", "timedelta", "(", "days", "=", "defaults", ".", "ACCOUNTS_ACTIVATION_DAYS", ")", "expiration_date", "=", "self", ".", "date_joined", "+", "expiration_days", "if", "self", ".", "activation_key", "==", "defaults", ".", "ACCOUNTS_ACTIVATED", ":", "return", "True", "if", "get_datetime_now", "(", ")", ">=", "expiration_date", ":", "return", "True", "return", "False" ]
Ensure the root element is a supported version .
def _check_version ( self , root ) : version = self . _get_version ( root ) supported = [ StrictVersion ( x ) for x in self . supported_versions ( root . tag ) ] if version in supported : return error = "Document version ({0}) not in supported versions ({1})" raise UnsupportedVersionError ( message = error . format ( version , supported ) , expected = supported , found = version )
534
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/parser.py#L79-L100
[ "def", "next_flightmode_colour", "(", "self", ")", ":", "if", "self", ".", "flightmode_colour_index", ">", "len", "(", "flightmode_colours", ")", ":", "print", "(", "\"Out of colours; reusing\"", ")", "self", ".", "flightmode_colour_index", "=", "0", "ret", "=", "flightmode_colours", "[", "self", ".", "flightmode_colour_index", "]", "self", ".", "flightmode_colour_index", "+=", "1", "return", "ret" ]
Check that the XML element tree has a supported root element .
def _check_root_tag ( self , root ) : supported = self . supported_tags ( ) if root . tag in supported : return error = "Document root element ({0}) not one of ({1})" raise UnsupportedRootElementError ( message = error . format ( root . tag , supported ) , expected = supported , found = root . tag , )
535
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/parser.py#L102-L120
[ "def", "set_rainbow", "(", "self", ",", "duration", ")", ":", "for", "i", "in", "range", "(", "0", ",", "359", ")", ":", "self", ".", "set_color_hsv", "(", "i", ",", "100", ",", "100", ")", "time", ".", "sleep", "(", "duration", "/", "359", ")" ]
Creates a STIX binding object from the supplied xml file .
def parse_xml_to_obj ( self , xml_file , check_version = True , check_root = True , encoding = None ) : root = get_etree_root ( xml_file , encoding = encoding ) if check_root : self . _check_root_tag ( root ) if check_version : self . _check_version ( root ) entity_class = self . get_entity_class ( root . tag ) entity_obj = entity_class . _binding_class . factory ( ) entity_obj . build ( root ) return entity_obj
536
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/parser.py#L122-L154
[ "def", "get_creation_date_tags", "(", "url", ",", "domain", ",", "as_dicts", "=", "False", ")", ":", "creation_date_tags", "=", "[", "mementoweb_api_tags", "(", "url", ")", ",", "get_whois_tags", "(", "domain", ")", ",", "]", "creation_date_tags", "=", "sorted", "(", "sum", "(", "creation_date_tags", ",", "[", "]", ")", ",", "key", "=", "lambda", "x", ":", "x", ".", "date", ")", "if", "not", "as_dicts", ":", "return", "creation_date_tags", "return", "[", "item", ".", "_as_dict", "(", ")", "for", "item", "in", "creation_date_tags", "]" ]
Creates a python - stix STIXPackage object from the supplied xml_file .
def parse_xml ( self , xml_file , check_version = True , check_root = True , encoding = None ) : xml_etree = get_etree ( xml_file , encoding = encoding ) entity_obj = self . parse_xml_to_obj ( xml_file = xml_etree , check_version = check_version , check_root = check_root ) xml_root_node = xml_etree . getroot ( ) entity = self . get_entity_class ( xml_root_node . tag ) . from_obj ( entity_obj ) # Save the parsed nsmap and schemalocations onto the parsed Entity entity . __input_namespaces__ = dict ( iteritems ( xml_root_node . nsmap ) ) with ignored ( KeyError ) : pairs = get_schemaloc_pairs ( xml_root_node ) entity . __input_schemalocations__ = dict ( pairs ) return entity
537
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/parser.py#L156-L195
[ "def", "get_default_config_help", "(", "self", ")", ":", "config_help", "=", "super", "(", "UsersCollector", ",", "self", ")", ".", "get_default_config_help", "(", ")", "config_help", ".", "update", "(", "{", "}", ")", "return", "config_help" ]
Get the community logo URL .
def get_logo_url ( self , obj ) : if current_app and obj . logo_url : return u'{site_url}{path}' . format ( site_url = current_app . config . get ( 'THEME_SITEURL' ) , path = obj . logo_url , )
538
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/serializers/schemas/community.py#L50-L56
[ "def", "_get_all_data", "(", "self", ",", "start_date", ",", "end_date", ")", ":", "return", "[", "self", ".", "_get_input_data", "(", "var", ",", "start_date", ",", "end_date", ")", "for", "var", "in", "_replace_pressure", "(", "self", ".", "variables", ",", "self", ".", "dtype_in_vert", ")", "]" ]
Add the links for each community .
def item_links_addition ( self , data ) : links_item_factory = self . context . get ( 'links_item_factory' , default_links_item_factory ) data [ 'links' ] = links_item_factory ( data ) return data
539
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/serializers/schemas/community.py#L59-L64
[ "def", "_read_body_by_chunk", "(", "self", ",", "response", ",", "file", ",", "raw", "=", "False", ")", ":", "reader", "=", "ChunkedTransferReader", "(", "self", ".", "_connection", ")", "file_is_async", "=", "hasattr", "(", "file", ",", "'drain'", ")", "while", "True", ":", "chunk_size", ",", "data", "=", "yield", "from", "reader", ".", "read_chunk_header", "(", ")", "self", ".", "_data_event_dispatcher", ".", "notify_read", "(", "data", ")", "if", "raw", ":", "file", ".", "write", "(", "data", ")", "if", "not", "chunk_size", ":", "break", "while", "True", ":", "content", ",", "data", "=", "yield", "from", "reader", ".", "read_chunk_body", "(", ")", "self", ".", "_data_event_dispatcher", ".", "notify_read", "(", "data", ")", "if", "not", "content", ":", "if", "raw", ":", "file", ".", "write", "(", "data", ")", "break", "content", "=", "self", ".", "_decompress_data", "(", "content", ")", "if", "file", ":", "file", ".", "write", "(", "content", ")", "if", "file_is_async", ":", "yield", "from", "file", ".", "drain", "(", ")", "content", "=", "self", ".", "_flush_decompressor", "(", ")", "if", "file", ":", "file", ".", "write", "(", "content", ")", "if", "file_is_async", ":", "yield", "from", "file", ".", "drain", "(", ")", "trailer_data", "=", "yield", "from", "reader", ".", "read_trailer", "(", ")", "self", ".", "_data_event_dispatcher", ".", "notify_read", "(", "trailer_data", ")", "if", "file", "and", "raw", ":", "file", ".", "write", "(", "trailer_data", ")", "if", "file_is_async", ":", "yield", "from", "file", ".", "drain", "(", ")", "response", ".", "fields", ".", "parse", "(", "trailer_data", ")" ]
Wrap result in envelope .
def envelope ( self , data , many ) : if not many : return data result = dict ( hits = dict ( hits = data , total = self . context . get ( 'total' , len ( data ) ) ) ) page = self . context . get ( 'page' ) if page : links_pagination_factory = self . context . get ( 'links_pagination_factory' , default_links_pagination_factory ) urlkwargs = self . context . get ( 'urlkwargs' , { } ) result [ 'links' ] = links_pagination_factory ( page , urlkwargs ) return result
540
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/serializers/schemas/community.py#L67-L90
[ "def", "aux", "(", "self", ",", "aux", ")", ":", "if", "aux", "==", "self", ".", "_aux", ":", "return", "if", "self", ".", "_aux", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_aux", ",", "self", ".", "_project", ")", "self", ".", "_aux", "=", "None", "if", "aux", "is", "not", "None", ":", "self", ".", "_aux", "=", "self", ".", "_manager", ".", "port_manager", ".", "reserve_tcp_port", "(", "aux", ",", "self", ".", "_project", ")", "log", ".", "info", "(", "\"{module}: '{name}' [{id}]: aux port set to {port}\"", ".", "format", "(", "module", "=", "self", ".", "manager", ".", "module_name", ",", "name", "=", "self", ".", "name", ",", "id", "=", "self", ".", "id", ",", "port", "=", "aux", ")", ")" ]
Attempts to parse value into an instance of datetime . datetime . If value is None this function will return None .
def parse_datetime ( value ) : if not value : return None elif isinstance ( value , datetime . datetime ) : return value return dateutil . parser . parse ( value )
541
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/dates.py#L13-L25
[ "def", "to_even_columns", "(", "data", ",", "headers", "=", "None", ")", ":", "result", "=", "''", "col_width", "=", "max", "(", "len", "(", "word", ")", "for", "row", "in", "data", "for", "word", "in", "row", ")", "+", "2", "# padding", "if", "headers", ":", "header_width", "=", "max", "(", "len", "(", "word", ")", "for", "row", "in", "headers", "for", "word", "in", "row", ")", "+", "2", "if", "header_width", ">", "col_width", ":", "col_width", "=", "header_width", "result", "+=", "\"\"", ".", "join", "(", "word", ".", "ljust", "(", "col_width", ")", "for", "word", "in", "headers", ")", "+", "\"\\n\"", "result", "+=", "'-'", "*", "col_width", "*", "len", "(", "headers", ")", "+", "\"\\n\"", "for", "row", "in", "data", ":", "result", "+=", "\"\"", ".", "join", "(", "word", ".", "ljust", "(", "col_width", ")", "for", "word", "in", "row", ")", "+", "\"\\n\"", "return", "result" ]
Attempts to parse value into an instance of datetime . date . If value is None this function will return None .
def parse_date ( value ) : if not value : return None if isinstance ( value , datetime . date ) : return value return parse_datetime ( value ) . date ( )
542
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/dates.py#L44-L59
[ "def", "setStimReps", "(", "self", ")", ":", "reps", "=", "self", ".", "ui", ".", "nrepsSpnbx", ".", "value", "(", ")", "self", ".", "stimModel", ".", "setRepCount", "(", "reps", ")" ]
Finds all valid one and two letter corrections for word_string returning the word with the highest relative probability as type str .
def correct_word ( word_string ) : if word_string is None : return "" elif isinstance ( word_string , str ) : return max ( find_candidates ( word_string ) , key = find_word_prob ) else : raise InputError ( "string or none type variable not passed as argument to correct_word" )
543
https://github.com/SpotlightData/preprocessing/blob/180c6472bc2642afbd7a1ece08d0b0d14968a708/preprocessing/spellcheck.py#L18-L28
[ "def", "streams", "(", "self", ")", ":", "if", "self", ".", "_streams", "is", "None", ":", "self", ".", "_streams", "=", "list", "(", "self", ".", "_stream_df", "[", "\"STREAM\"", "]", ".", "values", ")", "return", "self", ".", "_streams" ]
Finds all potential words word_string could have intended to mean . If a word is not incorrectly spelled it will return this word first else if will look for one letter edits that are correct . If there are no valid one letter edits it will perform a two letter edit search .
def find_candidates ( word_string ) : if word_string is None : return { } elif isinstance ( word_string , str ) : return ( validate_words ( [ word_string ] ) or validate_words ( list ( find_one_letter_edits ( word_string ) ) ) or validate_words ( list ( find_two_letter_edits ( word_string ) ) ) or set ( [ word_string ] ) ) else : raise InputError ( "string or none type variable not passed as argument to find_candidates" )
544
https://github.com/SpotlightData/preprocessing/blob/180c6472bc2642afbd7a1ece08d0b0d14968a708/preprocessing/spellcheck.py#L30-L45
[ "def", "with_headers", "(", "self", ",", "headers", ")", ":", "for", "key", ",", "value", "in", "headers", ".", "items", "(", ")", ":", "self", ".", "with_header", "(", "key", ",", "value", ")", "return", "self" ]
Finds the relative probability of the word appearing given context of a base corpus . Returns this probability value as a float instance .
def find_word_prob ( word_string , word_total = sum ( WORD_DISTRIBUTION . values ( ) ) ) : if word_string is None : return 0 elif isinstance ( word_string , str ) : return WORD_DISTRIBUTION [ word_string ] / word_total else : raise InputError ( "string or none type variable not passed as argument to find_word_prob" )
545
https://github.com/SpotlightData/preprocessing/blob/180c6472bc2642afbd7a1ece08d0b0d14968a708/preprocessing/spellcheck.py#L91-L101
[ "def", "_prune_invalid_time_reductions", "(", "spec", ")", ":", "valid_reductions", "=", "[", "]", "if", "not", "spec", "[", "'var'", "]", ".", "def_time", "and", "spec", "[", "'dtype_out_time'", "]", "is", "not", "None", ":", "for", "reduction", "in", "spec", "[", "'dtype_out_time'", "]", ":", "if", "reduction", "not", "in", "_TIME_DEFINED_REDUCTIONS", ":", "valid_reductions", ".", "append", "(", "reduction", ")", "else", ":", "msg", "=", "(", "\"Var {0} has no time dimension \"", "\"for the given time reduction \"", "\"{1} so this calculation will \"", "\"be skipped\"", ".", "format", "(", "spec", "[", "'var'", "]", ".", "name", ",", "reduction", ")", ")", "logging", ".", "info", "(", "msg", ")", "else", ":", "valid_reductions", "=", "spec", "[", "'dtype_out_time'", "]", "return", "valid_reductions" ]
Checks for each edited word in word_list if that word is a valid english word . abs Returns all validated words as a set instance .
def validate_words ( word_list ) : if word_list is None : return { } elif isinstance ( word_list , list ) : if not word_list : return { } else : return set ( word for word in word_list if word in WORD_DISTRIBUTION ) else : raise InputError ( "list variable not passed as argument to validate_words" )
546
https://github.com/SpotlightData/preprocessing/blob/180c6472bc2642afbd7a1ece08d0b0d14968a708/preprocessing/spellcheck.py#L103-L116
[ "def", "parse_xml_node", "(", "self", ",", "node", ")", ":", "self", ".", "id", "=", "node", ".", "getAttributeNS", "(", "RTS_NS", ",", "'id'", ")", "self", ".", "kind", "=", "node", ".", "getAttributeNS", "(", "RTS_NS", ",", "'kind'", ")", "if", "node", ".", "hasAttributeNS", "(", "RTS_NS", ",", "'rate'", ")", ":", "self", ".", "rate", "=", "float", "(", "node", ".", "getAttributeNS", "(", "RTS_NS", ",", "'rate'", ")", ")", "else", ":", "self", ".", "rate", "=", "0.0", "self", ".", "_participants", "=", "[", "]", "for", "c", "in", "node", ".", "getElementsByTagNameNS", "(", "RTS_NS", ",", "'Participants'", ")", ":", "self", ".", "_participants", ".", "append", "(", "TargetComponent", "(", ")", ".", "parse_xml_node", "(", "c", ")", ")", "for", "c", "in", "get_direct_child_elements_xml", "(", "node", ",", "prefix", "=", "RTS_EXT_NS", ",", "local_name", "=", "'Properties'", ")", ":", "name", ",", "value", "=", "parse_properties_xml", "(", "c", ")", "self", ".", "_properties", "[", "name", "]", "=", "value", "return", "self" ]
It is also possible to query the stars by label here is an example of querying for the star labeled as Sun .
def search_star ( star ) : base_url = "http://star-api.herokuapp.com/api/v1/stars/" if not isinstance ( star , str ) : raise ValueError ( "The star arg you provided is not the type of str" ) else : base_url += star return dispatch_http_get ( base_url )
547
https://github.com/emirozer/bowshock/blob/9f5e053f1d54995b833b83616f37c67178c3e840/bowshock/star.py#L16-L30
[ "def", "mainloop", "(", "self", ")", ":", "while", "self", ".", "keep_going", ":", "with", "self", ".", "lock", ":", "if", "self", ".", "on_connect", "and", "not", "self", ".", "readable", "(", "2", ")", ":", "self", ".", "on_connect", "(", ")", "self", ".", "on_connect", "=", "None", "if", "not", "self", ".", "keep_going", ":", "break", "self", ".", "process_once", "(", ")" ]
It is also possible to query the exoplanets by label here is an example of querying for the exoplanet labeled as 11 Com
def search_exoplanet ( exoplanet ) : base_url = "http://star-api.herokuapp.com/api/v1/exo_planets/" if not isinstance ( exoplanet , str ) : raise ValueError ( "The exoplanet arg you provided is not the type of str" ) else : base_url += exoplanet return dispatch_http_get ( base_url )
548
https://github.com/emirozer/bowshock/blob/9f5e053f1d54995b833b83616f37c67178c3e840/bowshock/star.py#L43-L58
[ "def", "handle_input", "(", "self", ")", ":", "difference", "=", "self", ".", "check_state", "(", ")", "if", "not", "difference", ":", "return", "self", ".", "events", "=", "[", "]", "self", ".", "handle_new_events", "(", "difference", ")", "self", ".", "update_timeval", "(", ")", "self", ".", "events", ".", "append", "(", "self", ".", "sync_marker", "(", "self", ".", "timeval", ")", ")", "self", ".", "write_to_pipe", "(", "self", ".", "events", ")" ]
It is also possible to query the local galaxies by label here is an example of querying for the local galaxy labeled IC 10
def search_local_galaxies ( galaxy ) : base_url = "http://star-api.herokuapp.com/api/v1/local_groups/" if not isinstance ( galaxy , str ) : raise ValueError ( "The galaxy arg you provided is not the type of str" ) else : base_url += galaxy return dispatch_http_get ( base_url )
549
https://github.com/emirozer/bowshock/blob/9f5e053f1d54995b833b83616f37c67178c3e840/bowshock/star.py#L71-L85
[ "def", "timeout", "(", "self", ",", "duration", "=", "3600", ")", ":", "self", ".", "room", ".", "check_owner", "(", ")", "self", ".", "conn", ".", "make_call", "(", "\"timeoutFile\"", ",", "self", ".", "fid", ",", "duration", ")" ]
It is also possible to query the star clusters by label here is an example of querying for the star cluster labeled Berkeley 59
def search_star_cluster ( cluster ) : base_url = "http://star-api.herokuapp.com/api/v1/open_cluster/" if not isinstance ( cluster , str ) : raise ValueError ( "The cluster arg you provided is not the type of str" ) else : base_url += cluster return dispatch_http_get ( base_url )
550
https://github.com/emirozer/bowshock/blob/9f5e053f1d54995b833b83616f37c67178c3e840/bowshock/star.py#L98-L112
[ "def", "upload_profiler_report", "(", "url", ",", "filename", ",", "config", ")", ":", "try", ":", "logger", ".", "debug", "(", "\"Uploading profiler report to IOpipe\"", ")", "with", "open", "(", "filename", ",", "\"rb\"", ")", "as", "data", ":", "response", "=", "requests", ".", "put", "(", "url", ",", "data", "=", "data", ",", "timeout", "=", "config", "[", "\"network_timeout\"", "]", ")", "response", ".", "raise_for_status", "(", ")", "except", "Exception", "as", "e", ":", "logger", ".", "debug", "(", "\"Error while uploading profiler report: %s\"", ",", "e", ")", "if", "hasattr", "(", "e", ",", "\"response\"", ")", ":", "logger", ".", "debug", "(", "e", ".", "response", ".", "content", ")", "else", ":", "logger", ".", "debug", "(", "\"Profiler report uploaded successfully\"", ")", "finally", ":", "if", "os", ".", "path", ".", "isfile", "(", "filename", ")", ":", "os", ".", "remove", "(", "filename", ")" ]
Return the python representation
def as_python ( self , name : str ) -> str : if self . _ruleTokens : pattern = "jsg.JSGPattern(r'{}'.format({}))" . format ( self . _rulePattern , ', ' . join ( [ '{v}={v}.pattern' . format ( v = v ) for v in sorted ( self . _ruleTokens ) ] ) ) else : pattern = "jsg.JSGPattern(r'{}')" . format ( self . _rulePattern ) base_type = self . _jsontype . signature_type ( ) if self . _jsontype else "jsg.JSGString" return python_template . format ( name = name , base_type = base_type , pattern = pattern )
551
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/parser_impl/jsg_lexerruleblock_parser.py#L59-L67
[ "def", "get_l2vpnfs_table", "(", "self", ")", ":", "l2vpnfs_table", "=", "self", ".", "_global_tables", ".", "get", "(", "RF_L2VPN_FLOWSPEC", ")", "# Lazy initialization of the table.", "if", "not", "l2vpnfs_table", ":", "l2vpnfs_table", "=", "L2VPNFlowSpecTable", "(", "self", ".", "_core_service", ",", "self", ".", "_signal_bus", ")", "self", ".", "_global_tables", "[", "RF_L2VPN_FLOWSPEC", "]", "=", "l2vpnfs_table", "self", ".", "_tables", "[", "(", "None", ",", "RF_L2VPN_FLOWSPEC", ")", "]", "=", "l2vpnfs_table", "return", "l2vpnfs_table" ]
Generate next slug for a series .
def increment_slug ( s ) : slug_parts = s . split ( '-' ) # advance (or add) the serial counter on the end of this slug # noinspection PyBroadException try : # if it's an integer, increment it slug_parts [ - 1 ] = str ( 1 + int ( slug_parts [ - 1 ] ) ) except : # there's no counter! add one now slug_parts . append ( '2' ) return '-' . join ( slug_parts )
552
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/utility/str_util.py#L82-L101
[ "def", "_remove_clublog_xml_header", "(", "self", ",", "cty_xml_filename", ")", ":", "import", "tempfile", "try", ":", "with", "open", "(", "cty_xml_filename", ",", "\"r\"", ")", "as", "f", ":", "content", "=", "f", ".", "readlines", "(", ")", "cty_dir", "=", "tempfile", ".", "gettempdir", "(", ")", "cty_name", "=", "os", ".", "path", ".", "split", "(", "cty_xml_filename", ")", "[", "1", "]", "cty_xml_filename_no_header", "=", "os", ".", "path", ".", "join", "(", "cty_dir", ",", "\"NoHeader_\"", "+", "cty_name", ")", "with", "open", "(", "cty_xml_filename_no_header", ",", "\"w\"", ")", "as", "f", ":", "f", ".", "writelines", "(", "\"<clublog>\\n\\r\"", ")", "f", ".", "writelines", "(", "content", "[", "1", ":", "]", ")", "self", ".", "_logger", ".", "debug", "(", "\"Header successfully modified for XML Parsing\"", ")", "return", "cty_xml_filename_no_header", "except", "Exception", "as", "e", ":", "self", ".", "_logger", ".", "error", "(", "\"Clublog CTY could not be opened / modified\"", ")", "self", ".", "_logger", ".", "error", "(", "\"Error Message: \"", "+", "str", "(", "e", ")", ")", "return" ]
converts ott_id to ottId .
def underscored2camel_case ( v ) : vlist = v . split ( '_' ) c = [ ] for n , el in enumerate ( vlist ) : if el : if n == 0 : c . append ( el ) else : c . extend ( [ el [ 0 ] . upper ( ) , el [ 1 : ] ] ) return '' . join ( c )
553
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/utility/str_util.py#L104-L114
[ "def", "_get_graph", "(", "self", ",", "ctx", ",", "bundle", ",", "extensions", ",", "caller", "=", "None", ")", ":", "request", "=", "ctx", ".", "get", "(", "'request'", ")", "if", "request", "is", "None", ":", "request", "=", "get_current_request", "(", ")", "if", "':'", "in", "bundle", ":", "config_name", ",", "bundle", "=", "bundle", ".", "split", "(", "':'", ")", "else", ":", "config_name", "=", "'DEFAULT'", "webpack", "=", "request", ".", "webpack", "(", "config_name", ")", "assets", "=", "(", "caller", "(", "a", ")", "for", "a", "in", "webpack", ".", "get_bundle", "(", "bundle", ",", "extensions", ")", ")", "return", "''", ".", "join", "(", "assets", ")" ]
Return true if the pair name should be ignored
def unvalidated_parm ( self , parm : str ) -> bool : return parm . startswith ( "_" ) or parm == self . TYPE or parm in self . IGNORE or ( self . JSON_LD and parm . startswith ( '@' ) )
554
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/jsglib/jsg_context.py#L24-L31
[ "def", "save_scatter_table", "(", "self", ",", "fn", ",", "description", "=", "\"\"", ")", ":", "data", "=", "{", "\"description\"", ":", "description", ",", "\"time\"", ":", "datetime", ".", "now", "(", ")", ",", "\"psd_scatter\"", ":", "(", "self", ".", "num_points", ",", "self", ".", "D_max", ",", "self", ".", "_psd_D", ",", "self", ".", "_S_table", ",", "self", ".", "_Z_table", ",", "self", ".", "_angular_table", ",", "self", ".", "_m_table", ",", "self", ".", "geometries", ")", ",", "\"version\"", ":", "tmatrix_aux", ".", "VERSION", "}", "pickle", ".", "dump", "(", "data", ",", "file", "(", "fn", ",", "'w'", ")", ",", "pickle", ".", "HIGHEST_PROTOCOL", ")" ]
Takes a request and dispatches its data to a jsonrpc method .
def dispatch ( self , request ) : def _wrapped ( ) : messages = self . _get_request_messages ( request ) results = [ self . _dispatch_and_handle_errors ( message ) for message in messages ] non_notification_results = [ x for x in results if x is not None ] if len ( non_notification_results ) == 0 : return None elif len ( messages ) == 1 : return non_notification_results [ 0 ] else : return non_notification_results result , _ = self . _handle_exceptions ( _wrapped ) if result is not None : return self . _encode_complete_result ( result )
555
https://github.com/palantir/typedjsonrpc/blob/274218fcd236ff9643506caa629029c9ba25a0fb/typedjsonrpc/registry.py#L95-L118
[ "def", "secret_file", "(", "filename", ")", ":", "filestat", "=", "os", ".", "stat", "(", "abspath", "(", "filename", ")", ")", "if", "stat", ".", "S_ISREG", "(", "filestat", ".", "st_mode", ")", "==", "0", "and", "stat", ".", "S_ISLNK", "(", "filestat", ".", "st_mode", ")", "==", "0", ":", "e_msg", "=", "\"Secret file %s must be a real file or symlink\"", "%", "filename", "raise", "aomi", ".", "exceptions", ".", "AomiFile", "(", "e_msg", ")", "if", "platform", ".", "system", "(", ")", "!=", "\"Windows\"", ":", "if", "filestat", ".", "st_mode", "&", "stat", ".", "S_IROTH", "or", "filestat", ".", "st_mode", "&", "stat", ".", "S_IWOTH", "or", "filestat", ".", "st_mode", "&", "stat", ".", "S_IWGRP", ":", "e_msg", "=", "\"Secret file %s has too loose permissions\"", "%", "filename", "raise", "aomi", ".", "exceptions", ".", "AomiFile", "(", "e_msg", ")" ]
Registers a method with a given name and signature .
def register ( self , name , method , method_signature = None ) : if inspect . ismethod ( method ) : raise Exception ( "typedjsonrpc does not support making class methods into endpoints" ) self . _name_to_method_info [ name ] = MethodInfo ( name , method , method_signature )
556
https://github.com/palantir/typedjsonrpc/blob/274218fcd236ff9643506caa629029c9ba25a0fb/typedjsonrpc/registry.py#L218-L232
[ "def", "SetConsoleTextAttribute", "(", "stream_id", ",", "attrs", ")", ":", "handle", "=", "handles", "[", "stream_id", "]", "return", "windll", ".", "kernel32", ".", "SetConsoleTextAttribute", "(", "handle", ",", "attrs", ")" ]
Syntactic sugar for registering a method
def method ( self , returns , * * parameter_types ) : @ wrapt . decorator def type_check_wrapper ( method , instance , args , kwargs ) : """Wraps a method so that it is type-checked. :param method: The method to wrap :type method: (T) -> U :return: The result of calling the method with the given parameters :rtype: U """ if instance is not None : raise Exception ( "Instance shouldn't be set." ) parameter_names = inspect . getargspec ( method ) . args # pylint: disable=deprecated-method defaults = inspect . getargspec ( method ) . defaults # pylint: disable=deprecated-method parameters = self . _collect_parameters ( parameter_names , args , kwargs , defaults ) parameter_checker . check_types ( parameters , parameter_types , self . _strict_floats ) result = method ( * args , * * kwargs ) parameter_checker . check_return_type ( result , returns , self . _strict_floats ) return result def register_method ( method ) : """Registers a method with its fully qualified name. :param method: The method to register :type method: function :return: The original method wrapped into a type-checker :rtype: function """ parameter_names = inspect . getargspec ( method ) . args # pylint: disable=deprecated-method parameter_checker . check_type_declaration ( parameter_names , parameter_types ) wrapped_method = type_check_wrapper ( method , None , None , None ) fully_qualified_name = "{}.{}" . format ( method . __module__ , method . __name__ ) self . register ( fully_qualified_name , wrapped_method , MethodSignature . create ( parameter_names , parameter_types , returns ) ) return wrapped_method return register_method
557
https://github.com/palantir/typedjsonrpc/blob/274218fcd236ff9643506caa629029c9ba25a0fb/typedjsonrpc/registry.py#L234-L291
[ "def", "data_from_dates", "(", "path", ",", "dates", ")", ":", "if", "path", "[", "-", "1", "]", "!=", "os", ".", "path", ".", "sep", ":", "path", "+=", "os", ".", "path", ".", "sep", "if", "not", "isinstance", "(", "dates", ",", "list", ")", ":", "dates", "=", "[", "dates", "]", "data", "=", "[", "]", "for", "d", "in", "dates", ":", "filepath", "=", "path", "+", "'datalog '", "+", "d", "+", "'.xls'", "data", ".", "append", "(", "remove_notes", "(", "pd", ".", "read_csv", "(", "filepath", ",", "delimiter", "=", "'\\t'", ")", ")", ")", "return", "data" ]
Creates a dictionary mapping parameters names to their values in the method call .
def _collect_parameters ( parameter_names , args , kwargs , defaults ) : parameters = { } if defaults is not None : zipped_defaults = zip ( reversed ( parameter_names ) , reversed ( defaults ) ) for name , default in zipped_defaults : parameters [ name ] = default for name , value in zip ( parameter_names , args ) : parameters [ name ] = value for name , value in kwargs . items ( ) : parameters [ name ] = value return parameters
558
https://github.com/palantir/typedjsonrpc/blob/274218fcd236ff9643506caa629029c9ba25a0fb/typedjsonrpc/registry.py#L294-L317
[ "def", "seconds_left", "(", "self", ")", ":", "return", "int", "(", "(", "self", ".", "_ENDDATE", ".", "datetime", "-", "Date", "(", "self", ")", ".", "datetime", ")", ".", "total_seconds", "(", ")", ")" ]
Parses the request as a json message .
def _get_request_messages ( self , request ) : data = request . get_data ( as_text = True ) try : msg = self . json_decoder . decode ( data ) except Exception : raise ParseError ( "Could not parse request data '{}'" . format ( data ) ) if isinstance ( msg , list ) : return msg else : return [ msg ]
559
https://github.com/palantir/typedjsonrpc/blob/274218fcd236ff9643506caa629029c9ba25a0fb/typedjsonrpc/registry.py#L332-L348
[ "def", "endswith", "(", "self", ",", "pat", ")", ":", "check_type", "(", "pat", ",", "str", ")", "return", "_series_bool_result", "(", "self", ",", "weld_str_endswith", ",", "pat", "=", "pat", ")" ]
Checks that the request json is well - formed .
def _check_request ( self , msg ) : if "jsonrpc" not in msg : raise InvalidRequestError ( "'\"jsonrpc\": \"2.0\"' must be included." ) if msg [ "jsonrpc" ] != "2.0" : raise InvalidRequestError ( "'jsonrpc' must be exactly the string '2.0', but it was '{}'." . format ( msg [ "jsonrpc" ] ) ) if "method" not in msg : raise InvalidRequestError ( "No method specified." ) if "id" in msg : if msg [ "id" ] is None : raise InvalidRequestError ( "typedjsonrpc does not allow id to be None." ) if isinstance ( msg [ "id" ] , float ) : raise InvalidRequestError ( "typedjsonrpc does not support float ids." ) if not isinstance ( msg [ "id" ] , ( six . string_types , six . integer_types ) ) : raise InvalidRequestError ( "id must be a string or integer; '{}' is of type {}." . format ( msg [ "id" ] , type ( msg [ "id" ] ) ) ) if msg [ "method" ] not in self . _name_to_method_info : raise MethodNotFoundError ( "Could not find method '{}'." . format ( msg [ "method" ] ) )
560
https://github.com/palantir/typedjsonrpc/blob/274218fcd236ff9643506caa629029c9ba25a0fb/typedjsonrpc/registry.py#L350-L372
[ "def", "add_metadata", "(", "file_name", ",", "title", ",", "artist", ",", "album", ")", ":", "tags", "=", "EasyMP3", "(", "file_name", ")", "if", "title", ":", "tags", "[", "\"title\"", "]", "=", "title", "if", "artist", ":", "tags", "[", "\"artist\"", "]", "=", "artist", "if", "album", ":", "tags", "[", "\"album\"", "]", "=", "album", "tags", ".", "save", "(", ")", "return", "file_name" ]
Render a template from the template folder with the given context .
def render_template_to_string ( input , _from_string = False , * * context ) : if _from_string : template = current_app . jinja_env . from_string ( input ) else : template = current_app . jinja_env . get_or_select_template ( input ) return template . render ( context )
561
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/utils.py#L79-L95
[ "def", "isrchi", "(", "value", ",", "ndim", ",", "array", ")", ":", "value", "=", "ctypes", ".", "c_int", "(", "value", ")", "ndim", "=", "ctypes", ".", "c_int", "(", "ndim", ")", "array", "=", "stypes", ".", "toIntVector", "(", "array", ")", "return", "libspice", ".", "isrchi_c", "(", "value", ",", "ndim", ",", "array", ")" ]
Validate if communities logo is in limit size and save it .
def save_and_validate_logo ( logo_stream , logo_filename , community_id ) : cfg = current_app . config logos_bucket_id = cfg [ 'COMMUNITIES_BUCKET_UUID' ] logo_max_size = cfg [ 'COMMUNITIES_LOGO_MAX_SIZE' ] logos_bucket = Bucket . query . get ( logos_bucket_id ) ext = os . path . splitext ( logo_filename ) [ 1 ] ext = ext [ 1 : ] if ext . startswith ( '.' ) else ext logo_stream . seek ( SEEK_SET , SEEK_END ) # Seek from beginning to end logo_size = logo_stream . tell ( ) if logo_size > logo_max_size : return None if ext in cfg [ 'COMMUNITIES_LOGO_EXTENSIONS' ] : key = "{0}/logo.{1}" . format ( community_id , ext ) logo_stream . seek ( 0 ) # Rewind the stream to the beginning ObjectVersion . create ( logos_bucket , key , stream = logo_stream , size = logo_size ) return ext else : return None
562
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/utils.py#L98-L120
[ "def", "update", "(", "self", ",", "other", ")", ":", "self", ".", "update_ttl", "(", "other", ".", "ttl", ")", "super", "(", "Rdataset", ",", "self", ")", ".", "update", "(", "other", ")" ]
Initialize the communities file bucket .
def initialize_communities_bucket ( ) : bucket_id = UUID ( current_app . config [ 'COMMUNITIES_BUCKET_UUID' ] ) if Bucket . query . get ( bucket_id ) : raise FilesException ( "Bucket with UUID {} already exists." . format ( bucket_id ) ) else : storage_class = current_app . config [ 'FILES_REST_DEFAULT_STORAGE_CLASS' ] location = Location . get_default ( ) bucket = Bucket ( id = bucket_id , location = location , default_storage_class = storage_class ) db . session . add ( bucket ) db . session . commit ( )
563
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/utils.py#L123-L140
[ "def", "make_random_models_table", "(", "n_sources", ",", "param_ranges", ",", "random_state", "=", "None", ")", ":", "prng", "=", "check_random_state", "(", "random_state", ")", "sources", "=", "Table", "(", ")", "for", "param_name", ",", "(", "lower", ",", "upper", ")", "in", "param_ranges", ".", "items", "(", ")", ":", "# Generate a column for every item in param_ranges, even if it", "# is not in the model (e.g. flux). However, such columns will", "# be ignored when rendering the image.", "sources", "[", "param_name", "]", "=", "prng", ".", "uniform", "(", "lower", ",", "upper", ",", "n_sources", ")", "return", "sources" ]
Format the email message element for inclusion request notification .
def format_request_email_templ ( increq , template , * * ctx ) : # Add minimal information to the contex (without overwriting). curate_link = '{site_url}/communities/{id}/curate/' . format ( site_url = current_app . config [ 'THEME_SITEURL' ] , id = increq . community . id ) min_ctx = dict ( record = Record . get_record ( increq . record . id ) , requester = increq . user , community = increq . community , curate_link = curate_link , ) for k , v in min_ctx . items ( ) : if k not in ctx : ctx [ k ] = v msg_element = render_template_to_string ( template , * * ctx ) return msg_element
564
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/utils.py#L143-L176
[ "def", "GetRootKey", "(", "self", ")", ":", "root_registry_key", "=", "virtual", ".", "VirtualWinRegistryKey", "(", "''", ")", "for", "mapped_key", "in", "self", ".", "_MAPPED_KEYS", ":", "key_path_segments", "=", "key_paths", ".", "SplitKeyPath", "(", "mapped_key", ")", "if", "not", "key_path_segments", ":", "continue", "registry_key", "=", "root_registry_key", "for", "name", "in", "key_path_segments", "[", ":", "-", "1", "]", ":", "sub_registry_key", "=", "registry_key", ".", "GetSubkeyByName", "(", "name", ")", "if", "not", "sub_registry_key", ":", "sub_registry_key", "=", "virtual", ".", "VirtualWinRegistryKey", "(", "name", ")", "registry_key", ".", "AddSubkey", "(", "sub_registry_key", ")", "registry_key", "=", "sub_registry_key", "sub_registry_key", "=", "registry_key", ".", "GetSubkeyByName", "(", "key_path_segments", "[", "-", "1", "]", ")", "if", "(", "not", "sub_registry_key", "and", "isinstance", "(", "registry_key", ",", "virtual", ".", "VirtualWinRegistryKey", ")", ")", ":", "sub_registry_key", "=", "virtual", ".", "VirtualWinRegistryKey", "(", "key_path_segments", "[", "-", "1", "]", ",", "registry", "=", "self", ")", "registry_key", ".", "AddSubkey", "(", "sub_registry_key", ")", "return", "root_registry_key" ]
Format the email message title for inclusion request notification .
def format_request_email_title ( increq , * * ctx ) : template = current_app . config [ "COMMUNITIES_REQUEST_EMAIL_TITLE_TEMPLATE" ] , return format_request_email_templ ( increq , template , * * ctx )
565
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/utils.py#L179-L190
[ "def", "deleteData", "(", "self", ",", "offset", ":", "int", ",", "count", ":", "int", ")", "->", "None", ":", "self", ".", "_delete_data", "(", "offset", ",", "count", ")" ]
Format the email message body for inclusion request notification .
def format_request_email_body ( increq , * * ctx ) : template = current_app . config [ "COMMUNITIES_REQUEST_EMAIL_BODY_TEMPLATE" ] , return format_request_email_templ ( increq , template , * * ctx )
566
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/utils.py#L193-L204
[ "def", "_getLPA", "(", "self", ")", ":", "return", "str", "(", "self", ".", "line", ")", "+", "\":\"", "+", "str", "(", "self", ".", "pos", ")", "+", "\":\"", "+", "str", "(", "self", ".", "absPosition", ")" ]
Signal for sending emails after community inclusion request .
def send_community_request_email ( increq ) : from flask_mail import Message from invenio_mail . tasks import send_email msg_body = format_request_email_body ( increq ) msg_title = format_request_email_title ( increq ) sender = current_app . config [ 'COMMUNITIES_REQUEST_EMAIL_SENDER' ] msg = Message ( msg_title , sender = sender , recipients = [ increq . community . owner . email , ] , body = msg_body ) send_email . delay ( msg . __dict__ )
567
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/utils.py#L207-L224
[ "def", "GetRootKey", "(", "self", ")", ":", "root_registry_key", "=", "virtual", ".", "VirtualWinRegistryKey", "(", "''", ")", "for", "mapped_key", "in", "self", ".", "_MAPPED_KEYS", ":", "key_path_segments", "=", "key_paths", ".", "SplitKeyPath", "(", "mapped_key", ")", "if", "not", "key_path_segments", ":", "continue", "registry_key", "=", "root_registry_key", "for", "name", "in", "key_path_segments", "[", ":", "-", "1", "]", ":", "sub_registry_key", "=", "registry_key", ".", "GetSubkeyByName", "(", "name", ")", "if", "not", "sub_registry_key", ":", "sub_registry_key", "=", "virtual", ".", "VirtualWinRegistryKey", "(", "name", ")", "registry_key", ".", "AddSubkey", "(", "sub_registry_key", ")", "registry_key", "=", "sub_registry_key", "sub_registry_key", "=", "registry_key", ".", "GetSubkeyByName", "(", "key_path_segments", "[", "-", "1", "]", ")", "if", "(", "not", "sub_registry_key", "and", "isinstance", "(", "registry_key", ",", "virtual", ".", "VirtualWinRegistryKey", ")", ")", ":", "sub_registry_key", "=", "virtual", ".", "VirtualWinRegistryKey", "(", "key_path_segments", "[", "-", "1", "]", ",", "registry", "=", "self", ")", "registry_key", ".", "AddSubkey", "(", "sub_registry_key", ")", "return", "root_registry_key" ]
Convenience function for writing documentation .
def modifydocs ( a , b , desc = '' ) : newdoc = a . func_doc . replace ( '\t\t' , '\t' ) newdoc += "Documentation from " + desc + ":\n" + b . func_doc return newdoc
568
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/tab.py#L34-L68
[ "def", "synchronize", "(", "self", ",", "graph_data", "=", "None", ")", ":", "profile", "=", "graph_data", "or", "self", ".", "graph", ".", "get", "(", "'me'", ")", "self", ".", "facebook_username", "=", "profile", ".", "get", "(", "'username'", ")", "self", ".", "first_name", "=", "profile", ".", "get", "(", "'first_name'", ")", "self", ".", "middle_name", "=", "profile", ".", "get", "(", "'middle_name'", ")", "self", ".", "last_name", "=", "profile", ".", "get", "(", "'last_name'", ")", "self", ".", "birthday", "=", "datetime", ".", "strptime", "(", "profile", "[", "'birthday'", "]", ",", "'%m/%d/%Y'", ")", "if", "profile", ".", "has_key", "(", "'birthday'", ")", "else", "None", "self", ".", "email", "=", "profile", ".", "get", "(", "'email'", ")", "self", ".", "locale", "=", "profile", ".", "get", "(", "'locale'", ")", "self", ".", "gender", "=", "profile", ".", "get", "(", "'gender'", ")", "self", ".", "extra_data", "=", "profile", "self", ".", "save", "(", ")" ]
Database - join for tabular arrays .
def tab_join ( ToMerge , keycols = None , nullvals = None , renamer = None , returnrenaming = False , Names = None ) : [ Result , Renaming ] = spreadsheet . join ( ToMerge , keycols = keycols , nullvals = nullvals , renamer = renamer , returnrenaming = True , Names = Names ) if isinstance ( ToMerge , dict ) : Names = ToMerge . keys ( ) else : Names = range ( len ( ToMerge ) ) Colorings = dict ( [ ( k , ToMerge [ k ] . coloring ) if 'coloring' in dir ( ToMerge [ k ] ) else { } for k in Names ] ) for k in Names : if k in Renaming . keys ( ) : l = ToMerge [ k ] Colorings [ k ] = dict ( [ ( g , [ n if not n in Renaming [ k ] . keys ( ) else Renaming [ k ] [ n ] for n in l . coloring [ g ] ] ) for g in Colorings [ k ] . keys ( ) ] ) Coloring = { } for k in Colorings . keys ( ) : for j in Colorings [ k ] . keys ( ) : if j in Coloring . keys ( ) : Coloring [ j ] = utils . uniqify ( Coloring [ j ] + Colorings [ k ] [ j ] ) else : Coloring [ j ] = utils . uniqify ( Colorings [ k ] [ j ] ) Result = Result . view ( tabarray ) Result . coloring = Coloring if returnrenaming : return [ Result , Renaming ] else : return Result
569
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/tab.py#L142-L186
[ "def", "MakeRequest", "(", "self", ",", "data", ")", ":", "stats_collector_instance", ".", "Get", "(", ")", ".", "IncrementCounter", "(", "\"grr_client_sent_bytes\"", ",", "len", "(", "data", ")", ")", "# Verify the response is as it should be from the control endpoint.", "response", "=", "self", ".", "http_manager", ".", "OpenServerEndpoint", "(", "path", "=", "\"control?api=%s\"", "%", "config", ".", "CONFIG", "[", "\"Network.api\"", "]", ",", "verify_cb", "=", "self", ".", "VerifyServerControlResponse", ",", "data", "=", "data", ",", "headers", "=", "{", "\"Content-Type\"", ":", "\"binary/octet-stream\"", "}", ")", "if", "response", ".", "code", "==", "406", ":", "self", ".", "InitiateEnrolment", "(", ")", "return", "response", "if", "response", ".", "code", "==", "200", ":", "stats_collector_instance", ".", "Get", "(", ")", ".", "IncrementCounter", "(", "\"grr_client_received_bytes\"", ",", "len", "(", "response", ".", "data", ")", ")", "return", "response", "# An unspecified error occured.", "return", "response" ]
Creates a copy of this tabarray in the form of a numpy ndarray .
def extract ( self ) : return np . vstack ( [ self [ r ] for r in self . dtype . names ] ) . T . squeeze ( )
570
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/tab.py#L550-L559
[ "def", "assign_mv_feeder_to_nodes", "(", "mv_grid", ")", ":", "mv_station_neighbors", "=", "mv_grid", ".", "graph", ".", "neighbors", "(", "mv_grid", ".", "station", ")", "# get all nodes in MV grid and remove MV station to get separate subgraphs", "mv_graph_nodes", "=", "mv_grid", ".", "graph", ".", "nodes", "(", ")", "mv_graph_nodes", ".", "remove", "(", "mv_grid", ".", "station", ")", "subgraph", "=", "mv_grid", ".", "graph", ".", "subgraph", "(", "mv_graph_nodes", ")", "for", "neighbor", "in", "mv_station_neighbors", ":", "# determine feeder", "mv_feeder", "=", "mv_grid", ".", "graph", ".", "line_from_nodes", "(", "mv_grid", ".", "station", ",", "neighbor", ")", "# get all nodes in that feeder by doing a DFS in the disconnected", "# subgraph starting from the node adjacent to the MVStation `neighbor`", "subgraph_neighbor", "=", "nx", ".", "dfs_tree", "(", "subgraph", ",", "source", "=", "neighbor", ")", "for", "node", "in", "subgraph_neighbor", ".", "nodes", "(", ")", ":", "# in case of an LV station assign feeder to all nodes in that LV", "# grid", "if", "isinstance", "(", "node", ",", "LVStation", ")", ":", "for", "lv_node", "in", "node", ".", "grid", ".", "graph", ".", "nodes", "(", ")", ":", "lv_node", ".", "mv_feeder", "=", "mv_feeder", "else", ":", "node", ".", "mv_feeder", "=", "mv_feeder" ]
Append one or more records to the end of the array .
def addrecords ( self , new ) : data = spreadsheet . addrecords ( self , new ) data = data . view ( tabarray ) data . coloring = self . coloring return data
571
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/tab.py#L609-L621
[ "def", "get_needful_files", "(", "self", ")", ":", "manifest", "=", "self", ".", "storage", ".", "load_manifest", "(", ")", "if", "self", ".", "keep_unhashed_files", ":", "if", "PY3", ":", "needful_files", "=", "set", "(", "manifest", ".", "keys", "(", ")", "|", "manifest", ".", "values", "(", ")", ")", "else", ":", "needful_files", "=", "set", "(", "manifest", ".", "keys", "(", ")", "+", "manifest", ".", "values", "(", ")", ")", "needful_files", "=", "{", "self", ".", "storage", ".", "clean_name", "(", "file", ")", "for", "file", "in", "needful_files", "}", "else", ":", "needful_files", "=", "set", "(", "manifest", ".", "values", "(", ")", ")", "return", "{", "self", ".", "process_file", "(", "file", ")", "for", "file", "in", "needful_files", "}" ]
Add one or more new columns .
def addcols ( self , cols , names = None ) : data = spreadsheet . addcols ( self , cols , names ) data = data . view ( tabarray ) data . coloring = self . coloring return data
572
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/tab.py#L625-L637
[ "def", "stop_experiment", "(", "args", ")", ":", "experiment_id_list", "=", "parse_ids", "(", "args", ")", "if", "experiment_id_list", ":", "experiment_config", "=", "Experiments", "(", ")", "experiment_dict", "=", "experiment_config", ".", "get_all_experiments", "(", ")", "for", "experiment_id", "in", "experiment_id_list", ":", "print_normal", "(", "'Stoping experiment %s'", "%", "experiment_id", ")", "nni_config", "=", "Config", "(", "experiment_dict", "[", "experiment_id", "]", "[", "'fileName'", "]", ")", "rest_port", "=", "nni_config", ".", "get_config", "(", "'restServerPort'", ")", "rest_pid", "=", "nni_config", ".", "get_config", "(", "'restServerPid'", ")", "if", "rest_pid", ":", "kill_command", "(", "rest_pid", ")", "tensorboard_pid_list", "=", "nni_config", ".", "get_config", "(", "'tensorboardPidList'", ")", "if", "tensorboard_pid_list", ":", "for", "tensorboard_pid", "in", "tensorboard_pid_list", ":", "try", ":", "kill_command", "(", "tensorboard_pid", ")", "except", "Exception", "as", "exception", ":", "print_error", "(", "exception", ")", "nni_config", ".", "set_config", "(", "'tensorboardPidList'", ",", "[", "]", ")", "print_normal", "(", "'Stop experiment success!'", ")", "experiment_config", ".", "update_experiment", "(", "experiment_id", ",", "'status'", ",", "'STOPPED'", ")", "time_now", "=", "time", ".", "strftime", "(", "'%Y-%m-%d %H:%M:%S'", ",", "time", ".", "localtime", "(", "time", ".", "time", "(", ")", ")", ")", "experiment_config", ".", "update_experiment", "(", "experiment_id", ",", "'endTime'", ",", "str", "(", "time_now", ")", ")" ]
Rename column or color in - place .
def renamecol ( self , old , new ) : spreadsheet . renamecol ( self , old , new ) for x in self . coloring . keys ( ) : if old in self . coloring [ x ] : ind = self . coloring [ x ] . index ( old ) self . coloring [ x ] [ ind ] = new
573
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/tab.py#L658-L671
[ "def", "get_device_access_interfaces", "(", "auth", ",", "url", ",", "devid", "=", "None", ",", "devip", "=", "None", ")", ":", "if", "devip", "is", "not", "None", ":", "devid", "=", "get_dev_details", "(", "devip", ",", "auth", ",", "url", ")", "[", "'id'", "]", "get_access_interface_vlan_url", "=", "\"/imcrs/vlan/access?devId=\"", "+", "str", "(", "devid", ")", "+", "\"&start=1&size=500&total=false\"", "f_url", "=", "url", "+", "get_access_interface_vlan_url", "response", "=", "requests", ".", "get", "(", "f_url", ",", "auth", "=", "auth", ",", "headers", "=", "HEADERS", ")", "try", ":", "if", "response", ".", "status_code", "==", "200", ":", "dev_access_interfaces", "=", "(", "json", ".", "loads", "(", "response", ".", "text", ")", ")", "if", "type", "(", "dev_access_interfaces", "[", "'accessIf'", "]", ")", "is", "dict", ":", "return", "[", "dev_access_interfaces", "[", "'accessIf'", "]", "]", "if", "len", "(", "dev_access_interfaces", ")", "==", "2", ":", "return", "dev_access_interfaces", "[", "'accessIf'", "]", "else", ":", "dev_access_interfaces", "[", "'accessIf'", "]", "=", "[", "\"No access inteface\"", "]", "return", "dev_access_interfaces", "[", "'accessIf'", "]", "except", "requests", ".", "exceptions", ".", "RequestException", "as", "error", ":", "return", "\"Error:\\n\"", "+", "str", "(", "error", ")", "+", "\" get_device_access_interfaces: An Error has occured\"" ]
Horizontal stacking for tabarrays .
def colstack ( self , new , mode = 'abort' ) : if isinstance ( new , list ) : return tab_colstack ( [ self ] + new , mode ) else : return tab_colstack ( [ self , new ] , mode )
574
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/tab.py#L713-L728
[ "def", "_restore_base_estimators", "(", "self", ",", "kernel_cache", ",", "out", ",", "X", ",", "cv", ")", ":", "train_folds", "=", "{", "fold", ":", "train_index", "for", "fold", ",", "(", "train_index", ",", "_", ")", "in", "enumerate", "(", "cv", ")", "}", "for", "idx", ",", "fold", ",", "_", ",", "est", "in", "out", ":", "if", "idx", "in", "kernel_cache", ":", "if", "not", "hasattr", "(", "est", ",", "'fit_X_'", ")", ":", "raise", "ValueError", "(", "'estimator %s uses a custom kernel function, '", "'but does not have the attribute `fit_X_` after training'", "%", "self", ".", "base_estimators", "[", "idx", "]", "[", "0", "]", ")", "est", ".", "set_params", "(", "kernel", "=", "self", ".", "base_estimators", "[", "idx", "]", "[", "1", "]", ".", "kernel", ")", "est", ".", "fit_X_", "=", "X", "[", "train_folds", "[", "fold", "]", "]", "return", "out" ]
Vertical stacking for tabarrays .
def rowstack ( self , new , mode = 'nulls' ) : if isinstance ( new , list ) : return tab_rowstack ( [ self ] + new , mode ) else : return tab_rowstack ( [ self , new ] , mode )
575
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/tab.py#L733-L748
[ "def", "send_email_sns", "(", "sender", ",", "subject", ",", "message", ",", "topic_ARN", ",", "image_png", ")", ":", "from", "boto3", "import", "resource", "as", "boto3_resource", "sns", "=", "boto3_resource", "(", "'sns'", ")", "topic", "=", "sns", ".", "Topic", "(", "topic_ARN", "[", "0", "]", ")", "# Subject is max 100 chars", "if", "len", "(", "subject", ")", ">", "100", ":", "subject", "=", "subject", "[", "0", ":", "48", "]", "+", "'...'", "+", "subject", "[", "-", "49", ":", "]", "response", "=", "topic", ".", "publish", "(", "Subject", "=", "subject", ",", "Message", "=", "message", ")", "logger", ".", "debug", "(", "(", "\"Message sent to SNS.\\nMessageId: {},\\nRequestId: {},\\n\"", "\"HTTPSStatusCode: {}\"", ")", ".", "format", "(", "response", "[", "'MessageId'", "]", ",", "response", "[", "'ResponseMetadata'", "]", "[", "'RequestId'", "]", ",", "response", "[", "'ResponseMetadata'", "]", "[", "'HTTPStatusCode'", "]", ")", ")" ]
Aggregate a tabarray on columns for given functions .
def aggregate ( self , On = None , AggFuncDict = None , AggFunc = None , AggList = None , returnsort = False , KeepOthers = True , keyfuncdict = None ) : if returnsort : [ data , s ] = spreadsheet . aggregate ( X = self , On = On , AggFuncDict = AggFuncDict , AggFunc = AggFunc , AggList = AggList , returnsort = returnsort , keyfuncdict = keyfuncdict ) else : data = spreadsheet . aggregate ( X = self , On = On , AggFuncDict = AggFuncDict , AggFunc = AggFunc , AggList = AggList , returnsort = returnsort , KeepOthers = KeepOthers , keyfuncdict = keyfuncdict ) data = data . view ( tabarray ) data . coloring = self . coloring if returnsort : return [ data , s ] else : return data
576
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/tab.py#L753-L781
[ "def", "_create_download_failed_message", "(", "exception", ",", "url", ")", ":", "message", "=", "'Failed to download from:\\n{}\\nwith {}:\\n{}'", ".", "format", "(", "url", ",", "exception", ".", "__class__", ".", "__name__", ",", "exception", ")", "if", "_is_temporal_problem", "(", "exception", ")", ":", "if", "isinstance", "(", "exception", ",", "requests", ".", "ConnectionError", ")", ":", "message", "+=", "'\\nPlease check your internet connection and try again.'", "else", ":", "message", "+=", "'\\nThere might be a problem in connection or the server failed to process '", "'your request. Please try again.'", "elif", "isinstance", "(", "exception", ",", "requests", ".", "HTTPError", ")", ":", "try", ":", "server_message", "=", "''", "for", "elem", "in", "decode_data", "(", "exception", ".", "response", ".", "content", ",", "MimeType", ".", "XML", ")", ":", "if", "'ServiceException'", "in", "elem", ".", "tag", "or", "'Message'", "in", "elem", ".", "tag", ":", "server_message", "+=", "elem", ".", "text", ".", "strip", "(", "'\\n\\t '", ")", "except", "ElementTree", ".", "ParseError", ":", "server_message", "=", "exception", ".", "response", ".", "text", "message", "+=", "'\\nServer response: \"{}\"'", ".", "format", "(", "server_message", ")", "return", "message" ]
Aggregate a tabarray and include original data in the result .
def aggregate_in ( self , On = None , AggFuncDict = None , AggFunc = None , AggList = None , interspersed = True ) : data = spreadsheet . aggregate_in ( Data = self , On = On , AggFuncDict = AggFuncDict , AggFunc = AggFunc , AggList = AggList , interspersed = interspersed ) data = data . view ( tabarray ) data . view = self . coloring return data
577
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/tab.py#L785-L802
[ "def", "acme_renew_certificates", "(", ")", ":", "for", "csr", "in", "glob", "(", "os", ".", "path", ".", "join", "(", "CERTIFICATES_PATH", ",", "'*.csr'", ")", ")", ":", "common_name", "=", "os", ".", "path", ".", "basename", "(", "csr", ")", "common_name", "=", "os", ".", "path", ".", "splitext", "(", "common_name", ")", "[", "0", "]", "certificate_path", "=", "\"{}.crt\"", ".", "format", "(", "common_name", ")", "certificate_path", "=", "os", ".", "path", ".", "join", "(", "CERTIFICATES_PATH", ",", "certificate_path", ")", "with", "open", "(", "certificate_path", ")", "as", "file", ":", "crt", "=", "OpenSSL", ".", "crypto", ".", "load_certificate", "(", "OpenSSL", ".", "crypto", ".", "FILETYPE_PEM", ",", "file", ".", "read", "(", ")", ")", "expiration", "=", "crt", ".", "get_notAfter", "(", ")", "expiration", "=", "_parse_asn1_generalized_date", "(", "expiration", ")", "remaining", "=", "expiration", "-", "datetime", ".", "utcnow", "(", ")", "if", "remaining", ">", "timedelta", "(", "days", "=", "30", ")", ":", "print", "\"No need to renew {} ({})\"", ".", "format", "(", "certificate_path", ",", "remaining", ")", "continue", "print", "\"Renewing {} ({})\"", ".", "format", "(", "certificate_path", ",", "remaining", ")", "certificate_request_path", "=", "\"{}.csr\"", ".", "format", "(", "common_name", ")", "certificate_request_path", "=", "os", ".", "path", ".", "join", "(", "CERTIFICATES_PATH", ",", "certificate_request_path", ")", "signed_cert", "=", "\"{}-signed.crt\"", ".", "format", "(", "common_name", ")", "signed_cert", "=", "os", ".", "path", ".", "join", "(", "CERTIFICATES_PATH", ",", "signed_cert", ")", "_internal_sign_certificate", "(", "certificate_path", ",", "certificate_request_path", ",", "signed_cert", ")" ]
Pivot with a as the row axis and b values as the column axis .
def pivot ( self , a , b , Keep = None , NullVals = None , order = None , prefix = '_' ) : [ data , coloring ] = spreadsheet . pivot ( X = self , a = a , b = b , Keep = Keep , NullVals = NullVals , order = order , prefix = prefix ) data = data . view ( tabarray ) data . coloring = coloring return data
578
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/tab.py#L807-L820
[ "def", "_create_download_failed_message", "(", "exception", ",", "url", ")", ":", "message", "=", "'Failed to download from:\\n{}\\nwith {}:\\n{}'", ".", "format", "(", "url", ",", "exception", ".", "__class__", ".", "__name__", ",", "exception", ")", "if", "_is_temporal_problem", "(", "exception", ")", ":", "if", "isinstance", "(", "exception", ",", "requests", ".", "ConnectionError", ")", ":", "message", "+=", "'\\nPlease check your internet connection and try again.'", "else", ":", "message", "+=", "'\\nThere might be a problem in connection or the server failed to process '", "'your request. Please try again.'", "elif", "isinstance", "(", "exception", ",", "requests", ".", "HTTPError", ")", ":", "try", ":", "server_message", "=", "''", "for", "elem", "in", "decode_data", "(", "exception", ".", "response", ".", "content", ",", "MimeType", ".", "XML", ")", ":", "if", "'ServiceException'", "in", "elem", ".", "tag", "or", "'Message'", "in", "elem", ".", "tag", ":", "server_message", "+=", "elem", ".", "text", ".", "strip", "(", "'\\n\\t '", ")", "except", "ElementTree", ".", "ParseError", ":", "server_message", "=", "exception", ".", "response", ".", "text", "message", "+=", "'\\nServer response: \"{}\"'", ".", "format", "(", "server_message", ")", "return", "message" ]
Wrapper for spreadsheet . join but handles coloring attributes .
def join ( self , ToMerge , keycols = None , nullvals = None , renamer = None , returnrenaming = False , selfname = None , Names = None ) : if isinstance ( ToMerge , np . ndarray ) : ToMerge = [ ToMerge ] if isinstance ( ToMerge , dict ) : assert selfname not in ToMerge . keys ( ) , ( 'Can\'t use "' , selfname + '" for name of one of the things to ' 'merge, since it is the same name as the self object.' ) if selfname == None : try : selfname = self . name except AttributeError : selfname = 'self' ToMerge . update ( { selfname : self } ) else : ToMerge = [ self ] + ToMerge return tab_join ( ToMerge , keycols = keycols , nullvals = nullvals , renamer = renamer , returnrenaming = returnrenaming , Names = Names )
579
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/tab.py#L839-L867
[ "def", "wrapped_env_maker", "(", "environment_id", ",", "seed", ",", "serial_id", ",", "disable_reward_clipping", "=", "False", ",", "disable_episodic_life", "=", "False", ",", "monitor", "=", "False", ",", "allow_early_resets", "=", "False", ",", "scale_float_frames", "=", "False", ",", "max_episode_frames", "=", "10000", ",", "frame_stack", "=", "None", ")", ":", "env", "=", "env_maker", "(", "environment_id", ")", "env", ".", "seed", "(", "seed", "+", "serial_id", ")", "if", "max_episode_frames", "is", "not", "None", ":", "env", "=", "ClipEpisodeLengthWrapper", "(", "env", ",", "max_episode_length", "=", "max_episode_frames", ")", "# Monitoring the env", "if", "monitor", ":", "logdir", "=", "logger", ".", "get_dir", "(", ")", "and", "os", ".", "path", ".", "join", "(", "logger", ".", "get_dir", "(", ")", ",", "str", "(", "serial_id", ")", ")", "else", ":", "logdir", "=", "None", "env", "=", "Monitor", "(", "env", ",", "logdir", ",", "allow_early_resets", "=", "allow_early_resets", ")", "if", "not", "disable_episodic_life", ":", "# Make end-of-life == end-of-episode, but only reset on true game over.", "# Done by DeepMind for the DQN and co. since it helps value estimation.", "env", "=", "EpisodicLifeEnv", "(", "env", ")", "if", "'FIRE'", "in", "env", ".", "unwrapped", ".", "get_action_meanings", "(", ")", ":", "# Take action on reset for environments that are fixed until firing.", "if", "disable_episodic_life", ":", "env", "=", "FireEpisodicLifeEnv", "(", "env", ")", "else", ":", "env", "=", "FireResetEnv", "(", "env", ")", "# Warp frames to 84x84 as done in the Nature paper and later work.", "env", "=", "WarpFrame", "(", "env", ")", "if", "scale_float_frames", ":", "env", "=", "ScaledFloatFrame", "(", "env", ")", "if", "not", "disable_reward_clipping", ":", "# Bin reward to {+1, 0, -1} by its sign.", "env", "=", "ClipRewardEnv", "(", "env", ")", "if", "frame_stack", "is", "not", "None", ":", "env", "=", "FrameStack", "(", "env", ",", "frame_stack", ")", "return", "env" ]
Returns the indices that would sort an array .
def argsort ( self , axis = - 1 , kind = 'quicksort' , order = None ) : index_array = np . core . fromnumeric . _wrapit ( self , 'argsort' , axis , kind , order ) index_array = index_array . view ( np . ndarray ) return index_array
580
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/tab.py#L869-L937
[ "def", "delete_container_instance_group", "(", "access_token", ",", "subscription_id", ",", "resource_group", ",", "container_group_name", ")", ":", "endpoint", "=", "''", ".", "join", "(", "[", "get_rm_endpoint", "(", ")", ",", "'/subscriptions/'", ",", "subscription_id", ",", "'/resourcegroups/'", ",", "resource_group", ",", "'/providers/Microsoft.ContainerInstance/ContainerGroups/'", ",", "container_group_name", ",", "'?api-version='", ",", "CONTAINER_API", "]", ")", "return", "do_delete", "(", "endpoint", ",", "access_token", ")" ]
Determine whether txt matches pattern
def matches ( self , txt : str ) -> bool : # rval = ref.getText()[1:-1].encode('utf-8').decode('unicode-escape') if r'\\u' in self . pattern_re . pattern : txt = txt . encode ( 'utf-8' ) . decode ( 'unicode-escape' ) match = self . pattern_re . match ( txt ) return match is not None and match . end ( ) == len ( txt )
581
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/jsglib/jsg_strings.py#L23-L33
[ "async", "def", "open_session", "(", "self", ",", "request", ":", "BaseRequestWebsocket", ")", "->", "Session", ":", "return", "await", "ensure_coroutine", "(", "self", ".", "session_interface", ".", "open_session", ")", "(", "self", ",", "request", ")" ]
Return web - safe hex triplets .
def Point2HexColor ( a , lfrac , tfrac ) : [ H , S , V ] = [ math . floor ( 360 * a ) , lfrac , tfrac ] RGB = hsvToRGB ( H , S , V ) H = [ hex ( int ( math . floor ( 255 * x ) ) ) for x in RGB ] HEX = [ a [ a . find ( 'x' ) + 1 : ] for a in H ] HEX = [ '0' + h if len ( h ) == 1 else h for h in HEX ] return '#' + '' . join ( HEX )
582
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/colors.py#L13-L28
[ "def", "refresh_keyboard_mapping", "(", "self", ",", "evt", ")", ":", "if", "isinstance", "(", "evt", ",", "event", ".", "MappingNotify", ")", ":", "if", "evt", ".", "request", "==", "X", ".", "MappingKeyboard", ":", "self", ".", "_update_keymap", "(", "evt", ".", "first_keycode", ",", "evt", ".", "count", ")", "else", ":", "raise", "TypeError", "(", "'expected a MappingNotify event'", ")" ]
Only to be used in this file and peyotl . utility . get_config
def warn_from_util_logger ( msg ) : global _LOG # This check is necessary to avoid infinite recursion when called from get_config, because # the _read_logging_conf can require reading a conf file. if _LOG is None and _LOGGING_CONF is None : sys . stderr . write ( 'WARNING: (from peyotl before logging is configured) {}\n' . format ( msg ) ) return if _LOG is None : _LOG = get_logger ( "peyotl.utility" ) _LOG . warn ( msg )
583
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/utility/get_logger.py#L85-L95
[ "def", "compare_values", "(", "values0", ",", "values1", ")", ":", "values0", "=", "{", "v", "[", "0", "]", ":", "v", "[", "1", ":", "]", "for", "v", "in", "values0", "}", "values1", "=", "{", "v", "[", "0", "]", ":", "v", "[", "1", ":", "]", "for", "v", "in", "values1", "}", "created", "=", "[", "(", "k", ",", "v", "[", "0", "]", ",", "v", "[", "1", "]", ")", "for", "k", ",", "v", "in", "values1", ".", "items", "(", ")", "if", "k", "not", "in", "values0", "]", "deleted", "=", "[", "(", "k", ",", "v", "[", "0", "]", ",", "v", "[", "1", "]", ")", "for", "k", ",", "v", "in", "values0", ".", "items", "(", ")", "if", "k", "not", "in", "values1", "]", "modified", "=", "[", "(", "k", ",", "v", "[", "0", "]", ",", "v", "[", "1", "]", ")", "for", "k", ",", "v", "in", "values0", ".", "items", "(", ")", "if", "v", "!=", "values1", ".", "get", "(", "k", ",", "None", ")", "]", "return", "created", ",", "deleted", ",", "modified" ]
Given a state delta apply the modifications to lights state over a given period of time .
def state_delta ( self , selector = 'all' , power = None , duration = 1.0 , infrared = None , hue = None , saturation = None , brightness = None , kelvin = None ) : argument_tuples = [ ( "power" , power ) , ( "duration" , duration ) , ( "infrared" , infrared ) , ( "hue" , hue ) , ( "saturation" , saturation ) , ( "brightness" , brightness ) , ( "kelvin" , kelvin ) ] return self . client . perform_request ( method = 'post' , endpoint = 'lights/{}/state/delta' , endpoint_args = [ selector ] , argument_tuples = argument_tuples )
584
https://github.com/cydrobolt/pifx/blob/c9de9c2695c3e6e72de4aa0de47b78fc13c457c3/pifx/core.py#L75-L122
[ "def", "_compute_sync_map_file_path", "(", "self", ",", "root", ",", "hierarchy_type", ",", "custom_id", ",", "file_name", ")", ":", "prefix", "=", "root", "if", "hierarchy_type", "==", "HierarchyType", ".", "PAGED", ":", "prefix", "=", "gf", ".", "norm_join", "(", "prefix", ",", "custom_id", ")", "file_name_joined", "=", "gf", ".", "norm_join", "(", "prefix", ",", "file_name", ")", "return", "self", ".", "_replace_placeholder", "(", "file_name_joined", ",", "custom_id", ")" ]
Perform breathe effect on lights .
def breathe_lights ( self , color , selector = 'all' , from_color = None , period = 1.0 , cycles = 1.0 , persist = False , power_on = True , peak = 0.5 ) : argument_tuples = [ ( "color" , color ) , ( "from_color" , from_color ) , ( "period" , period ) , ( "cycles" , cycles ) , ( "persist" , persist ) , ( "power_on" , power_on ) , ( "peak" , peak ) , ] return self . client . perform_request ( method = 'post' , endpoint = 'lights/{}/effects/breathe' , endpoint_args = [ selector ] , argument_tuples = argument_tuples )
585
https://github.com/cydrobolt/pifx/blob/c9de9c2695c3e6e72de4aa0de47b78fc13c457c3/pifx/core.py#L135-L186
[ "def", "add_binary_media_types", "(", "self", ",", "logical_id", ",", "binary_media_types", ")", ":", "properties", "=", "self", ".", "_get_properties", "(", "logical_id", ")", "binary_media_types", "=", "binary_media_types", "or", "[", "]", "for", "value", "in", "binary_media_types", ":", "normalized_value", "=", "self", ".", "_normalize_binary_media_type", "(", "value", ")", "# If the value is not supported, then just skip it.", "if", "normalized_value", ":", "properties", ".", "binary_media_types", ".", "add", "(", "normalized_value", ")", "else", ":", "LOG", ".", "debug", "(", "\"Unsupported data type of binary media type value of resource '%s'\"", ",", "logical_id", ")" ]
Cycle through list of effects .
def cycle_lights ( self , states , defaults , direction = 'forward' , selector = 'all' ) : argument_tuples = [ ( "states" , states ) , ( "defaults" , defaults ) , ( "direction" , direction ) ] return self . client . perform_request ( method = 'post' , endpoint = 'lights/{}/cycle' , endpoint_args = [ selector ] , argument_tuples = argument_tuples , json_body = True )
586
https://github.com/cydrobolt/pifx/blob/c9de9c2695c3e6e72de4aa0de47b78fc13c457c3/pifx/core.py#L235-L266
[ "def", "decode_temperature", "(", "packet", ",", "channel", "=", "1", ")", ":", "val", "=", "str", "(", "packet", ".", "get", "(", "QSDATA", ",", "''", ")", ")", "if", "len", "(", "val", ")", "==", "12", "and", "val", ".", "startswith", "(", "'34'", ")", "and", "channel", "==", "1", ":", "temperature", "=", "int", "(", "val", "[", "-", "4", ":", "]", ",", "16", ")", "return", "round", "(", "float", "(", "(", "-", "46.85", "+", "(", "175.72", "*", "(", "temperature", "/", "pow", "(", "2", ",", "16", ")", ")", ")", ")", ")", ")", "return", "None" ]
Activate a scene .
def activate_scene ( self , scene_uuid , duration = 1.0 ) : argument_tuples = [ ( "duration" , duration ) , ] return self . client . perform_request ( method = 'put' , endpoint = 'scenes/scene_id:{}/activate' , endpoint_args = [ scene_uuid ] , argument_tuples = argument_tuples )
587
https://github.com/cydrobolt/pifx/blob/c9de9c2695c3e6e72de4aa0de47b78fc13c457c3/pifx/core.py#L276-L295
[ "def", "indication", "(", "self", ",", "apdu", ")", ":", "if", "_debug", ":", "ServerSSM", ".", "_debug", "(", "\"indication %r\"", ",", "apdu", ")", "if", "self", ".", "state", "==", "IDLE", ":", "self", ".", "idle", "(", "apdu", ")", "elif", "self", ".", "state", "==", "SEGMENTED_REQUEST", ":", "self", ".", "segmented_request", "(", "apdu", ")", "elif", "self", ".", "state", "==", "AWAIT_RESPONSE", ":", "self", ".", "await_response", "(", "apdu", ")", "elif", "self", ".", "state", "==", "SEGMENTED_RESPONSE", ":", "self", ".", "segmented_response", "(", "apdu", ")", "else", ":", "if", "_debug", ":", "ServerSSM", ".", "_debug", "(", "\" - invalid state\"", ")" ]
Returns the number of trees summed across all tree groups .
def count_num_trees ( nexson , nexson_version = None ) : if nexson_version is None : nexson_version = detect_nexson_version ( nexson ) nex = get_nexml_el ( nexson ) num_trees_by_group = [ ] if _is_by_id_hbf ( nexson_version ) : for tree_group in nex . get ( 'treesById' , { } ) . values ( ) : nt = len ( tree_group . get ( 'treeById' , { } ) ) num_trees_by_group . append ( nt ) else : trees_group = nex . get ( 'trees' , [ ] ) if isinstance ( trees_group , dict ) : trees_group = [ trees_group ] for tree_group in trees_group : t = tree_group . get ( 'tree' ) if isinstance ( t , list ) : nt = len ( t ) else : nt = 1 num_trees_by_group . append ( nt ) return sum ( num_trees_by_group )
588
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/nexson_syntax/inspect.py#L15-L38
[ "def", "load_draco", "(", "file_obj", ",", "*", "*", "kwargs", ")", ":", "with", "tempfile", ".", "NamedTemporaryFile", "(", "suffix", "=", "'.drc'", ")", "as", "temp_drc", ":", "temp_drc", ".", "write", "(", "file_obj", ".", "read", "(", ")", ")", "temp_drc", ".", "flush", "(", ")", "with", "tempfile", ".", "NamedTemporaryFile", "(", "suffix", "=", "'.ply'", ")", "as", "temp_ply", ":", "subprocess", ".", "check_output", "(", "[", "draco_decoder", ",", "'-i'", ",", "temp_drc", ".", "name", ",", "'-o'", ",", "temp_ply", ".", "name", "]", ")", "temp_ply", ".", "seek", "(", "0", ")", "kwargs", "=", "load_ply", "(", "temp_ply", ")", "return", "kwargs" ]
Factory function for a _TreeCollectionStore object .
def TreeCollectionStore ( repos_dict = None , repos_par = None , with_caching = True , assumed_doc_version = None , git_ssh = None , pkey = None , git_action_class = TreeCollectionsGitAction , mirror_info = None , infrastructure_commit_author = 'OpenTree API <[email protected]>' ) : global _THE_TREE_COLLECTION_STORE if _THE_TREE_COLLECTION_STORE is None : _THE_TREE_COLLECTION_STORE = _TreeCollectionStore ( repos_dict = repos_dict , repos_par = repos_par , with_caching = with_caching , assumed_doc_version = assumed_doc_version , git_ssh = git_ssh , pkey = pkey , git_action_class = git_action_class , mirror_info = mirror_info , infrastructure_commit_author = infrastructure_commit_author ) return _THE_TREE_COLLECTION_STORE
589
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/collections_store/collections_umbrella.py#L287-L314
[ "def", "rate_limit", "(", "f", ")", ":", "def", "new_f", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "errors", "=", "0", "while", "True", ":", "resp", "=", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "resp", ".", "status_code", "==", "200", ":", "errors", "=", "0", "return", "resp", "elif", "resp", ".", "status_code", "==", "401", ":", "# Hack to retain the original exception, but augment it with", "# additional context for the user to interpret it. In a Python", "# 3 only future we can raise a new exception of the same type", "# with a new message from the old error.", "try", ":", "resp", ".", "raise_for_status", "(", ")", "except", "requests", ".", "HTTPError", "as", "e", ":", "message", "=", "\"\\nThis is a protected or locked account, or\"", "+", "\" the credentials provided are no longer valid.\"", "e", ".", "args", "=", "(", "e", ".", "args", "[", "0", "]", "+", "message", ",", ")", "+", "e", ".", "args", "[", "1", ":", "]", "log", ".", "warning", "(", "\"401 Authentication required for %s\"", ",", "resp", ".", "url", ")", "raise", "elif", "resp", ".", "status_code", "==", "429", ":", "reset", "=", "int", "(", "resp", ".", "headers", "[", "'x-rate-limit-reset'", "]", ")", "now", "=", "time", ".", "time", "(", ")", "seconds", "=", "reset", "-", "now", "+", "10", "if", "seconds", "<", "1", ":", "seconds", "=", "10", "log", ".", "warning", "(", "\"rate limit exceeded: sleeping %s secs\"", ",", "seconds", ")", "time", ".", "sleep", "(", "seconds", ")", "elif", "resp", ".", "status_code", ">=", "500", ":", "errors", "+=", "1", "if", "errors", ">", "30", ":", "log", ".", "warning", "(", "\"too many errors from Twitter, giving up\"", ")", "resp", ".", "raise_for_status", "(", ")", "seconds", "=", "60", "*", "errors", "log", ".", "warning", "(", "\"%s from Twitter API, sleeping %s\"", ",", "resp", ".", "status_code", ",", "seconds", ")", "time", ".", "sleep", "(", "seconds", ")", "else", ":", "resp", ".", "raise_for_status", "(", ")", "return", "new_f" ]
Parse the JSON find its name return a slug of its name
def _slugify_internal_collection_name ( self , json_repr ) : collection = self . _coerce_json_to_collection ( json_repr ) if collection is None : return None internal_name = collection [ 'name' ] return slugify ( internal_name )
590
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/collections_store/collections_umbrella.py#L238-L244
[ "def", "apply", "(", "self", ")", ":", "self", ".", "read_group_info", "(", ")", "if", "self", ".", "tabs", ".", "count", "(", ")", "==", "0", ":", "# disactivate buttons", "self", ".", "button_color", ".", "setEnabled", "(", "False", ")", "self", ".", "button_del", ".", "setEnabled", "(", "False", ")", "self", ".", "button_apply", ".", "setEnabled", "(", "False", ")", "else", ":", "# activate buttons", "self", ".", "button_color", ".", "setEnabled", "(", "True", ")", "self", ".", "button_del", ".", "setEnabled", "(", "True", ")", "self", ".", "button_apply", ".", "setEnabled", "(", "True", ")", "if", "self", ".", "groups", ":", "self", ".", "parent", ".", "overview", ".", "update_position", "(", ")", "self", ".", "parent", ".", "spectrum", ".", "update", "(", ")", "self", ".", "parent", ".", "notes", ".", "enable_events", "(", ")", "else", ":", "self", ".", "parent", ".", "traces", ".", "reset", "(", ")", "self", ".", "parent", ".", "spectrum", ".", "reset", "(", ")", "self", ".", "parent", ".", "notes", ".", "enable_events", "(", ")" ]
Search LAN for available Roku devices . Returns a Roku object .
def discover_roku ( ) : print ( "Searching for Roku devices within LAN ..." ) rokus = Roku . discover ( ) if not rokus : print ( "Unable to discover Roku devices. " + "Try again, or manually specify the IP address with " + "\'roku <ipaddr>\' (e.g. roku 192.168.1.130)" ) return None print ( "Found the following Roku devices:" ) for i , r in enumerate ( rokus ) : # dinfo = ' '.join(re.split(', |: ', str(r.device_info))[1:3]) dinfo = '' print ( "[" + str ( i + 1 ) + "] " + str ( r . host ) + ":" + str ( r . port ) + ' (' + dinfo + ')' ) print ( "" ) if len ( rokus ) == 1 : print ( "Selecting Roku 1 by default" ) return rokus [ 0 ] else : print ( "Multiple Rokus found. Select the index of the Roku to control:" ) while True : try : query = "Select (1 to " + str ( len ( rokus ) ) + ") > " sel = int ( input ( query ) ) - 1 if sel >= len ( rokus ) : raise ValueError else : break except ValueError : print ( "Invalid selection" ) return rokus [ sel ]
591
https://github.com/ncmiller/roku-cli/blob/9101952edf9802146c794e63353abf2bf116c052/rokucli/discover.py#L6-L42
[ "def", "set_compare_custom_predict_fn", "(", "self", ",", "predict_fn", ")", ":", "# If estimator is set, remove it before setting predict_fn", "self", ".", "delete", "(", "'compare_estimator_and_spec'", ")", "self", ".", "store", "(", "'compare_custom_predict_fn'", ",", "predict_fn", ")", "self", ".", "set_compare_inference_address", "(", "'custom_predict_fn'", ")", "# If no model name has been set, give a default", "if", "not", "self", ".", "has_compare_model_name", "(", ")", ":", "self", ".", "set_compare_model_name", "(", "'2'", ")", "return", "self" ]
Uses a peyotl wrapper around an Open Tree web service to get a list of OTT IDs matching the name_list . The tnrs_wrapper can be None ( in which case the default wrapper from peyotl . sugar will be used . All other arguments correspond to the arguments of the web - service call . A ValueError will be raised if the context_name does not match one of the valid names for a taxonomic context . This uses the wrap_response option to create and return a TNRSRespose object around the response .
def ot_tnrs_match_names ( name_list , context_name = None , do_approximate_matching = True , include_dubious = False , include_deprecated = True , tnrs_wrapper = None ) : if tnrs_wrapper is None : from peyotl . sugar import tnrs tnrs_wrapper = tnrs match_obj = tnrs_wrapper . match_names ( name_list , context_name = context_name , do_approximate_matching = do_approximate_matching , include_deprecated = include_deprecated , include_dubious = include_dubious , wrap_response = True ) return match_obj
592
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/tutorials/ot-info-for-taxon-name.py#L10-L33
[ "def", "model_stats", "(", "self", ")", ":", "stats", "=", "self", ".", "model", ".", "default_stats", "return", "blob_data_to_dict", "(", "stats", ",", "self", ".", "_sampler", ".", "blobs", ")" ]
Make value suitable for a binding object .
def _objectify ( field , value , ns_info ) : if ( getattr ( field . type_ , "_treat_none_as_empty_list" , False ) and value is None ) : return [ ] if value is None : return None elif field . type_ : return value . to_obj ( ns_info = ns_info ) return field . binding_value ( value )
593
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/entities.py#L19-L33
[ "def", "_is_chunk_markdown", "(", "source", ")", ":", "lines", "=", "source", ".", "splitlines", "(", ")", "if", "all", "(", "line", ".", "startswith", "(", "'# '", ")", "for", "line", "in", "lines", ")", ":", "# The chunk is a Markdown *unless* it is commented Python code.", "source", "=", "'\\n'", ".", "join", "(", "line", "[", "2", ":", "]", "for", "line", "in", "lines", "if", "not", "line", "[", "2", ":", "]", ".", "startswith", "(", "'#'", ")", ")", "# skip headers", "if", "not", "source", ":", "return", "True", "# Try to parse the chunk: if it fails, it is Markdown, otherwise,", "# it is Python.", "return", "not", "_is_python", "(", "source", ")", "return", "False" ]
Make value suitable for a dictionary .
def _dictify ( field , value ) : if value is None : return None elif field . type_ : return value . to_dict ( ) return field . dict_value ( value )
594
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/entities.py#L36-L47
[ "def", "color_lerp", "(", "c1", ":", "Tuple", "[", "int", ",", "int", ",", "int", "]", ",", "c2", ":", "Tuple", "[", "int", ",", "int", ",", "int", "]", ",", "a", ":", "float", ")", "->", "Color", ":", "return", "Color", ".", "_new_from_cdata", "(", "lib", ".", "TCOD_color_lerp", "(", "c1", ",", "c2", ",", "a", ")", ")" ]
Parse the dictionary and return an Entity instance .
def from_dict ( cls , cls_dict , fallback_xsi_type = None ) : if not cls_dict : return None if isinstance ( cls_dict , six . string_types ) : if not getattr ( cls , "_convert_strings" , False ) : return cls_dict try : typekey = cls . dictkey ( cls_dict ) except TypeError : typekey = fallback_xsi_type klass = cls . entity_class ( typekey ) return klass . from_dict ( cls_dict )
595
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/entities.py#L137-L164
[ "def", "acquire_writer", "(", "self", ")", ":", "with", "self", ".", "mutex", ":", "while", "self", ".", "rwlock", "!=", "0", ":", "self", ".", "_writer_wait", "(", ")", "self", ".", "rwlock", "=", "-", "1" ]
Parse the generateDS object and return an Entity instance .
def from_obj ( cls , cls_obj ) : if not cls_obj : return None typekey = cls . objkey ( cls_obj ) klass = cls . entity_class ( typekey ) return klass . from_obj ( cls_obj )
596
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/entities.py#L167-L185
[ "def", "to_td", "(", "frame", ",", "name", ",", "con", ",", "if_exists", "=", "'fail'", ",", "time_col", "=", "None", ",", "time_index", "=", "None", ",", "index", "=", "True", ",", "index_label", "=", "None", ",", "chunksize", "=", "10000", ",", "date_format", "=", "None", ")", ":", "database", ",", "table", "=", "name", ".", "split", "(", "'.'", ")", "uploader", "=", "StreamingUploader", "(", "con", ".", "client", ",", "database", ",", "table", ",", "show_progress", "=", "True", ",", "clear_progress", "=", "True", ")", "uploader", ".", "message", "(", "'Streaming import into: {0}.{1}'", ".", "format", "(", "database", ",", "table", ")", ")", "# check existence", "if", "if_exists", "==", "'fail'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "else", ":", "raise", "RuntimeError", "(", "'table \"%s\" already exists'", "%", "name", ")", "elif", "if_exists", "==", "'replace'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "pass", "else", ":", "uploader", ".", "message", "(", "'deleting old table...'", ")", "con", ".", "client", ".", "delete_table", "(", "database", ",", "table", ")", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "elif", "if_exists", "==", "'append'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "else", ":", "raise", "ValueError", "(", "'invalid value for if_exists: %s'", "%", "if_exists", ")", "# \"time_index\" implies \"index=False\"", "if", "time_index", ":", "index", "=", "None", "# convert", "frame", "=", "frame", ".", "copy", "(", ")", "frame", "=", "_convert_time_column", "(", "frame", ",", "time_col", ",", "time_index", ")", "frame", "=", "_convert_index_column", "(", "frame", ",", "index", ",", "index_label", ")", "frame", "=", "_convert_date_format", "(", "frame", ",", "date_format", ")", "# upload", "uploader", ".", "upload_frame", "(", "frame", ",", "chunksize", ")", "uploader", ".", "wait_for_import", "(", "len", "(", "frame", ")", ")" ]
Return a tuple of this entity s TypedFields .
def typed_fields ( cls ) : # Checking cls._typed_fields could return a superclass _typed_fields # value. So we check our class __dict__ which does not include # inherited attributes. klassdict = cls . __dict__ try : return klassdict [ "_typed_fields" ] except KeyError : fields = cls . typed_fields_with_attrnames ( ) cls . _typed_fields = tuple ( field for _ , field in fields ) return cls . _typed_fields
597
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/entities.py#L201-L214
[ "def", "get_closest_sibling_state", "(", "state_m", ",", "from_logical_port", "=", "None", ")", ":", "if", "not", "state_m", ".", "parent", ":", "logger", ".", "warning", "(", "\"A state can not have a closest sibling state if it has not parent as {0}\"", ".", "format", "(", "state_m", ")", ")", "return", "margin", "=", "cal_margin", "(", "state_m", ".", "parent", ".", "get_meta_data_editor", "(", ")", "[", "'size'", "]", ")", "pos", "=", "state_m", ".", "get_meta_data_editor", "(", ")", "[", "'rel_pos'", "]", "size", "=", "state_m", ".", "get_meta_data_editor", "(", ")", "[", "'size'", "]", "# otherwise measure from reference state itself", "if", "from_logical_port", "in", "[", "\"outcome\"", ",", "\"income\"", "]", ":", "size", "=", "(", "margin", ",", "margin", ")", "if", "from_logical_port", "==", "\"outcome\"", ":", "outcomes_m", "=", "[", "outcome_m", "for", "outcome_m", "in", "state_m", ".", "outcomes", "if", "outcome_m", ".", "outcome", ".", "outcome_id", ">=", "0", "]", "free_outcomes_m", "=", "[", "oc_m", "for", "oc_m", "in", "outcomes_m", "if", "not", "state_m", ".", "state", ".", "parent", ".", "get_transition_for_outcome", "(", "state_m", ".", "state", ",", "oc_m", ".", "outcome", ")", "]", "if", "free_outcomes_m", ":", "outcome_m", "=", "free_outcomes_m", "[", "0", "]", "else", ":", "outcome_m", "=", "outcomes_m", "[", "0", "]", "pos", "=", "add_pos", "(", "pos", ",", "outcome_m", ".", "get_meta_data_editor", "(", ")", "[", "'rel_pos'", "]", ")", "elif", "from_logical_port", "==", "\"income\"", ":", "pos", "=", "add_pos", "(", "pos", ",", "state_m", ".", "income", ".", "get_meta_data_editor", "(", ")", "[", "'rel_pos'", "]", ")", "min_distance", "=", "None", "for", "sibling_state_m", "in", "state_m", ".", "parent", ".", "states", ".", "values", "(", ")", ":", "if", "sibling_state_m", "is", "state_m", ":", "continue", "sibling_pos", "=", "sibling_state_m", ".", "get_meta_data_editor", "(", ")", "[", "'rel_pos'", "]", "sibling_size", "=", "sibling_state_m", ".", "get_meta_data_editor", "(", ")", "[", "'size'", "]", "distance", "=", "geometry", ".", "cal_dist_between_2_coord_frame_aligned_boxes", "(", "pos", ",", "size", ",", "sibling_pos", ",", "sibling_size", ")", "if", "not", "min_distance", "or", "min_distance", "[", "0", "]", ">", "distance", ":", "min_distance", "=", "(", "distance", ",", "sibling_state_m", ")", "return", "min_distance" ]
Convert to a GenerateDS binding object .
def to_obj ( self , ns_info = None ) : if ns_info : ns_info . collect ( self ) # null behavior for classes that inherit from Entity but do not # have _binding_class if not hasattr ( self , "_binding_class" ) : return None entity_obj = self . _binding_class ( ) for field , val in six . iteritems ( self . _fields ) : # EntityLists with no list items should be dropped if isinstance ( val , EntityList ) and len ( val ) == 0 : val = None elif field . multiple : if val : val = [ _objectify ( field , x , ns_info ) for x in val ] else : val = [ ] else : val = _objectify ( field , val , ns_info ) setattr ( entity_obj , field . name , val ) self . _finalize_obj ( entity_obj ) return entity_obj
598
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/entities.py#L275-L309
[ "def", "_sync", "(", "self", ")", ":", "if", "(", "self", ".", "_opcount", ">", "self", ".", "checkpoint_operations", "or", "datetime", ".", "now", "(", ")", ">", "self", ".", "_last_sync", "+", "self", ".", "checkpoint_timeout", ")", ":", "self", ".", "log", ".", "debug", "(", "\"Synchronizing queue metadata.\"", ")", "self", ".", "queue_metadata", ".", "sync", "(", ")", "self", ".", "_last_sync", "=", "datetime", ".", "now", "(", ")", "self", ".", "_opcount", "=", "0", "else", ":", "self", ".", "log", ".", "debug", "(", "\"NOT synchronizing queue metadata.\"", ")" ]
Convert to a dict
def to_dict ( self ) : entity_dict = { } for field , val in six . iteritems ( self . _fields ) : if field . multiple : if val : val = [ _dictify ( field , x ) for x in val ] else : val = [ ] else : val = _dictify ( field , val ) # Only add non-None objects or non-empty lists if val is not None and val != [ ] : entity_dict [ field . key_name ] = val self . _finalize_dict ( entity_dict ) return entity_dict
599
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/entities.py#L318-L343
[ "def", "ts_stats_significance", "(", "ts", ",", "ts_stat_func", ",", "null_ts_func", ",", "B", "=", "1000", ",", "permute_fast", "=", "False", ")", ":", "stats_ts", "=", "ts_stat_func", "(", "ts", ")", "if", "permute_fast", ":", "# Permute it in 1 shot", "null_ts", "=", "map", "(", "np", ".", "random", ".", "permutation", ",", "np", ".", "array", "(", "[", "ts", ",", "]", "*", "B", ")", ")", "else", ":", "null_ts", "=", "np", ".", "vstack", "(", "[", "null_ts_func", "(", "ts", ")", "for", "i", "in", "np", ".", "arange", "(", "0", ",", "B", ")", "]", ")", "stats_null_ts", "=", "np", ".", "vstack", "(", "[", "ts_stat_func", "(", "nts", ")", "for", "nts", "in", "null_ts", "]", ")", "pvals", "=", "[", "]", "nums", "=", "[", "]", "for", "i", "in", "np", ".", "arange", "(", "0", ",", "len", "(", "stats_ts", ")", ")", ":", "num_samples", "=", "np", ".", "sum", "(", "(", "stats_null_ts", "[", ":", ",", "i", "]", ">=", "stats_ts", "[", "i", "]", ")", ")", "nums", ".", "append", "(", "num_samples", ")", "pval", "=", "num_samples", "/", "float", "(", "B", ")", "pvals", ".", "append", "(", "pval", ")", "return", "stats_ts", ",", "pvals", ",", "nums" ]