query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
sequencelengths
20
553
we recursively run dfs on each vertices in free_vertex
def __dfs ( self , v , index , layers ) : if index == 0 : path = [ v ] while self . _dfs_parent [ v ] != v : path . append ( self . _dfs_parent [ v ] ) v = self . _dfs_parent [ v ] self . _dfs_paths . append ( path ) return True for neighbour in self . _graph [ v ] : # check the neighbours of vertex if neighbour in layers [ index - 1 ] : # if neighbour is in left, we are traversing unmatched edges.. if neighbour in self . _dfs_parent : continue if ( neighbour in self . _left and ( v not in self . _matching or neighbour != self . _matching [ v ] ) ) or ( neighbour in self . _right and ( v in self . _matching and neighbour == self . _matching [ v ] ) ) : self . _dfs_parent [ neighbour ] = v if self . __dfs ( neighbour , index - 1 , layers ) : return True return False
300
https://github.com/sofiatolaosebikan/hopcroftkarp/blob/5e6cf4f95702304847307a07d369f8041edff8c9/hopcroftkarp/__init__.py#L84-L109
[ "def", "catalogFactory", "(", "name", ",", "*", "*", "kwargs", ")", ":", "fn", "=", "lambda", "member", ":", "inspect", ".", "isclass", "(", "member", ")", "and", "member", ".", "__module__", "==", "__name__", "catalogs", "=", "odict", "(", "inspect", ".", "getmembers", "(", "sys", ".", "modules", "[", "__name__", "]", ",", "fn", ")", ")", "if", "name", "not", "in", "list", "(", "catalogs", ".", "keys", "(", ")", ")", ":", "msg", "=", "\"%s not found in catalogs:\\n %s\"", "%", "(", "name", ",", "list", "(", "kernels", ".", "keys", "(", ")", ")", ")", "logger", ".", "error", "(", "msg", ")", "msg", "=", "\"Unrecognized catalog: %s\"", "%", "name", "raise", "Exception", "(", "msg", ")", "return", "catalogs", "[", "name", "]", "(", "*", "*", "kwargs", ")" ]
Symbol decorator .
def method ( self , symbol ) : assert issubclass ( symbol , SymbolBase ) def wrapped ( fn ) : setattr ( symbol , fn . __name__ , fn ) return wrapped
301
https://github.com/tehmaze/parser/blob/ccc69236304b2f00671f14c62433e8830b838101/parser/base.py#L69-L76
[ "def", "on_response", "(", "self", ",", "ch", ",", "method_frame", ",", "props", ",", "body", ")", ":", "LOGGER", ".", "debug", "(", "\"rabbitmq.Requester.on_response\"", ")", "if", "self", ".", "corr_id", "==", "props", ".", "correlation_id", ":", "self", ".", "response", "=", "{", "'props'", ":", "props", ",", "'body'", ":", "body", "}", "else", ":", "LOGGER", ".", "warn", "(", "\"rabbitmq.Requester.on_response - discarded response : \"", "+", "str", "(", "props", ".", "correlation_id", ")", ")", "LOGGER", ".", "debug", "(", "\"natsd.Requester.on_response - discarded response : \"", "+", "str", "(", "{", "'properties'", ":", "props", ",", "'body'", ":", "body", "}", ")", ")" ]
Simple date parsing function
def _simpleparsefun ( date ) : if hasattr ( date , 'year' ) : return date try : date = datetime . datetime . strptime ( date , '%Y-%m-%d' ) except ValueError : date = datetime . datetime . strptime ( date , '%Y-%m-%d %H:%M:%S' ) return date
302
https://github.com/antoniobotelho/py-business-calendar/blob/92365fbddd043e41e33b01f1ddd9dd6a5094c031/business_calendar/business_calendar.py#L63-L71
[ "def", "FindClonedClients", "(", "token", "=", "None", ")", ":", "index", "=", "client_index", ".", "CreateClientIndex", "(", "token", "=", "token", ")", "clients", "=", "index", ".", "LookupClients", "(", "[", "\".\"", "]", ")", "hw_infos", "=", "_GetHWInfos", "(", "clients", ",", "token", "=", "token", ")", "# We get all clients that have reported more than one hardware serial", "# number over time. This doesn't necessarily indicate a cloned client - the", "# machine might just have new hardware. We need to search for clients that", "# alternate between different IDs.", "clients_with_multiple_serials", "=", "[", "client_id", "for", "client_id", ",", "serials", "in", "iteritems", "(", "hw_infos", ")", "if", "len", "(", "serials", ")", ">", "1", "]", "client_list", "=", "aff4", ".", "FACTORY", ".", "MultiOpen", "(", "clients_with_multiple_serials", ",", "age", "=", "aff4", ".", "ALL_TIMES", ",", "token", "=", "token", ")", "cloned_clients", "=", "[", "]", "for", "c", "in", "client_list", ":", "hwis", "=", "c", ".", "GetValuesForAttribute", "(", "c", ".", "Schema", ".", "HARDWARE_INFO", ")", "# Here we search for the earliest and latest time each ID was reported.", "max_index", "=", "{", "}", "min_index", "=", "{", "}", "ids", "=", "set", "(", ")", "for", "i", ",", "hwi", "in", "enumerate", "(", "hwis", ")", ":", "s", "=", "hwi", ".", "serial_number", "max_index", "[", "s", "]", "=", "i", "if", "s", "not", "in", "min_index", ":", "min_index", "[", "s", "]", "=", "i", "ids", ".", "add", "(", "s", ")", "# Construct ranges [first occurrence, last occurrence] for every ID. If", "# a client just changed from one ID to the other, those ranges of IDs should", "# be disjunct. If they overlap at some point, it indicates that two IDs were", "# reported in the same time frame.", "ranges", "=", "[", "]", "for", "hwid", "in", "ids", ":", "ranges", ".", "append", "(", "(", "min_index", "[", "hwid", "]", ",", "max_index", "[", "hwid", "]", ")", ")", "# Sort ranges by first occurrence time.", "ranges", ".", "sort", "(", ")", "for", "i", "in", "range", "(", "len", "(", "ranges", ")", "-", "1", ")", ":", "if", "ranges", "[", "i", "]", "[", "1", "]", ">", "ranges", "[", "i", "+", "1", "]", "[", "0", "]", ":", "cloned_clients", ".", "append", "(", "c", ")", "msg", "=", "\"Found client with multiple, overlapping serial numbers: %s\"", "logging", ".", "info", "(", "msg", ",", "c", ".", "urn", ")", "for", "hwi", "in", "c", ".", "GetValuesForAttribute", "(", "c", ".", "Schema", ".", "HARDWARE_INFO", ")", ":", "logging", ".", "info", "(", "\"%s %s\"", ",", "hwi", ".", "age", ",", "hwi", ".", "serial_number", ")", "break", "return", "cloned_clients" ]
Connect signal to current model
def _connect ( cls ) : post_save . connect ( notify_items , sender = cls , dispatch_uid = 'knocker_{0}' . format ( cls . __name__ ) )
303
https://github.com/nephila/django-knocker/blob/d25380d43a1f91285f1581dcf9db8510fe87f354/knocker/mixins.py#L31-L38
[ "def", "_do_http", "(", "opts", ",", "profile", "=", "'default'", ")", ":", "ret", "=", "{", "}", "url", "=", "__salt__", "[", "'config.get'", "]", "(", "'modjk:{0}:url'", ".", "format", "(", "profile", ")", ",", "''", ")", "user", "=", "__salt__", "[", "'config.get'", "]", "(", "'modjk:{0}:user'", ".", "format", "(", "profile", ")", ",", "''", ")", "passwd", "=", "__salt__", "[", "'config.get'", "]", "(", "'modjk:{0}:pass'", ".", "format", "(", "profile", ")", ",", "''", ")", "realm", "=", "__salt__", "[", "'config.get'", "]", "(", "'modjk:{0}:realm'", ".", "format", "(", "profile", ")", ",", "''", ")", "timeout", "=", "__salt__", "[", "'config.get'", "]", "(", "'modjk:{0}:timeout'", ".", "format", "(", "profile", ")", ",", "''", ")", "if", "not", "url", ":", "raise", "Exception", "(", "'missing url in profile {0}'", ".", "format", "(", "profile", ")", ")", "if", "user", "and", "passwd", ":", "auth", "=", "_auth", "(", "url", "=", "url", ",", "realm", "=", "realm", ",", "user", "=", "user", ",", "passwd", "=", "passwd", ")", "_install_opener", "(", "auth", ")", "url", "+=", "'?{0}'", ".", "format", "(", "_urlencode", "(", "opts", ")", ")", "for", "line", "in", "_urlopen", "(", "url", ",", "timeout", "=", "timeout", ")", ".", "read", "(", ")", ".", "splitlines", "(", ")", ":", "splt", "=", "line", ".", "split", "(", "'='", ",", "1", ")", "if", "splt", "[", "0", "]", "in", "ret", ":", "ret", "[", "splt", "[", "0", "]", "]", "+=", "',{0}'", ".", "format", "(", "splt", "[", "1", "]", ")", "else", ":", "ret", "[", "splt", "[", "0", "]", "]", "=", "splt", "[", "1", "]", "return", "ret" ]
Disconnect signal from current model
def _disconnect ( cls ) : post_save . disconnect ( notify_items , sender = cls , dispatch_uid = 'knocker_{0}' . format ( cls . __name__ ) )
304
https://github.com/nephila/django-knocker/blob/d25380d43a1f91285f1581dcf9db8510fe87f354/knocker/mixins.py#L41-L48
[ "def", "run_tornado", "(", "self", ",", "args", ")", ":", "server", "=", "self", "import", "tornado", ".", "ioloop", "import", "tornado", ".", "web", "import", "tornado", ".", "websocket", "ioloop", "=", "tornado", ".", "ioloop", ".", "IOLoop", ".", "current", "(", ")", "class", "DevWebSocketHandler", "(", "tornado", ".", "websocket", ".", "WebSocketHandler", ")", ":", "def", "open", "(", "self", ")", ":", "super", "(", "DevWebSocketHandler", ",", "self", ")", ".", "open", "(", ")", "server", ".", "on_open", "(", "self", ")", "def", "on_message", "(", "self", ",", "message", ")", ":", "server", ".", "on_message", "(", "self", ",", "message", ")", "def", "on_close", "(", "self", ")", ":", "super", "(", "DevWebSocketHandler", ",", "self", ")", ".", "on_close", "(", ")", "server", ".", "on_close", "(", "self", ")", "class", "MainHandler", "(", "tornado", ".", "web", ".", "RequestHandler", ")", ":", "def", "get", "(", "self", ")", ":", "self", ".", "write", "(", "server", ".", "index_page", ")", "#: Set the call later method", "server", ".", "call_later", "=", "ioloop", ".", "call_later", "server", ".", "add_callback", "=", "ioloop", ".", "add_callback", "app", "=", "tornado", ".", "web", ".", "Application", "(", "[", "(", "r\"/\"", ",", "MainHandler", ")", ",", "(", "r\"/dev\"", ",", "DevWebSocketHandler", ")", ",", "]", ")", "app", ".", "listen", "(", "self", ".", "port", ")", "print", "(", "\"Tornado Dev server started on {}\"", ".", "format", "(", "self", ".", "port", ")", ")", "ioloop", ".", "start", "(", ")" ]
Returns a dictionary with the knock data built from _knocker_data
def as_knock ( self , created = False ) : knock = { } if self . should_knock ( created ) : for field , data in self . _retrieve_data ( None , self . _knocker_data ) : knock [ field ] = data return knock
305
https://github.com/nephila/django-knocker/blob/d25380d43a1f91285f1581dcf9db8510fe87f354/knocker/mixins.py#L97-L105
[ "def", "_allocate_address", "(", "self", ",", "instance", ")", ":", "connection", "=", "self", ".", "_connect", "(", ")", "free_addresses", "=", "[", "ip", "for", "ip", "in", "connection", ".", "get_all_addresses", "(", ")", "if", "not", "ip", ".", "instance_id", "]", "if", "not", "free_addresses", ":", "try", ":", "address", "=", "connection", ".", "allocate_address", "(", ")", "except", "Exception", "as", "ex", ":", "log", ".", "error", "(", "\"Unable to allocate a public IP address to instance `%s`\"", ",", "instance", ".", "id", ")", "return", "None", "try", ":", "address", "=", "free_addresses", ".", "pop", "(", ")", "instance", ".", "use_ip", "(", "address", ")", "return", "address", ".", "public_ip", "except", "Exception", "as", "ex", ":", "log", ".", "error", "(", "\"Unable to associate IP address %s to instance `%s`\"", ",", "address", ",", "instance", ".", "id", ")", "return", "None" ]
Send the knock in the associated channels Group
def send_knock ( self , created = False ) : knock = self . as_knock ( created ) if knock : gr = Group ( 'knocker-{0}' . format ( knock [ 'language' ] ) ) gr . send ( { 'text' : json . dumps ( knock ) } )
306
https://github.com/nephila/django-knocker/blob/d25380d43a1f91285f1581dcf9db8510fe87f354/knocker/mixins.py#L107-L114
[ "def", "open", "(", "self", ",", "job", ")", ":", "# Create a working directory for the job", "startingDir", "=", "os", ".", "getcwd", "(", ")", "self", ".", "localTempDir", "=", "makePublicDir", "(", "os", ".", "path", ".", "join", "(", "self", ".", "localTempDir", ",", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", ")", ")", "# Check the status of all jobs on this node. If there are jobs that started and died before", "# cleaning up their presence from the cache state file, restore the cache file to a state", "# where the jobs don't exist.", "with", "self", ".", "_CacheState", ".", "open", "(", "self", ")", "as", "cacheInfo", ":", "self", ".", "findAndHandleDeadJobs", "(", "cacheInfo", ")", "# While we have a lock on the cache file, run a naive check to see if jobs on this node", "# have greatly gone over their requested limits.", "if", "cacheInfo", ".", "sigmaJob", "<", "0", ":", "logger", ".", "warning", "(", "'Detecting that one or more jobs on this node have used more '", "'resources than requested. Turn on debug logs to see more'", "'information on cache usage.'", ")", "# Get the requirements for the job and clean the cache if necessary. cleanCache will", "# ensure that the requirements for this job are stored in the state file.", "jobReqs", "=", "job", ".", "disk", "# Cleanup the cache to free up enough space for this job (if needed)", "self", ".", "cleanCache", "(", "jobReqs", ")", "try", ":", "os", ".", "chdir", "(", "self", ".", "localTempDir", ")", "yield", "finally", ":", "diskUsed", "=", "getDirSizeRecursively", "(", "self", ".", "localTempDir", ")", "logString", "=", "(", "\"Job {jobName} used {percent:.2f}% ({humanDisk}B [{disk}B] used, \"", "\"{humanRequestedDisk}B [{requestedDisk}B] requested) at the end of \"", "\"its run.\"", ".", "format", "(", "jobName", "=", "self", ".", "jobName", ",", "percent", "=", "(", "float", "(", "diskUsed", ")", "/", "jobReqs", "*", "100", "if", "jobReqs", ">", "0", "else", "0.0", ")", ",", "humanDisk", "=", "bytes2human", "(", "diskUsed", ")", ",", "disk", "=", "diskUsed", ",", "humanRequestedDisk", "=", "bytes2human", "(", "jobReqs", ")", ",", "requestedDisk", "=", "jobReqs", ")", ")", "self", ".", "logToMaster", "(", "logString", ",", "level", "=", "logging", ".", "DEBUG", ")", "if", "diskUsed", ">", "jobReqs", ":", "self", ".", "logToMaster", "(", "\"Job used more disk than requested. Please reconsider modifying \"", "\"the user script to avoid the chance of failure due to \"", "\"incorrectly requested resources. \"", "+", "logString", ",", "level", "=", "logging", ".", "WARNING", ")", "os", ".", "chdir", "(", "startingDir", ")", "self", ".", "cleanupInProgress", "=", "True", "# Delete all the job specific files and return sizes to jobReqs", "self", ".", "returnJobReqs", "(", "jobReqs", ")", "with", "self", ".", "_CacheState", ".", "open", "(", "self", ")", "as", "cacheInfo", ":", "# Carry out any user-defined cleanup actions", "deferredFunctions", "=", "cacheInfo", ".", "jobState", "[", "self", ".", "jobID", "]", "[", "'deferredFunctions'", "]", "failures", "=", "self", ".", "_runDeferredFunctions", "(", "deferredFunctions", ")", "for", "failure", "in", "failures", ":", "self", ".", "logToMaster", "(", "'Deferred function \"%s\" failed.'", "%", "failure", ",", "logging", ".", "WARN", ")", "# Finally delete the job from the cache state file", "cacheInfo", ".", "jobState", ".", "pop", "(", "self", ".", "jobID", ")" ]
Colorize some message with ANSI colors specification
def colorize ( printable , color , style = 'normal' , autoreset = True ) : if not COLORED : # disable color return printable if color not in COLOR_MAP : raise RuntimeError ( 'invalid color set, no {}' . format ( color ) ) return '{color}{printable}{reset}' . format ( printable = printable , color = COLOR_MAP [ color ] . format ( style = STYLE_MAP [ style ] ) , reset = COLOR_MAP [ 'reset' ] if autoreset else '' )
307
https://github.com/ryukinix/decorating/blob/df78c3f87800205701704c0bc0fb9b6bb908ba7e/decorating/color.py#L45-L63
[ "def", "dict_max", "(", "dic", ")", ":", "aux", "=", "dict", "(", "map", "(", "lambda", "item", ":", "(", "item", "[", "1", "]", ",", "item", "[", "0", "]", ")", ",", "dic", ".", "items", "(", ")", ")", ")", "if", "aux", ".", "keys", "(", ")", "==", "[", "]", ":", "return", "0", "max_value", "=", "max", "(", "aux", ".", "keys", "(", ")", ")", "return", "max_value", ",", "aux", "[", "max_value", "]" ]
Change text color for the linux terminal defaults to green . Set warning = True for red .
def color ( string , status = True , warning = False , bold = True ) : attr = [ ] if status : # green attr . append ( '32' ) if warning : # red attr . append ( '31' ) if bold : attr . append ( '1' ) return '\x1b[%sm%s\x1b[0m' % ( ';' . join ( attr ) , string )
308
https://github.com/FortyNorthSecurity/Hasher/blob/40173c56b36680ab1ddc57a9c13c36b3a1ec51c3/hashes/common/helpers.py#L10-L24
[ "def", "generate_citation_counter", "(", "self", ")", ":", "cite_counter", "=", "dict", "(", ")", "filename", "=", "'%s.aux'", "%", "self", ".", "project_name", "with", "open", "(", "filename", ")", "as", "fobj", ":", "main_aux", "=", "fobj", ".", "read", "(", ")", "cite_counter", "[", "filename", "]", "=", "_count_citations", "(", "filename", ")", "for", "match", "in", "re", ".", "finditer", "(", "r'\\\\@input\\{(.*.aux)\\}'", ",", "main_aux", ")", ":", "filename", "=", "match", ".", "groups", "(", ")", "[", "0", "]", "try", ":", "counter", "=", "_count_citations", "(", "filename", ")", "except", "IOError", ":", "pass", "else", ":", "cite_counter", "[", "filename", "]", "=", "counter", "return", "cite_counter" ]
Patch pymongo s Collection object to add a tail method . While not nessicarily recommended you can use this to inject tail as a method into Collection making it generally accessible .
def _patch ( ) : if not __debug__ : # pragma: no cover import warnings warnings . warn ( "A catgirl has died." , ImportWarning ) from pymongo . collection import Collection Collection . tail = tail
309
https://github.com/marrow/mongo/blob/2066dc73e281b8a46cb5fc965267d6b8e1b18467/marrow/mongo/util/capped.py#L49-L61
[ "def", "remove_experiment", "(", "self", ",", "id", ")", ":", "if", "id", "in", "self", ".", "experiments", ":", "self", ".", "experiments", ".", "pop", "(", "id", ")", "self", ".", "write_file", "(", ")" ]
Execute a find and return the resulting queryset using combined plain and parametric query generation . Additionally performs argument case normalization refer to the _prepare_query method s docstring .
def _prepare_find ( cls , * args , * * kw ) : cls , collection , query , options = cls . _prepare_query ( cls . FIND_MAPPING , cls . FIND_OPTIONS , * args , * * kw ) if 'await' in options : raise TypeError ( "Await is hard-deprecated as reserved keyword in Python 3.7, use wait instead." ) if 'cursor_type' in options and { 'tail' , 'wait' } & set ( options ) : raise TypeError ( "Can not combine cursor_type and tail/wait arguments." ) elif options . pop ( 'tail' , False ) : options [ 'cursor_type' ] = CursorType . TAILABLE_AWAIT if options . pop ( 'wait' , True ) else CursorType . TAILABLE elif 'wait' in options : raise TypeError ( "Wait option only applies to tailing cursors." ) modifiers = options . get ( 'modifiers' , dict ( ) ) if 'max_time_ms' in options : modifiers [ '$maxTimeMS' ] = options . pop ( 'max_time_ms' ) if modifiers : options [ 'modifiers' ] = modifiers return cls , collection , query , options
310
https://github.com/marrow/mongo/blob/2066dc73e281b8a46cb5fc965267d6b8e1b18467/marrow/mongo/core/trait/queryable.py#L113-L146
[ "def", "bulkRead", "(", "self", ",", "endpoint", ",", "size", ",", "timeout", "=", "100", ")", ":", "return", "self", ".", "dev", ".", "read", "(", "endpoint", ",", "size", ",", "timeout", ")" ]
Reload the entire document from the database or refresh specific named top - level fields .
def reload ( self , * fields , * * kw ) : Doc , collection , query , options = self . _prepare_find ( id = self . id , projection = fields , * * kw ) result = collection . find_one ( query , * * options ) if fields : # Refresh only the requested data. for k in result : # TODO: Better merge algorithm. if k == ~ Doc . id : continue self . __data__ [ k ] = result [ k ] else : self . __data__ = result return self
311
https://github.com/marrow/mongo/blob/2066dc73e281b8a46cb5fc965267d6b8e1b18467/marrow/mongo/core/trait/queryable.py#L271-L284
[ "def", "render", "(", "self", ")", ":", "if", "not", "self", ".", "available", "(", ")", ":", "return", "\"\"", "mtool", "=", "api", ".", "get_tool", "(", "\"portal_membership\"", ")", "member", "=", "mtool", ".", "getAuthenticatedMember", "(", ")", "roles", "=", "member", ".", "getRoles", "(", ")", "allowed", "=", "\"LabManager\"", "in", "roles", "or", "\"Manager\"", "in", "roles", "self", ".", "get_failed_instruments", "(", ")", "if", "allowed", "and", "self", ".", "nr_failed", ":", "return", "self", ".", "index", "(", ")", "else", ":", "return", "\"\"" ]
Get values gathered from the previously set hierarchy .
def get ( cls ) : results = { } hierarchy = cls . __hierarchy hierarchy . reverse ( ) for storeMethod in hierarchy : cls . merger . merge ( results , storeMethod . get ( ) ) return results
312
https://github.com/andrasmaroy/pconf/blob/1f930bf4e88bf8b4732fcc95557c66f3608b8821/pconf/pconf.py#L27-L45
[ "def", "sync_readmes", "(", ")", ":", "print", "(", "\"syncing README\"", ")", "with", "open", "(", "\"README.md\"", ",", "'r'", ")", "as", "reader", ":", "file_text", "=", "reader", ".", "read", "(", ")", "with", "open", "(", "\"README\"", ",", "'w'", ")", "as", "writer", ":", "writer", ".", "write", "(", "file_text", ")" ]
Set command line arguments as a source
def argv ( cls , name , short_name = None , type = None , help = None ) : cls . __hierarchy . append ( argv . Argv ( name , short_name , type , help ) )
313
https://github.com/andrasmaroy/pconf/blob/1f930bf4e88bf8b4732fcc95557c66f3608b8821/pconf/pconf.py#L72-L83
[ "def", "similarity", "(", "self", ",", "other", ")", ":", "sim", "=", "self", ".", "Similarity", "(", ")", "total", "=", "0.0", "# Calculate similarity ratio for each attribute", "cname", "=", "self", ".", "__class__", ".", "__name__", "for", "aname", ",", "weight", "in", "self", ".", "attributes", ".", "items", "(", ")", ":", "attr1", "=", "getattr", "(", "self", ",", "aname", ",", "None", ")", "attr2", "=", "getattr", "(", "other", ",", "aname", ",", "None", ")", "self", ".", "log", "(", "attr1", ",", "attr2", ",", "'%'", ",", "cname", "=", "cname", ",", "aname", "=", "aname", ")", "# Similarity is ignored if None on both objects", "if", "attr1", "is", "None", "and", "attr2", "is", "None", ":", "self", ".", "log", "(", "attr1", ",", "attr2", ",", "'%'", ",", "cname", "=", "cname", ",", "aname", "=", "aname", ",", "result", "=", "\"attributes are both None\"", ")", "continue", "# Similarity is 0 if either attribute is non-Comparable", "if", "not", "all", "(", "(", "isinstance", "(", "attr1", ",", "Comparable", ")", ",", "isinstance", "(", "attr2", ",", "Comparable", ")", ")", ")", ":", "self", ".", "log", "(", "attr1", ",", "attr2", ",", "'%'", ",", "cname", "=", "cname", ",", "aname", "=", "aname", ",", "result", "=", "\"attributes not Comparable\"", ")", "total", "+=", "weight", "continue", "# Calculate similarity between the attributes", "attr_sim", "=", "(", "attr1", "%", "attr2", ")", "self", ".", "log", "(", "attr1", ",", "attr2", ",", "'%'", ",", "cname", "=", "cname", ",", "aname", "=", "aname", ",", "result", "=", "attr_sim", ")", "# Add the similarity to the total", "sim", "+=", "attr_sim", "*", "weight", "total", "+=", "weight", "# Scale the similarity so the total is 1.0", "if", "total", ":", "sim", "*=", "(", "1.0", "/", "total", ")", "return", "sim" ]
Set environment variables as a source .
def env ( cls , separator = None , match = None , whitelist = None , parse_values = None , to_lower = None , convert_underscores = None ) : cls . __hierarchy . append ( env . Env ( separator , match , whitelist , parse_values , to_lower , convert_underscores ) )
314
https://github.com/andrasmaroy/pconf/blob/1f930bf4e88bf8b4732fcc95557c66f3608b8821/pconf/pconf.py#L86-L104
[ "def", "_get_port_speed_price_id", "(", "items", ",", "port_speed", ",", "no_public", ",", "location", ")", ":", "for", "item", "in", "items", ":", "if", "utils", ".", "lookup", "(", "item", ",", "'itemCategory'", ",", "'categoryCode'", ")", "!=", "'port_speed'", ":", "continue", "# Check for correct capacity and if the item matches private only", "if", "any", "(", "[", "int", "(", "utils", ".", "lookup", "(", "item", ",", "'capacity'", ")", ")", "!=", "port_speed", ",", "_is_private_port_speed_item", "(", "item", ")", "!=", "no_public", ",", "not", "_is_bonded", "(", "item", ")", "]", ")", ":", "continue", "for", "price", "in", "item", "[", "'prices'", "]", ":", "if", "not", "_matches_location", "(", "price", ",", "location", ")", ":", "continue", "return", "price", "[", "'id'", "]", "raise", "SoftLayer", ".", "SoftLayerError", "(", "\"Could not find valid price for port speed: '%s'\"", "%", "port_speed", ")" ]
Set a file as a source .
def file ( cls , path , encoding = None , parser = None ) : cls . __hierarchy . append ( file . File ( path , encoding , parser ) )
315
https://github.com/andrasmaroy/pconf/blob/1f930bf4e88bf8b4732fcc95557c66f3608b8821/pconf/pconf.py#L107-L122
[ "def", "compare_nouns", "(", "self", ",", "word1", ",", "word2", ")", ":", "return", "self", ".", "_plequal", "(", "word1", ",", "word2", ",", "self", ".", "plural_noun", ")" ]
Generate a MongoDB projection dictionary using the Django ORM style .
def P ( Document , * fields , * * kw ) : __always__ = kw . pop ( '__always__' , set ( ) ) projected = set ( ) omitted = set ( ) for field in fields : if field [ 0 ] in ( '-' , '!' ) : omitted . add ( field [ 1 : ] ) elif field [ 0 ] == '+' : projected . add ( field [ 1 : ] ) else : projected . add ( field ) if not projected : # We only have exclusions from the default projection. names = set ( getattr ( Document , '__projection__' , Document . __fields__ ) or Document . __fields__ ) projected = { name for name in ( names - omitted ) } projected |= __always__ if not projected : projected = { '_id' } return { unicode ( traverse ( Document , name , name ) ) : True for name in projected }
316
https://github.com/marrow/mongo/blob/2066dc73e281b8a46cb5fc965267d6b8e1b18467/marrow/mongo/param/project.py#L11-L35
[ "def", "get_segment_definer_comments", "(", "xml_file", ",", "include_version", "=", "True", ")", ":", "from", "glue", ".", "ligolw", ".", "ligolw", "import", "LIGOLWContentHandler", "as", "h", "lsctables", ".", "use_in", "(", "h", ")", "# read segment definer table", "xmldoc", ",", "_", "=", "ligolw_utils", ".", "load_fileobj", "(", "xml_file", ",", "gz", "=", "xml_file", ".", "name", ".", "endswith", "(", "\".gz\"", ")", ",", "contenthandler", "=", "h", ")", "seg_def_table", "=", "table", ".", "get_table", "(", "xmldoc", ",", "lsctables", ".", "SegmentDefTable", ".", "tableName", ")", "# put comment column into a dict", "comment_dict", "=", "{", "}", "for", "seg_def", "in", "seg_def_table", ":", "if", "include_version", ":", "full_channel_name", "=", "':'", ".", "join", "(", "[", "str", "(", "seg_def", ".", "ifos", ")", ",", "str", "(", "seg_def", ".", "name", ")", ",", "str", "(", "seg_def", ".", "version", ")", "]", ")", "else", ":", "full_channel_name", "=", "':'", ".", "join", "(", "[", "str", "(", "seg_def", ".", "ifos", ")", ",", "str", "(", "seg_def", ".", "name", ")", "]", ")", "comment_dict", "[", "full_channel_name", "]", "=", "seg_def", ".", "comment", "return", "comment_dict" ]
Identify if the given session ID is currently valid . Return True if valid False if explicitly invalid None if unknown .
def is_valid ( self , context , sid ) : record = self . _Document . find_one ( sid , project = ( 'expires' , ) ) if not record : return return not record . _expired
317
https://github.com/marrow/mongo/blob/2066dc73e281b8a46cb5fc965267d6b8e1b18467/web/session/mongo.py#L55-L66
[ "def", "_start", "(", "self", ",", "tpart", ")", ":", "g", "=", "gevent", ".", "Greenlet", "(", "self", ".", "uploader", ",", "tpart", ")", "g", ".", "link", "(", "self", ".", "_finish", ")", "# Account for concurrency_burden before starting the greenlet", "# to avoid racing against .join.", "self", ".", "concurrency_burden", "+=", "1", "self", ".", "member_burden", "+=", "len", "(", "tpart", ")", "g", ".", "start", "(", ")" ]
Immediately expire a session from the backing store .
def invalidate ( self , context , sid ) : result = self . _Document . get_collection ( ) . delete_one ( { '_id' : sid } ) return result . deleted_count == 1
318
https://github.com/marrow/mongo/blob/2066dc73e281b8a46cb5fc965267d6b8e1b18467/web/session/mongo.py#L68-L73
[ "def", "get_version", "(", ")", ":", "if", "all", "(", "[", "VERSION", ",", "UPDATED", ",", "any", "(", "[", "isinstance", "(", "UPDATED", ",", "date", ")", ",", "isinstance", "(", "UPDATED", ",", "datetime", ")", ",", "]", ")", ",", "]", ")", ":", "return", "FORMAT_STRING", ".", "format", "(", "*", "*", "{", "\"version\"", ":", "VERSION", ",", "\"updated\"", ":", "UPDATED", ",", "}", ")", "elif", "VERSION", ":", "return", "VERSION", "elif", "UPDATED", ":", "return", "localize", "(", "UPDATED", ")", "if", "any", "(", "[", "isinstance", "(", "UPDATED", ",", "date", ")", ",", "isinstance", "(", "UPDATED", ",", "datetime", ")", ",", "]", ")", "else", "\"\"", "else", ":", "return", "\"\"" ]
Update or insert the session document into the configured collection
def persist ( self , context ) : D = self . _Document document = context . session [ self . name ] D . get_collection ( ) . replace_one ( D . id == document . id , document , True )
319
https://github.com/marrow/mongo/blob/2066dc73e281b8a46cb5fc965267d6b8e1b18467/web/session/mongo.py#L92-L98
[ "def", "product_name", "(", "self", ")", ":", "buf", "=", "(", "ctypes", ".", "c_char", "*", "self", ".", "MAX_BUF_SIZE", ")", "(", ")", "self", ".", "_dll", ".", "JLINKARM_EMU_GetProductName", "(", "buf", ",", "self", ".", "MAX_BUF_SIZE", ")", "return", "ctypes", ".", "string_at", "(", "buf", ")", ".", "decode", "(", ")" ]
Channels connection setup . Register the current client on the related Group according to the language
def ws_connect ( message ) : prefix , language = message [ 'path' ] . strip ( '/' ) . split ( '/' ) gr = Group ( 'knocker-{0}' . format ( language ) ) gr . add ( message . reply_channel ) message . channel_session [ 'knocker' ] = language message . reply_channel . send ( { "accept" : True } )
320
https://github.com/nephila/django-knocker/blob/d25380d43a1f91285f1581dcf9db8510fe87f354/knocker/consumers.py#L9-L18
[ "def", "extract_paths", "(", "self", ",", "paths", ",", "ignore_nopath", ")", ":", "try", ":", "super", "(", ")", ".", "extract_paths", "(", "paths", "=", "paths", ",", "ignore_nopath", "=", "ignore_nopath", ",", ")", "except", "ExtractPathError", "as", "err", ":", "LOGGER", ".", "debug", "(", "'%s: failed extracting files: %s'", ",", "self", ".", "vm", ".", "name", "(", ")", ",", "err", ".", "message", ")", "if", "self", ".", "_has_guestfs", ":", "self", ".", "extract_paths_dead", "(", "paths", ",", "ignore_nopath", ")", "else", ":", "raise" ]
Channels connection close . Deregister the client
def ws_disconnect ( message ) : language = message . channel_session [ 'knocker' ] gr = Group ( 'knocker-{0}' . format ( language ) ) gr . discard ( message . reply_channel )
321
https://github.com/nephila/django-knocker/blob/d25380d43a1f91285f1581dcf9db8510fe87f354/knocker/consumers.py#L30-L37
[ "def", "put_admin_metadata", "(", "self", ",", "admin_metadata", ")", ":", "logger", ".", "debug", "(", "\"Putting admin metdata\"", ")", "text", "=", "json", ".", "dumps", "(", "admin_metadata", ")", "key", "=", "self", ".", "get_admin_metadata_key", "(", ")", "self", ".", "put_text", "(", "key", ",", "text", ")" ]
Start a new animation instance
def start ( self , autopush = True ) : if self . enabled : if autopush : self . push_message ( self . message ) self . spinner . message = ' - ' . join ( self . animation . messages ) if not self . spinner . running : self . animation . thread = threading . Thread ( target = _spinner , args = ( self . spinner , ) ) self . spinner . running = True self . animation . thread . start ( ) sys . stdout = stream . Clean ( sys . stdout , self . spinner . stream )
322
https://github.com/ryukinix/decorating/blob/df78c3f87800205701704c0bc0fb9b6bb908ba7e/decorating/animation.py#L235-L246
[ "def", "get_connection_ip_list", "(", "as_wmi_format", "=", "False", ",", "server", "=", "_DEFAULT_SERVER", ")", ":", "ret", "=", "dict", "(", ")", "setting", "=", "'IPGrant'", "reg_separator", "=", "r',\\s*'", "if", "as_wmi_format", ":", "ret", "=", "list", "(", ")", "addresses", "=", "_get_wmi_setting", "(", "'IIsIPSecuritySetting'", ",", "setting", ",", "server", ")", "# WMI returns the addresses as a tuple of unicode strings, each representing", "# an address/subnet pair. Remove extra spaces that may be present.", "for", "unnormalized_address", "in", "addresses", ":", "ip_address", ",", "subnet", "=", "re", ".", "split", "(", "reg_separator", ",", "unnormalized_address", ")", "if", "as_wmi_format", ":", "ret", ".", "append", "(", "'{0}, {1}'", ".", "format", "(", "ip_address", ",", "subnet", ")", ")", "else", ":", "ret", "[", "ip_address", "]", "=", "subnet", "if", "not", "ret", ":", "_LOG", ".", "debug", "(", "'%s is empty.'", ",", "setting", ")", "return", "ret" ]
Stop the thread animation gracefully and reset_message
def stop ( cls ) : if AnimatedDecorator . _enabled : if cls . spinner . running : cls . spinner . running = False cls . animation . thread . join ( ) if any ( cls . animation . messages ) : cls . pop_message ( ) sys . stdout = sys . __stdout__
323
https://github.com/ryukinix/decorating/blob/df78c3f87800205701704c0bc0fb9b6bb908ba7e/decorating/animation.py#L249-L259
[ "def", "makeNodeID", "(", "Rec", ",", "ndType", ",", "extras", "=", "None", ")", ":", "if", "ndType", "==", "'raw'", ":", "recID", "=", "Rec", "else", ":", "recID", "=", "Rec", ".", "get", "(", "ndType", ")", "if", "recID", "is", "None", ":", "pass", "elif", "isinstance", "(", "recID", ",", "list", ")", ":", "recID", "=", "tuple", "(", "recID", ")", "else", ":", "recID", "=", "recID", "extraDict", "=", "{", "}", "if", "extras", ":", "for", "tag", "in", "extras", ":", "if", "tag", "==", "\"raw\"", ":", "extraDict", "[", "'Tag'", "]", "=", "Rec", "else", ":", "extraDict", "[", "'Tag'", "]", "=", "Rec", ".", "get", "(", "tag", ")", "return", "recID", ",", "extraDict" ]
Try guess the message by the args passed
def auto_message ( self , args ) : if any ( args ) and callable ( args [ 0 ] ) and not self . message : return args [ 0 ] . __name__ elif not self . message : return self . default_message else : return self . message
324
https://github.com/ryukinix/decorating/blob/df78c3f87800205701704c0bc0fb9b6bb908ba7e/decorating/animation.py#L296-L315
[ "def", "printParameters", "(", "self", ")", ":", "print", "\"------------PY TemporalPooler Parameters ------------------\"", "print", "\"numInputs = \"", ",", "self", ".", "getNumInputs", "(", ")", "print", "\"numColumns = \"", ",", "self", ".", "getNumColumns", "(", ")", "print", "\"columnDimensions = \"", ",", "self", ".", "_columnDimensions", "print", "\"numActiveColumnsPerInhArea = \"", ",", "self", ".", "getNumActiveColumnsPerInhArea", "(", ")", "print", "\"potentialPct = \"", ",", "self", ".", "getPotentialPct", "(", ")", "print", "\"globalInhibition = \"", ",", "self", ".", "getGlobalInhibition", "(", ")", "print", "\"localAreaDensity = \"", ",", "self", ".", "getLocalAreaDensity", "(", ")", "print", "\"stimulusThreshold = \"", ",", "self", ".", "getStimulusThreshold", "(", ")", "print", "\"synPermActiveInc = \"", ",", "self", ".", "getSynPermActiveInc", "(", ")", "print", "\"synPermInactiveDec = \"", ",", "self", ".", "getSynPermInactiveDec", "(", ")", "print", "\"synPermConnected = \"", ",", "self", ".", "getSynPermConnected", "(", ")", "print", "\"minPctOverlapDutyCycle = \"", ",", "self", ".", "getMinPctOverlapDutyCycles", "(", ")", "print", "\"dutyCyclePeriod = \"", ",", "self", ".", "getDutyCyclePeriod", "(", ")", "print", "\"boostStrength = \"", ",", "self", ".", "getBoostStrength", "(", ")", "print", "\"spVerbosity = \"", ",", "self", ".", "getSpVerbosity", "(", ")", "print", "\"version = \"", ",", "self", ".", "_version" ]
Activate the TypingStream on stdout
def start ( self ) : self . streams . append ( sys . stdout ) sys . stdout = self . stream
325
https://github.com/ryukinix/decorating/blob/df78c3f87800205701704c0bc0fb9b6bb908ba7e/decorating/animation.py#L353-L356
[ "def", "merge_objects", "(", "self", ",", "mujoco_objects", ")", ":", "self", ".", "mujoco_objects", "=", "mujoco_objects", "self", ".", "objects", "=", "{", "}", "# xml manifestation", "self", ".", "max_horizontal_radius", "=", "0", "for", "obj_name", ",", "obj_mjcf", "in", "mujoco_objects", ".", "items", "(", ")", ":", "self", ".", "merge_asset", "(", "obj_mjcf", ")", "# Load object", "obj", "=", "obj_mjcf", ".", "get_collision", "(", "name", "=", "obj_name", ",", "site", "=", "True", ")", "obj", ".", "append", "(", "new_joint", "(", "name", "=", "obj_name", ",", "type", "=", "\"free\"", ",", "damping", "=", "\"0.0005\"", ")", ")", "self", ".", "objects", "[", "obj_name", "]", "=", "obj", "self", ".", "worldbody", ".", "append", "(", "obj", ")", "self", ".", "max_horizontal_radius", "=", "max", "(", "self", ".", "max_horizontal_radius", ",", "obj_mjcf", ".", "get_horizontal_radius", "(", ")", ")" ]
Change back the normal stdout after the end
def stop ( cls ) : if any ( cls . streams ) : sys . stdout = cls . streams . pop ( - 1 ) else : sys . stdout = sys . __stdout__
326
https://github.com/ryukinix/decorating/blob/df78c3f87800205701704c0bc0fb9b6bb908ba7e/decorating/animation.py#L359-L364
[ "def", "checkIsConsistent", "(", "self", ")", ":", "if", "is_an_array", "(", "self", ".", "mask", ")", "and", "self", ".", "mask", ".", "shape", "!=", "self", ".", "data", ".", "shape", ":", "raise", "ConsistencyError", "(", "\"Shape mismatch mask={}, data={}\"", ".", "format", "(", "self", ".", "mask", ".", "shape", "!=", "self", ".", "data", ".", "shape", ")", ")" ]
Prolong the working duration of an already held lock . Attempting to prolong a lock not already owned will result in a Locked exception .
def prolong ( self ) : D = self . __class__ collection = self . get_collection ( ) identity = self . Lock ( ) query = D . id == self query &= D . lock . instance == identity . instance query &= D . lock . time >= ( identity . time - identity . __period__ ) previous = collection . find_one_and_update ( query , { '$set' : { ~ D . lock . time : identity . time } } , { ~ D . lock : True } ) if previous is None : lock = getattr ( self . find_one ( self , projection = { ~ D . lock : True } ) , 'lock' , None ) if lock and lock . expires <= identity . time : lock . expired ( self ) raise self . Locked ( "Unable to prolong lock." , lock ) identity . prolonged ( self ) return identity
327
https://github.com/marrow/mongo/blob/2066dc73e281b8a46cb5fc965267d6b8e1b18467/marrow/mongo/core/trait/lockable.py#L245-L271
[ "def", "init", "(", "uri", ",", "echo", "=", "False", ")", ":", "global", "ENGINE", ",", "_METADATA", ",", "JOBS_TABLE", ",", "METADATA_TABLE", ",", "LOGS_TABLE", "ENGINE", "=", "sqlalchemy", ".", "create_engine", "(", "uri", ",", "echo", "=", "echo", ",", "convert_unicode", "=", "True", ")", "_METADATA", "=", "sqlalchemy", ".", "MetaData", "(", "ENGINE", ")", "JOBS_TABLE", "=", "_init_jobs_table", "(", ")", "METADATA_TABLE", "=", "_init_metadata_table", "(", ")", "LOGS_TABLE", "=", "_init_logs_table", "(", ")", "_METADATA", ".", "create_all", "(", "ENGINE", ")" ]
Release an exclusive lock on this integration task . Unless forcing if we are not the current owners of the lock a Locked exception will be raised .
def release ( self , force = False ) : D = self . __class__ collection = self . get_collection ( ) identity = self . Lock ( ) query = D . id == self if not force : query &= D . lock . instance == identity . instance previous = collection . find_one_and_update ( query , { '$unset' : { ~ D . lock : True } } , { ~ D . lock : True } ) if previous is None : lock = getattr ( self . find_one ( self , projection = { ~ D . lock : True } ) , 'lock' , None ) raise self . Locked ( "Unable to release lock." , lock ) lock = self . Lock . from_mongo ( previous [ ~ D . lock ] ) if lock and lock . expires <= identity . time : lock . expired ( self ) identity . released ( self , force )
328
https://github.com/marrow/mongo/blob/2066dc73e281b8a46cb5fc965267d6b8e1b18467/marrow/mongo/core/trait/lockable.py#L273-L299
[ "def", "tablestructure", "(", "tablename", ",", "dataman", "=", "True", ",", "column", "=", "True", ",", "subtable", "=", "False", ",", "sort", "=", "False", ")", ":", "t", "=", "table", "(", "tablename", ",", "ack", "=", "False", ")", "six", ".", "print_", "(", "t", ".", "showstructure", "(", "dataman", ",", "column", ",", "subtable", ",", "sort", ")", ")" ]
Send something for stdout and erased after delay
def write ( self , message , autoerase = True ) : super ( Animation , self ) . write ( message ) self . last_message = message if autoerase : time . sleep ( self . interval ) self . erase ( message )
329
https://github.com/ryukinix/decorating/blob/df78c3f87800205701704c0bc0fb9b6bb908ba7e/decorating/stream.py#L82-L88
[ "def", "validate_file_permissions", "(", "config", ")", ":", "files", "=", "config", ".", "get", "(", "'files'", ",", "{", "}", ")", "for", "file_name", ",", "options", "in", "files", ".", "items", "(", ")", ":", "for", "key", "in", "options", ".", "keys", "(", ")", ":", "if", "key", "not", "in", "[", "\"owner\"", ",", "\"group\"", ",", "\"mode\"", "]", ":", "raise", "RuntimeError", "(", "\"Invalid ownership configuration: {}\"", ".", "format", "(", "key", ")", ")", "mode", "=", "options", ".", "get", "(", "'mode'", ",", "config", ".", "get", "(", "'permissions'", ",", "'600'", ")", ")", "optional", "=", "options", ".", "get", "(", "'optional'", ",", "config", ".", "get", "(", "'optional'", ",", "'False'", ")", ")", "if", "'*'", "in", "file_name", ":", "for", "file", "in", "glob", ".", "glob", "(", "file_name", ")", ":", "if", "file", "not", "in", "files", ".", "keys", "(", ")", ":", "if", "os", ".", "path", ".", "isfile", "(", "file", ")", ":", "_validate_file_mode", "(", "mode", ",", "file", ",", "optional", ")", "else", ":", "if", "os", ".", "path", ".", "isfile", "(", "file_name", ")", ":", "_validate_file_mode", "(", "mode", ",", "file_name", ",", "optional", ")" ]
Write something on the default stream with a prefixed message
def write ( self , message , flush = False ) : # this need be threadsafe because the concurrent spinning running on # the stderr with self . lock : self . paralell_stream . erase ( ) super ( Clean , self ) . write ( message , flush )
330
https://github.com/ryukinix/decorating/blob/df78c3f87800205701704c0bc0fb9b6bb908ba7e/decorating/stream.py#L120-L126
[ "def", "editDirectory", "(", "self", ",", "directoryName", ",", "physicalPath", ",", "description", ")", ":", "url", "=", "self", ".", "_url", "+", "\"/directories/%s/edit\"", "%", "directoryName", "params", "=", "{", "\"f\"", ":", "\"json\"", ",", "\"physicalPath\"", ":", "physicalPath", ",", "\"description\"", ":", "description", "}", "return", "self", ".", "_post", "(", "url", "=", "url", ",", "param_dict", "=", "params", ",", "securityHandler", "=", "self", ".", "_securityHandler", ",", "proxy_port", "=", "self", ".", "_proxy_port", ",", "proxy_url", "=", "self", ".", "_proxy_url", ")" ]
A Writting like write method delayed at each char
def write ( self , message , flush = True ) : if isinstance ( message , bytes ) : # pragma: no cover message = message . decode ( 'utf-8' ) for char in message : time . sleep ( self . delay * ( 4 if char == '\n' else 1 ) ) super ( Writting , self ) . write ( char , flush )
331
https://github.com/ryukinix/decorating/blob/df78c3f87800205701704c0bc0fb9b6bb908ba7e/decorating/stream.py#L148-L155
[ "def", "register_component", "(", "self", ",", "path", ")", ":", "component", "=", "foundations", ".", "strings", ".", "get_splitext_basename", "(", "path", ")", "LOGGER", ".", "debug", "(", "\"> Current Component: '{0}'.\"", ".", "format", "(", "component", ")", ")", "profile", "=", "Profile", "(", "file", "=", "path", ")", "if", "profile", ".", "initializeProfile", "(", ")", ":", "if", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "profile", ".", "directory", ",", "profile", ".", "package", ")", "+", "\".py\"", ")", "or", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "profile", ".", "directory", ",", "profile", ".", "package", ")", ")", "or", "os", ".", "path", ".", "basename", "(", "profile", ".", "directory", ")", "==", "profile", ".", "package", ":", "self", ".", "__components", "[", "profile", ".", "name", "]", "=", "profile", "return", "True", "else", ":", "raise", "manager", ".", "exceptions", ".", "ComponentModuleError", "(", "\"{0} | '{1}' has no associated module and has been rejected!\"", ".", "format", "(", "self", ".", "__class__", ".", "__name__", ",", "component", ")", ")", "else", ":", "raise", "manager", ".", "exceptions", ".", "ComponentProfileError", "(", "\"{0} | '{1}' is not a valid Component and has been rejected!\"", ".", "format", "(", "self", ".", "__class__", ".", "__name__", ",", "component", ")", ")" ]
Construct the default projection document .
def _get_default_projection ( cls ) : projected = [ ] # The fields explicitly requested for inclusion. neutral = [ ] # Fields returning neutral (None) status. omitted = False # Have any fields been explicitly omitted? for name , field in cls . __fields__ . items ( ) : if field . project is None : neutral . append ( name ) elif field . project : projected . append ( name ) else : omitted = True if not projected and not omitted : # No preferences specified. return None elif not projected and omitted : # No positive inclusions given, but negative ones were. projected = neutral return { field : True for field in projected }
332
https://github.com/marrow/mongo/blob/2066dc73e281b8a46cb5fc965267d6b8e1b18467/marrow/mongo/core/trait/collection.py#L203-L226
[ "def", "parseReaderConfig", "(", "self", ",", "confdict", ")", ":", "logger", ".", "debug", "(", "'parseReaderConfig input: %s'", ",", "confdict", ")", "conf", "=", "{", "}", "for", "k", ",", "v", "in", "confdict", ".", "items", "(", ")", ":", "if", "not", "k", ".", "startswith", "(", "'Parameter'", ")", ":", "continue", "ty", "=", "v", "[", "'Type'", "]", "data", "=", "v", "[", "'Data'", "]", "vendor", "=", "None", "subtype", "=", "None", "try", ":", "vendor", ",", "subtype", "=", "v", "[", "'Vendor'", "]", ",", "v", "[", "'Subtype'", "]", "except", "KeyError", ":", "pass", "if", "ty", "==", "1023", ":", "if", "vendor", "==", "25882", "and", "subtype", "==", "37", ":", "tempc", "=", "struct", ".", "unpack", "(", "'!H'", ",", "data", ")", "[", "0", "]", "conf", ".", "update", "(", "temperature", "=", "tempc", ")", "else", ":", "conf", "[", "ty", "]", "=", "data", "return", "conf" ]
Move marrow . schema fields around to control positional instantiation order .
def adjust_attribute_sequence ( * fields ) : amount = None if fields and isinstance ( fields [ 0 ] , int ) : amount , fields = fields [ 0 ] , fields [ 1 : ] def adjust_inner ( cls ) : for field in fields : if field not in cls . __dict__ : # TODO: Copy the field definition. raise TypeError ( "Can only override sequence on non-inherited attributes." ) # Adjust the sequence to re-order the field. if amount is None : cls . __dict__ [ field ] . __sequence__ = ElementMeta . sequence else : cls . __dict__ [ field ] . __sequence__ += amount # Add the given amount. # Update the attribute collection. cls . __attributes__ = OrderedDict ( ( k , v ) for k , v in sorted ( cls . __attributes__ . items ( ) , key = lambda i : i [ 1 ] . __sequence__ ) ) return cls return adjust_inner
333
https://github.com/marrow/mongo/blob/2066dc73e281b8a46cb5fc965267d6b8e1b18467/marrow/mongo/util/__init__.py#L26-L55
[ "def", "delete_all_banks", "(", "self", ")", ":", "for", "file", "in", "glob", "(", "str", "(", "self", ".", "data_path", ")", "+", "\"/*.json\"", ")", ":", "Persistence", ".", "delete", "(", "file", ")" ]
Get a dictionary of file paths and timestamps .
def get_hashes ( path , exclude = None ) : out = { } for f in Path ( path ) . rglob ( '*' ) : if f . is_dir ( ) : # We want to watch files, not directories. continue if exclude and re . match ( exclude , f . as_posix ( ) ) : retox_log . debug ( "excluding '{}'" . format ( f . as_posix ( ) ) ) continue pytime = f . stat ( ) . st_mtime out [ f . as_posix ( ) ] = pytime return out
334
https://github.com/tonybaloney/retox/blob/4635e31001d2ac083423f46766249ac8daca7c9c/retox/__main__.py#L103-L119
[ "def", "make_query", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "query", "=", "Q", "(", ")", "# initial is an empty query", "query_dict", "=", "self", ".", "_build_query_dict", "(", "self", ".", "cleaned_data", ")", "if", "'negate'", "in", "self", ".", "cleaned_data", "and", "self", ".", "cleaned_data", "[", "'negate'", "]", ":", "query", "=", "query", "&", "~", "Q", "(", "*", "*", "query_dict", ")", "else", ":", "query", "=", "query", "&", "Q", "(", "*", "*", "query_dict", ")", "return", "query" ]
Sends an HTTP request to the API .
def request ( self , method , params = None , query_continue = None , files = None , auth = None , continuation = False ) : normal_params = _normalize_params ( params , query_continue ) if continuation : return self . _continuation ( method , params = normal_params , auth = auth , files = files ) else : return self . _request ( method , params = normal_params , auth = auth , files = files )
335
https://github.com/mediawiki-utilities/python-mwapi/blob/7a653c29207ecd318ae4b369d398aed13f26951d/mwapi/session.py#L136-L171
[ "def", "partition_by_vid", "(", "self", ",", "ref", ")", ":", "from", "ambry", ".", "orm", "import", "Partition", "p", "=", "self", ".", "session", ".", "query", "(", "Partition", ")", ".", "filter", "(", "Partition", ".", "vid", "==", "str", "(", "ref", ")", ")", ".", "first", "(", ")", "if", "p", ":", "return", "self", ".", "wrap_partition", "(", "p", ")", "else", ":", "return", "None" ]
Authenticate with the given credentials . If authentication is successful all further requests sent will be signed the authenticated user .
def login ( self , username , password , login_token = None ) : if login_token is None : token_doc = self . post ( action = 'query' , meta = 'tokens' , type = 'login' ) login_token = token_doc [ 'query' ] [ 'tokens' ] [ 'logintoken' ] login_doc = self . post ( action = "clientlogin" , username = username , password = password , logintoken = login_token , loginreturnurl = "http://example.org/" ) if login_doc [ 'clientlogin' ] [ 'status' ] == "UI" : raise ClientInteractionRequest . from_doc ( login_token , login_doc [ 'clientlogin' ] ) elif login_doc [ 'clientlogin' ] [ 'status' ] != 'PASS' : raise LoginError . from_doc ( login_doc [ 'clientlogin' ] ) return login_doc [ 'clientlogin' ]
336
https://github.com/mediawiki-utilities/python-mwapi/blob/7a653c29207ecd318ae4b369d398aed13f26951d/mwapi/session.py#L213-L246
[ "def", "scatter", "(", "n_categories", "=", "5", ",", "n", "=", "10", ",", "prefix", "=", "'category'", ",", "mode", "=", "None", ")", ":", "categories", "=", "[", "]", "for", "i", "in", "range", "(", "n_categories", ")", ":", "categories", ".", "extend", "(", "[", "prefix", "+", "str", "(", "i", "+", "1", ")", "]", "*", "n", ")", "return", "pd", ".", "DataFrame", "(", "{", "'x'", ":", "np", ".", "random", ".", "randn", "(", "n", "*", "n_categories", ")", ",", "'y'", ":", "np", ".", "random", ".", "randn", "(", "n", "*", "n_categories", ")", ",", "'text'", ":", "getName", "(", "n", "*", "n_categories", ",", "mode", "=", "mode", ")", ",", "'categories'", ":", "categories", "}", ")" ]
Continues a login that requires an additional step . This is common for when login requires completing a captcha or supplying a two - factor authentication token .
def continue_login ( self , login_token , * * params ) : login_params = { 'action' : "clientlogin" , 'logintoken' : login_token , 'logincontinue' : 1 } login_params . update ( params ) login_doc = self . post ( * * login_params ) if login_doc [ 'clientlogin' ] [ 'status' ] != 'PASS' : raise LoginError . from_doc ( login_doc [ 'clientlogin' ] ) return login_doc [ 'clientlogin' ]
337
https://github.com/mediawiki-utilities/python-mwapi/blob/7a653c29207ecd318ae4b369d398aed13f26951d/mwapi/session.py#L248-L273
[ "def", "allow_exception", "(", "self", ",", "exc_class", ")", ":", "name", "=", "exc_class", ".", "__name__", "self", ".", "_allowed_exceptions", "[", "name", "]", "=", "exc_class" ]
Makes an API request with the GET method
def get ( self , query_continue = None , auth = None , continuation = False , * * params ) : return self . request ( 'GET' , params = params , auth = auth , query_continue = query_continue , continuation = continuation )
338
https://github.com/mediawiki-utilities/python-mwapi/blob/7a653c29207ecd318ae4b369d398aed13f26951d/mwapi/session.py#L284-L309
[ "def", "load_index", "(", "self", ",", "filename", ",", "reindex", "=", "False", ")", ":", "self", ".", "_reset_index", "(", ")", "with", "open", "(", "filename", ",", "'r'", ")", "as", "fobj", ":", "data", "=", "json", ".", "load", "(", "fobj", ")", "for", "path", ",", "file", "in", "data", ".", "items", "(", ")", ":", "ents", ",", "domains", "=", "file", "[", "'entities'", "]", ",", "file", "[", "'domains'", "]", "root", ",", "f", "=", "dirname", "(", "path", ")", ",", "basename", "(", "path", ")", "if", "reindex", ":", "self", ".", "_index_file", "(", "root", ",", "f", ",", "domains", ")", "else", ":", "f", "=", "self", ".", "_make_file_object", "(", "root", ",", "f", ")", "tags", "=", "{", "k", ":", "Tag", "(", "self", ".", "entities", "[", "k", "]", ",", "v", ")", "for", "k", ",", "v", "in", "ents", ".", "items", "(", ")", "}", "f", ".", "tags", "=", "tags", "self", ".", "files", "[", "f", ".", "path", "]", "=", "f", "for", "ent", ",", "val", "in", "f", ".", "entities", ".", "items", "(", ")", ":", "self", ".", "entities", "[", "ent", "]", ".", "add_file", "(", "f", ".", "path", ",", "val", ")" ]
Makes an API request with the POST method
def post ( self , query_continue = None , upload_file = None , auth = None , continuation = False , * * params ) : if upload_file is not None : files = { 'file' : upload_file } else : files = None return self . request ( 'POST' , params = params , auth = auth , query_continue = query_continue , files = files , continuation = continuation )
339
https://github.com/mediawiki-utilities/python-mwapi/blob/7a653c29207ecd318ae4b369d398aed13f26951d/mwapi/session.py#L311-L342
[ "def", "_generate_examples_validation", "(", "self", ",", "archive", ",", "labels", ")", ":", "# Get the current random seeds.", "numpy_st0", "=", "np", ".", "random", ".", "get_state", "(", ")", "# Set new random seeds.", "np", ".", "random", ".", "seed", "(", "135", ")", "logging", ".", "warning", "(", "'Overwriting cv2 RNG seed.'", ")", "tfds", ".", "core", ".", "lazy_imports", ".", "cv2", ".", "setRNGSeed", "(", "357", ")", "for", "example", "in", "super", "(", "Imagenet2012Corrupted", ",", "self", ")", ".", "_generate_examples_validation", "(", "archive", ",", "labels", ")", ":", "with", "tf", ".", "Graph", "(", ")", ".", "as_default", "(", ")", ":", "tf_img", "=", "tf", ".", "image", ".", "decode_jpeg", "(", "example", "[", "'image'", "]", ".", "read", "(", ")", ",", "channels", "=", "3", ")", "image_np", "=", "tfds", ".", "as_numpy", "(", "tf_img", ")", "example", "[", "'image'", "]", "=", "self", ".", "_get_corrupted_example", "(", "image_np", ")", "yield", "example", "# Reset the seeds back to their original values.", "np", ".", "random", ".", "set_state", "(", "numpy_st0", ")" ]
Transform this record into an instance of a more specialized subclass .
def promote ( self , cls , update = False , preserve = True ) : if not issubclass ( cls , self . __class__ ) : raise TypeError ( "Must promote to a subclass of " + self . __class__ . __name__ ) return self . _as ( cls , update , preserve )
340
https://github.com/marrow/mongo/blob/2066dc73e281b8a46cb5fc965267d6b8e1b18467/marrow/mongo/core/trait/derived.py#L36-L42
[ "def", "get_changed_vars", "(", "section", ":", "SoS_Step", ")", ":", "if", "'shared'", "not", "in", "section", ".", "options", ":", "return", "set", "(", ")", "changed_vars", "=", "set", "(", ")", "svars", "=", "section", ".", "options", "[", "'shared'", "]", "if", "isinstance", "(", "svars", ",", "str", ")", ":", "changed_vars", ".", "add", "(", "svars", ")", "svars", "=", "{", "svars", ":", "svars", "}", "elif", "isinstance", "(", "svars", ",", "Sequence", ")", ":", "for", "item", "in", "svars", ":", "if", "isinstance", "(", "item", ",", "str", ")", ":", "changed_vars", ".", "add", "(", "item", ")", "elif", "isinstance", "(", "item", ",", "Mapping", ")", ":", "changed_vars", "|=", "set", "(", "item", ".", "keys", "(", ")", ")", "else", ":", "raise", "ValueError", "(", "f'Option shared should be a string, a mapping of expression, or list of string or mappings. {svars} provided'", ")", "elif", "isinstance", "(", "svars", ",", "Mapping", ")", ":", "changed_vars", "|=", "set", "(", "svars", ".", "keys", "(", ")", ")", "else", ":", "raise", "ValueError", "(", "f'Option shared should be a string, a mapping of expression, or list of string or mappings. {svars} provided'", ")", "return", "changed_vars" ]
cutting nodes away from menus
def cut_levels ( nodes , start_level ) : final = [ ] removed = [ ] for node in nodes : if not hasattr ( node , 'level' ) : # remove and ignore nodes that don't have level information remove ( node , removed ) continue if node . attr . get ( 'soft_root' , False ) : # remove and ignore nodes that are behind a node marked as 'soft_root' remove ( node , removed ) continue if node . level == start_level : # turn nodes that are on from_level into root nodes final . append ( node ) node . parent = None if not node . visible and not node . children : remove ( node , removed ) elif node . level == start_level + 1 : # remove nodes that are deeper than one level node . children = [ ] else : remove ( node , removed ) if not node . visible : keep_node = False for child in node . children : keep_node = keep_node or child . visible if not keep_node : remove ( node , removed ) for node in removed : if node in final : final . remove ( node ) return final
341
https://github.com/jrief/djangocms-bootstrap/blob/293a7050602d6e9a728acea2fb13893e5ec7992e/cms_bootstrap/templatetags/bootstrap_tags.py#L18-L53
[ "def", "_write_options", "(", "name", ",", "configuration", ")", ":", "_check_portname", "(", "name", ")", "pkg", "=", "next", "(", "iter", "(", "configuration", ")", ")", "conf_ptr", "=", "configuration", "[", "pkg", "]", "dirname", "=", "_options_dir", "(", "name", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "dirname", ")", ":", "try", ":", "os", ".", "makedirs", "(", "dirname", ")", "except", "OSError", "as", "exc", ":", "raise", "CommandExecutionError", "(", "'Unable to make {0}: {1}'", ".", "format", "(", "dirname", ",", "exc", ")", ")", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "os", ".", "path", ".", "join", "(", "dirname", ",", "'options'", ")", ",", "'w'", ")", "as", "fp_", ":", "sorted_options", "=", "list", "(", "conf_ptr", ")", "sorted_options", ".", "sort", "(", ")", "fp_", ".", "write", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_str", "(", "'# This file was auto-generated by Salt (http://saltstack.com)\\n'", "'# Options for {0}\\n'", "'_OPTIONS_READ={0}\\n'", "'_FILE_COMPLETE_OPTIONS_LIST={1}\\n'", ".", "format", "(", "pkg", ",", "' '", ".", "join", "(", "sorted_options", ")", ")", ")", ")", "opt_tmpl", "=", "'OPTIONS_FILE_{0}SET+={1}\\n'", "for", "opt", "in", "sorted_options", ":", "fp_", ".", "write", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_str", "(", "opt_tmpl", ".", "format", "(", "''", "if", "conf_ptr", "[", "opt", "]", "==", "'on'", "else", "'UN'", ",", "opt", ")", ")", ")" ]
In the event a value that has technically already expired is loaded swap it for None .
def from_mongo ( cls , data , expired = False , * * kw ) : value = super ( Expires , cls ) . from_mongo ( data , * * kw ) if not expired and value . is_expired : return None return value
342
https://github.com/marrow/mongo/blob/2066dc73e281b8a46cb5fc965267d6b8e1b18467/marrow/mongo/core/trait/expires.py#L36-L44
[ "def", "subscribe_commit", "(", "self", ",", "repo_name", ",", "branch", ",", "from_commit_id", "=", "None", ")", ":", "repo", "=", "proto", ".", "Repo", "(", "name", "=", "repo_name", ")", "req", "=", "proto", ".", "SubscribeCommitRequest", "(", "repo", "=", "repo", ",", "branch", "=", "branch", ")", "if", "from_commit_id", "is", "not", "None", ":", "getattr", "(", "req", ",", "'from'", ")", ".", "CopyFrom", "(", "proto", ".", "Commit", "(", "repo", "=", "repo", ",", "id", "=", "from_commit_id", ")", ")", "res", "=", "self", ".", "stub", ".", "SubscribeCommit", "(", "req", ",", "metadata", "=", "self", ".", "metadata", ")", "return", "res" ]
Generate a MongoDB sort order list using the Django ORM style .
def S ( Document , * fields ) : result = [ ] for field in fields : if isinstance ( field , tuple ) : # Unpack existing tuple. field , direction = field result . append ( ( field , direction ) ) continue direction = ASCENDING if not field . startswith ( '__' ) : field = field . replace ( '__' , '.' ) if field [ 0 ] == '-' : direction = DESCENDING if field [ 0 ] in ( '+' , '-' ) : field = field [ 1 : ] _field = traverse ( Document , field , default = None ) result . append ( ( ( ~ _field ) if _field else field , direction ) ) return result
343
https://github.com/marrow/mongo/blob/2066dc73e281b8a46cb5fc965267d6b8e1b18467/marrow/mongo/param/sort.py#L12-L38
[ "def", "GetAdGroups", "(", "self", ",", "client_customer_id", ",", "campaign_id", ")", ":", "self", ".", "client", ".", "SetClientCustomerId", "(", "client_customer_id", ")", "selector", "=", "{", "'fields'", ":", "[", "'Id'", ",", "'Name'", ",", "'Status'", "]", ",", "'predicates'", ":", "[", "{", "'field'", ":", "'CampaignId'", ",", "'operator'", ":", "'EQUALS'", ",", "'values'", ":", "[", "campaign_id", "]", "}", ",", "{", "'field'", ":", "'Status'", ",", "'operator'", ":", "'NOT_EQUALS'", ",", "'values'", ":", "[", "'REMOVED'", "]", "}", "]", "}", "adgroups", "=", "self", ".", "client", ".", "GetService", "(", "'AdGroupService'", ")", ".", "get", "(", "selector", ")", "if", "int", "(", "adgroups", "[", "'totalNumEntries'", "]", ")", ">", "0", ":", "return", "adgroups", "[", "'entries'", "]", "else", ":", "return", "None" ]
Reads data from CNPJ list and write results to output directory .
def run ( self ) : self . _assure_output_dir ( self . output ) companies = self . read ( ) print '%s CNPJs found' % len ( companies ) pbar = ProgressBar ( widgets = [ Counter ( ) , ' ' , Percentage ( ) , ' ' , Bar ( ) , ' ' , Timer ( ) ] , maxval = len ( companies ) ) . start ( ) resolved = 0 runner = Runner ( companies , self . days , self . token ) try : for data in runner : self . write ( data ) resolved = resolved + 1 pbar . update ( resolved ) except KeyboardInterrupt : print '\naborted: waiting current requests to finish.' runner . stop ( ) return pbar . finish ( )
344
https://github.com/vkruoso/receita-tools/blob/fd62a252c76541c9feac6470b9048b31348ffe86/receita/tools/get.py#L25-L48
[ "def", "OneHot0", "(", "*", "xs", ",", "simplify", "=", "True", ",", "conj", "=", "True", ")", ":", "xs", "=", "[", "Expression", ".", "box", "(", "x", ")", ".", "node", "for", "x", "in", "xs", "]", "terms", "=", "list", "(", ")", "if", "conj", ":", "for", "x0", ",", "x1", "in", "itertools", ".", "combinations", "(", "xs", ",", "2", ")", ":", "terms", ".", "append", "(", "exprnode", ".", "or_", "(", "exprnode", ".", "not_", "(", "x0", ")", ",", "exprnode", ".", "not_", "(", "x1", ")", ")", ")", "y", "=", "exprnode", ".", "and_", "(", "*", "terms", ")", "else", ":", "for", "_xs", "in", "itertools", ".", "combinations", "(", "xs", ",", "len", "(", "xs", ")", "-", "1", ")", ":", "terms", ".", "append", "(", "exprnode", ".", "and_", "(", "*", "[", "exprnode", ".", "not_", "(", "x", ")", "for", "x", "in", "_xs", "]", ")", ")", "y", "=", "exprnode", ".", "or_", "(", "*", "terms", ")", "if", "simplify", ":", "y", "=", "y", ".", "simplify", "(", ")", "return", "_expr", "(", "y", ")" ]
Reads data from the CSV file .
def read ( self ) : companies = [ ] with open ( self . file ) as f : reader = unicodecsv . reader ( f ) for line in reader : if len ( line ) >= 1 : cnpj = self . format ( line [ 0 ] ) if self . valid ( cnpj ) : companies . append ( cnpj ) return companies
345
https://github.com/vkruoso/receita-tools/blob/fd62a252c76541c9feac6470b9048b31348ffe86/receita/tools/get.py#L50-L60
[ "def", "_visit_te_shape", "(", "self", ",", "shape", ":", "ShExJ", ".", "shapeExpr", ",", "visit_center", ":", "_VisitorCenter", ")", "->", "None", ":", "if", "isinstance", "(", "shape", ",", "ShExJ", ".", "Shape", ")", "and", "shape", ".", "expression", "is", "not", "None", ":", "visit_center", ".", "f", "(", "visit_center", ".", "arg_cntxt", ",", "shape", ".", "expression", ",", "self", ")" ]
Writes json data to the output directory .
def write ( self , data ) : cnpj , data = data path = os . path . join ( self . output , '%s.json' % cnpj ) with open ( path , 'w' ) as f : json . dump ( data , f , encoding = 'utf-8' )
346
https://github.com/vkruoso/receita-tools/blob/fd62a252c76541c9feac6470b9048b31348ffe86/receita/tools/get.py#L62-L68
[ "def", "parse_table_column_properties", "(", "doc", ",", "cell", ",", "prop", ")", ":", "if", "not", "cell", ":", "return", "grid", "=", "prop", ".", "find", "(", "_name", "(", "'{{{w}}}gridSpan'", ")", ")", "if", "grid", "is", "not", "None", ":", "cell", ".", "grid_span", "=", "int", "(", "grid", ".", "attrib", "[", "_name", "(", "'{{{w}}}val'", ")", "]", ")", "vmerge", "=", "prop", ".", "find", "(", "_name", "(", "'{{{w}}}vMerge'", ")", ")", "if", "vmerge", "is", "not", "None", ":", "if", "_name", "(", "'{{{w}}}val'", ")", "in", "vmerge", ".", "attrib", ":", "cell", ".", "vmerge", "=", "vmerge", ".", "attrib", "[", "_name", "(", "'{{{w}}}val'", ")", "]", "else", ":", "cell", ".", "vmerge", "=", "\"\"" ]
Check if a CNPJ is valid .
def valid ( self , cnpj ) : if len ( cnpj ) != 14 : return False tam = 12 nums = cnpj [ : tam ] digs = cnpj [ tam : ] tot = 0 pos = tam - 7 for i in range ( tam , 0 , - 1 ) : tot = tot + int ( nums [ tam - i ] ) * pos pos = pos - 1 if pos < 2 : pos = 9 res = 0 if tot % 11 < 2 else 11 - ( tot % 11 ) if res != int ( digs [ 0 ] ) : return False tam = tam + 1 nums = cnpj [ : tam ] tot = 0 pos = tam - 7 for i in range ( tam , 0 , - 1 ) : tot = tot + int ( nums [ tam - i ] ) * pos pos = pos - 1 if pos < 2 : pos = 9 res = 0 if tot % 11 < 2 else 11 - ( tot % 11 ) if res != int ( digs [ 1 ] ) : return False return True
347
https://github.com/vkruoso/receita-tools/blob/fd62a252c76541c9feac6470b9048b31348ffe86/receita/tools/get.py#L88-L125
[ "def", "_update_list_store_entry", "(", "self", ",", "list_store", ",", "config_key", ",", "config_value", ")", ":", "for", "row_num", ",", "row", "in", "enumerate", "(", "list_store", ")", ":", "if", "row", "[", "self", ".", "KEY_STORAGE_ID", "]", "==", "config_key", ":", "row", "[", "self", ".", "VALUE_STORAGE_ID", "]", "=", "str", "(", "config_value", ")", "row", "[", "self", ".", "TOGGLE_VALUE_STORAGE_ID", "]", "=", "config_value", "return", "row_num" ]
Returns the configuration filepath .
def get_default_config_filename ( ) : global _CONFIG_FN if _CONFIG_FN is not None : return _CONFIG_FN with _CONFIG_FN_LOCK : if _CONFIG_FN is not None : return _CONFIG_FN if 'PEYOTL_CONFIG_FILE' in os . environ : cfn = os . path . abspath ( os . environ [ 'PEYOTL_CONFIG_FILE' ] ) else : cfn = os . path . expanduser ( "~/.peyotl/config" ) if not os . path . isfile ( cfn ) : # noinspection PyProtectedMember if 'PEYOTL_CONFIG_FILE' in os . environ : from peyotl . utility . get_logger import warn_from_util_logger msg = 'Filepath "{}" specified via PEYOTL_CONFIG_FILE={} was not found' . format ( cfn , os . environ [ 'PEYOTL_CONFIG_FILE' ] ) warn_from_util_logger ( msg ) from pkg_resources import Requirement , resource_filename pr = Requirement . parse ( 'peyotl' ) cfn = resource_filename ( pr , 'peyotl/default.conf' ) if not os . path . isfile ( cfn ) : raise RuntimeError ( 'The peyotl configuration file cascade failed looking for "{}"' . format ( cfn ) ) _CONFIG_FN = os . path . abspath ( cfn ) return _CONFIG_FN
348
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/utility/get_config.py#L73-L104
[ "def", "delete", "(", "table", ",", "session", ",", "conds", ")", ":", "with", "session", ".", "begin_nested", "(", ")", ":", "archive_conds_list", "=", "_get_conditions_list", "(", "table", ",", "conds", ")", "session", ".", "execute", "(", "sa", ".", "delete", "(", "table", ".", "ArchiveTable", ",", "whereclause", "=", "_get_conditions", "(", "archive_conds_list", ")", ")", ")", "conds_list", "=", "_get_conditions_list", "(", "table", ",", "conds", ",", "archive", "=", "False", ")", "session", ".", "execute", "(", "sa", ".", "delete", "(", "table", ",", "whereclause", "=", "_get_conditions", "(", "conds_list", ")", ")", ")" ]
Returns a ConfigParser object and a list of filenames that were parsed to initialize it
def get_raw_default_config_and_read_file_list ( ) : global _CONFIG , _READ_DEFAULT_FILES if _CONFIG is not None : return _CONFIG , _READ_DEFAULT_FILES with _CONFIG_LOCK : if _CONFIG is not None : return _CONFIG , _READ_DEFAULT_FILES try : # noinspection PyCompatibility from ConfigParser import SafeConfigParser except ImportError : # noinspection PyCompatibility,PyUnresolvedReferences from configparser import ConfigParser as SafeConfigParser # pylint: disable=F0401 cfg = SafeConfigParser ( ) read_files = cfg . read ( get_default_config_filename ( ) ) _CONFIG , _READ_DEFAULT_FILES = cfg , read_files return _CONFIG , _READ_DEFAULT_FILES
349
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/utility/get_config.py#L107-L124
[ "def", "_compare_variables_function_generator", "(", "method_string", ",", "aggregation_func", ")", ":", "def", "comparison_function", "(", "self", ",", "other", ")", ":", "\"\"\"Wrapper for comparison functions for class |Variable|.\"\"\"", "if", "self", "is", "other", ":", "return", "method_string", "in", "(", "'__eq__'", ",", "'__le__'", ",", "'__ge__'", ")", "method", "=", "getattr", "(", "self", ".", "value", ",", "method_string", ")", "try", ":", "if", "hasattr", "(", "type", "(", "other", ")", ",", "'__hydpy__get_value__'", ")", ":", "other", "=", "other", ".", "__hydpy__get_value__", "(", ")", "result", "=", "method", "(", "other", ")", "if", "result", "is", "NotImplemented", ":", "return", "result", "return", "aggregation_func", "(", "result", ")", "except", "BaseException", ":", "objecttools", ".", "augment_excmessage", "(", "f'While trying to compare variable '", "f'{objecttools.elementphrase(self)} with object '", "f'`{other}` of type `{objecttools.classname(other)}`'", ")", "return", "comparison_function" ]
Thread - safe accessor for the immutable default ConfigWrapper object
def get_config_object ( ) : global _DEFAULT_CONFIG_WRAPPER if _DEFAULT_CONFIG_WRAPPER is not None : return _DEFAULT_CONFIG_WRAPPER with _DEFAULT_CONFIG_WRAPPER_LOCK : if _DEFAULT_CONFIG_WRAPPER is not None : return _DEFAULT_CONFIG_WRAPPER _DEFAULT_CONFIG_WRAPPER = ConfigWrapper ( ) return _DEFAULT_CONFIG_WRAPPER
350
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/utility/get_config.py#L315-L324
[ "def", "delete_types_s", "(", "s", ",", "types", ")", ":", "patt", "=", "'(?s)'", "+", "'|'", ".", "join", "(", "'(?<=\\n)'", "+", "s", "+", "'\\n.+?\\n(?=\\S+|$)'", "for", "s", "in", "types", ")", "return", "re", ".", "sub", "(", "patt", ",", "''", ",", "'\\n'", "+", "s", ".", "strip", "(", ")", "+", "'\\n'", ",", ")", ".", "strip", "(", ")" ]
return the first non - None setting from a series where each element in sec_param_list is a section param pair suitable for a get_config_setting call .
def get_from_config_setting_cascade ( self , sec_param_list , default = None , warn_on_none_level = logging . WARN ) : for section , param in sec_param_list : r = self . get_config_setting ( section , param , default = None , warn_on_none_level = None ) if r is not None : return r section , param = sec_param_list [ - 1 ] if default is None : _warn_missing_setting ( section , param , self . _config_filename , warn_on_none_level ) return default
351
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/utility/get_config.py#L213-L228
[ "def", "cudaMemcpy_dtoh", "(", "dst", ",", "src", ",", "count", ")", ":", "status", "=", "_libcudart", ".", "cudaMemcpy", "(", "dst", ",", "src", ",", "ctypes", ".", "c_size_t", "(", "count", ")", ",", "cudaMemcpyDeviceToHost", ")", "cudaCheckStatus", "(", "status", ")" ]
Parse the text in infile and save the results in outfile
def parse ( input_ : Union [ str , FileStream ] , source : str ) -> Optional [ str ] : # Step 1: Tokenize the input stream error_listener = ParseErrorListener ( ) if not isinstance ( input_ , FileStream ) : input_ = InputStream ( input_ ) lexer = jsgLexer ( input_ ) lexer . addErrorListener ( error_listener ) tokens = CommonTokenStream ( lexer ) tokens . fill ( ) if error_listener . n_errors : return None # Step 2: Generate the parse tree parser = jsgParser ( tokens ) parser . addErrorListener ( error_listener ) parse_tree = parser . doc ( ) if error_listener . n_errors : return None # Step 3: Transform the results the results parser = JSGDocParser ( ) parser . visit ( parse_tree ) if parser . undefined_tokens ( ) : for tkn in parser . undefined_tokens ( ) : print ( "Undefined token: " + tkn ) return None return parser . as_python ( source )
352
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/parser_impl/generate_python.py#L56-L91
[ "def", "is_cleanly_mergable", "(", "*", "dicts", ":", "Dict", "[", "Any", ",", "Any", "]", ")", "->", "bool", ":", "if", "len", "(", "dicts", ")", "<=", "1", ":", "return", "True", "elif", "len", "(", "dicts", ")", "==", "2", ":", "if", "not", "all", "(", "isinstance", "(", "d", ",", "Mapping", ")", "for", "d", "in", "dicts", ")", ":", "return", "False", "else", ":", "shared_keys", "=", "set", "(", "dicts", "[", "0", "]", ".", "keys", "(", ")", ")", "&", "set", "(", "dicts", "[", "1", "]", ".", "keys", "(", ")", ")", "return", "all", "(", "is_cleanly_mergable", "(", "dicts", "[", "0", "]", "[", "key", "]", ",", "dicts", "[", "1", "]", "[", "key", "]", ")", "for", "key", "in", "shared_keys", ")", "else", ":", "dict_combinations", "=", "itertools", ".", "combinations", "(", "dicts", ",", "2", ")", "return", "all", "(", "is_cleanly_mergable", "(", "*", "combination", ")", "for", "combination", "in", "dict_combinations", ")" ]
Executes a request by AsyncHTTPClient asynchronously returning an tornado . HTTPResponse .
def fetch ( self , request , callback = None , raise_error = True , * * kwargs ) : # accepts request as string then convert it to HTTPRequest if isinstance ( request , str ) : request = HTTPRequest ( request , * * kwargs ) try : # The first request calls tornado-client ignoring the # possible exception, in case of 401 response, # renews the access token and replay it response = yield self . _authorized_fetch ( request , callback , raise_error = False , * * kwargs ) if response . code == BAD_TOKEN : yield self . _token_manager . reset_token ( ) elif response . error and raise_error : raise response . error else : raise gen . Return ( response ) # The request with renewed token response = yield self . _authorized_fetch ( request , callback , raise_error = raise_error , * * kwargs ) raise gen . Return ( response ) except TokenError as err : yield self . _token_manager . reset_token ( ) raise err
353
https://github.com/globocom/tornado-alf/blob/3c3ec58c33f2d4ddfbed4ac18ca89d6beedf9c87/tornadoalf/client.py#L29-L66
[ "def", "communityvisibilitystate", "(", "self", ")", ":", "if", "self", ".", "_communityvisibilitystate", "==", "None", ":", "return", "None", "elif", "self", ".", "_communityvisibilitystate", "in", "self", ".", "VisibilityState", ":", "return", "self", ".", "VisibilityState", "[", "self", ".", "_communityvisibilitystate", "]", "else", ":", "#Invalid State", "return", "None" ]
Call jsonschema validation to raise JSONValidation on non - compliance or silently pass .
def validate_config ( key : str , config : dict ) -> None : try : jsonschema . validate ( config , CONFIG_JSON_SCHEMA [ key ] ) except jsonschema . ValidationError as x_validation : raise JSONValidation ( 'JSON validation error on {} configuration: {}' . format ( key , x_validation . message ) ) except jsonschema . SchemaError as x_schema : raise JSONValidation ( 'JSON schema error on {} specification: {}' . format ( key , x_schema . message ) )
354
https://github.com/PSPC-SPAC-buyandsell/von_agent/blob/0b1c17cca3bd178b6e6974af84dbac1dfce5cf45/von_agent/validate_config.py#L86-L99
[ "def", "_update_cache", "(", "self", ")", ":", "expected_version", "=", "(", "tuple", "(", "r", ".", "_version", "for", "r", "in", "self", ".", "registries", ")", "+", "(", "self", ".", "_extra_registry", ".", "_version", ",", ")", ")", "if", "self", ".", "_last_version", "!=", "expected_version", ":", "registry2", "=", "Registry", "(", ")", "for", "reg", "in", "self", ".", "registries", ":", "registry2", ".", "key_bindings", ".", "extend", "(", "reg", ".", "key_bindings", ")", "# Copy all bindings from `self._extra_registry`.", "registry2", ".", "key_bindings", ".", "extend", "(", "self", ".", "_extra_registry", ".", "key_bindings", ")", "self", ".", "_registry2", "=", "registry2", "self", ".", "_last_version", "=", "expected_version" ]
Generate an identifier for a callable signal receiver .
def __make_id ( receiver ) : if __is_bound_method ( receiver ) : return ( id ( receiver . __func__ ) , id ( receiver . __self__ ) ) return id ( receiver )
355
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/signals.py#L41-L56
[ "def", "_create_auth", "(", "team", ",", "timeout", "=", "None", ")", ":", "url", "=", "get_registry_url", "(", "team", ")", "contents", "=", "_load_auth", "(", ")", "auth", "=", "contents", ".", "get", "(", "url", ")", "if", "auth", "is", "not", "None", ":", "# If the access token expires within a minute, update it.", "if", "auth", "[", "'expires_at'", "]", "<", "time", ".", "time", "(", ")", "+", "60", ":", "try", ":", "auth", "=", "_update_auth", "(", "team", ",", "auth", "[", "'refresh_token'", "]", ",", "timeout", ")", "except", "CommandException", "as", "ex", ":", "raise", "CommandException", "(", "\"Failed to update the access token (%s). Run `quilt login%s` again.\"", "%", "(", "ex", ",", "' '", "+", "team", "if", "team", "else", "''", ")", ")", "contents", "[", "url", "]", "=", "auth", "_save_auth", "(", "contents", ")", "return", "auth" ]
Remove all dead signal receivers from the global receivers collection .
def __purge ( ) : global __receivers newreceivers = collections . defaultdict ( list ) for signal , receivers in six . iteritems ( __receivers ) : alive = [ x for x in receivers if not __is_dead ( x ) ] newreceivers [ signal ] = alive __receivers = newreceivers
356
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/signals.py#L59-L72
[ "def", "replace_version", "(", "self", ",", "other", ",", "logger", ")", ":", "if", "other", ".", "library_name", "!=", "self", ".", "library_name", ":", "logger", ".", "debug", "(", "'not replacable: {} != {} ()'", ".", "format", "(", "other", ".", "library_name", ",", "self", ".", "library_name", ",", "other", ".", "filename", ")", ")", "return", "False", "elif", "int", "(", "other", ".", "major_version", ")", "!=", "int", "(", "self", ".", "major_version", ")", ":", "logger", ".", "debug", "(", "'not replacable: {} != {} ({})'", ".", "format", "(", "int", "(", "self", ".", "major_version", ")", ",", "int", "(", "other", ".", "major_version", ")", ",", "other", ".", "filename", ",", ")", ")", "return", "False", "elif", "float", "(", "other", ".", "minor_version", ")", ">=", "float", "(", "self", ".", "minor_version", ")", ":", "logger", ".", "debug", "(", "'not replacable: {} >= {} ({})'", ".", "format", "(", "other", ".", "minor_version", ",", "self", ".", "minor_version", ",", "other", ".", "filename", ",", ")", ")", "return", "False", "else", ":", "return", "True" ]
Return all signal handlers that are currently still alive for the input signal .
def __live_receivers ( signal ) : with __lock : __purge ( ) receivers = [ funcref ( ) for funcref in __receivers [ signal ] ] return receivers
357
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/signals.py#L75-L89
[ "async", "def", "parse_result", "(", "response", ",", "response_type", "=", "None", ",", "*", ",", "encoding", "=", "\"utf-8\"", ")", ":", "if", "response_type", "is", "None", ":", "ct", "=", "response", ".", "headers", ".", "get", "(", "\"content-type\"", ")", "if", "ct", "is", "None", ":", "cl", "=", "response", ".", "headers", ".", "get", "(", "\"content-length\"", ")", "if", "cl", "is", "None", "or", "cl", "==", "\"0\"", ":", "return", "\"\"", "raise", "TypeError", "(", "\"Cannot auto-detect response type \"", "\"due to missing Content-Type header.\"", ")", "main_type", ",", "sub_type", ",", "extras", "=", "parse_content_type", "(", "ct", ")", "if", "sub_type", "==", "\"json\"", ":", "response_type", "=", "\"json\"", "elif", "sub_type", "==", "\"x-tar\"", ":", "response_type", "=", "\"tar\"", "elif", "(", "main_type", ",", "sub_type", ")", "==", "(", "\"text\"", ",", "\"plain\"", ")", ":", "response_type", "=", "\"text\"", "encoding", "=", "extras", ".", "get", "(", "\"charset\"", ",", "encoding", ")", "else", ":", "raise", "TypeError", "(", "\"Unrecognized response type: {ct}\"", ".", "format", "(", "ct", "=", "ct", ")", ")", "if", "\"tar\"", "==", "response_type", ":", "what", "=", "await", "response", ".", "read", "(", ")", "return", "tarfile", ".", "open", "(", "mode", "=", "\"r\"", ",", "fileobj", "=", "BytesIO", "(", "what", ")", ")", "if", "\"json\"", "==", "response_type", ":", "data", "=", "await", "response", ".", "json", "(", "encoding", "=", "encoding", ")", "elif", "\"text\"", "==", "response_type", ":", "data", "=", "await", "response", ".", "text", "(", "encoding", "=", "encoding", ")", "else", ":", "data", "=", "await", "response", ".", "read", "(", ")", "return", "data" ]
Return True if the method is a bound method ( attached to an class instance .
def __is_bound_method ( method ) : if not ( hasattr ( method , "__func__" ) and hasattr ( method , "__self__" ) ) : return False # Bound methods have a __self__ attribute pointing to the owner instance return six . get_method_self ( method ) is not None
358
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/signals.py#L92-L103
[ "def", "delete_datasource", "(", "datasourceid", ",", "orgname", "=", "None", ",", "profile", "=", "'grafana'", ")", ":", "if", "isinstance", "(", "profile", ",", "string_types", ")", ":", "profile", "=", "__salt__", "[", "'config.option'", "]", "(", "profile", ")", "response", "=", "requests", ".", "delete", "(", "'{0}/api/datasources/{1}'", ".", "format", "(", "profile", "[", "'grafana_url'", "]", ",", "datasourceid", ")", ",", "auth", "=", "_get_auth", "(", "profile", ")", ",", "headers", "=", "_get_headers", "(", "profile", ")", ",", "timeout", "=", "profile", ".", "get", "(", "'grafana_timeout'", ",", "3", ")", ",", ")", "if", "response", ".", "status_code", ">=", "400", ":", "response", ".", "raise_for_status", "(", ")", "return", "response", ".", "json", "(", ")" ]
Disconnect the receiver func from the signal identified by signal_id .
def disconnect ( signal , receiver ) : inputkey = __make_id ( receiver ) with __lock : __purge ( ) receivers = __receivers . get ( signal ) for idx in six . moves . range ( len ( receivers ) ) : connected = receivers [ idx ] ( ) if inputkey != __make_id ( connected ) : continue del receivers [ idx ] return True # receiver successfully disconnected! return False
359
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/signals.py#L149-L175
[ "def", "insert", "(", "self", ",", "crc", ",", "toc", ")", ":", "if", "self", ".", "_rw_cache", ":", "try", ":", "filename", "=", "'%s/%08X.json'", "%", "(", "self", ".", "_rw_cache", ",", "crc", ")", "cache", "=", "open", "(", "filename", ",", "'w'", ")", "cache", ".", "write", "(", "json", ".", "dumps", "(", "toc", ",", "indent", "=", "2", ",", "default", "=", "self", ".", "_encoder", ")", ")", "cache", ".", "close", "(", ")", "logger", ".", "info", "(", "'Saved cache to [%s]'", ",", "filename", ")", "self", ".", "_cache_files", "+=", "[", "filename", "]", "except", "Exception", "as", "exp", ":", "logger", ".", "warning", "(", "'Could not save cache to file [%s]: %s'", ",", "filename", ",", "str", "(", "exp", ")", ")", "else", ":", "logger", ".", "warning", "(", "'Could not save cache, no writable directory'", ")" ]
Emit a signal by serially calling each registered signal receiver for the signal .
def emit ( signal , * args , * * kwargs ) : if signal not in __receivers : return receivers = __live_receivers ( signal ) for func in receivers : func ( * args , * * kwargs )
360
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/signals.py#L194-L214
[ "def", "create_index", "(", "index_name", ",", "index_config", ",", "client", ")", ":", "client", ".", "create", "(", "index", "=", "index_name", ",", "body", "=", "index_config", ")" ]
Very fast uniqify routine for numpy arrays .
def arrayuniqify ( X , retainorder = False ) : s = X . argsort ( ) X = X [ s ] D = np . append ( [ True ] , X [ 1 : ] != X [ : - 1 ] ) if retainorder : DD = np . append ( D . nonzero ( ) [ 0 ] , len ( X ) ) ind = [ min ( s [ x : DD [ i + 1 ] ] ) for ( i , x ) in enumerate ( DD [ : - 1 ] ) ] ind . sort ( ) return ind else : return [ D , s ]
361
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/fast.py#L19-L75
[ "def", "_create_download_failed_message", "(", "exception", ",", "url", ")", ":", "message", "=", "'Failed to download from:\\n{}\\nwith {}:\\n{}'", ".", "format", "(", "url", ",", "exception", ".", "__class__", ".", "__name__", ",", "exception", ")", "if", "_is_temporal_problem", "(", "exception", ")", ":", "if", "isinstance", "(", "exception", ",", "requests", ".", "ConnectionError", ")", ":", "message", "+=", "'\\nPlease check your internet connection and try again.'", "else", ":", "message", "+=", "'\\nThere might be a problem in connection or the server failed to process '", "'your request. Please try again.'", "elif", "isinstance", "(", "exception", ",", "requests", ".", "HTTPError", ")", ":", "try", ":", "server_message", "=", "''", "for", "elem", "in", "decode_data", "(", "exception", ".", "response", ".", "content", ",", "MimeType", ".", "XML", ")", ":", "if", "'ServiceException'", "in", "elem", ".", "tag", "or", "'Message'", "in", "elem", ".", "tag", ":", "server_message", "+=", "elem", ".", "text", ".", "strip", "(", "'\\n\\t '", ")", "except", "ElementTree", ".", "ParseError", ":", "server_message", "=", "exception", ".", "response", ".", "text", "message", "+=", "'\\nServer response: \"{}\"'", ".", "format", "(", "server_message", ")", "return", "message" ]
Indices of elements in a sorted numpy array equal to those in another .
def equalspairs ( X , Y ) : T = Y . copy ( ) R = ( T [ 1 : ] != T [ : - 1 ] ) . nonzero ( ) [ 0 ] R = np . append ( R , np . array ( [ len ( T ) - 1 ] ) ) M = R [ R . searchsorted ( range ( len ( T ) ) ) ] D = T . searchsorted ( X ) T = np . append ( T , np . array ( [ 0 ] ) ) M = np . append ( M , np . array ( [ 0 ] ) ) A = ( T [ D ] == X ) * D B = ( T [ D ] == X ) * ( M [ D ] + 1 ) return [ A , B ]
362
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/fast.py#L142-L191
[ "def", "get_license_assignment_manager", "(", "service_instance", ")", ":", "log", ".", "debug", "(", "'Retrieving license assignment manager'", ")", "try", ":", "lic_assignment_manager", "=", "service_instance", ".", "content", ".", "licenseManager", ".", "licenseAssignmentManager", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{0}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareRuntimeError", "(", "exc", ".", "msg", ")", "if", "not", "lic_assignment_manager", ":", "raise", "salt", ".", "exceptions", ".", "VMwareObjectRetrievalError", "(", "'License assignment manager was not retrieved'", ")", "return", "lic_assignment_manager" ]
Indices of elements in a numpy array that appear in another .
def isin ( X , Y ) : if len ( Y ) > 0 : T = Y . copy ( ) T . sort ( ) D = T . searchsorted ( X ) T = np . append ( T , np . array ( [ 0 ] ) ) W = ( T [ D ] == X ) if isinstance ( W , bool ) : return np . zeros ( ( len ( X ) , ) , bool ) else : return ( T [ D ] == X ) else : return np . zeros ( ( len ( X ) , ) , bool )
363
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/fast.py#L260-L304
[ "def", "update_version_descriptor", "(", "self", ",", "task", ",", "releasetype", ",", "descriptor", ",", "verbrowser", ",", "commentbrowser", ")", ":", "if", "task", "is", "None", ":", "null", "=", "treemodel", ".", "TreeItem", "(", "None", ")", "verbrowser", ".", "set_model", "(", "treemodel", ".", "TreeModel", "(", "null", ")", ")", "return", "m", "=", "self", ".", "create_version_model", "(", "task", ",", "releasetype", ",", "descriptor", ")", "verbrowser", ".", "set_model", "(", "m", ")", "commentbrowser", ".", "set_model", "(", "m", ")" ]
Elements of a numpy array that do not appear in another .
def arraydifference ( X , Y ) : if len ( Y ) > 0 : Z = isin ( X , Y ) return X [ np . invert ( Z ) ] else : return X
364
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/fast.py#L357-L391
[ "def", "run_tfba", "(", "self", ",", "reaction", ")", ":", "solver", "=", "self", ".", "_get_solver", "(", "integer", "=", "True", ")", "p", "=", "fluxanalysis", ".", "FluxBalanceProblem", "(", "self", ".", "_mm", ",", "solver", ")", "start_time", "=", "time", ".", "time", "(", ")", "p", ".", "add_thermodynamic", "(", ")", "try", ":", "p", ".", "maximize", "(", "reaction", ")", "except", "fluxanalysis", ".", "FluxBalanceError", "as", "e", ":", "self", ".", "report_flux_balance_error", "(", "e", ")", "logger", ".", "info", "(", "'Solving took {:.2f} seconds'", ".", "format", "(", "time", ".", "time", "(", ")", "-", "start_time", ")", ")", "for", "reaction_id", "in", "self", ".", "_mm", ".", "reactions", ":", "yield", "reaction_id", ",", "p", ".", "get_flux", "(", "reaction_id", ")" ]
Fast vectorized max function for element - wise comparison of two numpy arrays .
def arraymax ( X , Y ) : Z = np . zeros ( ( len ( X ) , ) , int ) A = X <= Y B = Y < X Z [ A ] = Y [ A ] Z [ B ] = X [ B ] return Z
365
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/fast.py#L434-L469
[ "def", "clear_to_reset", "(", "self", ",", "config_vars", ")", ":", "super", "(", "RemoteBridgeState", ",", "self", ")", ".", "clear_to_reset", "(", "config_vars", ")", "self", ".", "status", "=", "BRIDGE_STATUS", ".", "IDLE", "self", ".", "error", "=", "0" ]
Derive DID as per indy - sdk from seed .
async def _seed2did ( self ) -> str : rv = None dids_with_meta = json . loads ( await did . list_my_dids_with_meta ( self . handle ) ) # list if dids_with_meta : for did_with_meta in dids_with_meta : # dict if 'metadata' in did_with_meta : try : meta = json . loads ( did_with_meta [ 'metadata' ] ) if isinstance ( meta , dict ) and meta . get ( 'seed' , None ) == self . _seed : rv = did_with_meta . get ( 'did' ) except json . decoder . JSONDecodeError : continue # it's not one of ours, carry on if not rv : # seed not in metadata, generate did again on temp wallet temp_wallet = await Wallet ( self . _seed , '{}.seed2did' . format ( self . name ) , None , { 'auto-remove' : True } ) . create ( ) rv = temp_wallet . did await temp_wallet . remove ( ) return rv
366
https://github.com/PSPC-SPAC-buyandsell/von_agent/blob/0b1c17cca3bd178b6e6974af84dbac1dfce5cf45/von_agent/wallet.py#L181-L211
[ "def", "_get_parent_timestamp", "(", "dirname", ",", "mtime", ")", ":", "parent_pathname", "=", "os", ".", "path", ".", "dirname", "(", "dirname", ")", "# max between the parent timestamp the one passed in", "mtime", "=", "_max_timestamps", "(", "parent_pathname", ",", "False", ",", "mtime", ")", "if", "dirname", "!=", "os", ".", "path", ".", "dirname", "(", "parent_pathname", ")", ":", "# this is only called if we're not at the root", "mtime", "=", "_get_parent_timestamp", "(", "parent_pathname", ",", "mtime", ")", "return", "mtime" ]
Remove serialized wallet if it exists .
async def remove ( self ) -> None : LOGGER . debug ( 'Wallet.remove >>>' ) try : LOGGER . info ( 'Removing wallet: %s' , self . name ) await wallet . delete_wallet ( json . dumps ( self . cfg ) , json . dumps ( self . access_creds ) ) except IndyError as x_indy : LOGGER . info ( 'Abstaining from wallet removal; indy-sdk error code %s' , x_indy . error_code ) LOGGER . debug ( 'Wallet.remove <<<' )
367
https://github.com/PSPC-SPAC-buyandsell/von_agent/blob/0b1c17cca3bd178b6e6974af84dbac1dfce5cf45/von_agent/wallet.py#L353-L366
[ "def", "is_instance_throughput_too_low", "(", "self", ",", "inst_id", ")", ":", "r", "=", "self", ".", "instance_throughput_ratio", "(", "inst_id", ")", "if", "r", "is", "None", ":", "logger", ".", "debug", "(", "\"{} instance {} throughput is not \"", "\"measurable.\"", ".", "format", "(", "self", ",", "inst_id", ")", ")", "return", "None", "too_low", "=", "r", "<", "self", ".", "Delta", "if", "too_low", ":", "logger", ".", "display", "(", "\"{}{} instance {} throughput ratio {} is lower than Delta {}.\"", ".", "format", "(", "MONITORING_PREFIX", ",", "self", ",", "inst_id", ",", "r", ",", "self", ".", "Delta", ")", ")", "else", ":", "logger", ".", "trace", "(", "\"{} instance {} throughput ratio {} is acceptable.\"", ".", "format", "(", "self", ",", "inst_id", ",", "r", ")", ")", "return", "too_low" ]
Load a delimited text file to a numpy record array .
def loadSV ( fname , shape = None , titles = None , aligned = False , byteorder = None , renamer = None , * * kwargs ) : [ columns , metadata ] = loadSVcols ( fname , * * kwargs ) if 'names' in metadata . keys ( ) : names = metadata [ 'names' ] else : names = None if 'formats' in metadata . keys ( ) : formats = metadata [ 'formats' ] else : formats = None if 'dtype' in metadata . keys ( ) : dtype = metadata [ 'dtype' ] else : dtype = None if renamer is not None : print 'Trying user-given renamer ...' renamed = renamer ( names ) if len ( renamed ) == len ( uniqify ( renamed ) ) : names = renamed print '''... using renamed names (original names will be in return metadata)''' else : print '... renamer failed to produce unique names, not using.' if names and len ( names ) != len ( uniqify ( names ) ) : print 'Names are not unique, reverting to default naming scheme.' names = None return [ utils . fromarrays ( columns , type = np . ndarray , dtype = dtype , shape = shape , formats = formats , names = names , titles = titles , aligned = aligned , byteorder = byteorder ) , metadata ]
368
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/io.py#L39-L152
[ "def", "_op_generic_StoU_saturation", "(", "self", ",", "value", ",", "min_value", ",", "max_value", ")", ":", "#pylint:disable=no-self-use", "return", "claripy", ".", "If", "(", "claripy", ".", "SGT", "(", "value", ",", "max_value", ")", ",", "max_value", ",", "claripy", ".", "If", "(", "claripy", ".", "SLT", "(", "value", ",", "min_value", ")", ",", "min_value", ",", "value", ")", ")" ]
Load a separated value text file to a list of lists of strings of records .
def loadSVrecs ( fname , uselines = None , skiprows = 0 , linefixer = None , delimiter_regex = None , verbosity = DEFAULT_VERBOSITY , * * metadata ) : if delimiter_regex and isinstance ( delimiter_regex , types . StringType ) : import re delimiter_regex = re . compile ( delimiter_regex ) [ metadata , inferedlines , WHOLETHING ] = getmetadata ( fname , skiprows = skiprows , linefixer = linefixer , delimiter_regex = delimiter_regex , verbosity = verbosity , * * metadata ) if uselines is None : uselines = ( 0 , False ) if is_string_like ( fname ) : fh = file ( fname , 'rU' ) elif hasattr ( fname , 'readline' ) : fh = fname else : raise ValueError ( 'fname must be a string or file handle' ) for _ind in range ( skiprows + uselines [ 0 ] + metadata [ 'headerlines' ] ) : fh . readline ( ) if linefixer or delimiter_regex : fh2 = tempfile . TemporaryFile ( 'w+b' ) F = fh . read ( ) . strip ( '\n' ) . split ( '\n' ) if linefixer : F = map ( linefixer , F ) if delimiter_regex : F = map ( lambda line : delimiter_regex . sub ( metadata [ 'dialect' ] . delimiter , line ) , F ) fh2 . write ( '\n' . join ( F ) ) fh2 . seek ( 0 ) fh = fh2 reader = csv . reader ( fh , dialect = metadata [ 'dialect' ] ) if uselines [ 1 ] : linelist = [ ] for ln in reader : if reader . line_num <= uselines [ 1 ] - uselines [ 0 ] : linelist . append ( ln ) else : break else : linelist = list ( reader ) fh . close ( ) if linelist [ - 1 ] == [ ] : linelist . pop ( - 1 ) return [ linelist , metadata ]
369
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/io.py#L401-L572
[ "def", "cublasGetVersion", "(", "handle", ")", ":", "version", "=", "ctypes", ".", "c_int", "(", ")", "status", "=", "_libcublas", ".", "cublasGetVersion_v2", "(", "handle", ",", "ctypes", ".", "byref", "(", "version", ")", ")", "cublasCheckStatus", "(", "status", ")", "return", "version", ".", "value" ]
Parse the types from a structured numpy dtype object .
def parsetypes ( dtype ) : return [ dtype [ i ] . name . strip ( '1234567890' ) . rstrip ( 'ing' ) for i in range ( len ( dtype ) ) ]
370
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/io.py#L1808-L1835
[ "def", "encrypt", "(", "self", ",", "txt", ",", "key", ")", ":", "# log.debug(\"encrypt(txt='%s', key='%s')\", txt, key)", "assert", "isinstance", "(", "txt", ",", "six", ".", "text_type", ")", ",", "\"txt: %s is not text type!\"", "%", "repr", "(", "txt", ")", "assert", "isinstance", "(", "key", ",", "six", ".", "text_type", ")", ",", "\"key: %s is not text type!\"", "%", "repr", "(", "key", ")", "if", "len", "(", "txt", ")", "!=", "len", "(", "key", ")", ":", "raise", "SecureJSLoginError", "(", "\"encrypt error: %s and '%s' must have the same length!\"", "%", "(", "txt", ",", "key", ")", ")", "pbkdf2_hash", "=", "PBKDF2SHA1Hasher1", "(", ")", ".", "get_salt_hash", "(", "txt", ")", "txt", "=", "force_bytes", "(", "txt", ")", "key", "=", "force_bytes", "(", "key", ")", "crypted", "=", "self", ".", "xor", "(", "txt", ",", "key", ")", "crypted", "=", "binascii", ".", "hexlify", "(", "crypted", ")", "crypted", "=", "six", ".", "text_type", "(", "crypted", ",", "\"ascii\"", ")", "return", "\"%s$%s\"", "%", "(", "pbkdf2_hash", ",", "crypted", ")" ]
Threshold a coloring dictionary for a given list of column names .
def thresholdcoloring ( coloring , names ) : for key in coloring . keys ( ) : if len ( [ k for k in coloring [ key ] if k in names ] ) == 0 : coloring . pop ( key ) else : coloring [ key ] = utils . uniqify ( [ k for k in coloring [ key ] if k in names ] ) return coloring
371
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/io.py#L1838-L1873
[ "def", "is_file", "(", "jottapath", ",", "JFS", ")", ":", "log", ".", "debug", "(", "\"is_file %r\"", ",", "jottapath", ")", "try", ":", "jf", "=", "JFS", ".", "getObject", "(", "jottapath", ")", "except", "JFSNotFoundError", ":", "return", "False", "return", "isinstance", "(", "jf", ",", "JFSFile", ")" ]
Strong directory maker .
def makedir ( dir_name ) : if os . path . exists ( dir_name ) : delete ( dir_name ) os . mkdir ( dir_name )
372
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/io.py#L1924-L1945
[ "def", "get_active_token", "(", "self", ")", ":", "expire_time", "=", "self", ".", "store_handler", ".", "has_value", "(", "\"expires\"", ")", "access_token", "=", "self", ".", "store_handler", ".", "has_value", "(", "\"access_token\"", ")", "if", "expire_time", "and", "access_token", ":", "expire_time", "=", "self", ".", "store_handler", ".", "get_value", "(", "\"expires\"", ")", "if", "not", "datetime", ".", "now", "(", ")", "<", "datetime", ".", "fromtimestamp", "(", "float", "(", "expire_time", ")", ")", ":", "self", ".", "store_handler", ".", "delete_value", "(", "\"access_token\"", ")", "self", ".", "store_handler", ".", "delete_value", "(", "\"expires\"", ")", "logger", ".", "info", "(", "'Access token expired, going to get new token'", ")", "self", ".", "auth", "(", ")", "else", ":", "logger", ".", "info", "(", "'Access token noy expired yet'", ")", "else", ":", "self", ".", "auth", "(", ")", "return", "self", ".", "store_handler", ".", "get_value", "(", "\"access_token\"", ")" ]
Decorator to pass community .
def pass_community ( f ) : @ wraps ( f ) def inner ( community_id , * args , * * kwargs ) : c = Community . get ( community_id ) if c is None : abort ( 404 ) return f ( c , * args , * * kwargs ) return inner
373
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/views/ui.py#L56-L64
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
Decorator to require permission .
def permission_required ( action ) : def decorator ( f ) : @ wraps ( f ) def inner ( community , * args , * * kwargs ) : permission = current_permission_factory ( community , action = action ) if not permission . can ( ) : abort ( 403 ) return f ( community , * args , * * kwargs ) return inner return decorator
374
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/views/ui.py#L67-L77
[ "def", "describe_api_stages", "(", "restApiId", ",", "deploymentId", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "stages", "=", "conn", ".", "get_stages", "(", "restApiId", "=", "restApiId", ",", "deploymentId", "=", "deploymentId", ")", "return", "{", "'stages'", ":", "[", "_convert_datetime_str", "(", "stage", ")", "for", "stage", "in", "stages", "[", "'item'", "]", "]", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'error'", ":", "__utils__", "[", "'boto3.get_error'", "]", "(", "e", ")", "}" ]
Render a template to a string with the provided item in context .
def format_item ( item , template , name = 'item' ) : ctx = { name : item } return render_template_to_string ( template , * * ctx )
375
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/views/ui.py#L81-L84
[ "def", "isrchi", "(", "value", ",", "ndim", ",", "array", ")", ":", "value", "=", "ctypes", ".", "c_int", "(", "value", ")", "ndim", "=", "ctypes", ".", "c_int", "(", "ndim", ")", "array", "=", "stypes", ".", "toIntVector", "(", "array", ")", "return", "libspice", ".", "isrchi_c", "(", "value", ",", "ndim", ",", "array", ")" ]
Create a new community .
def new ( ) : form = CommunityForm ( formdata = request . values ) ctx = mycommunities_ctx ( ) ctx . update ( { 'form' : form , 'is_new' : True , 'community' : None , } ) if form . validate_on_submit ( ) : data = copy . deepcopy ( form . data ) community_id = data . pop ( 'identifier' ) del data [ 'logo' ] community = Community . create ( community_id , current_user . get_id ( ) , * * data ) file = request . files . get ( 'logo' , None ) if file : if not community . save_logo ( file . stream , file . filename ) : form . logo . errors . append ( _ ( 'Cannot add this file as a logo. Supported formats: ' 'PNG, JPG and SVG. Max file size: 1.5 MB.' ) ) db . session . rollback ( ) community = None if community : db . session . commit ( ) flash ( "Community was successfully created." , category = 'success' ) return redirect ( url_for ( '.edit' , community_id = community . id ) ) return render_template ( current_app . config [ 'COMMUNITIES_NEW_TEMPLATE' ] , community_form = form , * * ctx )
376
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/views/ui.py#L171-L209
[ "def", "delete_binding", "(", "self", ",", "vhost", ",", "exchange", ",", "queue", ",", "rt_key", ")", ":", "vhost", "=", "quote", "(", "vhost", ",", "''", ")", "exchange", "=", "quote", "(", "exchange", ",", "''", ")", "queue", "=", "quote", "(", "queue", ",", "''", ")", "body", "=", "''", "path", "=", "Client", ".", "urls", "[", "'rt_bindings_between_exch_queue'", "]", "%", "(", "vhost", ",", "exchange", ",", "queue", ",", "rt_key", ")", "return", "self", ".", "_call", "(", "path", ",", "'DELETE'", ",", "headers", "=", "Client", ".", "json_headers", ")" ]
Create or edit a community .
def edit ( community ) : form = EditCommunityForm ( formdata = request . values , obj = community ) deleteform = DeleteCommunityForm ( ) ctx = mycommunities_ctx ( ) ctx . update ( { 'form' : form , 'is_new' : False , 'community' : community , 'deleteform' : deleteform , } ) if form . validate_on_submit ( ) : for field , val in form . data . items ( ) : setattr ( community , field , val ) file = request . files . get ( 'logo' , None ) if file : if not community . save_logo ( file . stream , file . filename ) : form . logo . errors . append ( _ ( 'Cannot add this file as a logo. Supported formats: ' 'PNG, JPG and SVG. Max file size: 1.5 MB.' ) ) if not form . logo . errors : db . session . commit ( ) flash ( "Community successfully edited." , category = 'success' ) return redirect ( url_for ( '.edit' , community_id = community . id ) ) return render_template ( current_app . config [ 'COMMUNITIES_EDIT_TEMPLATE' ] , * * ctx )
377
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/views/ui.py#L216-L247
[ "def", "get_cached_placeholder_output", "(", "parent_object", ",", "placeholder_name", ")", ":", "if", "not", "PlaceholderRenderingPipe", ".", "may_cache_placeholders", "(", ")", ":", "return", "None", "language_code", "=", "get_parent_language_code", "(", "parent_object", ")", "cache_key", "=", "get_placeholder_cache_key_for_parent", "(", "parent_object", ",", "placeholder_name", ",", "language_code", ")", "return", "cache", ".", "get", "(", "cache_key", ")" ]
Delete a community .
def delete ( community ) : deleteform = DeleteCommunityForm ( formdata = request . values ) ctx = mycommunities_ctx ( ) ctx . update ( { 'deleteform' : deleteform , 'is_new' : False , 'community' : community , } ) if deleteform . validate_on_submit ( ) : community . delete ( ) db . session . commit ( ) flash ( "Community was deleted." , category = 'success' ) return redirect ( url_for ( '.index' ) ) else : flash ( "Community could not be deleted." , category = 'warning' ) return redirect ( url_for ( '.edit' , community_id = community . id ) )
378
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/views/ui.py#L254-L271
[ "def", "assignParameters", "(", "self", ",", "solution_next", ",", "IncomeDstn", ",", "LivPrb", ",", "DiscFac", ",", "CRRA", ",", "Rfree", ",", "PermGroFac", ",", "BoroCnstArt", ",", "aXtraGrid", ",", "vFuncBool", ",", "CubicBool", ")", ":", "ConsPerfForesightSolver", ".", "assignParameters", "(", "self", ",", "solution_next", ",", "DiscFac", ",", "LivPrb", ",", "CRRA", ",", "Rfree", ",", "PermGroFac", ")", "self", ".", "BoroCnstArt", "=", "BoroCnstArt", "self", ".", "IncomeDstn", "=", "IncomeDstn", "self", ".", "aXtraGrid", "=", "aXtraGrid", "self", ".", "vFuncBool", "=", "vFuncBool", "self", ".", "CubicBool", "=", "CubicBool" ]
Uses a peyotl wrapper around an Open Tree web service to get a list of trees including values value for a given property to be searched on porperty .
def ot_find_tree ( arg_dict , exact = True , verbose = False , oti_wrapper = None ) : if oti_wrapper is None : from peyotl . sugar import oti oti_wrapper = oti return oti_wrapper . find_trees ( arg_dict , exact = exact , verbose = verbose , wrap_response = True )
379
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/tutorials/ot-oti-find-tree.py#L12-L24
[ "def", "blocksize", "(", "self", ",", "input_totalframes", ")", ":", "blocksize", "=", "input_totalframes", "if", "self", ".", "pad", ":", "mod", "=", "input_totalframes", "%", "self", ".", "buffer_size", "if", "mod", ":", "blocksize", "+=", "self", ".", "buffer_size", "-", "mod", "return", "blocksize" ]
Determine whether etype is a List or other iterable
def is_iterable ( etype ) -> bool : return type ( etype ) is GenericMeta and issubclass ( etype . __extra__ , Iterable )
380
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/jsglib/typing_patch_36.py#L29-L31
[ "def", "build_request", "(", "headers", ":", "Headers", ")", "->", "str", ":", "raw_key", "=", "bytes", "(", "random", ".", "getrandbits", "(", "8", ")", "for", "_", "in", "range", "(", "16", ")", ")", "key", "=", "base64", ".", "b64encode", "(", "raw_key", ")", ".", "decode", "(", ")", "headers", "[", "\"Upgrade\"", "]", "=", "\"websocket\"", "headers", "[", "\"Connection\"", "]", "=", "\"Upgrade\"", "headers", "[", "\"Sec-WebSocket-Key\"", "]", "=", "key", "headers", "[", "\"Sec-WebSocket-Version\"", "]", "=", "\"13\"", "return", "key" ]
This function sets up a command - line option parser and then calls fetch_and_write_mrca to do all of the real work .
def main ( argv ) : import argparse description = 'Uses Open Tree of Life web services to the MRCA for a set of OTT IDs.' parser = argparse . ArgumentParser ( prog = 'ot-tree-of-life-mrca' , description = description ) parser . add_argument ( 'ottid' , nargs = '*' , type = int , help = 'OTT IDs' ) parser . add_argument ( '--subtree' , action = 'store_true' , default = False , required = False , help = 'write a newick representation of the subtree rooted at this mrca' ) parser . add_argument ( '--induced-subtree' , action = 'store_true' , default = False , required = False , help = 'write a newick representation of the topology of the requested taxa in the synthetic tree (the subtree pruned to just the queried taxa)' ) parser . add_argument ( '--details' , action = 'store_true' , default = False , required = False , help = 'report more details about the mrca node' ) args = parser . parse_args ( argv ) id_list = args . ottid if not id_list : sys . stderr . write ( 'No OTT IDs provided. Running a dummy query with 770302 770315\n' ) id_list = [ 770302 , 770315 ] fetch_and_write_mrca ( id_list , args . details , args . subtree , args . induced_subtree , sys . stdout , sys . stderr )
381
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/tutorials/ot-tree-of-life-mrca.py#L63-L82
[ "def", "get_stresses", "(", "self", ")", ":", "if", "\"stress\"", "not", "in", "self", ".", "settings", ":", "return", "None", "wrapped", "=", "[", "[", "Scalar", "(", "value", "=", "x", ")", "for", "x", "in", "y", "]", "for", "y", "in", "self", ".", "settings", "[", "\"stress\"", "]", "]", "return", "Property", "(", "matrices", "=", "[", "wrapped", "]", ",", "units", "=", "self", ".", "settings", "[", "\"stress units\"", "]", ")" ]
Send schema to ledger then retrieve it as written to the ledger and return it . If schema already exists on ledger log error and return schema .
async def send_schema ( self , schema_data_json : str ) -> str : LOGGER . debug ( 'Origin.send_schema >>> schema_data_json: %s' , schema_data_json ) schema_data = json . loads ( schema_data_json ) s_key = schema_key ( schema_id ( self . did , schema_data [ 'name' ] , schema_data [ 'version' ] ) ) with SCHEMA_CACHE . lock : try : rv_json = await self . get_schema ( s_key ) LOGGER . error ( 'Schema %s version %s already exists on ledger for origin-did %s: not sending' , schema_data [ 'name' ] , schema_data [ 'version' ] , self . did ) except AbsentSchema : # OK - about to create and send it ( _ , schema_json ) = await anoncreds . issuer_create_schema ( self . did , schema_data [ 'name' ] , schema_data [ 'version' ] , json . dumps ( schema_data [ 'attr_names' ] ) ) req_json = await ledger . build_schema_request ( self . did , schema_json ) resp_json = await self . _sign_submit ( req_json ) resp = json . loads ( resp_json ) resp_result_txn = resp [ 'result' ] [ 'txn' ] rv_json = await self . get_schema ( schema_key ( schema_id ( resp_result_txn [ 'metadata' ] [ 'from' ] , resp_result_txn [ 'data' ] [ 'data' ] [ 'name' ] , resp_result_txn [ 'data' ] [ 'data' ] [ 'version' ] ) ) ) # add to cache en passant LOGGER . debug ( 'Origin.send_schema <<< %s' , rv_json ) return rv_json
382
https://github.com/PSPC-SPAC-buyandsell/von_agent/blob/0b1c17cca3bd178b6e6974af84dbac1dfce5cf45/von_agent/agent/origin.py#L36-L82
[ "def", "load_any_file", "(", "filename", ")", ":", "import", "f311", "# Splits attempts using ((binary X text) file) criterion", "if", "a99", ".", "is_text_file", "(", "filename", ")", ":", "return", "load_with_classes", "(", "filename", ",", "f311", ".", "classes_txt", "(", ")", ")", "else", ":", "return", "load_with_classes", "(", "filename", ",", "f311", ".", "classes_bin", "(", ")", ")" ]
Assumes that the caller has the _index_lock !
def _locked_refresh_doc_ids ( self ) : d = { } for s in self . _shards : for k in s . doc_index . keys ( ) : if k in d : raise KeyError ( 'doc "{i}" found in multiple repos' . format ( i = k ) ) d [ k ] = s self . _doc2shard_map = d
383
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/git_storage/type_aware_doc_store.py#L157-L166
[ "def", "_CollectHistory_", "(", "lookupType", ",", "fromVal", ",", "toVal", ",", "using", "=", "{", "}", ",", "pattern", "=", "''", ")", ":", "histObj", "=", "{", "}", "if", "fromVal", "!=", "toVal", ":", "histObj", "[", "lookupType", "]", "=", "{", "\"from\"", ":", "fromVal", ",", "\"to\"", ":", "toVal", "}", "if", "lookupType", "in", "[", "'deriveValue'", ",", "'deriveRegex'", ",", "'copyValue'", ",", "'normIncludes'", ",", "'deriveIncludes'", "]", "and", "using", "!=", "''", ":", "histObj", "[", "lookupType", "]", "[", "\"using\"", "]", "=", "using", "if", "lookupType", "in", "[", "'genericRegex'", ",", "'fieldSpecificRegex'", ",", "'normRegex'", ",", "'deriveRegex'", "]", "and", "pattern", "!=", "''", ":", "histObj", "[", "lookupType", "]", "[", "\"pattern\"", "]", "=", "pattern", "return", "histObj" ]
This will push the master branch to the remote named remote_name using the mirroring strategy to cut down on locking of the working repo .
def push_doc_to_remote ( self , remote_name , doc_id = None ) : if doc_id is None : ret = True # @TODO should spawn a thread of each shard... for shard in self . _shards : if not shard . push_to_remote ( remote_name ) : ret = False return ret shard = self . get_shard ( doc_id ) return shard . push_to_remote ( remote_name )
384
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/git_storage/type_aware_doc_store.py#L214-L229
[ "def", "purge", "(", "datasets", ",", "reuses", ",", "organizations", ")", ":", "purge_all", "=", "not", "any", "(", "(", "datasets", ",", "reuses", ",", "organizations", ")", ")", "if", "purge_all", "or", "datasets", ":", "log", ".", "info", "(", "'Purging datasets'", ")", "purge_datasets", "(", ")", "if", "purge_all", "or", "reuses", ":", "log", ".", "info", "(", "'Purging reuses'", ")", "purge_reuses", "(", ")", "if", "purge_all", "or", "organizations", ":", "log", ".", "info", "(", "'Purging organizations'", ")", "purge_organizations", "(", ")", "success", "(", "'Done'", ")" ]
Generator that iterates over all detected documents . and returns the filesystem path to each doc . Order is by shard but arbitrary within shards .
def iter_doc_filepaths ( self , * * kwargs ) : for shard in self . _shards : for doc_id , blob in shard . iter_doc_filepaths ( * * kwargs ) : yield doc_id , blob
385
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/git_storage/type_aware_doc_store.py#L323-L331
[ "def", "delete", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "session", ".", "delete", "(", "*", "args", ",", "*", "*", "self", ".", "get_kwargs", "(", "*", "*", "kwargs", ")", ")" ]
Form data .
def data ( self ) : d = super ( CommunityForm , self ) . data d . pop ( 'csrf_token' , None ) return d
386
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/forms.py#L54-L58
[ "def", "_add_dependency", "(", "self", ",", "dependency", ",", "var_name", "=", "None", ")", ":", "if", "var_name", "is", "None", ":", "var_name", "=", "next", "(", "self", ".", "temp_var_names", ")", "# Don't add duplicate dependencies", "if", "(", "dependency", ",", "var_name", ")", "not", "in", "self", ".", "dependencies", ":", "self", ".", "dependencies", ".", "append", "(", "(", "dependency", ",", "var_name", ")", ")", "return", "var_name" ]
Validate field identifier .
def validate_identifier ( self , field ) : if field . data : field . data = field . data . lower ( ) if Community . get ( field . data , with_deleted = True ) : raise validators . ValidationError ( _ ( 'The identifier already exists. ' 'Please choose a different one.' ) )
387
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/forms.py#L154-L161
[ "def", "detach_storage", "(", "self", ",", "server", ",", "address", ")", ":", "body", "=", "{", "'storage_device'", ":", "{", "'address'", ":", "address", "}", "}", "url", "=", "'/server/{0}/storage/detach'", ".", "format", "(", "server", ")", "res", "=", "self", ".", "post_request", "(", "url", ",", "body", ")", "return", "Storage", ".", "_create_storage_objs", "(", "res", "[", "'server'", "]", "[", "'storage_devices'", "]", ",", "cloud_manager", "=", "self", ")" ]
Returns the text content of filepath
def read_filepath ( filepath , encoding = 'utf-8' ) : with codecs . open ( filepath , 'r' , encoding = encoding ) as fo : return fo . read ( )
388
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/utility/input_output.py#L25-L28
[ "def", "get_records", "(", "self", ")", ":", "form", "=", "self", ".", "request", ".", "form", "ar_count", "=", "self", ".", "get_ar_count", "(", ")", "records", "=", "[", "]", "# Group belonging AR fields together", "for", "arnum", "in", "range", "(", "ar_count", ")", ":", "record", "=", "{", "}", "s1", "=", "\"-{}\"", ".", "format", "(", "arnum", ")", "keys", "=", "filter", "(", "lambda", "key", ":", "s1", "in", "key", ",", "form", ".", "keys", "(", ")", ")", "for", "key", "in", "keys", ":", "new_key", "=", "key", ".", "replace", "(", "s1", ",", "\"\"", ")", "value", "=", "form", ".", "get", "(", "key", ")", "record", "[", "new_key", "]", "=", "value", "records", ".", "append", "(", "record", ")", "return", "records" ]
Returns the text fetched via http GET from URL read as encoding
def download ( url , encoding = 'utf-8' ) : import requests response = requests . get ( url ) response . encoding = encoding return response . text
389
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/utility/input_output.py#L53-L58
[ "def", "merge_ownership_periods", "(", "mappings", ")", ":", "return", "valmap", "(", "lambda", "v", ":", "tuple", "(", "OwnershipPeriod", "(", "a", ".", "start", ",", "b", ".", "start", ",", "a", ".", "sid", ",", "a", ".", "value", ",", ")", "for", "a", ",", "b", "in", "sliding_window", "(", "2", ",", "concatv", "(", "sorted", "(", "v", ")", ",", "# concat with a fake ownership object to make the last", "# end date be max timestamp", "[", "OwnershipPeriod", "(", "pd", ".", "Timestamp", ".", "max", ".", "tz_localize", "(", "'utc'", ")", ",", "None", ",", "None", ",", "None", ",", ")", "]", ",", ")", ",", ")", ")", ",", "mappings", ",", ")" ]
shows JSON indented representation of d
def pretty_dict_str ( d , indent = 2 ) : b = StringIO ( ) write_pretty_dict_str ( b , d , indent = indent ) return b . getvalue ( )
390
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/utility/input_output.py#L80-L84
[ "def", "logout", "(", "self", ")", ":", "# Check if all transfers are complete before logout", "self", ".", "transfers_complete", "payload", "=", "{", "'apikey'", ":", "self", ".", "config", ".", "get", "(", "'apikey'", ")", ",", "'logintoken'", ":", "self", ".", "session", ".", "cookies", ".", "get", "(", "'logintoken'", ")", "}", "method", ",", "url", "=", "get_URL", "(", "'logout'", ")", "res", "=", "getattr", "(", "self", ".", "session", ",", "method", ")", "(", "url", ",", "params", "=", "payload", ")", "if", "res", ".", "status_code", "==", "200", ":", "self", ".", "session", ".", "cookies", "[", "'logintoken'", "]", "=", "None", "return", "True", "hellraiser", "(", "res", ")" ]
writes JSON indented representation of obj to out
def write_pretty_dict_str ( out , obj , indent = 2 ) : json . dump ( obj , out , indent = indent , sort_keys = True , separators = ( ',' , ': ' ) , ensure_ascii = False , encoding = "utf-8" )
391
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/utility/input_output.py#L87-L95
[ "def", "handle_simulation_end", "(", "self", ",", "data_portal", ")", ":", "log", ".", "info", "(", "'Simulated {} trading days\\n'", "'first open: {}\\n'", "'last close: {}'", ",", "self", ".", "_session_count", ",", "self", ".", "_trading_calendar", ".", "session_open", "(", "self", ".", "_first_session", ")", ",", "self", ".", "_trading_calendar", ".", "session_close", "(", "self", ".", "_last_session", ")", ",", ")", "packet", "=", "{", "}", "self", ".", "end_of_simulation", "(", "packet", ",", "self", ".", "_ledger", ",", "self", ".", "_trading_calendar", ",", "self", ".", "_sessions", ",", "data_portal", ",", "self", ".", "_benchmark_source", ",", ")", "return", "packet" ]
Create a community response serializer .
def community_responsify ( schema_class , mimetype ) : def view ( data , code = 200 , headers = None , links_item_factory = None , page = None , urlkwargs = None , links_pagination_factory = None ) : """Generate the response object.""" if isinstance ( data , Community ) : last_modified = data . updated response_data = schema_class ( context = dict ( item_links_factory = links_item_factory ) ) . dump ( data ) . data else : last_modified = None response_data = schema_class ( context = dict ( total = data . query . count ( ) , item_links_factory = links_item_factory , page = page , urlkwargs = urlkwargs , pagination_links_factory = links_pagination_factory ) ) . dump ( data . items , many = True ) . data response = current_app . response_class ( json . dumps ( response_data , * * _format_args ( ) ) , mimetype = mimetype ) response . status_code = code if last_modified : response . last_modified = last_modified if headers is not None : response . headers . extend ( headers ) return response return view
392
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/serializers/response.py#L58-L94
[ "def", "_log10_Inorm_extern_planckint", "(", "self", ",", "Teff", ")", ":", "log10_Inorm", "=", "libphoebe", ".", "wd_planckint", "(", "Teff", ",", "self", ".", "extern_wd_idx", ",", "self", ".", "wd_data", "[", "\"planck_table\"", "]", ")", "return", "log10_Inorm" ]
Wraps another Exception in an InternalError .
def from_error ( exc_info , json_encoder , debug_url = None ) : exc = exc_info [ 1 ] data = exc . __dict__ . copy ( ) for key , value in data . items ( ) : try : json_encoder . encode ( value ) except TypeError : data [ key ] = repr ( value ) data [ "traceback" ] = "" . join ( traceback . format_exception ( * exc_info ) ) if debug_url is not None : data [ "debug_url" ] = debug_url return InternalError ( data )
393
https://github.com/palantir/typedjsonrpc/blob/274218fcd236ff9643506caa629029c9ba25a0fb/typedjsonrpc/errors.py#L99-L122
[ "def", "apache_md5crypt", "(", "password", ",", "salt", ",", "magic", "=", "'$apr1$'", ")", ":", "password", "=", "password", ".", "encode", "(", "'utf-8'", ")", "salt", "=", "salt", ".", "encode", "(", "'utf-8'", ")", "magic", "=", "magic", ".", "encode", "(", "'utf-8'", ")", "m", "=", "md5", "(", ")", "m", ".", "update", "(", "password", "+", "magic", "+", "salt", ")", "mixin", "=", "md5", "(", "password", "+", "salt", "+", "password", ")", ".", "digest", "(", ")", "for", "i", "in", "range", "(", "0", ",", "len", "(", "password", ")", ")", ":", "m", ".", "update", "(", "mixin", "[", "i", "%", "16", "]", ")", "i", "=", "len", "(", "password", ")", "while", "i", ":", "if", "i", "&", "1", ":", "m", ".", "update", "(", "'\\x00'", ")", "else", ":", "m", ".", "update", "(", "password", "[", "0", "]", ")", "i", ">>=", "1", "final", "=", "m", ".", "digest", "(", ")", "for", "i", "in", "range", "(", "1000", ")", ":", "m2", "=", "md5", "(", ")", "if", "i", "&", "1", ":", "m2", ".", "update", "(", "password", ")", "else", ":", "m2", ".", "update", "(", "final", ")", "if", "i", "%", "3", ":", "m2", ".", "update", "(", "salt", ")", "if", "i", "%", "7", ":", "m2", ".", "update", "(", "password", ")", "if", "i", "&", "1", ":", "m2", ".", "update", "(", "final", ")", "else", ":", "m2", ".", "update", "(", "password", ")", "final", "=", "m2", ".", "digest", "(", ")", "itoa64", "=", "'./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'", "rearranged", "=", "''", "seq", "=", "(", "(", "0", ",", "6", ",", "12", ")", ",", "(", "1", ",", "7", ",", "13", ")", ",", "(", "2", ",", "8", ",", "14", ")", ",", "(", "3", ",", "9", ",", "15", ")", ",", "(", "4", ",", "10", ",", "5", ")", ")", "for", "a", ",", "b", ",", "c", "in", "seq", ":", "v", "=", "ord", "(", "final", "[", "a", "]", ")", "<<", "16", "|", "ord", "(", "final", "[", "b", "]", ")", "<<", "8", "|", "ord", "(", "final", "[", "c", "]", ")", "for", "i", "in", "range", "(", "4", ")", ":", "rearranged", "+=", "itoa64", "[", "v", "&", "0x3f", "]", "v", ">>=", "6", "v", "=", "ord", "(", "final", "[", "11", "]", ")", "for", "i", "in", "range", "(", "2", ")", ":", "rearranged", "+=", "itoa64", "[", "v", "&", "0x3f", "]", "v", ">>=", "6", "return", "magic", "+", "salt", "+", "'$'", "+", "rearranged" ]
Return whether the cache contains a schema for the input key sequence number or schema identifier .
def contains ( self , index : Union [ SchemaKey , int , str ] ) -> bool : LOGGER . debug ( 'SchemaCache.contains >>> index: %s' , index ) rv = None if isinstance ( index , SchemaKey ) : rv = ( index in self . _schema_key2schema ) elif isinstance ( index , int ) or ( isinstance ( index , str ) and ':2:' not in index ) : rv = ( int ( index ) in self . _seq_no2schema_key ) elif isinstance ( index , str ) : rv = ( schema_key ( index ) in self . _schema_key2schema ) else : rv = False LOGGER . debug ( 'SchemaCache.contains <<< %s' , rv ) return rv
394
https://github.com/PSPC-SPAC-buyandsell/von_agent/blob/0b1c17cca3bd178b6e6974af84dbac1dfce5cf45/von_agent/cache.py#L115-L136
[ "def", "setGradingFinishedStateAction", "(", "self", ",", "request", ",", "queryset", ")", ":", "for", "subm", "in", "queryset", ":", "subm", ".", "state", "=", "Submission", ".", "GRADED", "subm", ".", "save", "(", ")" ]
Cull cache entry frame list to size favouring most recent query time .
def cull ( self , delta : bool ) -> None : LOGGER . debug ( 'RevoCacheEntry.cull >>> delta: %s' , delta ) rr_frames = self . rr_delta_frames if delta else self . rr_state_frames mark = 4096 ** 0.5 # max rev reg size = 4096; heuristic: hover max around sqrt(4096) = 64 if len ( rr_frames ) > int ( mark * 1.25 ) : rr_frames . sort ( key = lambda x : - x . qtime ) # order by descending query time del rr_frames [ int ( mark * 0.75 ) : ] # retain most recent, grow again from here LOGGER . info ( 'Pruned revocation cache entry %s to %s %s frames' , self . rev_reg_def [ 'id' ] , len ( rr_frames ) , 'delta' if delta else 'state' ) LOGGER . debug ( 'RevoCacheEntry.cull <<<' )
395
https://github.com/PSPC-SPAC-buyandsell/von_agent/blob/0b1c17cca3bd178b6e6974af84dbac1dfce5cf45/von_agent/cache.py#L413-L433
[ "def", "area", "(", "self", ")", ":", "mprop", "=", "vtk", ".", "vtkMassProperties", "(", ")", "mprop", ".", "SetInputData", "(", "self", ")", "return", "mprop", ".", "GetSurfaceArea", "(", ")" ]
Return default non - revocation interval from latest to times on delta frames of revocation cache entries on indices stemming from input cred def id .
def dflt_interval ( self , cd_id : str ) -> ( int , int ) : LOGGER . debug ( 'RevocationCache.dflt_interval >>>' ) fro = None to = None for rr_id in self : if cd_id != rev_reg_id2cred_def_id ( rr_id ) : continue entry = self [ rr_id ] if entry . rr_delta_frames : to = max ( entry . rr_delta_frames , key = lambda f : f . to ) . to fro = min ( fro or to , to ) if not ( fro and to ) : LOGGER . debug ( 'RevocationCache.dflt_interval <!< No data for default non-revoc interval on cred def id %s' , cd_id ) raise CacheIndex ( 'No data for default non-revoc interval on cred def id {}' . format ( cd_id ) ) rv = ( fro , to ) LOGGER . debug ( 'RevocationCache.dflt_interval <<< %s' , rv ) return rv
396
https://github.com/PSPC-SPAC-buyandsell/von_agent/blob/0b1c17cca3bd178b6e6974af84dbac1dfce5cf45/von_agent/cache.py#L662-L706
[ "def", "num_workers", "(", "self", ")", ":", "size", "=", "ctypes", ".", "c_int", "(", ")", "check_call", "(", "_LIB", ".", "MXKVStoreGetGroupSize", "(", "self", ".", "handle", ",", "ctypes", ".", "byref", "(", "size", ")", ")", ")", "return", "size", ".", "value" ]
Parse and update from archived cache files . Only accept new content ; do not overwrite any existing cache content .
def parse ( base_dir : str , timestamp : int = None ) -> int : LOGGER . debug ( 'parse >>> base_dir: %s, timestamp: %s' , base_dir , timestamp ) if not isdir ( base_dir ) : LOGGER . info ( 'No cache archives available: not feeding cache' ) LOGGER . debug ( 'parse <<< None' ) return None if not timestamp : timestamps = [ int ( t ) for t in listdir ( base_dir ) if t . isdigit ( ) ] if timestamps : timestamp = max ( timestamps ) else : LOGGER . info ( 'No cache archives available: not feeding cache' ) LOGGER . debug ( 'parse <<< None' ) return None timestamp_dir = join ( base_dir , str ( timestamp ) ) if not isdir ( timestamp_dir ) : LOGGER . error ( 'No such archived cache directory: %s' , timestamp_dir ) LOGGER . debug ( 'parse <<< None' ) return None with SCHEMA_CACHE . lock : with open ( join ( timestamp_dir , 'schema' ) , 'r' ) as archive : schemata = json . loads ( archive . read ( ) ) SCHEMA_CACHE . feed ( schemata ) with CRED_DEF_CACHE . lock : with open ( join ( timestamp_dir , 'cred_def' ) , 'r' ) as archive : cred_defs = json . loads ( archive . read ( ) ) for cd_id in cred_defs : if cd_id in CRED_DEF_CACHE : LOGGER . warning ( 'Cred def cache already has cred def on %s: skipping' , cd_id ) else : CRED_DEF_CACHE [ cd_id ] = cred_defs [ cd_id ] LOGGER . info ( 'Cred def cache imported cred def for cred def id %s' , cd_id ) with REVO_CACHE . lock : with open ( join ( timestamp_dir , 'revocation' ) , 'r' ) as archive : rr_cache_entries = json . loads ( archive . read ( ) ) for ( rr_id , entry ) in rr_cache_entries . items ( ) : if rr_id in REVO_CACHE : LOGGER . warning ( 'Revocation cache already has entry on %s: skipping' , rr_id ) else : rr_cache_entry = RevoCacheEntry ( entry [ 'rev_reg_def' ] ) rr_cache_entry . rr_delta_frames = [ RevRegUpdateFrame ( f [ '_to' ] , f [ '_timestamp' ] , f [ '_rr_update' ] ) for f in entry [ 'rr_delta_frames' ] ] rr_cache_entry . cull ( True ) rr_cache_entry . rr_state_frames = [ RevRegUpdateFrame ( f [ '_to' ] , f [ '_timestamp' ] , f [ '_rr_update' ] ) for f in entry [ 'rr_state_frames' ] ] rr_cache_entry . cull ( False ) REVO_CACHE [ rr_id ] = rr_cache_entry LOGGER . info ( 'Revocation cache imported entry for rev reg id %s' , rr_id ) LOGGER . debug ( 'parse <<< %s' , timestamp ) return timestamp
397
https://github.com/PSPC-SPAC-buyandsell/von_agent/blob/0b1c17cca3bd178b6e6974af84dbac1dfce5cf45/von_agent/cache.py#L774-L850
[ "def", "has_role", "(", "user", ",", "roles", ")", ":", "if", "user", "and", "user", ".", "is_superuser", ":", "return", "True", "if", "not", "isinstance", "(", "roles", ",", "list", ")", ":", "roles", "=", "[", "roles", "]", "normalized_roles", "=", "[", "]", "for", "role", "in", "roles", ":", "if", "not", "inspect", ".", "isclass", "(", "role", ")", ":", "role", "=", "RolesManager", ".", "retrieve_role", "(", "role", ")", "normalized_roles", ".", "append", "(", "role", ")", "user_roles", "=", "get_user_roles", "(", "user", ")", "return", "any", "(", "[", "role", "in", "user_roles", "for", "role", "in", "normalized_roles", "]", ")" ]
Returns the nexml2json attribute or the default code for badgerfish
def detect_nexson_version ( blob ) : n = get_nexml_el ( blob ) assert isinstance ( n , dict ) return n . get ( '@nexml2json' , BADGER_FISH_NEXSON_VERSION )
398
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/nexson_syntax/helper.py#L40-L44
[ "def", "index_split", "(", "index", ",", "chunks", ")", ":", "Ntotal", "=", "index", ".", "shape", "[", "0", "]", "Nsections", "=", "int", "(", "chunks", ")", "if", "Nsections", "<=", "0", ":", "raise", "ValueError", "(", "'number sections must be larger than 0.'", ")", "Neach_section", ",", "extras", "=", "divmod", "(", "Ntotal", ",", "Nsections", ")", "section_sizes", "=", "(", "[", "0", "]", "+", "extras", "*", "[", "Neach_section", "+", "1", "]", "+", "(", "Nsections", "-", "extras", ")", "*", "[", "Neach_section", "]", ")", "div_points", "=", "numpy", ".", "array", "(", "section_sizes", ")", ".", "cumsum", "(", ")", "sub_ind", "=", "[", "]", "for", "i", "in", "range", "(", "Nsections", ")", ":", "st", "=", "div_points", "[", "i", "]", "end", "=", "div_points", "[", "i", "+", "1", "]", "sub_ind", ".", "append", "(", "index", "[", "st", ":", "end", "]", ")", "return", "sub_ind" ]
Adds the k - > v mapping to d but if a previous element exists it changes the value of for the key to list .
def _add_value_to_dict_bf ( d , k , v ) : prev = d . get ( k ) if prev is None : d [ k ] = v elif isinstance ( prev , list ) : if isinstance ( v , list ) : prev . extend ( v ) else : prev . append ( v ) else : if isinstance ( v , list ) : x = [ prev ] x . extend ( v ) d [ k ] = x else : d [ k ] = [ prev , v ]
399
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/nexson_syntax/helper.py#L110-L133
[ "def", "RMSError", "(", "self", ")", ":", "tss", "=", "self", ".", "TSSError", "(", ")", "return", "math", ".", "sqrt", "(", "tss", "/", "self", ".", "size", ")" ]