query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
sequencelengths
20
553
This function calls the a trous algorithm code to recompose the input into a single array . This is the implementation of the isotropic undecimated wavelet transform recomposition for a GPU .
def gpu_iuwt_recomposition ( in1 , scale_adjust , store_on_gpu , smoothed_array ) : wavelet_filter = ( 1. / 16 ) * np . array ( [ 1 , 4 , 6 , 4 , 1 ] , dtype = np . float32 ) # Filter-bank for use in the a trous algorithm. wavelet_filter = gpuarray . to_gpu_async ( wavelet_filter ) # Determines scale with adjustment and creates a zero array on the GPU to store the output,unless smoothed_array # is given. max_scale = in1 . shape [ 0 ] + scale_adjust if smoothed_array is None : recomposition = gpuarray . zeros ( [ in1 . shape [ 1 ] , in1 . shape [ 2 ] ] , np . float32 ) else : recomposition = gpuarray . to_gpu ( smoothed_array . astype ( np . float32 ) ) # Determines whether the array is already on the GPU or not. If not, moves it to the GPU. try : gpu_in1 = gpuarray . to_gpu_async ( in1 . astype ( np . float32 ) ) except : gpu_in1 = in1 # Creates a working array on the GPU. gpu_tmp = gpuarray . empty_like ( recomposition ) # Creates and fills an array with the appropriate scale value. gpu_scale = gpuarray . zeros ( [ 1 ] , np . int32 ) gpu_scale += max_scale - 1 # Fetches the a trous kernels. gpu_a_trous_row_kernel , gpu_a_trous_col_kernel = gpu_a_trous ( ) grid_rows = int ( in1 . shape [ 1 ] // 32 ) grid_cols = int ( in1 . shape [ 2 ] // 32 ) # The following loops call the a trous algorithm code to recompose the input. The first loop assumes that there are # non-zero wavelet coefficients at scales above scale_adjust, while the second loop completes the recomposition # on the scales less than scale_adjust. for i in range ( max_scale - 1 , scale_adjust - 1 , - 1 ) : gpu_a_trous_row_kernel ( recomposition , gpu_tmp , wavelet_filter , gpu_scale , block = ( 32 , 32 , 1 ) , grid = ( grid_cols , grid_rows ) ) gpu_a_trous_col_kernel ( gpu_tmp , recomposition , wavelet_filter , gpu_scale , block = ( 32 , 32 , 1 ) , grid = ( grid_cols , grid_rows ) ) recomposition = recomposition [ : , : ] + gpu_in1 [ i - scale_adjust , : , : ] gpu_scale -= 1 if scale_adjust > 0 : for i in range ( scale_adjust - 1 , - 1 , - 1 ) : gpu_a_trous_row_kernel ( recomposition , gpu_tmp , wavelet_filter , gpu_scale , block = ( 32 , 32 , 1 ) , grid = ( grid_cols , grid_rows ) ) gpu_a_trous_col_kernel ( gpu_tmp , recomposition , wavelet_filter , gpu_scale , block = ( 32 , 32 , 1 ) , grid = ( grid_cols , grid_rows ) ) gpu_scale -= 1 # Return values depend on mode. if store_on_gpu : return recomposition else : return recomposition . get ( )
900
https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/iuwt.py#L501-L581
[ "def", "set_end_date", "(", "self", ",", "lifetime", ")", ":", "self", ".", "end_date", "=", "(", "datetime", ".", "datetime", ".", "now", "(", ")", "+", "datetime", ".", "timedelta", "(", "0", ",", "lifetime", ")", ")" ]
logout and remove all session data
def unauth ( request ) : if check_key ( request ) : api = get_api ( request ) request . session . clear ( ) logout ( request ) return HttpResponseRedirect ( reverse ( 'main' ) )
901
https://github.com/marcelcaraciolo/foursquare/blob/a8bda33cc2d61e25aa8df72011246269fd98aa13/examples/django/example/djfoursquare/views.py#L22-L30
[ "def", "create_or_update_group_alias", "(", "self", ",", "name", ",", "alias_id", "=", "None", ",", "mount_accessor", "=", "None", ",", "canonical_id", "=", "None", ",", "mount_point", "=", "DEFAULT_MOUNT_POINT", ")", ":", "params", "=", "{", "'name'", ":", "name", ",", "'mount_accessor'", ":", "mount_accessor", ",", "'canonical_id'", ":", "canonical_id", ",", "}", "if", "alias_id", "is", "not", "None", ":", "params", "[", "'id'", "]", "=", "alias_id", "api_path", "=", "'/v1/{mount_point}/group-alias'", ".", "format", "(", "mount_point", "=", "mount_point", ")", "response", "=", "self", ".", "_adapter", ".", "post", "(", "url", "=", "api_path", ",", "json", "=", "params", ",", ")", "return", "response", ".", "json", "(", ")" ]
display some user info to show we have authenticated successfully
def info ( request ) : if check_key ( request ) : api = get_api ( request ) user = api . users ( id = 'self' ) print dir ( user ) return render_to_response ( 'djfoursquare/info.html' , { 'user' : user } ) else : return HttpResponseRedirect ( reverse ( 'main' ) )
902
https://github.com/marcelcaraciolo/foursquare/blob/a8bda33cc2d61e25aa8df72011246269fd98aa13/examples/django/example/djfoursquare/views.py#L33-L43
[ "def", "comment", "(", "self", ",", "text", ",", "comment_prefix", "=", "'#'", ")", ":", "comment", "=", "Comment", "(", "self", ".", "_container", ")", "if", "not", "text", ".", "startswith", "(", "comment_prefix", ")", ":", "text", "=", "\"{} {}\"", ".", "format", "(", "comment_prefix", ",", "text", ")", "if", "not", "text", ".", "endswith", "(", "'\\n'", ")", ":", "text", "=", "\"{}{}\"", ".", "format", "(", "text", ",", "'\\n'", ")", "comment", ".", "add_line", "(", "text", ")", "self", ".", "_container", ".", "structure", ".", "insert", "(", "self", ".", "_idx", ",", "comment", ")", "self", ".", "_idx", "+=", "1", "return", "self" ]
Check to see if we already have an access_key stored if we do then we have already gone through OAuth . If not then we haven t and we probably need to .
def check_key ( request ) : try : access_key = request . session . get ( 'oauth_token' , None ) if not access_key : return False except KeyError : return False return True
903
https://github.com/marcelcaraciolo/foursquare/blob/a8bda33cc2d61e25aa8df72011246269fd98aa13/examples/django/example/djfoursquare/views.py#L73-L85
[ "def", "statistic_recommend", "(", "classes", ",", "P", ")", ":", "if", "imbalance_check", "(", "P", ")", ":", "return", "IMBALANCED_RECOMMEND", "if", "binary_check", "(", "classes", ")", ":", "return", "BINARY_RECOMMEND", "return", "MULTICLASS_RECOMMEND" ]
Iterate over items in a streaming response from the Docker client within a timeout .
def stream_timeout ( stream , timeout , timeout_msg = None ) : timed_out = threading . Event ( ) def timeout_func ( ) : timed_out . set ( ) stream . close ( ) timer = threading . Timer ( timeout , timeout_func ) try : timer . start ( ) for item in stream : yield item # A timeout looks the same as the loop ending. So we need to check a # flag to determine whether a timeout occurred or not. if timed_out . is_set ( ) : raise TimeoutError ( timeout_msg ) finally : timer . cancel ( ) # Close the stream's underlying response object (if it has one) to # avoid potential socket leaks. # This method seems to have more success at preventing ResourceWarnings # than just stream.close() (should this be improved upstream?) # FIXME: Potential race condition if Timer thread closes the stream at # the same time we do here, but hopefully not with serious side effects if hasattr ( stream , '_response' ) : stream . _response . close ( )
904
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/stream/_timeout.py#L4-L41
[ "def", "guess_extension", "(", "amimetype", ",", "normalize", "=", "False", ")", ":", "ext", "=", "_mimes", ".", "guess_extension", "(", "amimetype", ")", "if", "ext", "and", "normalize", ":", "# Normalize some common magic mis-interpreation", "ext", "=", "{", "'.asc'", ":", "'.txt'", ",", "'.obj'", ":", "'.bin'", "}", ".", "get", "(", "ext", ",", "ext", ")", "from", "invenio", ".", "legacy", ".", "bibdocfile", ".", "api_normalizer", "import", "normalize_format", "return", "normalize_format", "(", "ext", ")", "return", "ext" ]
Get per - program state .
def get_state ( self , caller ) : if caller in self . state : return self . state [ caller ] else : rv = self . state [ caller ] = DictObject ( ) return rv
905
https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/callable.py#L74-L83
[ "def", "removeAllChildrenAtIndex", "(", "self", ",", "parentIndex", ")", ":", "if", "not", "parentIndex", ".", "isValid", "(", ")", ":", "logger", ".", "debug", "(", "\"No valid item selected for deletion (ignored).\"", ")", "return", "parentItem", "=", "self", ".", "getItem", "(", "parentIndex", ",", "None", ")", "logger", ".", "debug", "(", "\"Removing children of {!r}\"", ".", "format", "(", "parentItem", ")", ")", "assert", "parentItem", ",", "\"parentItem not found\"", "#firstChildRow = self.index(0, 0, parentIndex).row()", "#lastChildRow = self.index(parentItem.nChildren()-1, 0, parentIndex).row()", "#logger.debug(\"Removing rows: {} to {}\".format(firstChildRow, lastChildRow))", "#self.beginRemoveRows(parentIndex, firstChildRow, lastChildRow)", "self", ".", "beginRemoveRows", "(", "parentIndex", ",", "0", ",", "parentItem", ".", "nChildren", "(", ")", "-", "1", ")", "try", ":", "parentItem", ".", "removeAllChildren", "(", ")", "finally", ":", "self", ".", "endRemoveRows", "(", ")", "logger", ".", "debug", "(", "\"removeAllChildrenAtIndex completed\"", ")" ]
Return object for given name registered in System namespace .
def name_to_system_object ( self , value ) : if not self . system : raise SystemNotReady if isinstance ( value , ( str , Object ) ) : rv = self . system . name_to_system_object ( value ) return rv if rv else value else : return value
906
https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/callable.py#L250-L261
[ "def", "write_index", "(", "self", ",", "overwrite", ":", "bool", "=", "False", ",", "mock", ":", "bool", "=", "False", ")", "->", "None", ":", "write_if_allowed", "(", "self", ".", "index_filename", ",", "self", ".", "index_content", "(", ")", ",", "overwrite", "=", "overwrite", ",", "mock", "=", "mock", ")" ]
Recursively cancel all threaded background processes of this Callable . This is called automatically for actions if program deactivates .
def cancel ( self , caller ) : for o in { i for i in self . children if isinstance ( i , AbstractCallable ) } : o . cancel ( caller )
907
https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/callable.py#L311-L317
[ "def", "getPositionNearType", "(", "self", ",", "tagSet", ",", "idx", ")", ":", "try", ":", "return", "idx", "+", "self", ".", "__ambiguousTypes", "[", "idx", "]", ".", "getPositionByType", "(", "tagSet", ")", "except", "KeyError", ":", "raise", "error", ".", "PyAsn1Error", "(", "'Type position out of range'", ")" ]
Give string representation of the callable .
def give_str ( self ) : args = self . _args [ : ] kwargs = self . _kwargs return self . _give_str ( args , kwargs )
908
https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/callable.py#L333-L339
[ "def", "_intersection", "(", "self", ",", "keys", ",", "rows", ")", ":", "# If there are no other keys with start and end date (i.e. nothing to merge) return immediately.", "if", "not", "keys", ":", "return", "rows", "ret", "=", "list", "(", ")", "for", "row", "in", "rows", ":", "start_date", "=", "row", "[", "self", ".", "_key_start_date", "]", "end_date", "=", "row", "[", "self", ".", "_key_end_date", "]", "for", "key_start_date", ",", "key_end_date", "in", "keys", ":", "start_date", ",", "end_date", "=", "Type2JoinHelper", ".", "_intersect", "(", "start_date", ",", "end_date", ",", "row", "[", "key_start_date", "]", ",", "row", "[", "key_end_date", "]", ")", "if", "not", "start_date", ":", "break", "if", "key_start_date", "not", "in", "[", "self", ".", "_key_start_date", ",", "self", ".", "_key_end_date", "]", ":", "del", "row", "[", "key_start_date", "]", "if", "key_end_date", "not", "in", "[", "self", ".", "_key_start_date", ",", "self", ".", "_key_end_date", "]", ":", "del", "row", "[", "key_end_date", "]", "if", "start_date", ":", "row", "[", "self", ".", "_key_start_date", "]", "=", "start_date", "row", "[", "self", ".", "_key_end_date", "]", "=", "end_date", "ret", ".", "append", "(", "row", ")", "return", "ret" ]
Prepares the request and catches common errors and returns tuple of data and the request response .
def _make_request ( self , endpoint , params ) : # params = { # **self._base_params, # Mind order to allow params to overwrite base params # **params # } full_params = self . _base_params . copy ( ) full_params . update ( params ) try : r = requests . get ( endpoint , full_params ) data = r . json ( ) if r . status_code == 401 and not endpoint . endswith ( 'lookup' ) : raise exceptions . UnauthorizedKeyError elif r . status_code == 400 and not endpoint . endswith ( 'shorten' ) : raise exceptions . BadApiRequest elif r . status_code == 500 : raise exceptions . ServerOrConnectionError return data , r except ValueError as e : raise exceptions . BadApiResponse ( e ) except requests . RequestException : raise exceptions . ServerOrConnectionError
909
https://github.com/fauskanger/mypolr/blob/46eb4fc5ba0f65412634a37e30e05de79fc9db4c/mypolr/polr_api.py#L42-L74
[ "def", "batch_update", "(", "self", ",", "values", ",", "w", "=", "1", ")", ":", "for", "x", "in", "values", ":", "self", ".", "update", "(", "x", ",", "w", ")", "self", ".", "compress", "(", ")", "return" ]
Creates a short url if valid
def shorten ( self , long_url , custom_ending = None , is_secret = False ) : params = { 'url' : long_url , 'is_secret' : 'true' if is_secret else 'false' , 'custom_ending' : custom_ending } data , r = self . _make_request ( self . api_shorten_endpoint , params ) if r . status_code == 400 : if custom_ending is not None : raise exceptions . CustomEndingUnavailable ( custom_ending ) raise exceptions . BadApiRequest elif r . status_code == 403 : raise exceptions . QuotaExceededError action = data . get ( 'action' ) short_url = data . get ( 'result' ) if action == 'shorten' and short_url is not None : return short_url raise exceptions . DebugTempWarning
910
https://github.com/fauskanger/mypolr/blob/46eb4fc5ba0f65412634a37e30e05de79fc9db4c/mypolr/polr_api.py#L76-L103
[ "def", "get_rng", "(", "obj", "=", "None", ")", ":", "seed", "=", "(", "id", "(", "obj", ")", "+", "os", ".", "getpid", "(", ")", "+", "int", "(", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "\"%Y%m%d%H%M%S%f\"", ")", ")", ")", "%", "4294967295", "if", "_RNG_SEED", "is", "not", "None", ":", "seed", "=", "_RNG_SEED", "return", "np", ".", "random", ".", "RandomState", "(", "seed", ")" ]
Returns the short url ending from a short url or an short url ending .
def _get_ending ( self , lookup_url ) : if lookup_url . startswith ( self . api_server ) : return lookup_url [ len ( self . api_server ) + 1 : ] return lookup_url
911
https://github.com/fauskanger/mypolr/blob/46eb4fc5ba0f65412634a37e30e05de79fc9db4c/mypolr/polr_api.py#L105-L120
[ "def", "run", "(", "self", ")", ":", "for", "cellindex", "in", "self", ".", "RANK_CELLINDICES", ":", "self", ".", "cellsim", "(", "cellindex", ")", "COMM", ".", "Barrier", "(", ")" ]
Looks up the url_ending to obtain information about the short url .
def lookup ( self , lookup_url , url_key = None ) : url_ending = self . _get_ending ( lookup_url ) params = { 'url_ending' : url_ending , 'url_key' : url_key } data , r = self . _make_request ( self . api_lookup_endpoint , params ) if r . status_code == 401 : if url_key is not None : raise exceptions . UnauthorizedKeyError ( 'given url_key is not valid for secret lookup.' ) raise exceptions . UnauthorizedKeyError elif r . status_code == 404 : return False # no url found in lookup action = data . get ( 'action' ) full_url = data . get ( 'result' ) if action == 'lookup' and full_url is not None : return full_url raise exceptions . DebugTempWarning
912
https://github.com/fauskanger/mypolr/blob/46eb4fc5ba0f65412634a37e30e05de79fc9db4c/mypolr/polr_api.py#L122-L173
[ "def", "ignore_directories", "(", "self", ",", "project", ")", ":", "project_list", "=", "False", "try", ":", "ignore_directories", "=", "il", "[", "'ignore_directories'", "]", "except", "KeyError", ":", "logger", ".", "error", "(", "'Key Error processing ignore_directories list values'", ")", "try", ":", "project_exceptions", "=", "il", ".", "get", "(", "'project_exceptions'", ")", "for", "item", "in", "project_exceptions", ":", "if", "project", "in", "item", ":", "exception_file", "=", "item", ".", "get", "(", "project", ")", "with", "open", "(", "exception_file", ",", "'r'", ")", "as", "f", ":", "test_list", "=", "yaml", ".", "safe_load", "(", "f", ")", "project_list", "=", "test_list", "[", "'ignore_directories'", "]", "except", "KeyError", ":", "logger", ".", "info", "(", "'No ignore_directories for %s'", ",", "project", ")", "if", "project_list", ":", "ignore_directories", "=", "ignore_directories", "+", "project_list", "return", "ignore_directories", "else", ":", "return", "ignore_directories" ]
Setup argparse arguments .
def make_argparser ( ) : parser = argparse . ArgumentParser ( prog = 'mypolr' , description = "Interacts with the Polr Project's API.\n\n" "User Guide and documentation: https://mypolr.readthedocs.io" , formatter_class = argparse . ArgumentDefaultsHelpFormatter , epilog = "NOTE: if configurations are saved, they are stored as plain text on disk, " "and can be read by anyone with access to the file." ) parser . add_argument ( "-v" , "--version" , action = "store_true" , help = "Print version and exit." ) parser . add_argument ( "url" , nargs = '?' , default = None , help = "The url to process." ) api_group = parser . add_argument_group ( 'API server arguments' , 'Use these for configure the API. Can be stored locally with --save.' ) api_group . add_argument ( "-s" , "--server" , default = None , help = "Server hosting the API." ) api_group . add_argument ( "-k" , "--key" , default = None , help = "API_KEY to authenticate against server." ) api_group . add_argument ( "--api-root" , default = DEFAULT_API_ROOT , help = "API endpoint root." ) option_group = parser . add_argument_group ( 'Action options' , 'Configure the API action to use.' ) option_group . add_argument ( "-c" , "--custom" , default = None , help = "Custom short url ending." ) option_group . add_argument ( "--secret" , action = "store_true" , help = "Set option if using secret url." ) option_group . add_argument ( "-l" , "--lookup" , action = "store_true" , help = "Perform lookup action instead of shorten action." ) manage_group = parser . add_argument_group ( 'Manage credentials' , 'Use these to save, delete or update SERVER, KEY and/or ' 'API_ROOT locally in ~/.mypolr/config.ini.' ) manage_group . add_argument ( "--save" , action = "store_true" , help = "Save configuration (including credentials) in plaintext(!)." ) manage_group . add_argument ( "--clear" , action = "store_true" , help = "Clear configuration." ) return parser
913
https://github.com/fauskanger/mypolr/blob/46eb4fc5ba0f65412634a37e30e05de79fc9db4c/mypolr/cli.py#L18-L61
[ "def", "restore", "(", "self", ")", ":", "clean_beam", ",", "beam_params", "=", "beam_fit", "(", "self", ".", "psf_data", ",", "self", ".", "cdelt1", ",", "self", ".", "cdelt2", ")", "if", "np", ".", "all", "(", "np", ".", "array", "(", "self", ".", "psf_data_shape", ")", "==", "2", "*", "np", ".", "array", "(", "self", ".", "dirty_data_shape", ")", ")", ":", "self", ".", "restored", "=", "np", ".", "fft", ".", "fftshift", "(", "np", ".", "fft", ".", "irfft2", "(", "np", ".", "fft", ".", "rfft2", "(", "conv", ".", "pad_array", "(", "self", ".", "model", ")", ")", "*", "np", ".", "fft", ".", "rfft2", "(", "clean_beam", ")", ")", ")", "self", ".", "restored", "=", "self", ".", "restored", "[", "self", ".", "dirty_data_shape", "[", "0", "]", "/", "2", ":", "-", "self", ".", "dirty_data_shape", "[", "0", "]", "/", "2", ",", "self", ".", "dirty_data_shape", "[", "1", "]", "/", "2", ":", "-", "self", ".", "dirty_data_shape", "[", "1", "]", "/", "2", "]", "else", ":", "self", ".", "restored", "=", "np", ".", "fft", ".", "fftshift", "(", "np", ".", "fft", ".", "irfft2", "(", "np", ".", "fft", ".", "rfft2", "(", "self", ".", "model", ")", "*", "np", ".", "fft", ".", "rfft2", "(", "clean_beam", ")", ")", ")", "self", ".", "restored", "+=", "self", ".", "residual", "self", ".", "restored", "=", "self", ".", "restored", ".", "astype", "(", "np", ".", "float32", ")", "return", "beam_params" ]
This function estimates the noise using the MAD estimator .
def estimate_threshold ( in1 , edge_excl = 0 , int_excl = 0 ) : out1 = np . empty ( [ in1 . shape [ 0 ] ] ) mid = in1 . shape [ 1 ] / 2 if ( edge_excl != 0 ) | ( int_excl != 0 ) : if edge_excl != 0 : mask = np . zeros ( [ in1 . shape [ 1 ] , in1 . shape [ 2 ] ] ) mask [ edge_excl : - edge_excl , edge_excl : - edge_excl ] = 1 else : mask = np . ones ( [ in1 . shape [ 1 ] , in1 . shape [ 2 ] ] ) if int_excl != 0 : mask [ mid - int_excl : mid + int_excl , mid - int_excl : mid + int_excl ] = 0 else : mask = np . ones ( [ in1 . shape [ 1 ] , in1 . shape [ 2 ] ] ) for i in range ( in1 . shape [ 0 ] ) : out1 [ i ] = np . median ( np . abs ( in1 [ i , mask == 1 ] ) ) / 0.6745 return out1
914
https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/iuwt_toolbox.py#L17-L48
[ "def", "_replace_path", "(", "self", ",", "path", ")", ":", "if", "path", "in", "self", ".", "_field_to_key", ":", "return", "self", ".", "_field_to_key", "[", "path", "]", "next_key", "=", "\"#f%d\"", "%", "self", ".", "_next_field", "self", ".", "_next_field", "+=", "1", "self", ".", "_field_to_key", "[", "path", "]", "=", "next_key", "self", ".", "_fields", "[", "next_key", "]", "=", "path", "return", "next_key" ]
Convenience function for allocating work to cpu or gpu depending on the selected mode .
def source_extraction ( in1 , tolerance , mode = "cpu" , store_on_gpu = False , neg_comp = False ) : if mode == "cpu" : return cpu_source_extraction ( in1 , tolerance , neg_comp ) elif mode == "gpu" : return gpu_source_extraction ( in1 , tolerance , store_on_gpu , neg_comp )
915
https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/iuwt_toolbox.py#L77-L94
[ "def", "generate_version_file", "(", "self", ",", "schema_filename", ",", "binding_filename", ")", ":", "version_filename", "=", "binding_filename", "+", "'_version.txt'", "version_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "binding_dir", ",", "version_filename", ")", "schema_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "schema_dir", ",", "schema_filename", ")", "try", ":", "tstamp", ",", "svnpath", ",", "svnrev", ",", "version", "=", "self", ".", "get_version_info_from_svn", "(", "schema_path", ")", "except", "TypeError", ":", "pass", "else", ":", "self", ".", "write_version_file", "(", "version_path", ",", "tstamp", ",", "svnpath", ",", "svnrev", ",", "version", ")" ]
The following function determines connectivity within a given wavelet decomposition . These connected and labelled structures are thresholded to within some tolerance of the maximum coefficient at the scale . This determines whether on not an object is to be considered as significant . Significant objects are extracted and factored into a mask which is finally multiplied by the wavelet coefficients to return only wavelet coefficients belonging to significant objects across all scales .
def cpu_source_extraction ( in1 , tolerance , neg_comp ) : # The following initialises some variables for storing the labelled image and the number of labels. The per scale # maxima are also initialised here. scale_maxima = np . empty ( [ in1 . shape [ 0 ] , 1 ] ) objects = np . empty_like ( in1 , dtype = np . int32 ) object_count = np . empty ( [ in1 . shape [ 0 ] , 1 ] , dtype = np . int32 ) # The following loop uses functionality from the ndimage module to assess connectivity. The maxima are also # calculated here. for i in range ( in1 . shape [ 0 ] ) : if neg_comp : scale_maxima [ i ] = np . max ( abs ( in1 [ i , : , : ] ) ) else : scale_maxima [ i ] = np . max ( in1 [ i , : , : ] ) objects [ i , : , : ] , object_count [ i ] = ndimage . label ( in1 [ i , : , : ] , structure = [ [ 1 , 1 , 1 ] , [ 1 , 1 , 1 ] , [ 1 , 1 , 1 ] ] ) # The following removes the insignificant objects and then extracts the remaining ones. for i in range ( - 1 , - in1 . shape [ 0 ] - 1 , - 1 ) : if neg_comp : if i == ( - 1 ) : tmp = ( abs ( in1 [ i , : , : ] ) >= ( tolerance * scale_maxima [ i ] ) ) * objects [ i , : , : ] else : tmp = ( abs ( in1 [ i , : , : ] ) >= ( tolerance * scale_maxima [ i ] ) ) * objects [ i , : , : ] * objects [ i + 1 , : , : ] else : if i == ( - 1 ) : tmp = ( in1 [ i , : , : ] >= ( tolerance * scale_maxima [ i ] ) ) * objects [ i , : , : ] else : tmp = ( in1 [ i , : , : ] >= ( tolerance * scale_maxima [ i ] ) ) * objects [ i , : , : ] * objects [ i + 1 , : , : ] labels = np . unique ( tmp [ tmp > 0 ] ) for j in labels : objects [ i , ( objects [ i , : , : ] == j ) ] = - 1 objects [ i , ( objects [ i , : , : ] > 0 ) ] = 0 objects [ i , : , : ] = - ( objects [ i , : , : ] ) return objects * in1 , objects
916
https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/iuwt_toolbox.py#L97-L154
[ "def", "_get_variant_silent", "(", "parser", ",", "variant", ")", ":", "prev_log", "=", "config", ".", "LOG_NOT_FOUND", "config", ".", "LOG_NOT_FOUND", "=", "False", "results", "=", "parser", ".", "get_variant_genotypes", "(", "variant", ")", "config", ".", "LOG_NOT_FOUND", "=", "prev_log", "return", "results" ]
The following function simply calculates the signal to noise ratio between two signals .
def snr_ratio ( in1 , in2 ) : out1 = 20 * ( np . log10 ( np . linalg . norm ( in1 ) / np . linalg . norm ( in1 - in2 ) ) ) return out1
917
https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/iuwt_toolbox.py#L292-L306
[ "def", "cache_url_config", "(", "cls", ",", "url", ",", "backend", "=", "None", ")", ":", "url", "=", "urlparse", "(", "url", ")", "if", "not", "isinstance", "(", "url", ",", "cls", ".", "URL_CLASS", ")", "else", "url", "location", "=", "url", ".", "netloc", ".", "split", "(", "','", ")", "if", "len", "(", "location", ")", "==", "1", ":", "location", "=", "location", "[", "0", "]", "config", "=", "{", "'BACKEND'", ":", "cls", ".", "CACHE_SCHEMES", "[", "url", ".", "scheme", "]", ",", "'LOCATION'", ":", "location", ",", "}", "# Add the drive to LOCATION", "if", "url", ".", "scheme", "==", "'filecache'", ":", "config", ".", "update", "(", "{", "'LOCATION'", ":", "url", ".", "netloc", "+", "url", ".", "path", ",", "}", ")", "if", "url", ".", "path", "and", "url", ".", "scheme", "in", "[", "'memcache'", ",", "'pymemcache'", "]", ":", "config", ".", "update", "(", "{", "'LOCATION'", ":", "'unix:'", "+", "url", ".", "path", ",", "}", ")", "elif", "url", ".", "scheme", ".", "startswith", "(", "'redis'", ")", ":", "if", "url", ".", "hostname", ":", "scheme", "=", "url", ".", "scheme", ".", "replace", "(", "'cache'", ",", "''", ")", "else", ":", "scheme", "=", "'unix'", "locations", "=", "[", "scheme", "+", "'://'", "+", "loc", "+", "url", ".", "path", "for", "loc", "in", "url", ".", "netloc", ".", "split", "(", "','", ")", "]", "config", "[", "'LOCATION'", "]", "=", "locations", "[", "0", "]", "if", "len", "(", "locations", ")", "==", "1", "else", "locations", "if", "url", ".", "query", ":", "config_options", "=", "{", "}", "for", "k", ",", "v", "in", "parse_qs", "(", "url", ".", "query", ")", ".", "items", "(", ")", ":", "opt", "=", "{", "k", ".", "upper", "(", ")", ":", "_cast", "(", "v", "[", "0", "]", ")", "}", "if", "k", ".", "upper", "(", ")", "in", "cls", ".", "_CACHE_BASE_OPTIONS", ":", "config", ".", "update", "(", "opt", ")", "else", ":", "config_options", ".", "update", "(", "opt", ")", "config", "[", "'OPTIONS'", "]", "=", "config_options", "if", "backend", ":", "config", "[", "'BACKEND'", "]", "=", "backend", "return", "config" ]
Wait for the RabbitMQ process to be come up .
def wait_for_start ( self ) : er = self . exec_rabbitmqctl ( 'wait' , [ '--pid' , '1' , '--timeout' , str ( int ( self . wait_timeout ) ) ] ) output_lines ( er , error_exc = TimeoutError )
918
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/containers/rabbitmq.py#L56-L62
[ "def", "_clear", "(", "self", ",", "fully", "=", "True", ")", ":", "pages", "=", "self", ".", "pages", "if", "not", "pages", ":", "return", "self", ".", "_keyframe", "=", "pages", "[", "0", "]", "if", "fully", ":", "# delete all but first TiffPage/TiffFrame", "for", "i", ",", "page", "in", "enumerate", "(", "pages", "[", "1", ":", "]", ")", ":", "if", "not", "isinstance", "(", "page", ",", "inttypes", ")", "and", "page", ".", "offset", "is", "not", "None", ":", "pages", "[", "i", "+", "1", "]", "=", "page", ".", "offset", "elif", "TiffFrame", "is", "not", "TiffPage", ":", "# delete only TiffFrames", "for", "i", ",", "page", "in", "enumerate", "(", "pages", ")", ":", "if", "isinstance", "(", "page", ",", "TiffFrame", ")", "and", "page", ".", "offset", "is", "not", "None", ":", "pages", "[", "i", "]", "=", "page", ".", "offset", "self", ".", "_cached", "=", "False" ]
Execute a rabbitmqctl command inside a running container .
def exec_rabbitmqctl ( self , command , args = [ ] , rabbitmqctl_opts = [ '-q' ] ) : cmd = [ 'rabbitmqctl' ] + rabbitmqctl_opts + [ command ] + args return self . inner ( ) . exec_run ( cmd )
919
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/containers/rabbitmq.py#L87-L98
[ "def", "delete", "(", "self", ",", "object_type", ",", "object_id", ")", ":", "tag_names", "=", "request", ".", "get_json", "(", "force", "=", "True", ")", "if", "not", "tag_names", ":", "return", "Response", "(", "status", "=", "403", ")", "db", ".", "session", ".", "query", "(", "TaggedObject", ")", ".", "filter", "(", "and_", "(", "TaggedObject", ".", "object_type", "==", "object_type", ",", "TaggedObject", ".", "object_id", "==", "object_id", ")", ",", "TaggedObject", ".", "tag", ".", "has", "(", "Tag", ".", "name", ".", "in_", "(", "tag_names", ")", ")", ",", ")", ".", "delete", "(", "synchronize_session", "=", "False", ")", "db", ".", "session", ".", "commit", "(", ")", "return", "Response", "(", "status", "=", "204", ")" ]
Execute a rabbitmqctl command to list the given resources .
def exec_rabbitmqctl_list ( self , resources , args = [ ] , rabbitmq_opts = [ '-q' , '--no-table-headers' ] ) : command = 'list_{}' . format ( resources ) return self . exec_rabbitmqctl ( command , args , rabbitmq_opts )
920
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/containers/rabbitmq.py#L100-L112
[ "def", "dump_webdriver_cookies_into_requestdriver", "(", "requestdriver", ",", "webdriverwrapper", ")", ":", "for", "cookie", "in", "webdriverwrapper", ".", "get_cookies", "(", ")", ":", "# Wedbriver uses \"expiry\"; requests uses \"expires\", adjust for this", "expires", "=", "cookie", ".", "pop", "(", "'expiry'", ",", "{", "'expiry'", ":", "None", "}", ")", "cookie", ".", "update", "(", "{", "'expires'", ":", "expires", "}", ")", "requestdriver", ".", "session", ".", "cookies", ".", "set", "(", "*", "*", "cookie", ")" ]
Run the list_users command and return a list of tuples describing the users .
def list_users ( self ) : lines = output_lines ( self . exec_rabbitmqctl_list ( 'users' ) ) return [ _parse_rabbitmq_user ( line ) for line in lines ]
921
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/containers/rabbitmq.py#L133-L143
[ "def", "_set_cdn_access", "(", "self", ",", "container", ",", "public", ",", "ttl", "=", "None", ")", ":", "headers", "=", "{", "\"X-Cdn-Enabled\"", ":", "\"%s\"", "%", "public", "}", "if", "public", "and", "ttl", ":", "headers", "[", "\"X-Ttl\"", "]", "=", "ttl", "self", ".", "api", ".", "cdn_request", "(", "\"/%s\"", "%", "utils", ".", "get_name", "(", "container", ")", ",", "method", "=", "\"PUT\"", ",", "headers", "=", "headers", ")" ]
Returns a broker URL for use with Celery .
def broker_url ( self ) : return 'amqp://{}:{}@{}/{}' . format ( self . user , self . password , self . name , self . vhost )
922
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/containers/rabbitmq.py#L145-L148
[ "def", "weld_replace", "(", "array", ",", "weld_type", ",", "this", ",", "to", ")", ":", "if", "not", "isinstance", "(", "this", ",", "str", ")", ":", "this", "=", "to_weld_literal", "(", "this", ",", "weld_type", ")", "to", "=", "to_weld_literal", "(", "to", ",", "weld_type", ")", "obj_id", ",", "weld_obj", "=", "create_weld_object", "(", "array", ")", "weld_template", "=", "\"\"\"map({array},\n |e: {type}|\n if(e == {this},\n {to},\n e\n ) \n)\"\"\"", "weld_obj", ".", "weld_code", "=", "weld_template", ".", "format", "(", "array", "=", "obj_id", ",", "type", "=", "weld_type", ",", "this", "=", "this", ",", "to", "=", "to", ")", "return", "weld_obj" ]
Execute a command inside a running container as the postgres user asserting success .
def exec_pg_success ( self , cmd ) : result = self . inner ( ) . exec_run ( cmd , user = 'postgres' ) assert result . exit_code == 0 , result . output . decode ( 'utf-8' ) return result
923
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/containers/postgresql.py#L63-L70
[ "def", "ticker", "(", "ctx", ",", "market", ")", ":", "market", "=", "Market", "(", "market", ",", "bitshares_instance", "=", "ctx", ".", "bitshares", ")", "ticker", "=", "market", ".", "ticker", "(", ")", "t", "=", "[", "[", "\"key\"", ",", "\"value\"", "]", "]", "for", "key", "in", "ticker", ":", "t", ".", "append", "(", "[", "key", ",", "str", "(", "ticker", "[", "key", "]", ")", "]", ")", "print_table", "(", "t", ")" ]
Remove all data by dropping and recreating the configured database .
def clean ( self ) : self . exec_pg_success ( [ 'dropdb' , '-U' , self . user , self . database ] ) self . exec_pg_success ( [ 'createdb' , '-U' , self . user , self . database ] )
924
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/containers/postgresql.py#L72-L82
[ "def", "confirmation", "(", "self", ",", "apdu", ")", ":", "if", "_debug", ":", "ClientSSM", ".", "_debug", "(", "\"confirmation %r\"", ",", "apdu", ")", "if", "self", ".", "state", "==", "SEGMENTED_REQUEST", ":", "self", ".", "segmented_request", "(", "apdu", ")", "elif", "self", ".", "state", "==", "AWAIT_CONFIRMATION", ":", "self", ".", "await_confirmation", "(", "apdu", ")", "elif", "self", ".", "state", "==", "SEGMENTED_CONFIRMATION", ":", "self", ".", "segmented_confirmation", "(", "apdu", ")", "else", ":", "raise", "RuntimeError", "(", "\"invalid state\"", ")" ]
Execute a psql command inside a running container . By default the container s database is connected to .
def exec_psql ( self , command , psql_opts = [ '-qtA' ] ) : cmd = [ 'psql' ] + psql_opts + [ '--dbname' , self . database , '-U' , self . user , '-c' , command , ] return self . inner ( ) . exec_run ( cmd , user = 'postgres' )
925
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/containers/postgresql.py#L84-L98
[ "def", "setOverlayTexelAspect", "(", "self", ",", "ulOverlayHandle", ",", "fTexelAspect", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTexelAspect", "result", "=", "fn", "(", "ulOverlayHandle", ",", "fTexelAspect", ")", "return", "result" ]
Runs the \\ list command and returns a list of column values with information about all databases .
def list_databases ( self ) : lines = output_lines ( self . exec_psql ( '\\list' ) ) return [ line . split ( '|' ) for line in lines ]
926
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/containers/postgresql.py#L100-L106
[ "def", "market_if_touched_replace", "(", "self", ",", "accountID", ",", "orderID", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "replace", "(", "accountID", ",", "orderID", ",", "order", "=", "MarketIfTouchedOrderRequest", "(", "*", "*", "kwargs", ")", ")" ]
Runs the \\ dt command and returns a list of column values with information about all tables in the database .
def list_tables ( self ) : lines = output_lines ( self . exec_psql ( '\\dt' ) ) return [ line . split ( '|' ) for line in lines ]
927
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/containers/postgresql.py#L108-L114
[ "def", "create", "(", "self", ",", "name", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "return", "super", "(", "ImageMemberManager", ",", "self", ")", ".", "create", "(", "name", ",", "*", "args", ",", "*", "*", "kwargs", ")", "except", "Exception", "as", "e", ":", "if", "e", ".", "http_status", "==", "403", ":", "raise", "exc", ".", "UnsharableImage", "(", "\"You cannot share a public image.\"", ")", "else", ":", "raise" ]
Runs the \\ du command and returns a list of column values with information about all user roles .
def list_users ( self ) : lines = output_lines ( self . exec_psql ( '\\du' ) ) return [ line . split ( '|' ) for line in lines ]
928
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/containers/postgresql.py#L116-L122
[ "def", "disassociate_public_ip", "(", "self", ",", "public_ip_id", ")", ":", "floating_ip", "=", "self", ".", "client", ".", "floating_ips", ".", "get", "(", "public_ip_id", ")", "floating_ip", "=", "floating_ip", ".", "to_dict", "(", ")", "instance_id", "=", "floating_ip", ".", "get", "(", "'instance_id'", ")", "address", "=", "floating_ip", ".", "get", "(", "'ip'", ")", "self", ".", "client", ".", "servers", ".", "remove_floating_ip", "(", "instance_id", ",", "address", ")", "return", "True" ]
Returns a database URL for use with DJ - Database - URL and similar libraries .
def database_url ( self ) : return 'postgres://{}:{}@{}/{}' . format ( self . user , self . password , self . name , self . database )
929
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/containers/postgresql.py#L124-L130
[ "def", "wait_for_compactions", "(", "self", ",", "timeout", "=", "120", ")", ":", "pattern", "=", "re", ".", "compile", "(", "\"pending tasks: 0\"", ")", "start", "=", "time", ".", "time", "(", ")", "while", "time", ".", "time", "(", ")", "-", "start", "<", "timeout", ":", "output", ",", "err", ",", "rc", "=", "self", ".", "nodetool", "(", "\"compactionstats\"", ")", "if", "pattern", ".", "search", "(", "output", ")", ":", "return", "time", ".", "sleep", "(", "1", ")", "raise", "TimeoutError", "(", "\"{} [{}] Compactions did not finish in {} seconds\"", ".", "format", "(", "time", ".", "strftime", "(", "\"%d %b %Y %H:%M:%S\"", ",", "time", ".", "gmtime", "(", ")", ")", ",", "self", ".", "name", ",", "timeout", ")", ")" ]
Generate a matrix from a configuration dictionary .
def from_config ( config ) : matrix = { } variables = config . keys ( ) for entries in product ( * config . values ( ) ) : combination = dict ( zip ( variables , entries ) ) include = True for value in combination . values ( ) : for reducer in value . reducers : if reducer . pattern == '-' : match = not combination [ reducer . variable ] . value else : match = fnmatch ( combination [ reducer . variable ] . value , reducer . pattern ) if match if reducer . is_exclude else not match : include = False if include : key = '-' . join ( entry . alias for entry in entries if entry . alias ) data = dict ( zip ( variables , ( entry . value for entry in entries ) ) ) if key in matrix and data != matrix [ key ] : raise DuplicateEnvironment ( key , data , matrix [ key ] ) matrix [ key ] = data return matrix
930
https://github.com/ionelmc/python-matrix/blob/e1a63879a6c94c37c3883386f1d86eb7c2179a5b/src/matrix/__init__.py#L131-L156
[ "def", "_try_free_lease", "(", "self", ",", "shard_state", ",", "slice_retry", "=", "False", ")", ":", "@", "db", ".", "transactional", "def", "_tx", "(", ")", ":", "fresh_state", "=", "model", ".", "ShardState", ".", "get_by_shard_id", "(", "shard_state", ".", "shard_id", ")", "if", "fresh_state", "and", "fresh_state", ".", "active", ":", "# Free lease.", "fresh_state", ".", "slice_start_time", "=", "None", "fresh_state", ".", "slice_request_id", "=", "None", "if", "slice_retry", ":", "fresh_state", ".", "slice_retries", "+=", "1", "fresh_state", ".", "put", "(", ")", "try", ":", "_tx", "(", ")", "# pylint: disable=broad-except", "except", "Exception", ",", "e", ":", "logging", ".", "warning", "(", "e", ")", "logging", ".", "warning", "(", "\"Release lock for shard %s failed. Wait for lease to expire.\"", ",", "shard_state", ".", "shard_id", ")" ]
This only needs to be called manually from unit tests
def flush ( self ) : self . logger . debug ( 'Flush joining' ) self . queue . join ( ) self . logger . debug ( 'Flush joining ready' )
931
https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/worker.py#L112-L119
[ "def", "_get_api_id", "(", "self", ",", "event_properties", ")", ":", "api_id", "=", "event_properties", ".", "get", "(", "\"RestApiId\"", ")", "if", "isinstance", "(", "api_id", ",", "dict", ")", "and", "\"Ref\"", "in", "api_id", ":", "api_id", "=", "api_id", "[", "\"Ref\"", "]", "return", "api_id" ]
Convert bytestring container output or the result of a container exec command into a sequence of unicode lines .
def output_lines ( output , encoding = 'utf-8' , error_exc = None ) : if isinstance ( output , ExecResult ) : exit_code , output = output if exit_code != 0 and error_exc is not None : raise error_exc ( output . decode ( encoding ) ) return output . decode ( encoding ) . splitlines ( )
932
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/utils.py#L4-L27
[ "def", "exclude_types", "(", "self", ",", "*", "objs", ")", ":", "for", "o", "in", "objs", ":", "for", "t", "in", "_keytuple", "(", "o", ")", ":", "if", "t", "and", "t", "not", "in", "self", ".", "_excl_d", ":", "self", ".", "_excl_d", "[", "t", "]", "=", "0" ]
Get file from WeedFS .
def get_file ( self , fid ) : url = self . get_file_url ( fid ) return self . conn . get_raw_data ( url )
933
https://github.com/utek/pyseaweed/blob/218049329885425a2b8370157fa44952e64516be/pyseaweed/weed.py#L50-L66
[ "def", "extract", "(", "self", ",", "content", ",", "output", ")", ":", "for", "table", "in", "self", "[", "'tables'", "]", ":", "# First apply default options.", "plugin_settings", "=", "DEFAULT_OPTIONS", ".", "copy", "(", ")", "plugin_settings", ".", "update", "(", "table", ")", "table", "=", "plugin_settings", "# Validate settings", "assert", "'start'", "in", "table", ",", "'Table start regex missing'", "assert", "'end'", "in", "table", ",", "'Table end regex missing'", "assert", "'body'", "in", "table", ",", "'Table body regex missing'", "start", "=", "re", ".", "search", "(", "table", "[", "'start'", "]", ",", "content", ")", "end", "=", "re", ".", "search", "(", "table", "[", "'end'", "]", ",", "content", ")", "if", "not", "start", "or", "not", "end", ":", "logger", ".", "warning", "(", "'no table body found - start %s, end %s'", ",", "start", ",", "end", ")", "continue", "table_body", "=", "content", "[", "start", ".", "end", "(", ")", ":", "end", ".", "start", "(", ")", "]", "for", "line", "in", "re", ".", "split", "(", "table", "[", "'line_separator'", "]", ",", "table_body", ")", ":", "# if the line has empty lines in it , skip them", "if", "not", "line", ".", "strip", "(", "''", ")", ".", "strip", "(", "'\\n'", ")", "or", "not", "line", ":", "continue", "match", "=", "re", ".", "search", "(", "table", "[", "'body'", "]", ",", "line", ")", "if", "match", ":", "for", "field", ",", "value", "in", "match", ".", "groupdict", "(", ")", ".", "items", "(", ")", ":", "# If a field name already exists, do not overwrite it", "if", "field", "in", "output", ":", "continue", "if", "field", ".", "startswith", "(", "'date'", ")", "or", "field", ".", "endswith", "(", "'date'", ")", ":", "output", "[", "field", "]", "=", "self", ".", "parse_date", "(", "value", ")", "if", "not", "output", "[", "field", "]", ":", "logger", ".", "error", "(", "\"Date parsing failed on date '%s'\"", ",", "value", ")", "return", "None", "elif", "field", ".", "startswith", "(", "'amount'", ")", ":", "output", "[", "field", "]", "=", "self", ".", "parse_number", "(", "value", ")", "else", ":", "output", "[", "field", "]", "=", "value", "logger", ".", "debug", "(", "'ignoring *%s* because it doesn\\'t match anything'", ",", "line", ")" ]
Get url for the file
def get_file_url ( self , fid , public = None ) : try : volume_id , rest = fid . strip ( ) . split ( "," ) except ValueError : raise BadFidFormat ( "fid must be in format: <volume_id>,<file_name_hash>" ) file_location = self . get_file_location ( volume_id ) if public is None : public = self . use_public_url volume_url = file_location . public_url if public else file_location . url url = "http://{volume_url}/{fid}" . format ( volume_url = volume_url , fid = fid ) return url
934
https://github.com/utek/pyseaweed/blob/218049329885425a2b8370157fa44952e64516be/pyseaweed/weed.py#L68-L87
[ "def", "handlePortfolio", "(", "self", ",", "msg", ")", ":", "# log handler msg", "self", ".", "log_msg", "(", "\"portfolio\"", ",", "msg", ")", "# contract identifier", "contract_tuple", "=", "self", ".", "contract_to_tuple", "(", "msg", ".", "contract", ")", "contractString", "=", "self", ".", "contractString", "(", "contract_tuple", ")", "# try creating the contract", "self", ".", "registerContract", "(", "msg", ".", "contract", ")", "# new account?", "if", "msg", ".", "accountName", "not", "in", "self", ".", "_portfolios", ".", "keys", "(", ")", ":", "self", ".", "_portfolios", "[", "msg", ".", "accountName", "]", "=", "{", "}", "self", ".", "_portfolios", "[", "msg", ".", "accountName", "]", "[", "contractString", "]", "=", "{", "\"symbol\"", ":", "contractString", ",", "\"position\"", ":", "int", "(", "msg", ".", "position", ")", ",", "\"marketPrice\"", ":", "float", "(", "msg", ".", "marketPrice", ")", ",", "\"marketValue\"", ":", "float", "(", "msg", ".", "marketValue", ")", ",", "\"averageCost\"", ":", "float", "(", "msg", ".", "averageCost", ")", ",", "\"unrealizedPNL\"", ":", "float", "(", "msg", ".", "unrealizedPNL", ")", ",", "\"realizedPNL\"", ":", "float", "(", "msg", ".", "realizedPNL", ")", ",", "\"totalPNL\"", ":", "float", "(", "msg", ".", "realizedPNL", ")", "+", "float", "(", "msg", ".", "unrealizedPNL", ")", ",", "\"account\"", ":", "msg", ".", "accountName", "}", "# fire callback", "self", ".", "ibCallback", "(", "caller", "=", "\"handlePortfolio\"", ",", "msg", "=", "msg", ")" ]
Get location for the file WeedFS volume is choosed randomly
def get_file_location ( self , volume_id ) : url = ( "http://{master_addr}:{master_port}/" "dir/lookup?volumeId={volume_id}" ) . format ( master_addr = self . master_addr , master_port = self . master_port , volume_id = volume_id ) data = json . loads ( self . conn . get_data ( url ) ) _file_location = random . choice ( data [ 'locations' ] ) FileLocation = namedtuple ( 'FileLocation' , "public_url url" ) return FileLocation ( _file_location [ 'publicUrl' ] , _file_location [ 'url' ] )
935
https://github.com/utek/pyseaweed/blob/218049329885425a2b8370157fa44952e64516be/pyseaweed/weed.py#L89-L105
[ "def", "_on_stream_update", "(", "self", ",", "data", ")", ":", "self", ".", "_streams", "[", "data", ".", "get", "(", "'id'", ")", "]", ".", "update", "(", "data", ".", "get", "(", "'stream'", ")", ")", "_LOGGER", ".", "info", "(", "'stream %s updated'", ",", "self", ".", "_streams", "[", "data", ".", "get", "(", "'id'", ")", "]", ".", "friendly_name", ")", "for", "group", "in", "self", ".", "_groups", ".", "values", "(", ")", ":", "if", "group", ".", "stream", "==", "data", ".", "get", "(", "'id'", ")", ":", "group", ".", "callback", "(", ")" ]
Gets size of uploaded file Or None if file doesn t exist .
def get_file_size ( self , fid ) : url = self . get_file_url ( fid ) res = self . conn . head ( url ) if res is not None : size = res . headers . get ( "content-length" , None ) if size is not None : return int ( size ) return None
936
https://github.com/utek/pyseaweed/blob/218049329885425a2b8370157fa44952e64516be/pyseaweed/weed.py#L107-L124
[ "def", "cmd_oreoled", "(", "self", ",", "args", ")", ":", "if", "len", "(", "args", ")", "<", "4", ":", "print", "(", "\"Usage: oreoled LEDNUM RED GREEN BLUE <RATE>\"", ")", "return", "lednum", "=", "int", "(", "args", "[", "0", "]", ")", "pattern", "=", "[", "0", "]", "*", "24", "pattern", "[", "0", "]", "=", "ord", "(", "'R'", ")", "pattern", "[", "1", "]", "=", "ord", "(", "'G'", ")", "pattern", "[", "2", "]", "=", "ord", "(", "'B'", ")", "pattern", "[", "3", "]", "=", "ord", "(", "'0'", ")", "pattern", "[", "4", "]", "=", "0", "pattern", "[", "5", "]", "=", "int", "(", "args", "[", "1", "]", ")", "pattern", "[", "6", "]", "=", "int", "(", "args", "[", "2", "]", ")", "pattern", "[", "7", "]", "=", "int", "(", "args", "[", "3", "]", ")", "self", ".", "master", ".", "mav", ".", "led_control_send", "(", "self", ".", "settings", ".", "target_system", ",", "self", ".", "settings", ".", "target_component", ",", "lednum", ",", "255", ",", "8", ",", "pattern", ")" ]
Checks if file with provided fid exists
def file_exists ( self , fid ) : res = self . get_file_size ( fid ) if res is not None : return True return False
937
https://github.com/utek/pyseaweed/blob/218049329885425a2b8370157fa44952e64516be/pyseaweed/weed.py#L126-L138
[ "def", "extract_name_from_path", "(", "path", ")", ":", "base_path", ",", "query_string", "=", "path", ".", "split", "(", "'?'", ")", "infos", "=", "base_path", ".", "strip", "(", "'/'", ")", ".", "split", "(", "'/'", ")", "[", "2", ":", "]", "# Removes api/version.", "if", "len", "(", "infos", ")", ">", "1", ":", "# This is an object.", "name", "=", "'{category} / {name}'", ".", "format", "(", "category", "=", "infos", "[", "0", "]", ".", "title", "(", ")", ",", "name", "=", "infos", "[", "1", "]", ".", "replace", "(", "'-'", ",", "' '", ")", ".", "title", "(", ")", ")", "else", ":", "# This is a collection.", "name", "=", "'{category}'", ".", "format", "(", "category", "=", "infos", "[", "0", "]", ".", "title", "(", ")", ")", "return", "safe_unicode", "(", "name", ")" ]
Delete file from WeedFS
def delete_file ( self , fid ) : url = self . get_file_url ( fid ) return self . conn . delete_data ( url )
938
https://github.com/utek/pyseaweed/blob/218049329885425a2b8370157fa44952e64516be/pyseaweed/weed.py#L140-L147
[ "def", "extract", "(", "self", ",", "content", ",", "output", ")", ":", "for", "table", "in", "self", "[", "'tables'", "]", ":", "# First apply default options.", "plugin_settings", "=", "DEFAULT_OPTIONS", ".", "copy", "(", ")", "plugin_settings", ".", "update", "(", "table", ")", "table", "=", "plugin_settings", "# Validate settings", "assert", "'start'", "in", "table", ",", "'Table start regex missing'", "assert", "'end'", "in", "table", ",", "'Table end regex missing'", "assert", "'body'", "in", "table", ",", "'Table body regex missing'", "start", "=", "re", ".", "search", "(", "table", "[", "'start'", "]", ",", "content", ")", "end", "=", "re", ".", "search", "(", "table", "[", "'end'", "]", ",", "content", ")", "if", "not", "start", "or", "not", "end", ":", "logger", ".", "warning", "(", "'no table body found - start %s, end %s'", ",", "start", ",", "end", ")", "continue", "table_body", "=", "content", "[", "start", ".", "end", "(", ")", ":", "end", ".", "start", "(", ")", "]", "for", "line", "in", "re", ".", "split", "(", "table", "[", "'line_separator'", "]", ",", "table_body", ")", ":", "# if the line has empty lines in it , skip them", "if", "not", "line", ".", "strip", "(", "''", ")", ".", "strip", "(", "'\\n'", ")", "or", "not", "line", ":", "continue", "match", "=", "re", ".", "search", "(", "table", "[", "'body'", "]", ",", "line", ")", "if", "match", ":", "for", "field", ",", "value", "in", "match", ".", "groupdict", "(", ")", ".", "items", "(", ")", ":", "# If a field name already exists, do not overwrite it", "if", "field", "in", "output", ":", "continue", "if", "field", ".", "startswith", "(", "'date'", ")", "or", "field", ".", "endswith", "(", "'date'", ")", ":", "output", "[", "field", "]", "=", "self", ".", "parse_date", "(", "value", ")", "if", "not", "output", "[", "field", "]", ":", "logger", ".", "error", "(", "\"Date parsing failed on date '%s'\"", ",", "value", ")", "return", "None", "elif", "field", ".", "startswith", "(", "'amount'", ")", ":", "output", "[", "field", "]", "=", "self", ".", "parse_number", "(", "value", ")", "else", ":", "output", "[", "field", "]", "=", "value", "logger", ".", "debug", "(", "'ignoring *%s* because it doesn\\'t match anything'", ",", "line", ")" ]
Uploads file to WeedFS
def upload_file ( self , path = None , stream = None , name = None , * * kwargs ) : params = "&" . join ( [ "%s=%s" % ( k , v ) for k , v in kwargs . items ( ) ] ) url = "http://{master_addr}:{master_port}/dir/assign{params}" . format ( master_addr = self . master_addr , master_port = self . master_port , params = "?" + params if params else '' ) data = json . loads ( self . conn . get_data ( url ) ) if data . get ( "error" ) is not None : return None post_url = "http://{url}/{fid}" . format ( url = data [ 'publicUrl' if self . use_public_url else 'url' ] , fid = data [ 'fid' ] ) if path is not None : filename = os . path . basename ( path ) with open ( path , "rb" ) as file_stream : res = self . conn . post_file ( post_url , filename , file_stream ) # we have file like object and filename elif stream is not None and name is not None : res = self . conn . post_file ( post_url , name , stream ) else : raise ValueError ( "If `path` is None then *both* `stream` and `name` must not" " be None " ) response_data = json . loads ( res ) if "size" in response_data : return data . get ( 'fid' ) return None
939
https://github.com/utek/pyseaweed/blob/218049329885425a2b8370157fa44952e64516be/pyseaweed/weed.py#L149-L192
[ "def", "alter", "(", "self", ",", "operation", ",", "timeout", "=", "None", ",", "metadata", "=", "None", ",", "credentials", "=", "None", ")", ":", "new_metadata", "=", "self", ".", "add_login_metadata", "(", "metadata", ")", "try", ":", "return", "self", ".", "any_client", "(", ")", ".", "alter", "(", "operation", ",", "timeout", "=", "timeout", ",", "metadata", "=", "new_metadata", ",", "credentials", "=", "credentials", ")", "except", "Exception", "as", "error", ":", "if", "util", ".", "is_jwt_expired", "(", "error", ")", ":", "self", ".", "retry_login", "(", ")", "new_metadata", "=", "self", ".", "add_login_metadata", "(", "metadata", ")", "return", "self", ".", "any_client", "(", ")", ".", "alter", "(", "operation", ",", "timeout", "=", "timeout", ",", "metadata", "=", "new_metadata", ",", "credentials", "=", "credentials", ")", "else", ":", "raise", "error" ]
Force garbage collection
def vacuum ( self , threshold = 0.3 ) : url = ( "http://{master_addr}:{master_port}/" "vol/vacuum?garbageThreshold={threshold}" ) . format ( master_addr = self . master_addr , master_port = self . master_port , threshold = threshold ) res = self . conn . get_data ( url ) if res is not None : return True return False
940
https://github.com/utek/pyseaweed/blob/218049329885425a2b8370157fa44952e64516be/pyseaweed/weed.py#L194-L211
[ "def", "_redis_watcher", "(", "state", ")", ":", "conf", "=", "state", ".", "app", ".", "config", "r", "=", "redis", ".", "client", ".", "StrictRedis", "(", "host", "=", "conf", ".", "get", "(", "'WAFFLE_REDIS_HOST'", ",", "'localhost'", ")", ",", "port", "=", "conf", ".", "get", "(", "'WAFFLE_REDIS_PORT'", ",", "6379", ")", ")", "sub", "=", "r", ".", "pubsub", "(", "ignore_subscribe_messages", "=", "True", ")", "sub", ".", "subscribe", "(", "conf", ".", "get", "(", "'WAFFLE_REDIS_CHANNEL'", ",", "'waffleconf'", ")", ")", "while", "True", ":", "for", "msg", "in", "sub", ".", "listen", "(", ")", ":", "# Skip non-messages", "if", "not", "msg", "[", "'type'", "]", "==", "'message'", ":", "continue", "tstamp", "=", "float", "(", "msg", "[", "'data'", "]", ")", "# Compare timestamps and update config if needed", "if", "tstamp", ">", "state", ".", "_tstamp", ":", "state", ".", "update_conf", "(", ")", "state", ".", "_tstamp", "=", "tstamp" ]
Returns Weed - FS master version
def version ( self ) : url = "http://{master_addr}:{master_port}/dir/status" . format ( master_addr = self . master_addr , master_port = self . master_port ) data = self . conn . get_data ( url ) response_data = json . loads ( data ) return response_data . get ( "Version" )
941
https://github.com/utek/pyseaweed/blob/218049329885425a2b8370157fa44952e64516be/pyseaweed/weed.py#L214-L225
[ "def", "list_conversions", "(", "api_key", ",", "api_secret", ",", "video_key", ",", "*", "*", "kwargs", ")", ":", "jwplatform_client", "=", "jwplatform", ".", "Client", "(", "api_key", ",", "api_secret", ")", "logging", ".", "info", "(", "\"Querying for video conversions.\"", ")", "try", ":", "response", "=", "jwplatform_client", ".", "videos", ".", "conversions", ".", "list", "(", "video_key", "=", "video_key", ",", "*", "*", "kwargs", ")", "except", "jwplatform", ".", "errors", ".", "JWPlatformError", "as", "e", ":", "logging", ".", "error", "(", "\"Encountered an error querying for video conversions.\\n{}\"", ".", "format", "(", "e", ")", ")", "sys", ".", "exit", "(", "e", ".", "message", ")", "return", "response" ]
This function determines the convolution of two inputs using the FFT . Contains an implementation for both CPU and GPU .
def fft_convolve ( in1 , in2 , conv_device = "cpu" , conv_mode = "linear" , store_on_gpu = False ) : # NOTE: Circular convolution assumes a periodic repetition of the input. This can cause edge effects. Linear # convolution pads the input with zeros to avoid this problem but is consequently heavier on computation and # memory. if conv_device == 'gpu' : if conv_mode == "linear" : fft_in1 = pad_array ( in1 ) fft_in1 = gpu_r2c_fft ( fft_in1 , store_on_gpu = True ) fft_in2 = in2 conv_in1_in2 = fft_in1 * fft_in2 conv_in1_in2 = contiguous_slice ( fft_shift ( gpu_c2r_ifft ( conv_in1_in2 , is_gpuarray = True , store_on_gpu = True ) ) ) if store_on_gpu : return conv_in1_in2 else : return conv_in1_in2 . get ( ) elif conv_mode == "circular" : fft_in1 = gpu_r2c_fft ( in1 , store_on_gpu = True ) fft_in2 = in2 conv_in1_in2 = fft_in1 * fft_in2 conv_in1_in2 = fft_shift ( gpu_c2r_ifft ( conv_in1_in2 , is_gpuarray = True , store_on_gpu = True ) ) if store_on_gpu : return conv_in1_in2 else : return conv_in1_in2 . get ( ) else : if conv_mode == "linear" : fft_in1 = pad_array ( in1 ) fft_in2 = in2 out1_slice = tuple ( slice ( 0.5 * sz , 1.5 * sz ) for sz in in1 . shape ) return np . require ( np . fft . fftshift ( np . fft . irfft2 ( fft_in2 * np . fft . rfft2 ( fft_in1 ) ) ) [ out1_slice ] , np . float32 , 'C' ) elif conv_mode == "circular" : return np . fft . fftshift ( np . fft . irfft2 ( in2 * np . fft . rfft2 ( in1 ) ) )
942
https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/iuwt_convolution.py#L18-L73
[ "def", "generation", "(", "self", ")", ":", "with", "self", ".", "_lock", ":", "if", "self", ".", "state", "is", "not", "MemberState", ".", "STABLE", ":", "return", "None", "return", "self", ".", "_generation" ]
This function makes use of the scikits implementation of the FFT for GPUs to take the real to complex FFT .
def gpu_r2c_fft ( in1 , is_gpuarray = False , store_on_gpu = False ) : if is_gpuarray : gpu_in1 = in1 else : gpu_in1 = gpuarray . to_gpu_async ( in1 . astype ( np . float32 ) ) output_size = np . array ( in1 . shape ) output_size [ 1 ] = 0.5 * output_size [ 1 ] + 1 gpu_out1 = gpuarray . empty ( [ output_size [ 0 ] , output_size [ 1 ] ] , np . complex64 ) gpu_plan = Plan ( gpu_in1 . shape , np . float32 , np . complex64 ) fft ( gpu_in1 , gpu_out1 , gpu_plan ) if store_on_gpu : return gpu_out1 else : return gpu_out1 . get ( )
943
https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/iuwt_convolution.py#L76-L106
[ "def", "release", "(", "self", ")", ":", "if", "self", ".", "_subscription", "and", "self", ".", "_subscription", ".", "subscribed", ":", "self", ".", "_subscription", ".", "unsubscribe", "(", ")", "self", ".", "_subscription", ".", "reset", "(", ")" ]
This function makes use of the scikits implementation of the FFT for GPUs to take the complex to real IFFT .
def gpu_c2r_ifft ( in1 , is_gpuarray = False , store_on_gpu = False ) : if is_gpuarray : gpu_in1 = in1 else : gpu_in1 = gpuarray . to_gpu_async ( in1 . astype ( np . complex64 ) ) output_size = np . array ( in1 . shape ) output_size [ 1 ] = 2 * ( output_size [ 1 ] - 1 ) gpu_out1 = gpuarray . empty ( [ output_size [ 0 ] , output_size [ 1 ] ] , np . float32 ) gpu_plan = Plan ( output_size , np . complex64 , np . float32 ) ifft ( gpu_in1 , gpu_out1 , gpu_plan ) scale_fft ( gpu_out1 ) if store_on_gpu : return gpu_out1 else : return gpu_out1 . get ( )
944
https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/iuwt_convolution.py#L108-L139
[ "def", "_remove_player", "(", "self", ",", "player_id", ")", ":", "player", "=", "self", ".", "_mpris_players", ".", "get", "(", "player_id", ")", "if", "player", ":", "if", "player", ".", "get", "(", "\"subscription\"", ")", ":", "player", "[", "\"subscription\"", "]", ".", "disconnect", "(", ")", "del", "self", ".", "_mpris_players", "[", "player_id", "]" ]
Simple convenience function to pad arrays for linear convolution .
def pad_array ( in1 ) : padded_size = 2 * np . array ( in1 . shape ) out1 = np . zeros ( [ padded_size [ 0 ] , padded_size [ 1 ] ] ) out1 [ padded_size [ 0 ] / 4 : 3 * padded_size [ 0 ] / 4 , padded_size [ 1 ] / 4 : 3 * padded_size [ 1 ] / 4 ] = in1 return out1
945
https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/iuwt_convolution.py#L142-L158
[ "def", "numRegisteredForRole", "(", "self", ",", "role", ",", "includeTemporaryRegs", "=", "False", ")", ":", "count", "=", "self", ".", "eventregistration_set", ".", "filter", "(", "cancelled", "=", "False", ",", "dropIn", "=", "False", ",", "role", "=", "role", ")", ".", "count", "(", ")", "if", "includeTemporaryRegs", ":", "count", "+=", "self", ".", "temporaryeventregistration_set", ".", "filter", "(", "dropIn", "=", "False", ",", "role", "=", "role", ")", ".", "exclude", "(", "registration__expirationDate__lte", "=", "timezone", ".", "now", "(", ")", ")", ".", "count", "(", ")", "return", "count" ]
Check if host is a dragon .
def is_dragon ( host , timeout = 1 ) : try : r = requests . get ( 'http://{}/' . format ( host ) , timeout = timeout ) if r . status_code == 200 : if '<title>DragonMint</title>' in r . text or '<title>AsicMiner</title>' in r . text : return True except requests . exceptions . RequestException : pass return False
946
https://github.com/brndnmtthws/dragon-rest/blob/10ea09a6203c0cbfeeeb854702764bd778769887/dragon_rest/dragons.py#L49-L65
[ "def", "encrypt", "(", "self", ",", "plaintext_data_key", ",", "encryption_context", ")", ":", "if", "self", ".", "wrapping_algorithm", ".", "encryption_type", "is", "EncryptionType", ".", "ASYMMETRIC", ":", "if", "self", ".", "wrapping_key_type", "is", "EncryptionKeyType", ".", "PRIVATE", ":", "encrypted_key", "=", "self", ".", "_wrapping_key", ".", "public_key", "(", ")", ".", "encrypt", "(", "plaintext", "=", "plaintext_data_key", ",", "padding", "=", "self", ".", "wrapping_algorithm", ".", "padding", ")", "else", ":", "encrypted_key", "=", "self", ".", "_wrapping_key", ".", "encrypt", "(", "plaintext", "=", "plaintext_data_key", ",", "padding", "=", "self", ".", "wrapping_algorithm", ".", "padding", ")", "return", "EncryptedData", "(", "iv", "=", "None", ",", "ciphertext", "=", "encrypted_key", ",", "tag", "=", "None", ")", "serialized_encryption_context", "=", "serialize_encryption_context", "(", "encryption_context", "=", "encryption_context", ")", "iv", "=", "os", ".", "urandom", "(", "self", ".", "wrapping_algorithm", ".", "algorithm", ".", "iv_len", ")", "return", "encrypt", "(", "algorithm", "=", "self", ".", "wrapping_algorithm", ".", "algorithm", ",", "key", "=", "self", ".", "_derived_wrapping_key", ",", "plaintext", "=", "plaintext_data_key", ",", "associated_data", "=", "serialized_encryption_context", ",", "iv", "=", "iv", ",", ")" ]
Change the pools of the miner . This call will restart cgminer .
def updatePools ( self , pool1 , username1 , password1 , pool2 = None , username2 = None , password2 = None , pool3 = None , username3 = None , password3 = None ) : return self . __post ( '/api/updatePools' , data = { 'Pool1' : pool1 , 'UserName1' : username1 , 'Password1' : password1 , 'Pool2' : pool2 , 'UserName2' : username2 , 'Password2' : password2 , 'Pool3' : pool3 , 'UserName3' : username3 , 'Password3' : password3 , } )
947
https://github.com/brndnmtthws/dragon-rest/blob/10ea09a6203c0cbfeeeb854702764bd778769887/dragon_rest/dragons.py#L148-L170
[ "def", "WriteArtifactsFile", "(", "self", ",", "artifacts", ",", "filename", ")", ":", "with", "open", "(", "filename", ",", "'w'", ")", "as", "file_object", ":", "file_object", ".", "write", "(", "self", ".", "FormatArtifacts", "(", "artifacts", ")", ")" ]
Change the password of a user .
def updatePassword ( self , user , currentPassword , newPassword ) : return self . __post ( '/api/updatePassword' , data = { 'user' : user , 'currentPassword' : currentPassword , 'newPassword' : newPassword } )
948
https://github.com/brndnmtthws/dragon-rest/blob/10ea09a6203c0cbfeeeb854702764bd778769887/dragon_rest/dragons.py#L172-L182
[ "def", "write", "(", "self", ",", "path", ",", "wrap_ttl", "=", "None", ",", "*", "*", "kwargs", ")", ":", "path", "=", "sanitize_mount", "(", "path", ")", "val", "=", "None", "if", "path", ".", "startswith", "(", "'cubbyhole'", ")", ":", "self", ".", "token", "=", "self", ".", "initial_token", "val", "=", "super", "(", "Client", ",", "self", ")", ".", "write", "(", "path", ",", "wrap_ttl", "=", "wrap_ttl", ",", "*", "*", "kwargs", ")", "self", ".", "token", "=", "self", ".", "operational_token", "else", ":", "super", "(", "Client", ",", "self", ")", ".", "write", "(", "path", ",", "wrap_ttl", "=", "wrap_ttl", ",", "*", "*", "kwargs", ")", "return", "val" ]
Change the current network settings .
def updateNetwork ( self , dhcp = 'dhcp' , ipaddress = None , netmask = None , gateway = None , dns = None ) : return self . __post ( '/api/updateNetwork' , data = { 'dhcp' : dhcp , 'ipaddress' : ipaddress , 'netmask' : netmask , 'gateway' : gateway , 'dns' : json . dumps ( dns ) } )
949
https://github.com/brndnmtthws/dragon-rest/blob/10ea09a6203c0cbfeeeb854702764bd778769887/dragon_rest/dragons.py#L188-L202
[ "def", "revoke_session", "(", "self", ",", "sid", "=", "''", ",", "token", "=", "''", ")", ":", "if", "not", "sid", ":", "if", "token", ":", "sid", "=", "self", ".", "handler", ".", "sid", "(", "token", ")", "else", ":", "raise", "ValueError", "(", "'Need one of \"sid\" or \"token\"'", ")", "for", "typ", "in", "[", "'access_token'", ",", "'refresh_token'", ",", "'code'", "]", ":", "try", ":", "self", ".", "revoke_token", "(", "self", "[", "sid", "]", "[", "typ", "]", ",", "typ", ")", "except", "KeyError", ":", "# If no such token has been issued", "pass", "self", ".", "update", "(", "sid", ",", "revoked", "=", "True", ")" ]
Upgrade the firmware of the miner .
def upgradeUpload ( self , file ) : files = { 'upfile' : open ( file , 'rb' ) } return self . __post_files ( '/upgrade/upload' , files = files )
950
https://github.com/brndnmtthws/dragon-rest/blob/10ea09a6203c0cbfeeeb854702764bd778769887/dragon_rest/dragons.py#L237-L241
[ "def", "user_deleted_from_site_event", "(", "event", ")", ":", "userid", "=", "event", ".", "principal", "catalog", "=", "api", ".", "portal", ".", "get_tool", "(", "'portal_catalog'", ")", "query", "=", "{", "'object_provides'", ":", "WORKSPACE_INTERFACE", "}", "query", "[", "'workspace_members'", "]", "=", "userid", "workspaces", "=", "[", "IWorkspace", "(", "b", ".", "_unrestrictedGetObject", "(", ")", ")", "for", "b", "in", "catalog", ".", "unrestrictedSearchResults", "(", "query", ")", "]", "for", "workspace", "in", "workspaces", ":", "workspace", ".", "remove_from_team", "(", "userid", ")" ]
A property which can be used to check if StatusObject uses program features or not .
def is_program ( self ) : from automate . callables import Empty return not ( isinstance ( self . on_activate , Empty ) and isinstance ( self . on_deactivate , Empty ) and isinstance ( self . on_update , Empty ) )
951
https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/statusobject.py#L208-L215
[ "def", "insert", "(", "self", ",", "storagemodel", ")", "->", "StorageTableModel", ":", "modeldefinition", "=", "self", ".", "getmodeldefinition", "(", "storagemodel", ",", "True", ")", "try", ":", "modeldefinition", "[", "'tableservice'", "]", ".", "insert_or_replace_entity", "(", "modeldefinition", "[", "'tablename'", "]", ",", "storagemodel", ".", "entity", "(", ")", ")", "storagemodel", ".", "_exists", "=", "True", "except", "AzureMissingResourceHttpError", "as", "e", ":", "storagemodel", ".", "_exists", "=", "False", "log", ".", "debug", "(", "'can not insert or replace table entity: Table {}, PartitionKey {}, RowKey {} because {!s}'", ".", "format", "(", "modeldefinition", "[", "'tablename'", "]", ",", "storagemodel", ".", "getPartitionKey", "(", ")", ",", "storagemodel", ".", "getRowKey", "(", ")", ",", "e", ")", ")", "except", "Exception", "as", "e", ":", "storagemodel", ".", "_exists", "=", "False", "msg", "=", "'can not insert or replace table entity: Table {}, PartitionKey {}, RowKey {} because {!s}'", ".", "format", "(", "modeldefinition", "[", "'tablename'", "]", ",", "storagemodel", ".", "PartitionKey", ",", "storagemodel", ".", "RowKey", ",", "e", ")", "raise", "AzureStorageWrapException", "(", "msg", "=", "msg", ")", "finally", ":", "return", "storagemodel" ]
Get data of this object as a data dictionary . Used by websocket service .
def get_as_datadict ( self ) : d = super ( ) . get_as_datadict ( ) d . update ( dict ( status = self . status , data_type = self . data_type , editable = self . editable ) ) return d
952
https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/statusobject.py#L238-L244
[ "def", "setOverlayTextureColorSpace", "(", "self", ",", "ulOverlayHandle", ",", "eTextureColorSpace", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTextureColorSpace", "result", "=", "fn", "(", "ulOverlayHandle", ",", "eTextureColorSpace", ")", "return", "result" ]
This function is called by - set_status - _update_program_stack if active program is being changed - thia may be launched by sensor status change . status lock is necessary because these happen from different threads .
def _do_change_status ( self , status , force = False ) : self . system . worker_thread . put ( DummyStatusWorkerTask ( self . _request_status_change_in_queue , status , force = force ) )
953
https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/statusobject.py#L354-L366
[ "def", "create_pattern", "(", "cls", ",", "userdata", ")", ":", "empty", "=", "cls", ".", "create_empty", "(", "None", ")", "userdata_dict", "=", "cls", ".", "normalize", "(", "empty", ",", "userdata", ")", "return", "Userdata", "(", "userdata_dict", ")" ]
Called by program which desires to manipulate this actuator when it is activated .
def activate_program ( self , program ) : self . logger . debug ( "activate_program %s" , program ) if program in self . program_stack : return with self . _program_lock : self . logger . debug ( "activate_program got through %s" , program ) self . program_stack . append ( program ) self . _update_program_stack ( )
954
https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/statusobject.py#L578-L589
[ "def", "get_resource_form", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# Implemented from kitosid template for -", "# osid.resource.ResourceAdminSession.get_resource_form_for_update", "# This method might be a bit sketchy. Time will tell.", "if", "isinstance", "(", "args", "[", "-", "1", "]", ",", "list", ")", "or", "'resource_record_types'", "in", "kwargs", ":", "return", "self", ".", "get_resource_form_for_create", "(", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "return", "self", ".", "get_resource_form_for_update", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Called by program when it is deactivated .
def deactivate_program ( self , program ) : self . logger . debug ( "deactivate_program %s" , program ) with self . _program_lock : self . logger . debug ( "deactivate_program got through %s" , program ) if program not in self . program_stack : import ipdb ipdb . set_trace ( ) self . program_stack . remove ( program ) if program in self . program_status : del self . program_status [ program ] self . _update_program_stack ( )
955
https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/statusobject.py#L591-L605
[ "def", "create_resource_quota", "(", "self", ",", "name", ",", "quota_json", ")", ":", "url", "=", "self", ".", "_build_k8s_url", "(", "\"resourcequotas/\"", ")", "response", "=", "self", ".", "_post", "(", "url", ",", "data", "=", "json", ".", "dumps", "(", "quota_json", ")", ",", "headers", "=", "{", "\"Content-Type\"", ":", "\"application/json\"", "}", ")", "if", "response", ".", "status_code", "==", "http_client", ".", "CONFLICT", ":", "url", "=", "self", ".", "_build_k8s_url", "(", "\"resourcequotas/%s\"", "%", "name", ")", "response", "=", "self", ".", "_put", "(", "url", ",", "data", "=", "json", ".", "dumps", "(", "quota_json", ")", ",", "headers", "=", "{", "\"Content-Type\"", ":", "\"application/json\"", "}", ")", "check_response", "(", "response", ")", "return", "response" ]
Stream logs from a Docker container within a timeout .
def stream_logs ( container , timeout = 10.0 , * * logs_kwargs ) : stream = container . logs ( stream = True , * * logs_kwargs ) return stream_timeout ( stream , timeout , 'Timeout waiting for container logs.' )
956
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/stream/logs.py#L8-L26
[ "def", "save_html", "(", "self", ",", "name", ",", "address", "=", "True", ",", "overall_param", "=", "None", ",", "class_param", "=", "None", ",", "class_name", "=", "None", ",", "color", "=", "(", "0", ",", "0", ",", "0", ")", ",", "normalize", "=", "False", ")", ":", "try", ":", "message", "=", "None", "table", "=", "self", ".", "table", "if", "normalize", ":", "table", "=", "self", ".", "normalized_table", "html_file", "=", "open", "(", "name", "+", "\".html\"", ",", "\"w\"", ")", "html_file", ".", "write", "(", "html_init", "(", "name", ")", ")", "html_file", ".", "write", "(", "html_dataset_type", "(", "self", ".", "binary", ",", "self", ".", "imbalance", ")", ")", "html_file", ".", "write", "(", "html_table", "(", "self", ".", "classes", ",", "table", ",", "color", ",", "normalize", ")", ")", "html_file", ".", "write", "(", "html_overall_stat", "(", "self", ".", "overall_stat", ",", "self", ".", "digit", ",", "overall_param", ",", "self", ".", "recommended_list", ")", ")", "class_stat_classes", "=", "class_filter", "(", "self", ".", "classes", ",", "class_name", ")", "html_file", ".", "write", "(", "html_class_stat", "(", "class_stat_classes", ",", "self", ".", "class_stat", ",", "self", ".", "digit", ",", "class_param", ",", "self", ".", "recommended_list", ")", ")", "html_file", ".", "write", "(", "html_end", "(", "VERSION", ")", ")", "html_file", ".", "close", "(", ")", "if", "address", ":", "message", "=", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "name", "+", "\".html\"", ")", "return", "{", "\"Status\"", ":", "True", ",", "\"Message\"", ":", "message", "}", "except", "Exception", "as", "e", ":", "return", "{", "\"Status\"", ":", "False", ",", "\"Message\"", ":", "str", "(", "e", ")", "}" ]
Fetch an image if it isn t already present .
def fetch_image ( client , name ) : try : image = client . images . get ( name ) except docker . errors . ImageNotFound : name , tag = _parse_image_tag ( name ) tag = 'latest' if tag is None else tag log . info ( "Pulling tag '{}' for image '{}'..." . format ( tag , name ) ) image = client . images . pull ( name , tag = tag ) log . debug ( "Found image '{}' for tag '{}'" . format ( image . id , name ) ) return image
957
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/helpers.py#L27-L44
[ "def", "session", "(", "self", ",", "request", ":", "Request", ")", "->", "WebSession", ":", "return", "WebSession", "(", "request", ",", "http_client", "=", "self", ".", "_http_client", ",", "redirect_tracker", "=", "self", ".", "_redirect_tracker_factory", "(", ")", ",", "request_factory", "=", "self", ".", "_request_factory", ",", "cookie_jar", "=", "self", ".", "_cookie_jar", ",", ")" ]
Get both the model and ID of an object that could be an ID or a model .
def _get_id_and_model ( self , id_or_model ) : if isinstance ( id_or_model , self . collection . model ) : model = id_or_model elif isinstance ( id_or_model , str ) : # Assume we have an ID string model = self . collection . get ( id_or_model ) else : raise TypeError ( 'Unexpected type {}, expected {} or {}' . format ( type ( id_or_model ) , str , self . collection . model ) ) return model . id , model
958
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/helpers.py#L82-L100
[ "def", "GetCovariance", "(", "kernel", ",", "kernel_params", ",", "time", ",", "errors", ")", ":", "# NOTE: We purposefully compute the covariance matrix", "# *without* the GP white noise term", "K", "=", "np", ".", "diag", "(", "errors", "**", "2", ")", "K", "+=", "GP", "(", "kernel", ",", "kernel_params", ",", "white", "=", "False", ")", ".", "get_matrix", "(", "time", ")", "return", "K" ]
Create an instance of this resource type .
def create ( self , name , * args , * * kwargs ) : resource_name = self . _resource_name ( name ) log . info ( "Creating {} '{}'..." . format ( self . _model_name , resource_name ) ) resource = self . collection . create ( * args , name = resource_name , * * kwargs ) self . _ids . add ( resource . id ) return resource
959
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/helpers.py#L102-L111
[ "def", "_create_auth", "(", "team", ",", "timeout", "=", "None", ")", ":", "url", "=", "get_registry_url", "(", "team", ")", "contents", "=", "_load_auth", "(", ")", "auth", "=", "contents", ".", "get", "(", "url", ")", "if", "auth", "is", "not", "None", ":", "# If the access token expires within a minute, update it.", "if", "auth", "[", "'expires_at'", "]", "<", "time", ".", "time", "(", ")", "+", "60", ":", "try", ":", "auth", "=", "_update_auth", "(", "team", ",", "auth", "[", "'refresh_token'", "]", ",", "timeout", ")", "except", "CommandException", "as", "ex", ":", "raise", "CommandException", "(", "\"Failed to update the access token (%s). Run `quilt login%s` again.\"", "%", "(", "ex", ",", "' '", "+", "team", "if", "team", "else", "''", ")", ")", "contents", "[", "url", "]", "=", "auth", "_save_auth", "(", "contents", ")", "return", "auth" ]
Remove an instance of this resource type .
def remove ( self , resource , * * kwargs ) : log . info ( "Removing {} '{}'..." . format ( self . _model_name , resource . name ) ) resource . remove ( * * kwargs ) self . _ids . remove ( resource . id )
960
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/helpers.py#L113-L120
[ "def", "_create_auth", "(", "team", ",", "timeout", "=", "None", ")", ":", "url", "=", "get_registry_url", "(", "team", ")", "contents", "=", "_load_auth", "(", ")", "auth", "=", "contents", ".", "get", "(", "url", ")", "if", "auth", "is", "not", "None", ":", "# If the access token expires within a minute, update it.", "if", "auth", "[", "'expires_at'", "]", "<", "time", ".", "time", "(", ")", "+", "60", ":", "try", ":", "auth", "=", "_update_auth", "(", "team", ",", "auth", "[", "'refresh_token'", "]", ",", "timeout", ")", "except", "CommandException", "as", "ex", ":", "raise", "CommandException", "(", "\"Failed to update the access token (%s). Run `quilt login%s` again.\"", "%", "(", "ex", ",", "' '", "+", "team", "if", "team", "else", "''", ")", ")", "contents", "[", "url", "]", "=", "auth", "_save_auth", "(", "contents", ")", "return", "auth" ]
Remove a container .
def remove ( self , container , force = True , volumes = True ) : super ( ) . remove ( container , force = force , v = volumes )
961
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/helpers.py#L266-L283
[ "def", "disable_avatar", "(", "self", ")", ":", "with", "(", "yield", "from", "self", ".", "_publish_lock", ")", ":", "todo", "=", "[", "]", "if", "self", ".", "_synchronize_vcard", ":", "todo", ".", "append", "(", "self", ".", "_disable_vcard_avatar", "(", ")", ")", "if", "(", "yield", "from", "self", ".", "_pep", ".", "available", "(", ")", ")", ":", "todo", ".", "append", "(", "self", ".", "_pep", ".", "publish", "(", "namespaces", ".", "xep0084_metadata", ",", "avatar_xso", ".", "Metadata", "(", ")", ")", ")", "yield", "from", "gather_reraise_multi", "(", "*", "todo", ",", "message", "=", "\"disable_avatar\"", ")" ]
Get the default bridge network that containers are connected to if no other network options are specified .
def get_default ( self , create = True ) : if self . _default_network is None and create : log . debug ( "Creating default network..." ) self . _default_network = self . create ( 'default' , driver = 'bridge' ) return self . _default_network
962
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/helpers.py#L326-L338
[ "def", "createRPYText", "(", "self", ")", ":", "self", ".", "rollText", "=", "self", ".", "axes", ".", "text", "(", "self", ".", "leftPos", "+", "(", "self", ".", "vertSize", "/", "10.0", ")", ",", "-", "0.97", "+", "(", "2", "*", "self", ".", "vertSize", ")", "-", "(", "self", ".", "vertSize", "/", "10.0", ")", ",", "'Roll: %.2f'", "%", "self", ".", "roll", ",", "color", "=", "'w'", ",", "size", "=", "self", ".", "fontSize", ")", "self", ".", "pitchText", "=", "self", ".", "axes", ".", "text", "(", "self", ".", "leftPos", "+", "(", "self", ".", "vertSize", "/", "10.0", ")", ",", "-", "0.97", "+", "self", ".", "vertSize", "-", "(", "0.5", "*", "self", ".", "vertSize", "/", "10.0", ")", ",", "'Pitch: %.2f'", "%", "self", ".", "pitch", ",", "color", "=", "'w'", ",", "size", "=", "self", ".", "fontSize", ")", "self", ".", "yawText", "=", "self", ".", "axes", ".", "text", "(", "self", ".", "leftPos", "+", "(", "self", ".", "vertSize", "/", "10.0", ")", ",", "-", "0.97", ",", "'Yaw: %.2f'", "%", "self", ".", "yaw", ",", "color", "=", "'w'", ",", "size", "=", "self", ".", "fontSize", ")", "self", ".", "rollText", ".", "set_path_effects", "(", "[", "PathEffects", ".", "withStroke", "(", "linewidth", "=", "1", ",", "foreground", "=", "'k'", ")", "]", ")", "self", ".", "pitchText", ".", "set_path_effects", "(", "[", "PathEffects", ".", "withStroke", "(", "linewidth", "=", "1", ",", "foreground", "=", "'k'", ")", "]", ")", "self", ".", "yawText", ".", "set_path_effects", "(", "[", "PathEffects", ".", "withStroke", "(", "linewidth", "=", "1", ",", "foreground", "=", "'k'", ")", "]", ")" ]
Get the helper for a given type of Docker model . For use by resource definitions .
def _helper_for_model ( self , model_type ) : if model_type is models . containers . Container : return self . containers if model_type is models . images . Image : return self . images if model_type is models . networks . Network : return self . networks if model_type is models . volumes . Volume : return self . volumes raise ValueError ( 'Unknown model type {}' . format ( model_type ) )
963
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/helpers.py#L398-L412
[ "def", "_quantiles", "(", "self", ")", ":", "trials", "=", "[", "]", "for", "trial", ",", "state", "in", "self", ".", "_trial_state", ".", "items", "(", ")", ":", "if", "state", ".", "last_score", "is", "not", "None", "and", "not", "trial", ".", "is_finished", "(", ")", ":", "trials", ".", "append", "(", "trial", ")", "trials", ".", "sort", "(", "key", "=", "lambda", "t", ":", "self", ".", "_trial_state", "[", "t", "]", ".", "last_score", ")", "if", "len", "(", "trials", ")", "<=", "1", ":", "return", "[", "]", ",", "[", "]", "else", ":", "return", "(", "trials", "[", ":", "int", "(", "math", ".", "ceil", "(", "len", "(", "trials", ")", "*", "PBT_QUANTILE", ")", ")", "]", ",", "trials", "[", "int", "(", "math", ".", "floor", "(", "-", "len", "(", "trials", ")", "*", "PBT_QUANTILE", ")", ")", ":", "]", ")" ]
Clean up all resources when we re done with them .
def teardown ( self ) : self . containers . _teardown ( ) self . networks . _teardown ( ) self . volumes . _teardown ( ) # We need to close the underlying APIClient explicitly to avoid # ResourceWarnings from unclosed HTTP connections. self . _client . api . close ( )
964
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/helpers.py#L414-L424
[ "def", "_bsecurate_cli_compare_basis_sets", "(", "args", ")", ":", "ret", "=", "curate", ".", "compare_basis_sets", "(", "args", ".", "basis1", ",", "args", ".", "basis2", ",", "args", ".", "version1", ",", "args", ".", "version2", ",", "args", ".", "uncontract_general", ",", "args", ".", "data_dir", ",", "args", ".", "data_dir", ")", "if", "ret", ":", "return", "\"No difference found\"", "else", ":", "return", "\"DIFFERENCES FOUND. SEE ABOVE\"" ]
Execute a redis - cli command inside a running container .
def exec_redis_cli ( self , command , args = [ ] , db = 0 , redis_cli_opts = [ ] ) : cli_opts = [ '-n' , str ( db ) ] + redis_cli_opts cmd = [ 'redis-cli' ] + cli_opts + [ command ] + [ str ( a ) for a in args ] return self . inner ( ) . exec_run ( cmd )
965
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/containers/redis.py#L40-L52
[ "def", "orderbook", "(", "ctx", ",", "market", ")", ":", "market", "=", "Market", "(", "market", ",", "bitshares_instance", "=", "ctx", ".", "bitshares", ")", "orderbook", "=", "market", ".", "orderbook", "(", ")", "ta", "=", "{", "}", "ta", "[", "\"bids\"", "]", "=", "[", "[", "\"quote\"", ",", "\"sum quote\"", ",", "\"base\"", ",", "\"sum base\"", ",", "\"price\"", "]", "]", "cumsumquote", "=", "Amount", "(", "0", ",", "market", "[", "\"quote\"", "]", ")", "cumsumbase", "=", "Amount", "(", "0", ",", "market", "[", "\"base\"", "]", ")", "for", "order", "in", "orderbook", "[", "\"bids\"", "]", ":", "cumsumbase", "+=", "order", "[", "\"base\"", "]", "cumsumquote", "+=", "order", "[", "\"quote\"", "]", "ta", "[", "\"bids\"", "]", ".", "append", "(", "[", "str", "(", "order", "[", "\"quote\"", "]", ")", ",", "str", "(", "cumsumquote", ")", ",", "str", "(", "order", "[", "\"base\"", "]", ")", ",", "str", "(", "cumsumbase", ")", ",", "\"{:f} {}/{}\"", ".", "format", "(", "order", "[", "\"price\"", "]", ",", "order", "[", "\"base\"", "]", "[", "\"asset\"", "]", "[", "\"symbol\"", "]", ",", "order", "[", "\"quote\"", "]", "[", "\"asset\"", "]", "[", "\"symbol\"", "]", ",", ")", ",", "]", ")", "ta", "[", "\"asks\"", "]", "=", "[", "[", "\"price\"", ",", "\"base\"", ",", "\"sum base\"", ",", "\"quote\"", ",", "\"sum quote\"", "]", "]", "cumsumquote", "=", "Amount", "(", "0", ",", "market", "[", "\"quote\"", "]", ")", "cumsumbase", "=", "Amount", "(", "0", ",", "market", "[", "\"base\"", "]", ")", "for", "order", "in", "orderbook", "[", "\"asks\"", "]", ":", "cumsumbase", "+=", "order", "[", "\"base\"", "]", "cumsumquote", "+=", "order", "[", "\"quote\"", "]", "ta", "[", "\"asks\"", "]", ".", "append", "(", "[", "\"{:f} {}/{}\"", ".", "format", "(", "order", "[", "\"price\"", "]", ",", "order", "[", "\"base\"", "]", "[", "\"asset\"", "]", "[", "\"symbol\"", "]", ",", "order", "[", "\"quote\"", "]", "[", "\"asset\"", "]", "[", "\"symbol\"", "]", ",", ")", ",", "str", "(", "order", "[", "\"base\"", "]", ")", ",", "str", "(", "cumsumbase", ")", ",", "str", "(", "order", "[", "\"quote\"", "]", ")", ",", "str", "(", "cumsumquote", ")", ",", "]", ")", "t", "=", "[", "[", "\"bids\"", ",", "\"asks\"", "]", "]", "t", ".", "append", "(", "[", "format_table", "(", "ta", "[", "\"bids\"", "]", ")", ",", "format_table", "(", "ta", "[", "\"asks\"", "]", ")", "]", ")", "print_table", "(", "t", ")" ]
Run the KEYS command and return the list of matching keys .
def list_keys ( self , pattern = '*' , db = 0 ) : lines = output_lines ( self . exec_redis_cli ( 'KEYS' , [ pattern ] , db = db ) ) return [ ] if lines == [ '' ] else lines
966
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/containers/redis.py#L54-L62
[ "def", "commit", "(", "self", ")", ":", "if", "self", ".", "session", "is", "not", "None", ":", "logger", ".", "info", "(", "\"committing transaction in %s\"", "%", "self", ")", "tmp", "=", "self", ".", "stable", "self", ".", "stable", ",", "self", ".", "session", "=", "self", ".", "session", ",", "None", "self", ".", "istable", "=", "1", "-", "self", ".", "istable", "self", ".", "write_istable", "(", ")", "tmp", ".", "close", "(", ")", "# don't wait for gc, release resources manually", "self", ".", "lock_update", ".", "release", "(", ")", "else", ":", "logger", ".", "warning", "(", "\"commit called but there's no open session in %s\"", "%", "self", ")" ]
uses thread_init as a decorator - style
def threaded ( system , func , * args , * * kwargs ) : @ wraps ( func ) def wrapper ( * args , * * kwargs ) : try : return func ( * args , * * kwargs ) except Exception as e : if system . raven_client : system . raven_client . captureException ( ) logger . exception ( 'Exception occurred in thread: %s' , e ) return False return lambda : wrapper ( * args , * * kwargs )
967
https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/common.py#L96-L108
[ "def", "decode", "(", "self", ",", "data", ")", ":", "file_name", "=", "self", ".", "_filename_decoder", ".", "decode", "(", "data", "[", "'filename'", "]", ")", "file_data", "=", "data", "[", "'contents'", "]", "i", "=", "0", "max_size", "=", "len", "(", "file_data", ")", "while", "file_data", "[", "i", ":", "i", "+", "1", "]", "!=", "'H'", "and", "i", "<", "max_size", ":", "i", "+=", "1", "if", "i", ">", "0", ":", "data", "[", "'contents'", "]", "=", "file_data", "[", "i", ":", "]", "transmission", "=", "self", ".", "_file_decoder", ".", "decode", "(", "data", "[", "'contents'", "]", ")", "[", "0", "]", "return", "CWRFile", "(", "file_name", ",", "transmission", ")" ]
Return True if the expected matchers are matched in the expected order otherwise False .
def match ( self , item ) : if self . _position == len ( self . _matchers ) : raise RuntimeError ( 'Matcher exhausted, no more matchers to use' ) matcher = self . _matchers [ self . _position ] if matcher ( item ) : self . _position += 1 if self . _position == len ( self . _matchers ) : # All patterns have been matched return True return False
968
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/stream/matchers.py#L81-L97
[ "def", "win32_refresh_window", "(", "cls", ")", ":", "# Get console handle", "handle", "=", "windll", ".", "kernel32", ".", "GetConsoleWindow", "(", ")", "RDW_INVALIDATE", "=", "0x0001", "windll", ".", "user32", ".", "RedrawWindow", "(", "handle", ",", "None", ",", "None", ",", "c_uint", "(", "RDW_INVALIDATE", ")", ")" ]
Return True if the expected matchers are matched in any order otherwise False .
def match ( self , item ) : if not self . _unused_matchers : raise RuntimeError ( 'Matcher exhausted, no more matchers to use' ) for matcher in self . _unused_matchers : if matcher ( item ) : self . _used_matchers . append ( matcher ) break if not self . _unused_matchers : # All patterns have been matched return True return False
969
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/stream/matchers.py#L129-L146
[ "def", "win32_refresh_window", "(", "cls", ")", ":", "# Get console handle", "handle", "=", "windll", ".", "kernel32", ".", "GetConsoleWindow", "(", ")", "RDW_INVALIDATE", "=", "0x0001", "windll", ".", "user32", ".", "RedrawWindow", "(", "handle", ",", "None", ",", "None", ",", "c_uint", "(", "RDW_INVALIDATE", ")", ")" ]
Extension of the MORESANE algorithm . This takes a scale - by - scale approach attempting to remove all sources at the lower scales before moving onto the higher ones . At each step the algorithm may return to previous scales to remove the sources uncovered by the deconvolution .
def moresane_by_scale ( self , start_scale = 1 , stop_scale = 20 , subregion = None , sigma_level = 4 , loop_gain = 0.1 , tolerance = 0.75 , accuracy = 1e-6 , major_loop_miter = 100 , minor_loop_miter = 30 , all_on_gpu = False , decom_mode = "ser" , core_count = 1 , conv_device = 'cpu' , conv_mode = 'linear' , extraction_mode = 'cpu' , enforce_positivity = False , edge_suppression = False , edge_offset = 0 , flux_threshold = 0 , neg_comp = False , edge_excl = 0 , int_excl = 0 ) : # The following preserves the dirty image as it will be changed on every iteration. dirty_data = self . dirty_data scale_count = start_scale while not ( self . complete ) : logger . info ( "MORESANE at scale {}" . format ( scale_count ) ) self . moresane ( subregion = subregion , scale_count = scale_count , sigma_level = sigma_level , loop_gain = loop_gain , tolerance = tolerance , accuracy = accuracy , major_loop_miter = major_loop_miter , minor_loop_miter = minor_loop_miter , all_on_gpu = all_on_gpu , decom_mode = decom_mode , core_count = core_count , conv_device = conv_device , conv_mode = conv_mode , extraction_mode = extraction_mode , enforce_positivity = enforce_positivity , edge_suppression = edge_suppression , edge_offset = edge_offset , flux_threshold = flux_threshold , neg_comp = neg_comp , edge_excl = edge_excl , int_excl = int_excl ) self . dirty_data = self . residual scale_count += 1 if ( scale_count > ( np . log2 ( self . dirty_data . shape [ 0 ] ) ) - 1 ) : logger . info ( "Maximum scale reached - finished." ) break if ( scale_count > stop_scale ) : logger . info ( "Maximum scale reached - finished." ) break # Restores the original dirty image. self . dirty_data = dirty_data self . complete = False
970
https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/main.py#L523-L599
[ "def", "set_global_feedback", "(", "feedback", ",", "append", "=", "False", ")", ":", "rdict", "=", "load_feedback", "(", ")", "rdict", "[", "'text'", "]", "=", "rdict", ".", "get", "(", "'text'", ",", "''", ")", "+", "feedback", "if", "append", "else", "feedback", "save_feedback", "(", "rdict", ")" ]
This method constructs the restoring beam and then adds the convolution to the residual .
def restore ( self ) : clean_beam , beam_params = beam_fit ( self . psf_data , self . cdelt1 , self . cdelt2 ) if np . all ( np . array ( self . psf_data_shape ) == 2 * np . array ( self . dirty_data_shape ) ) : self . restored = np . fft . fftshift ( np . fft . irfft2 ( np . fft . rfft2 ( conv . pad_array ( self . model ) ) * np . fft . rfft2 ( clean_beam ) ) ) self . restored = self . restored [ self . dirty_data_shape [ 0 ] / 2 : - self . dirty_data_shape [ 0 ] / 2 , self . dirty_data_shape [ 1 ] / 2 : - self . dirty_data_shape [ 1 ] / 2 ] else : self . restored = np . fft . fftshift ( np . fft . irfft2 ( np . fft . rfft2 ( self . model ) * np . fft . rfft2 ( clean_beam ) ) ) self . restored += self . residual self . restored = self . restored . astype ( np . float32 ) return beam_params
971
https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/main.py#L601-L616
[ "def", "component_doi", "(", "soup", ")", ":", "component_doi", "=", "[", "]", "object_id_tags", "=", "raw_parser", ".", "object_id", "(", "soup", ",", "pub_id_type", "=", "\"doi\"", ")", "# Get components too for later", "component_list", "=", "components", "(", "soup", ")", "position", "=", "1", "for", "tag", "in", "object_id_tags", ":", "component_object", "=", "{", "}", "component_object", "[", "\"doi\"", "]", "=", "doi_uri_to_doi", "(", "tag", ".", "text", ")", "component_object", "[", "\"position\"", "]", "=", "position", "# Try to find the type of component", "for", "component", "in", "component_list", ":", "if", "\"doi\"", "in", "component", "and", "component", "[", "\"doi\"", "]", "==", "component_object", "[", "\"doi\"", "]", ":", "component_object", "[", "\"type\"", "]", "=", "component", "[", "\"type\"", "]", "component_doi", ".", "append", "(", "component_object", ")", "position", "=", "position", "+", "1", "return", "component_doi" ]
This method tries to ensure that the input data has the correct dimensions .
def handle_input ( self , input_hdr ) : input_slice = input_hdr [ 'NAXIS' ] * [ 0 ] for i in range ( input_hdr [ 'NAXIS' ] ) : if input_hdr [ 'CTYPE%d' % ( i + 1 ) ] . startswith ( "RA" ) : input_slice [ - 1 ] = slice ( None ) if input_hdr [ 'CTYPE%d' % ( i + 1 ) ] . startswith ( "DEC" ) : input_slice [ - 2 ] = slice ( None ) return input_slice
972
https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/main.py#L618-L634
[ "def", "authorize_url", "(", "self", ")", ":", "url", ",", "self", ".", "state", "=", "self", ".", "oauth", ".", "authorization_url", "(", "'%sauthorize'", "%", "OAUTH_URL", ")", "return", "url" ]
This method simply saves the model components and the residual .
def save_fits ( self , data , name ) : data = data . reshape ( 1 , 1 , data . shape [ 0 ] , data . shape [ 0 ] ) new_file = pyfits . PrimaryHDU ( data , self . img_hdu_list [ 0 ] . header ) new_file . writeto ( "{}" . format ( name ) , overwrite = True )
973
https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/main.py#L636-L646
[ "def", "quote", "(", "text", ")", ":", "text", "=", "unicode", "(", "text", ")", "return", "text", ".", "translate", "(", "{", "ord", "(", "'&'", ")", ":", "u'&amp;'", ",", "ord", "(", "'<'", ")", ":", "u'&lt;'", ",", "ord", "(", "'\"'", ")", ":", "u'&quot;'", ",", "ord", "(", "'>'", ")", ":", "u'&gt;'", ",", "ord", "(", "'@'", ")", ":", "u'&#64;'", ",", "0xa0", ":", "u'&nbsp;'", "}", ")" ]
Convenience function which creates a logger for the module .
def make_logger ( self , level = "INFO" ) : level = getattr ( logging , level . upper ( ) ) logger = logging . getLogger ( __name__ ) logger . setLevel ( logging . DEBUG ) fh = logging . FileHandler ( 'PyMORESANE.log' , mode = 'w' ) fh . setLevel ( level ) ch = logging . StreamHandler ( ) ch . setLevel ( level ) formatter = logging . Formatter ( '%(asctime)s [%(levelname)s]: %(' 'message)s' , datefmt = '[%m/%d/%Y] [%I:%M:%S]' ) fh . setFormatter ( formatter ) ch . setFormatter ( formatter ) logger . addHandler ( fh ) logger . addHandler ( ch ) return logger
974
https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/main.py#L648-L676
[ "def", "_get_port_speed_price_id", "(", "items", ",", "port_speed", ",", "no_public", ",", "location", ")", ":", "for", "item", "in", "items", ":", "if", "utils", ".", "lookup", "(", "item", ",", "'itemCategory'", ",", "'categoryCode'", ")", "!=", "'port_speed'", ":", "continue", "# Check for correct capacity and if the item matches private only", "if", "any", "(", "[", "int", "(", "utils", ".", "lookup", "(", "item", ",", "'capacity'", ")", ")", "!=", "port_speed", ",", "_is_private_port_speed_item", "(", "item", ")", "!=", "no_public", ",", "not", "_is_bonded", "(", "item", ")", "]", ")", ":", "continue", "for", "price", "in", "item", "[", "'prices'", "]", ":", "if", "not", "_matches_location", "(", "price", ",", "location", ")", ":", "continue", "return", "price", "[", "'id'", "]", "raise", "SoftLayer", ".", "SoftLayerError", "(", "\"Could not find valid price for port speed: '%s'\"", "%", "port_speed", ")" ]
Start Text UI main loop
def text_ui ( self ) : self . logger . info ( "Starting command line interface" ) self . help ( ) try : self . ipython_ui ( ) except ImportError : self . fallback_ui ( ) self . system . cleanup ( )
975
https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/services/textui.py#L96-L106
[ "def", "scan", "(", "self", ",", "text", ")", ":", "self", ".", "pos", "=", "0", "self", ".", "text", "=", "text", "while", "self", ".", "pos", "<", "len", "(", "text", ")", ":", "m", "=", "self", ".", "wsregexp", ".", "match", "(", "text", ",", "self", ".", "pos", ")", "if", "m", "is", "not", "None", ":", "self", ".", "pos", "=", "m", ".", "end", "(", ")", "continue", "m", "=", "self", ".", "regexp", ".", "match", "(", "text", ",", "self", ".", "pos", ")", "if", "m", "is", "None", ":", "raise", "ParseError", "(", "\"unknown token %s\"", "%", "text", "[", "self", ".", "pos", ":", "]", ")", "self", ".", "pos", "=", "m", ".", "end", "(", ")", "yield", "(", "m", ".", "lastgroup", ",", "m", ".", "group", "(", "m", ".", "lastgroup", ")", ")" ]
Prepare headers for http communication .
def _prepare_headers ( self , additional_headers = None , * * kwargs ) : user_agent = "pyseaweed/{version}" . format ( version = __version__ ) headers = { "User-Agent" : user_agent } if additional_headers is not None : headers . update ( additional_headers ) return headers
976
https://github.com/utek/pyseaweed/blob/218049329885425a2b8370157fa44952e64516be/pyseaweed/utils.py#L21-L39
[ "def", "get_tracks_from_album", "(", "album_name", ")", ":", "spotify", "=", "spotipy", ".", "Spotify", "(", ")", "album", "=", "spotify", ".", "search", "(", "q", "=", "'album:'", "+", "album_name", ",", "limit", "=", "1", ")", "album_id", "=", "album", "[", "'tracks'", "]", "[", "'items'", "]", "[", "0", "]", "[", "'album'", "]", "[", "'id'", "]", "results", "=", "spotify", ".", "album_tracks", "(", "album_id", "=", "str", "(", "album_id", ")", ")", "songs", "=", "[", "]", "for", "items", "in", "results", "[", "'items'", "]", ":", "songs", ".", "append", "(", "items", "[", "'name'", "]", ")", "return", "songs" ]
Returns response to http HEAD on provided url
def head ( self , url , * args , * * kwargs ) : res = self . _conn . head ( url , headers = self . _prepare_headers ( * * kwargs ) ) if res . status_code == 200 : return res return None
977
https://github.com/utek/pyseaweed/blob/218049329885425a2b8370157fa44952e64516be/pyseaweed/utils.py#L41-L48
[ "def", "_SeparateTypes", "(", "self", ",", "metadata_value_pairs", ")", ":", "registry_pairs", "=", "[", "]", "file_pairs", "=", "[", "]", "match_pairs", "=", "[", "]", "for", "metadata", ",", "result", "in", "metadata_value_pairs", ":", "if", "(", "result", ".", "stat_entry", ".", "pathspec", ".", "pathtype", "==", "rdf_paths", ".", "PathSpec", ".", "PathType", ".", "REGISTRY", ")", ":", "registry_pairs", ".", "append", "(", "(", "metadata", ",", "result", ".", "stat_entry", ")", ")", "else", ":", "file_pairs", ".", "append", "(", "(", "metadata", ",", "result", ")", ")", "match_pairs", ".", "extend", "(", "[", "(", "metadata", ",", "match", ")", "for", "match", "in", "result", ".", "matches", "]", ")", "return", "registry_pairs", ",", "file_pairs", ",", "match_pairs" ]
Gets data from url as text
def get_data ( self , url , * args , * * kwargs ) : res = self . _conn . get ( url , headers = self . _prepare_headers ( * * kwargs ) ) if res . status_code == 200 : return res . text else : return None
978
https://github.com/utek/pyseaweed/blob/218049329885425a2b8370157fa44952e64516be/pyseaweed/utils.py#L50-L70
[ "def", "find_signature", "(", "self", ",", "data_stream", ",", "msg_signature", ")", ":", "signature_match_index", "=", "None", "# The message that will be returned if it matches the signature", "msg_signature", "=", "msg_signature", ".", "split", "(", ")", "# Split into list", "# convert to bytearray in order to be able to compare with the messages list which contains bytearrays", "msg_signature", "=", "bytearray", "(", "int", "(", "x", ",", "16", ")", "for", "x", "in", "msg_signature", ")", "# loop through each message returned from Russound", "index_of_last_f7", "=", "None", "for", "i", "in", "range", "(", "len", "(", "data_stream", ")", ")", ":", "if", "data_stream", "[", "i", "]", "==", "247", ":", "index_of_last_f7", "=", "i", "# the below line checks for the matching signature, ensuring ALL bytes of the response have been received", "if", "(", "data_stream", "[", "i", ":", "i", "+", "len", "(", "msg_signature", ")", "]", "==", "msg_signature", ")", "and", "(", "len", "(", "data_stream", ")", "-", "i", ">=", "24", ")", ":", "signature_match_index", "=", "i", "break", "if", "signature_match_index", "is", "None", ":", "# Scrap bytes up to end of msg (to avoid searching these again)", "data_stream", "=", "data_stream", "[", "index_of_last_f7", ":", "len", "(", "data_stream", ")", "]", "matching_message", "=", "None", "else", ":", "matching_message", "=", "data_stream", "[", "signature_match_index", ":", "len", "(", "data_stream", ")", "]", "_LOGGER", ".", "debug", "(", "\"Message signature found at location: %s\"", ",", "signature_match_index", ")", "return", "matching_message", ",", "data_stream" ]
Gets data from url as bytes
def get_raw_data ( self , url , * args , * * kwargs ) : res = self . _conn . get ( url , headers = self . _prepare_headers ( * * kwargs ) ) if res . status_code == 200 : return res . content else : return None
979
https://github.com/utek/pyseaweed/blob/218049329885425a2b8370157fa44952e64516be/pyseaweed/utils.py#L72-L93
[ "def", "group_experiments_greedy", "(", "tomo_expt", ":", "TomographyExperiment", ")", ":", "diag_sets", "=", "_max_tpb_overlap", "(", "tomo_expt", ")", "grouped_expt_settings_list", "=", "list", "(", "diag_sets", ".", "values", "(", ")", ")", "grouped_tomo_expt", "=", "TomographyExperiment", "(", "grouped_expt_settings_list", ",", "program", "=", "tomo_expt", ".", "program", ")", "return", "grouped_tomo_expt" ]
Uploads file to provided url .
def post_file ( self , url , filename , file_stream , * args , * * kwargs ) : res = self . _conn . post ( url , files = { filename : file_stream } , headers = self . _prepare_headers ( * * kwargs ) ) if res . status_code == 200 or res . status_code == 201 : return res . text else : return None
980
https://github.com/utek/pyseaweed/blob/218049329885425a2b8370157fa44952e64516be/pyseaweed/utils.py#L95-L119
[ "def", "get_records", "(", "self", ")", ":", "form", "=", "self", ".", "request", ".", "form", "ar_count", "=", "self", ".", "get_ar_count", "(", ")", "records", "=", "[", "]", "# Group belonging AR fields together", "for", "arnum", "in", "range", "(", "ar_count", ")", ":", "record", "=", "{", "}", "s1", "=", "\"-{}\"", ".", "format", "(", "arnum", ")", "keys", "=", "filter", "(", "lambda", "key", ":", "s1", "in", "key", ",", "form", ".", "keys", "(", ")", ")", "for", "key", "in", "keys", ":", "new_key", "=", "key", ".", "replace", "(", "s1", ",", "\"\"", ")", "value", "=", "form", ".", "get", "(", "key", ")", "record", "[", "new_key", "]", "=", "value", "records", ".", "append", "(", "record", ")", "return", "records" ]
Deletes data under provided url
def delete_data ( self , url , * args , * * kwargs ) : res = self . _conn . delete ( url , headers = self . _prepare_headers ( * * kwargs ) ) if res . status_code == 200 or res . status_code == 202 : return True else : return False
981
https://github.com/utek/pyseaweed/blob/218049329885425a2b8370157fa44952e64516be/pyseaweed/utils.py#L121-L140
[ "def", "record", "(", "self", ")", ":", "while", "True", ":", "frames", "=", "[", "]", "self", ".", "stream", ".", "start_stream", "(", ")", "for", "i", "in", "range", "(", "self", ".", "num_frames", ")", ":", "data", "=", "self", ".", "stream", ".", "read", "(", "self", ".", "config", ".", "FRAMES_PER_BUFFER", ")", "frames", ".", "append", "(", "data", ")", "self", ".", "output", ".", "seek", "(", "0", ")", "w", "=", "wave", ".", "open", "(", "self", ".", "output", ",", "'wb'", ")", "w", ".", "setnchannels", "(", "self", ".", "config", ".", "CHANNELS", ")", "w", ".", "setsampwidth", "(", "self", ".", "audio", ".", "get_sample_size", "(", "self", ".", "config", ".", "FORMAT", ")", ")", "w", ".", "setframerate", "(", "self", ".", "config", ".", "RATE", ")", "w", ".", "writeframes", "(", "b''", ".", "join", "(", "frames", ")", ")", "w", ".", "close", "(", ")", "yield" ]
Given a collection of Unicode diacritics return a function that takes a string and returns the string without those diacritics .
def remove_diacritic ( * diacritics ) : def _ ( text ) : return unicodedata . normalize ( "NFC" , "" . join ( ch for ch in unicodedata . normalize ( "NFD" , text ) if ch not in diacritics ) ) return _
982
https://github.com/jtauber/greek-accentuation/blob/330796cd97f7c7adcbecbd05bd91be984f9b9f67/greek_accentuation/characters.py#L42-L53
[ "def", "MapFile", "(", "self", ",", "key_path_prefix", ",", "registry_file", ")", ":", "self", ".", "_registry_files", "[", "key_path_prefix", ".", "upper", "(", ")", "]", "=", "registry_file", "registry_file", ".", "SetKeyPathPrefix", "(", "key_path_prefix", ")" ]
Recursively merge all input dicts into a single dict .
def deep_merge ( * dicts ) : result = { } for d in dicts : if not isinstance ( d , dict ) : raise Exception ( 'Can only deep_merge dicts, got {}' . format ( d ) ) for k , v in d . items ( ) : # Whenever the value is a dict, we deep_merge it. This ensures that # (a) we only ever merge dicts with dicts and (b) we always get a # deep(ish) copy of the dicts and are thus safe from accidental # mutations to shared state. if isinstance ( v , dict ) : v = deep_merge ( result . get ( k , { } ) , v ) result [ k ] = v return result
983
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/definitions.py#L21-L37
[ "def", "checkIfAvailable", "(", "self", ",", "dateTime", "=", "timezone", ".", "now", "(", ")", ")", ":", "return", "(", "self", ".", "startTime", ">=", "dateTime", "+", "timedelta", "(", "days", "=", "getConstant", "(", "'privateLessons__closeBookingDays'", ")", ")", "and", "self", ".", "startTime", "<=", "dateTime", "+", "timedelta", "(", "days", "=", "getConstant", "(", "'privateLessons__openBookingDays'", ")", ")", "and", "not", "self", ".", "eventRegistration", "and", "(", "self", ".", "status", "==", "self", ".", "SlotStatus", ".", "available", "or", "(", "self", ".", "status", "==", "self", ".", "SlotStatus", ".", "tentative", "and", "getattr", "(", "getattr", "(", "self", ".", "temporaryEventRegistration", ",", "'registration'", ",", "None", ")", ",", "'expirationDate'", ",", "timezone", ".", "now", "(", ")", ")", "<=", "timezone", ".", "now", "(", ")", ")", ")", ")" ]
Create an instance of this resource definition .
def create ( self , * * kwargs ) : if self . created : raise RuntimeError ( '{} already created.' . format ( self . __model_type__ . __name__ ) ) kwargs = self . merge_kwargs ( self . _create_kwargs , kwargs ) self . _inner = self . helper . create ( self . name , * self . _create_args , * * kwargs )
984
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/definitions.py#L54-L67
[ "def", "readAnnotations", "(", "self", ")", ":", "annot", "=", "self", ".", "read_annotation", "(", ")", "annot", "=", "np", ".", "array", "(", "annot", ")", "if", "(", "annot", ".", "shape", "[", "0", "]", "==", "0", ")", ":", "return", "np", ".", "array", "(", "[", "]", ")", ",", "np", ".", "array", "(", "[", "]", ")", ",", "np", ".", "array", "(", "[", "]", ")", "ann_time", "=", "self", ".", "_get_float", "(", "annot", "[", ":", ",", "0", "]", ")", "ann_text", "=", "annot", "[", ":", ",", "2", "]", "ann_text_out", "=", "[", "\"\"", "for", "x", "in", "range", "(", "len", "(", "annot", "[", ":", ",", "1", "]", ")", ")", "]", "for", "i", "in", "np", ".", "arange", "(", "len", "(", "annot", "[", ":", ",", "1", "]", ")", ")", ":", "ann_text_out", "[", "i", "]", "=", "self", ".", "_convert_string", "(", "ann_text", "[", "i", "]", ")", "if", "annot", "[", "i", ",", "1", "]", "==", "''", ":", "annot", "[", "i", ",", "1", "]", "=", "'-1'", "ann_duration", "=", "self", ".", "_get_float", "(", "annot", "[", ":", ",", "1", "]", ")", "return", "ann_time", "/", "10000000", ",", "ann_duration", ",", "np", ".", "array", "(", "ann_text_out", ")" ]
Remove an instance of this resource definition .
def remove ( self , * * kwargs ) : self . helper . remove ( self . inner ( ) , * * kwargs ) self . _inner = None
985
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/definitions.py#L69-L74
[ "def", "readAnnotations", "(", "self", ")", ":", "annot", "=", "self", ".", "read_annotation", "(", ")", "annot", "=", "np", ".", "array", "(", "annot", ")", "if", "(", "annot", ".", "shape", "[", "0", "]", "==", "0", ")", ":", "return", "np", ".", "array", "(", "[", "]", ")", ",", "np", ".", "array", "(", "[", "]", ")", ",", "np", ".", "array", "(", "[", "]", ")", "ann_time", "=", "self", ".", "_get_float", "(", "annot", "[", ":", ",", "0", "]", ")", "ann_text", "=", "annot", "[", ":", ",", "2", "]", "ann_text_out", "=", "[", "\"\"", "for", "x", "in", "range", "(", "len", "(", "annot", "[", ":", ",", "1", "]", ")", ")", "]", "for", "i", "in", "np", ".", "arange", "(", "len", "(", "annot", "[", ":", ",", "1", "]", ")", ")", ":", "ann_text_out", "[", "i", "]", "=", "self", ".", "_convert_string", "(", "ann_text", "[", "i", "]", ")", "if", "annot", "[", "i", ",", "1", "]", "==", "''", ":", "annot", "[", "i", ",", "1", "]", "=", "'-1'", "ann_duration", "=", "self", ".", "_get_float", "(", "annot", "[", ":", ",", "1", "]", ")", "return", "ann_time", "/", "10000000", ",", "ann_duration", ",", "np", ".", "array", "(", "ann_text_out", ")" ]
Setup this resource so that is ready to be used in a test . If the resource has already been created this call does nothing .
def setup ( self , helper = None , * * create_kwargs ) : if self . created : return self . set_helper ( helper ) self . create ( * * create_kwargs ) return self
986
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/definitions.py#L76-L99
[ "def", "files_comments_delete", "(", "self", ",", "*", ",", "file", ":", "str", ",", "id", ":", "str", ",", "*", "*", "kwargs", ")", "->", "SlackResponse", ":", "kwargs", ".", "update", "(", "{", "\"file\"", ":", "file", ",", "\"id\"", ":", "id", "}", ")", "return", "self", ".", "api_call", "(", "\"files.comments.delete\"", ",", "json", "=", "kwargs", ")" ]
A decorator to inject this container into a function as a test fixture .
def as_fixture ( self , name = None ) : if name is None : name = self . name def deco ( f ) : @ functools . wraps ( f ) def wrapper ( * args , * * kw ) : with self : kw [ name ] = self return f ( * args , * * kw ) return wrapper return deco
987
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/definitions.py#L147-L161
[ "def", "_GetKeyFromRegistry", "(", "self", ")", ":", "if", "not", "self", ".", "_registry", ":", "return", "try", ":", "self", ".", "_registry_key", "=", "self", ".", "_registry", ".", "GetKeyByPath", "(", "self", ".", "_key_path", ")", "except", "RuntimeError", ":", "pass", "if", "not", "self", ".", "_registry_key", ":", "return", "for", "sub_registry_key", "in", "self", ".", "_registry_key", ".", "GetSubkeys", "(", ")", ":", "self", ".", "AddSubkey", "(", "sub_registry_key", ")", "if", "self", ".", "_key_path", "==", "'HKEY_LOCAL_MACHINE\\\\System'", ":", "sub_registry_key", "=", "VirtualWinRegistryKey", "(", "'CurrentControlSet'", ",", "registry", "=", "self", ".", "_registry", ")", "self", ".", "AddSubkey", "(", "sub_registry_key", ")", "self", ".", "_registry", "=", "None" ]
Creates the container starts it and waits for it to completely start .
def setup ( self , helper = None , * * run_kwargs ) : if self . created : return self . set_helper ( helper ) self . run ( * * run_kwargs ) self . wait_for_start ( ) return self
988
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/definitions.py#L246-L267
[ "def", "read_avro", "(", "file_path_or_buffer", ",", "schema", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "file_path_or_buffer", ",", "six", ".", "string_types", ")", ":", "with", "open", "(", "file_path_or_buffer", ",", "'rb'", ")", "as", "f", ":", "return", "__file_to_dataframe", "(", "f", ",", "schema", ",", "*", "*", "kwargs", ")", "else", ":", "return", "__file_to_dataframe", "(", "file_path_or_buffer", ",", "schema", ",", "*", "*", "kwargs", ")" ]
Stop and remove the container if it exists .
def teardown ( self ) : while self . _http_clients : self . _http_clients . pop ( ) . close ( ) if self . created : self . halt ( )
989
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/definitions.py#L269-L276
[ "def", "_handle_fetch_response", "(", "self", ",", "request", ",", "send_time", ",", "response", ")", ":", "fetch_offsets", "=", "{", "}", "for", "topic", ",", "partitions", "in", "request", ".", "topics", ":", "for", "partition_data", "in", "partitions", ":", "partition", ",", "offset", "=", "partition_data", "[", ":", "2", "]", "fetch_offsets", "[", "TopicPartition", "(", "topic", ",", "partition", ")", "]", "=", "offset", "partitions", "=", "set", "(", "[", "TopicPartition", "(", "topic", ",", "partition_data", "[", "0", "]", ")", "for", "topic", ",", "partitions", "in", "response", ".", "topics", "for", "partition_data", "in", "partitions", "]", ")", "metric_aggregator", "=", "FetchResponseMetricAggregator", "(", "self", ".", "_sensors", ",", "partitions", ")", "# randomized ordering should improve balance for short-lived consumers", "random", ".", "shuffle", "(", "response", ".", "topics", ")", "for", "topic", ",", "partitions", "in", "response", ".", "topics", ":", "random", ".", "shuffle", "(", "partitions", ")", "for", "partition_data", "in", "partitions", ":", "tp", "=", "TopicPartition", "(", "topic", ",", "partition_data", "[", "0", "]", ")", "completed_fetch", "=", "CompletedFetch", "(", "tp", ",", "fetch_offsets", "[", "tp", "]", ",", "response", ".", "API_VERSION", ",", "partition_data", "[", "1", ":", "]", ",", "metric_aggregator", ")", "self", ".", "_completed_fetches", ".", "append", "(", "completed_fetch", ")", "if", "response", ".", "API_VERSION", ">=", "1", ":", "self", ".", "_sensors", ".", "fetch_throttle_time_sensor", ".", "record", "(", "response", ".", "throttle_time_ms", ")", "self", ".", "_sensors", ".", "fetch_latency", ".", "record", "(", "(", "time", ".", "time", "(", ")", "-", "send_time", ")", "*", "1000", ")" ]
Get the container s current status from Docker .
def status ( self ) : if not self . created : return None self . inner ( ) . reload ( ) return self . inner ( ) . status
990
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/definitions.py#L278-L288
[ "def", "_openResources", "(", "self", ")", ":", "try", ":", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "True", ")", "except", "Exception", "as", "ex", ":", "logger", ".", "warning", "(", "ex", ")", "logger", ".", "warning", "(", "\"Unable to read wav with memmory mapping. Trying without now.\"", ")", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "False", ")", "self", ".", "_array", "=", "data", "self", ".", "attributes", "[", "'rate'", "]", "=", "rate" ]
Stop the container . The container must have been created .
def stop ( self , timeout = 5 ) : self . inner ( ) . stop ( timeout = timeout ) self . inner ( ) . reload ( )
991
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/definitions.py#L297-L306
[ "def", "_retrieve_offsets", "(", "self", ",", "timestamps", ",", "timeout_ms", "=", "float", "(", "\"inf\"", ")", ")", ":", "if", "not", "timestamps", ":", "return", "{", "}", "start_time", "=", "time", ".", "time", "(", ")", "remaining_ms", "=", "timeout_ms", "while", "remaining_ms", ">", "0", ":", "future", "=", "self", ".", "_send_offset_requests", "(", "timestamps", ")", "self", ".", "_client", ".", "poll", "(", "future", "=", "future", ",", "timeout_ms", "=", "remaining_ms", ")", "if", "future", ".", "succeeded", "(", ")", ":", "return", "future", ".", "value", "if", "not", "future", ".", "retriable", "(", ")", ":", "raise", "future", ".", "exception", "# pylint: disable-msg=raising-bad-type", "elapsed_ms", "=", "(", "time", ".", "time", "(", ")", "-", "start_time", ")", "*", "1000", "remaining_ms", "=", "timeout_ms", "-", "elapsed_ms", "if", "remaining_ms", "<", "0", ":", "break", "if", "future", ".", "exception", ".", "invalid_metadata", ":", "refresh_future", "=", "self", ".", "_client", ".", "cluster", ".", "request_update", "(", ")", "self", ".", "_client", ".", "poll", "(", "future", "=", "refresh_future", ",", "timeout_ms", "=", "remaining_ms", ")", "else", ":", "time", ".", "sleep", "(", "self", ".", "config", "[", "'retry_backoff_ms'", "]", "/", "1000.0", ")", "elapsed_ms", "=", "(", "time", ".", "time", "(", ")", "-", "start_time", ")", "*", "1000", "remaining_ms", "=", "timeout_ms", "-", "elapsed_ms", "raise", "Errors", ".", "KafkaTimeoutError", "(", "\"Failed to get offsets by timestamps in %s ms\"", "%", "(", "timeout_ms", ",", ")", ")" ]
Create the container and start it . Similar to docker run .
def run ( self , fetch_image = True , * * kwargs ) : self . create ( fetch_image = fetch_image , * * kwargs ) self . start ( )
992
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/definitions.py#L308-L319
[ "def", "_adapt_WSDateTime", "(", "dt", ")", ":", "try", ":", "ts", "=", "int", "(", "(", "dt", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "utc", ")", "-", "datetime", "(", "1970", ",", "1", ",", "1", ",", "tzinfo", "=", "pytz", ".", "utc", ")", ")", ".", "total_seconds", "(", ")", ")", "except", "(", "OverflowError", ",", "OSError", ")", ":", "if", "dt", "<", "datetime", ".", "now", "(", ")", ":", "ts", "=", "0", "else", ":", "ts", "=", "2", "**", "63", "-", "1", "return", "ts" ]
Wait for the container to start .
def wait_for_start ( self ) : if self . wait_matchers : matcher = UnorderedMatcher ( * self . wait_matchers ) self . wait_for_logs_matching ( matcher , timeout = self . wait_timeout )
993
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/definitions.py#L321-L332
[ "def", "hicexplorer_basic_statistics", "(", "self", ")", ":", "data", "=", "{", "}", "for", "file", "in", "self", ".", "mod_data", ":", "max_distance_key", "=", "'Max rest. site distance'", "total_pairs", "=", "self", ".", "mod_data", "[", "file", "]", "[", "'Pairs considered'", "]", "[", "0", "]", "try", ":", "self", ".", "mod_data", "[", "file", "]", "[", "max_distance_key", "]", "[", "0", "]", "except", "KeyError", ":", "max_distance_key", "=", "'Max library insert size'", "data_", "=", "{", "'Pairs considered'", ":", "self", ".", "mod_data", "[", "file", "]", "[", "'Pairs considered'", "]", "[", "0", "]", ",", "'Pairs used'", ":", "self", ".", "mod_data", "[", "file", "]", "[", "'Pairs used'", "]", "[", "0", "]", "/", "total_pairs", ",", "'Mapped'", ":", "self", ".", "mod_data", "[", "file", "]", "[", "'One mate unmapped'", "]", "[", "0", "]", "/", "total_pairs", ",", "'Min rest. site distance'", ":", "self", ".", "mod_data", "[", "file", "]", "[", "'Min rest. site distance'", "]", "[", "0", "]", ",", "max_distance_key", ":", "self", ".", "mod_data", "[", "file", "]", "[", "max_distance_key", "]", "[", "0", "]", ",", "}", "data", "[", "self", ".", "mod_data", "[", "file", "]", "[", "'File'", "]", "[", "0", "]", "]", "=", "data_", "headers", "=", "OrderedDict", "(", ")", "headers", "[", "'Pairs considered'", "]", "=", "{", "'title'", ":", "'{} Pairs'", ".", "format", "(", "config", ".", "read_count_prefix", ")", ",", "'description'", ":", "'Total number of read pairs ({})'", ".", "format", "(", "config", ".", "read_count_desc", ")", ",", "'shared_key'", ":", "'read_count'", "}", "headers", "[", "'Pairs used'", "]", "=", "{", "'title'", ":", "'% Used pairs'", ",", "'max'", ":", "100", ",", "'min'", ":", "0", ",", "'modify'", ":", "lambda", "x", ":", "x", "*", "100", ",", "'suffix'", ":", "'%'", "}", "headers", "[", "'Mapped'", "]", "=", "{", "'title'", ":", "'% Mapped'", ",", "'max'", ":", "100", ",", "'min'", ":", "0", ",", "'modify'", ":", "lambda", "x", ":", "(", "1", "-", "x", ")", "*", "100", ",", "'scale'", ":", "'RdYlGn'", ",", "'suffix'", ":", "'%'", "}", "headers", "[", "'Min rest. site distance'", "]", "=", "{", "'title'", ":", "'Min RE dist'", ",", "'description'", ":", "'Minimum restriction site distance (bp)'", ",", "'format'", ":", "'{:.0f}'", ",", "'suffix'", ":", "' bp'", "}", "headers", "[", "max_distance_key", "]", "=", "{", "'title'", ":", "'Max RE dist'", ",", "'description'", ":", "max_distance_key", "+", "' (bp)'", ",", "'format'", ":", "'{:.0f}'", ",", "'suffix'", ":", "' bp'", "}", "self", ".", "general_stats_addcols", "(", "data", ",", "headers", ")" ]
Get container logs .
def get_logs ( self , stdout = True , stderr = True , timestamps = False , tail = 'all' , since = None ) : return self . inner ( ) . logs ( stdout = stdout , stderr = stderr , timestamps = timestamps , tail = tail , since = since )
994
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/definitions.py#L400-L410
[ "def", "has_uncacheable_headers", "(", "self", ",", "response", ")", ":", "cc_dict", "=", "get_header_dict", "(", "response", ",", "'Cache-Control'", ")", "if", "cc_dict", ":", "if", "'max-age'", "in", "cc_dict", "and", "cc_dict", "[", "'max-age'", "]", "==", "'0'", ":", "return", "True", "if", "'no-cache'", "in", "cc_dict", ":", "return", "True", "if", "'private'", "in", "cc_dict", ":", "return", "True", "if", "response", ".", "has_header", "(", "'Expires'", ")", ":", "if", "parse_http_date", "(", "response", "[", "'Expires'", "]", ")", "<", "time", ".", "time", "(", ")", ":", "return", "True", "return", "False" ]
Stream container output .
def stream_logs ( self , stdout = True , stderr = True , tail = 'all' , timeout = 10.0 ) : return stream_logs ( self . inner ( ) , stdout = stdout , stderr = stderr , tail = tail , timeout = timeout )
995
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/definitions.py#L412-L418
[ "def", "delete_entity", "(", "self", ",", "entity_id", ",", "mount_point", "=", "DEFAULT_MOUNT_POINT", ")", ":", "api_path", "=", "'/v1/{mount_point}/entity/id/{id}'", ".", "format", "(", "mount_point", "=", "mount_point", ",", "id", "=", "entity_id", ",", ")", "return", "self", ".", "_adapter", ".", "delete", "(", "url", "=", "api_path", ",", ")" ]
Wait for logs matching the given matcher .
def wait_for_logs_matching ( self , matcher , timeout = 10 , encoding = 'utf-8' , * * logs_kwargs ) : wait_for_logs_matching ( self . inner ( ) , matcher , timeout = timeout , encoding = encoding , * * logs_kwargs )
996
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/definitions.py#L420-L427
[ "def", "aux", "(", "self", ",", "aux", ")", ":", "if", "aux", "==", "self", ".", "_aux", ":", "return", "if", "self", ".", "_aux", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_aux", ",", "self", ".", "_project", ")", "self", ".", "_aux", "=", "None", "if", "aux", "is", "not", "None", ":", "self", ".", "_aux", "=", "self", ".", "_manager", ".", "port_manager", ".", "reserve_tcp_port", "(", "aux", ",", "self", ".", "_project", ")", "log", ".", "info", "(", "\"{module}: '{name}' [{id}]: aux port set to {port}\"", ".", "format", "(", "module", "=", "self", ".", "manager", ".", "module_name", ",", "name", "=", "self", ".", "name", ",", "id", "=", "self", ".", "id", ",", "port", "=", "aux", ")", ")" ]
Construct an HTTP client for this container .
def http_client ( self , port = None ) : # Local import to avoid potential circularity. from seaworthy . client import ContainerHttpClient client = ContainerHttpClient . for_container ( self , container_port = port ) self . _http_clients . append ( client ) return client
997
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/definitions.py#L429-L437
[ "def", "noise", "(", "mesh", ",", "magnitude", "=", "None", ")", ":", "if", "magnitude", "is", "None", ":", "magnitude", "=", "mesh", ".", "scale", "/", "100.0", "random", "=", "(", "np", ".", "random", ".", "random", "(", "mesh", ".", "vertices", ".", "shape", ")", "-", ".5", ")", "*", "magnitude", "vertices_noise", "=", "mesh", ".", "vertices", ".", "copy", "(", ")", "+", "random", "# make sure we've re- ordered faces randomly", "triangles", "=", "np", ".", "random", ".", "permutation", "(", "vertices_noise", "[", "mesh", ".", "faces", "]", ")", "mesh_type", "=", "util", ".", "type_named", "(", "mesh", ",", "'Trimesh'", ")", "permutated", "=", "mesh_type", "(", "*", "*", "triangles_module", ".", "to_kwargs", "(", "triangles", ")", ")", "return", "permutated" ]
Prepare and dispatch a trait change event to a listener .
def _dispatch_change_event ( self , object , trait_name , old , new , handler ) : # Extract the arguments needed from the handler. args = self . argument_transform ( object , trait_name , old , new ) # Send a description of the event to the change event tracer. if tnotifier . _pre_change_event_tracer is not None : tnotifier . _pre_change_event_tracer ( object , trait_name , old , new , handler ) # Dispatch the event to the listener. from automate . common import SystemNotReady try : self . dispatch ( handler , * args ) except SystemNotReady : pass except Exception as e : if tnotifier . _post_change_event_tracer is not None : tnotifier . _post_change_event_tracer ( object , trait_name , old , new , handler , exception = e ) # This call needs to be made inside the `except` block in case # the handler wants to re-raise the exception. tnotifier . handle_exception ( object , trait_name , old , new ) else : if tnotifier . _post_change_event_tracer is not None : tnotifier . _post_change_event_tracer ( object , trait_name , old , new , handler , exception = None )
998
https://github.com/tuomas2/automate/blob/d8a8cd03cd0da047e033a2d305f3f260f8c4e017/src/automate/traits_fixes.py#L59-L85
[ "async", "def", "load_cache", "(", "self", ",", "archive", ":", "bool", "=", "False", ")", "->", "int", ":", "LOGGER", ".", "debug", "(", "'Verifier.load_cache >>> archive: %s'", ",", "archive", ")", "rv", "=", "int", "(", "time", "(", ")", ")", "for", "s_id", "in", "self", ".", "cfg", ".", "get", "(", "'archive-on-close'", ",", "{", "}", ")", ".", "get", "(", "'schema_id'", ",", "{", "}", ")", ":", "with", "SCHEMA_CACHE", ".", "lock", ":", "await", "self", ".", "get_schema", "(", "s_id", ")", "for", "cd_id", "in", "self", ".", "cfg", ".", "get", "(", "'archive-on-close'", ",", "{", "}", ")", ".", "get", "(", "'cred_def_id'", ",", "{", "}", ")", ":", "with", "CRED_DEF_CACHE", ".", "lock", ":", "await", "self", ".", "get_cred_def", "(", "cd_id", ")", "for", "rr_id", "in", "self", ".", "cfg", ".", "get", "(", "'archive-on-close'", ",", "{", "}", ")", ".", "get", "(", "'rev_reg_id'", ",", "{", "}", ")", ":", "await", "self", ".", "_get_rev_reg_def", "(", "rr_id", ")", "with", "REVO_CACHE", ".", "lock", ":", "revo_cache_entry", "=", "REVO_CACHE", ".", "get", "(", "rr_id", ",", "None", ")", "if", "revo_cache_entry", ":", "try", ":", "await", "revo_cache_entry", ".", "get_state_json", "(", "self", ".", "_build_rr_state_json", ",", "rv", ",", "rv", ")", "except", "ClosedPool", ":", "LOGGER", ".", "warning", "(", "'Verifier %s is offline from pool %s, cannot update revo cache reg state for %s to %s'", ",", "self", ".", "wallet", ".", "name", ",", "self", ".", "pool", ".", "name", ",", "rr_id", ",", "rv", ")", "if", "archive", ":", "Caches", ".", "archive", "(", "self", ".", "dir_cache", ")", "LOGGER", ".", "debug", "(", "'Verifier.load_cache <<< %s'", ",", "rv", ")", "return", "rv" ]
Split value into value and exponent - of - 10 where exponent - of - 10 is a multiple of 3 . This corresponds to SI prefixes .
def split ( value , precision = 1 ) : negative = False digits = precision + 1 if value < 0. : value = - value negative = True elif value == 0. : return 0. , 0 expof10 = int ( math . log10 ( value ) ) if expof10 > 0 : expof10 = ( expof10 // 3 ) * 3 else : expof10 = ( - expof10 + 3 ) // 3 * ( - 3 ) value *= 10 ** ( - expof10 ) if value >= 1000. : value /= 1000.0 expof10 += 3 elif value >= 100.0 : digits -= 2 elif value >= 10.0 : digits -= 1 if negative : value *= - 1 return value , int ( expof10 )
999
https://github.com/cfobel/si-prefix/blob/274fdf47f65d87d0b7a2e3c80f267db63d042c59/si_prefix/__init__.py#L47-L106
[ "def", "load_cert_chain", "(", "self", ",", "certfile", ",", "keyfile", "=", "None", ")", ":", "self", ".", "_certfile", "=", "certfile", "self", ".", "_keyfile", "=", "keyfile" ]