query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
sequencelengths
20
553
Create a Solr search index for Yokozuna .
def create_search_index ( self , index , schema = None , n_val = None , timeout = None ) : if not self . yz_wm_index : raise NotImplementedError ( "Search 2.0 administration is not " "supported for this version" ) url = self . search_index_path ( index ) headers = { 'Content-Type' : 'application/json' } content_dict = dict ( ) if schema : content_dict [ 'schema' ] = schema if n_val : content_dict [ 'n_val' ] = n_val if timeout : content_dict [ 'timeout' ] = timeout content = json . dumps ( content_dict ) # Run the request... status , _ , _ = self . _request ( 'PUT' , url , headers , content ) if status != 204 : raise RiakError ( 'Error setting Search 2.0 index.' ) return True
251,700
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L447-L483
[ "def", "_setup_from_data", "(", "self", ",", "data", ")", ":", "fitted_state", "=", "{", "}", "_raise_error_if_not_of_type", "(", "data", ",", "[", "_SFrame", "]", ")", "feature_columns", "=", "_internal_utils", ".", "get_column_names", "(", "data", ",", "self", ".", "_exclude", ",", "self", ".", "_features", ")", "if", "not", "feature_columns", ":", "raise", "RuntimeError", "(", "\"No valid feature columns specified in transformation.\"", ")", "fitted_state", "[", "\"features\"", "]", "=", "feature_columns", "################################################################################", "# Helper functions", "def", "get_valid_interpretations", "(", ")", ":", "return", "list", "(", "n", ".", "split", "(", "\"__\"", ")", "[", "0", "]", "for", "n", "in", "dir", "(", "_interpretations", ")", "if", "not", "n", ".", "startswith", "(", "\"_\"", ")", ")", "################################################################################", "# Check input data.", "if", "not", "isinstance", "(", "data", ",", "_SFrame", ")", ":", "raise", "TypeError", "(", "\"`data` parameter must be an SFrame.\"", ")", "all_col_names", "=", "set", "(", "feature_columns", ")", "column_interpretations", "=", "self", ".", "_get", "(", "\"column_interpretations\"", ")", ".", "copy", "(", ")", "# Make sure all the interpretations are valid.", "for", "k", ",", "v", "in", "column_interpretations", ".", "items", "(", ")", ":", "if", "k", "not", "in", "all_col_names", ":", "raise", "ValueError", "(", "\"Column '%s' in column_interpretations, but not found in `data`.\"", "%", "k", ")", "# Get the automatic column interpretations.", "for", "col_name", "in", "feature_columns", ":", "if", "col_name", "not", "in", "column_interpretations", ":", "n", "=", "column_interpretations", "[", "col_name", "]", "=", "infer_column_interpretation", "(", "data", "[", "col_name", "]", ")", "if", "n", ".", "startswith", "(", "\"unknown\"", ")", ":", "raise", "ValueError", "(", "\"Interpretation inference failed on column '%s'; %s\"", "%", "(", "col_name", ",", "n", "[", "len", "(", "\"unknown\"", ")", ":", "]", ".", "strip", "(", ")", ")", ")", "# Now, build up the feature transforms.", "transforms", "=", "{", "}", "input_types", "=", "{", "}", "output_column_prefix", "=", "self", ".", "_get", "(", "\"output_column_prefix\"", ")", "assert", "output_column_prefix", "is", "None", "or", "type", "(", "output_column_prefix", ")", "is", "str", "tr_chain", "=", "[", "]", "for", "col_name", "in", "feature_columns", ":", "in_type", "=", "input_types", "[", "col_name", "]", "=", "data", "[", "col_name", "]", ".", "dtype", "intr_func", "=", "_get_interpretation_function", "(", "column_interpretations", "[", "col_name", "]", ",", "in_type", ")", "tr_list", "=", "intr_func", "(", "col_name", ",", "output_column_prefix", ")", "transforms", "[", "col_name", "]", "=", "tr_list", "tr_chain", "+=", "tr_list", "fitted_state", "[", "\"transform_chain\"", "]", "=", "_TransformerChain", "(", "tr_chain", ")", "fitted_state", "[", "\"transforms\"", "]", "=", "transforms", "fitted_state", "[", "\"input_types\"", "]", "=", "input_types", "fitted_state", "[", "\"column_interpretations\"", "]", "=", "column_interpretations", "self", ".", "__proxy__", ".", "update", "(", "fitted_state", ")" ]
Return a list of Solr search indexes from Yokozuna .
def list_search_indexes ( self ) : if not self . yz_wm_index : raise NotImplementedError ( "Search 2.0 administration is not " "supported for this version" ) url = self . search_index_path ( ) # Run the request... status , headers , body = self . _request ( 'GET' , url ) if status == 200 : json_data = json . loads ( bytes_to_str ( body ) ) # Return a list of dictionaries return json_data else : raise RiakError ( 'Error getting Search 2.0 index.' )
251,701
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L508-L528
[ "def", "validateExtractOptions", "(", "options", ")", ":", "if", "not", "options", ".", "pattern", "and", "not", "options", ".", "pattern2", ":", "if", "not", "options", ".", "read2_in", ":", "U", ".", "error", "(", "\"Must supply --bc-pattern for single-end\"", ")", "else", ":", "U", ".", "error", "(", "\"Must supply --bc-pattern and/or --bc-pattern2 \"", "\"if paired-end \"", ")", "if", "options", ".", "pattern2", ":", "if", "not", "options", ".", "read2_in", ":", "U", ".", "error", "(", "\"must specify a paired fastq ``--read2-in``\"", ")", "if", "not", "options", ".", "pattern2", ":", "options", ".", "pattern2", "=", "options", ".", "pattern", "extract_cell", "=", "False", "extract_umi", "=", "False", "# If the pattern is a regex we can compile the regex(es) prior to", "# ExtractFilterAndUpdate instantiation", "if", "options", ".", "extract_method", "==", "\"regex\"", ":", "if", "options", ".", "pattern", ":", "try", ":", "options", ".", "pattern", "=", "regex", ".", "compile", "(", "options", ".", "pattern", ")", "except", "regex", ".", "error", ":", "U", ".", "error", "(", "\"--bc-pattern '%s' is not a \"", "\"valid regex\"", "%", "options", ".", "pattern", ")", "if", "options", ".", "pattern2", ":", "try", ":", "options", ".", "pattern2", "=", "regex", ".", "compile", "(", "options", ".", "pattern2", ")", "except", "regex", ".", "Error", ":", "U", ".", "error", "(", "\"--bc-pattern2 '%s' is not a \"", "\"valid regex\"", "%", "options", ".", "pattern2", ")", "# check whether the regex contains a umi group(s) and cell groups(s)", "if", "options", ".", "extract_method", "==", "\"regex\"", ":", "if", "options", ".", "pattern", ":", "for", "group", "in", "options", ".", "pattern", ".", "groupindex", ":", "if", "group", ".", "startswith", "(", "\"cell_\"", ")", ":", "extract_cell", "=", "True", "elif", "group", ".", "startswith", "(", "\"umi_\"", ")", ":", "extract_umi", "=", "True", "if", "options", ".", "pattern2", ":", "for", "group", "in", "options", ".", "pattern2", ".", "groupindex", ":", "if", "group", ".", "startswith", "(", "\"cell_\"", ")", ":", "extract_cell", "=", "True", "elif", "group", ".", "startswith", "(", "\"umi_\"", ")", ":", "extract_umi", "=", "True", "# check whether the pattern string contains umi/cell bases", "elif", "options", ".", "extract_method", "==", "\"string\"", ":", "if", "options", ".", "pattern", ":", "if", "\"C\"", "in", "options", ".", "pattern", ":", "extract_cell", "=", "True", "if", "\"N\"", "in", "options", ".", "pattern", ":", "extract_umi", "=", "True", "if", "options", ".", "pattern2", ":", "if", "\"C\"", "in", "options", ".", "pattern2", ":", "extract_cell", "=", "True", "if", "\"N\"", "in", "options", ".", "pattern2", ":", "extract_umi", "=", "True", "if", "not", "extract_umi", ":", "if", "options", ".", "extract_method", "==", "\"string\"", ":", "U", ".", "error", "(", "\"barcode pattern(s) do not include any umi bases \"", "\"(marked with 'Ns') %s, %s\"", "%", "(", "options", ".", "pattern", ",", "options", ".", "pattern2", ")", ")", "elif", "options", ".", "extract_method", "==", "\"regex\"", ":", "U", ".", "error", "(", "\"barcode regex(es) do not include any umi groups \"", "\"(starting with 'umi_') %s, %s\"", "(", "options", ".", "pattern", ",", "options", ".", "pattern2", ")", ")", "return", "(", "extract_cell", ",", "extract_umi", ")" ]
Create a new Solr schema for Yokozuna .
def create_search_schema ( self , schema , content ) : if not self . yz_wm_schema : raise NotImplementedError ( "Search 2.0 administration is not " "supported for this version" ) url = self . search_schema_path ( schema ) headers = { 'Content-Type' : 'application/xml' } # Run the request... status , header , body = self . _request ( 'PUT' , url , headers , content ) if status != 204 : raise RiakError ( 'Error creating Search 2.0 schema.' ) return True
251,702
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L552-L575
[ "def", "_get_purecn_dx_files", "(", "paired", ",", "out", ")", ":", "out_base", "=", "\"%s-dx\"", "%", "utils", ".", "splitext_plus", "(", "out", "[", "\"rds\"", "]", ")", "[", "0", "]", "all_files", "=", "[", "]", "for", "key", ",", "ext", "in", "[", "[", "(", "\"mutation_burden\"", ",", ")", ",", "\"_mutation_burden.csv\"", "]", ",", "[", "(", "\"plot\"", ",", "\"signatures\"", ")", ",", "\"_signatures.pdf\"", "]", ",", "[", "(", "\"signatures\"", ",", ")", ",", "\"_signatures.csv\"", "]", "]", ":", "cur_file", "=", "\"%s%s\"", "%", "(", "out_base", ",", "ext", ")", "out", "=", "tz", ".", "update_in", "(", "out", ",", "key", ",", "lambda", "x", ":", "cur_file", ")", "all_files", ".", "append", "(", "os", ".", "path", ".", "basename", "(", "cur_file", ")", ")", "return", "out_base", ",", "out", ",", "all_files" ]
Fetch a Solr schema from Yokozuna .
def get_search_schema ( self , schema ) : if not self . yz_wm_schema : raise NotImplementedError ( "Search 2.0 administration is not " "supported for this version" ) url = self . search_schema_path ( schema ) # Run the request... status , _ , body = self . _request ( 'GET' , url ) if status == 200 : result = { } result [ 'name' ] = schema result [ 'content' ] = bytes_to_str ( body ) return result else : raise RiakError ( 'Error getting Search 2.0 schema.' )
251,703
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L577-L600
[ "def", "_get_purecn_dx_files", "(", "paired", ",", "out", ")", ":", "out_base", "=", "\"%s-dx\"", "%", "utils", ".", "splitext_plus", "(", "out", "[", "\"rds\"", "]", ")", "[", "0", "]", "all_files", "=", "[", "]", "for", "key", ",", "ext", "in", "[", "[", "(", "\"mutation_burden\"", ",", ")", ",", "\"_mutation_burden.csv\"", "]", ",", "[", "(", "\"plot\"", ",", "\"signatures\"", ")", ",", "\"_signatures.pdf\"", "]", ",", "[", "(", "\"signatures\"", ",", ")", ",", "\"_signatures.csv\"", "]", "]", ":", "cur_file", "=", "\"%s%s\"", "%", "(", "out_base", ",", "ext", ")", "out", "=", "tz", ".", "update_in", "(", "out", ",", "key", ",", "lambda", "x", ":", "cur_file", ")", "all_files", ".", "append", "(", "os", ".", "path", ".", "basename", "(", "cur_file", ")", ")", "return", "out_base", ",", "out", ",", "all_files" ]
Performs a search query .
def search ( self , index , query , * * params ) : if index is None : index = 'search' options = { } if 'op' in params : op = params . pop ( 'op' ) options [ 'q.op' ] = op options . update ( params ) url = self . solr_select_path ( index , query , * * options ) status , headers , data = self . _request ( 'GET' , url ) self . check_http_code ( status , [ 200 ] ) if 'json' in headers [ 'content-type' ] : results = json . loads ( bytes_to_str ( data ) ) return self . _normalize_json_search_response ( results ) elif 'xml' in headers [ 'content-type' ] : return self . _normalize_xml_search_response ( data ) else : raise ValueError ( "Could not decode search response" )
251,704
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L602-L624
[ "def", "reassign_comment_to_book", "(", "self", ",", "comment_id", ",", "from_book_id", ",", "to_book_id", ")", ":", "# Implemented from template for", "# osid.resource.ResourceBinAssignmentSession.reassign_resource_to_bin", "self", ".", "assign_comment_to_book", "(", "comment_id", ",", "to_book_id", ")", "try", ":", "self", ".", "unassign_comment_from_book", "(", "comment_id", ",", "from_book_id", ")", "except", ":", "# something went wrong, roll back assignment to to_book_id", "self", ".", "unassign_comment_from_book", "(", "comment_id", ",", "to_book_id", ")", "raise" ]
Adds documents to the search index .
def fulltext_add ( self , index , docs ) : xml = Document ( ) root = xml . createElement ( 'add' ) for doc in docs : doc_element = xml . createElement ( 'doc' ) for key in doc : value = doc [ key ] field = xml . createElement ( 'field' ) field . setAttribute ( "name" , key ) text = xml . createTextNode ( value ) field . appendChild ( text ) doc_element . appendChild ( field ) root . appendChild ( doc_element ) xml . appendChild ( root ) self . _request ( 'POST' , self . solr_update_path ( index ) , { 'Content-Type' : 'text/xml' } , xml . toxml ( ) . encode ( 'utf-8' ) )
251,705
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L626-L646
[ "def", "read_pgroups", "(", "in_file", ")", ":", "out", "=", "{", "}", "with", "open", "(", "in_file", ")", "as", "in_handle", ":", "for", "line", "in", "(", "l", "for", "l", "in", "in_handle", "if", "not", "l", ".", "startswith", "(", "\"#\"", ")", ")", ":", "locus", ",", "alleles", ",", "group", "=", "line", ".", "strip", "(", ")", ".", "split", "(", "\";\"", ")", "for", "allele", "in", "alleles", ".", "split", "(", "\"/\"", ")", ":", "out", "[", "\"HLA-%s%s\"", "%", "(", "locus", ",", "allele", ")", "]", "=", "group", "return", "out" ]
Removes documents from the full - text index .
def fulltext_delete ( self , index , docs = None , queries = None ) : xml = Document ( ) root = xml . createElement ( 'delete' ) if docs : for doc in docs : doc_element = xml . createElement ( 'id' ) text = xml . createTextNode ( doc ) doc_element . appendChild ( text ) root . appendChild ( doc_element ) if queries : for query in queries : query_element = xml . createElement ( 'query' ) text = xml . createTextNode ( query ) query_element . appendChild ( text ) root . appendChild ( query_element ) xml . appendChild ( root ) self . _request ( 'POST' , self . solr_update_path ( index ) , { 'Content-Type' : 'text/xml' } , xml . toxml ( ) . encode ( 'utf-8' ) )
251,706
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L648-L671
[ "def", "notifications_dismiss", "(", "self", ",", "id", ")", ":", "id", "=", "self", ".", "__unpack_id", "(", "id", ")", "params", "=", "self", ".", "__generate_params", "(", "locals", "(", ")", ")", "self", ".", "__api_request", "(", "'POST'", ",", "'/api/v1/notifications/dismiss'", ",", "params", ")" ]
Releases this resource back to the pool it came from .
def release ( self ) : if self . errored : self . pool . delete_resource ( self ) else : self . pool . release ( self )
251,707
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/pool.py#L76-L83
[ "def", "_ion_equals", "(", "a", ",", "b", ",", "timestamp_comparison_func", ",", "recursive_comparison_func", ")", ":", "for", "a", ",", "b", "in", "(", "(", "a", ",", "b", ")", ",", "(", "b", ",", "a", ")", ")", ":", "# Ensures that operand order does not matter.", "if", "isinstance", "(", "a", ",", "_IonNature", ")", ":", "if", "isinstance", "(", "b", ",", "_IonNature", ")", ":", "# Both operands have _IonNature. Their IonTypes and annotations must be equivalent.", "eq", "=", "a", ".", "ion_type", "is", "b", ".", "ion_type", "and", "_annotations_eq", "(", "a", ",", "b", ")", "else", ":", "# Only one operand has _IonNature. It cannot be equivalent to the other operand if it has annotations.", "eq", "=", "not", "a", ".", "ion_annotations", "if", "eq", ":", "if", "isinstance", "(", "a", ",", "IonPyList", ")", ":", "return", "_sequences_eq", "(", "a", ",", "b", ",", "recursive_comparison_func", ")", "elif", "isinstance", "(", "a", ",", "IonPyDict", ")", ":", "return", "_structs_eq", "(", "a", ",", "b", ",", "recursive_comparison_func", ")", "elif", "isinstance", "(", "a", ",", "IonPyTimestamp", ")", ":", "return", "timestamp_comparison_func", "(", "a", ",", "b", ")", "elif", "isinstance", "(", "a", ",", "IonPyNull", ")", ":", "return", "isinstance", "(", "b", ",", "IonPyNull", ")", "or", "(", "b", "is", "None", "and", "a", ".", "ion_type", "is", "IonType", ".", "NULL", ")", "elif", "isinstance", "(", "a", ",", "IonPySymbol", ")", "or", "(", "isinstance", "(", "a", ",", "IonPyText", ")", "and", "a", ".", "ion_type", "is", "IonType", ".", "SYMBOL", ")", ":", "return", "_symbols_eq", "(", "a", ",", "b", ")", "elif", "isinstance", "(", "a", ",", "IonPyDecimal", ")", ":", "return", "_decimals_eq", "(", "a", ",", "b", ")", "elif", "isinstance", "(", "a", ",", "IonPyFloat", ")", ":", "return", "_floats_eq", "(", "a", ",", "b", ")", "else", ":", "return", "a", "==", "b", "return", "False", "# Reaching this point means that neither operand has _IonNature.", "for", "a", ",", "b", "in", "(", "(", "a", ",", "b", ")", ",", "(", "b", ",", "a", ")", ")", ":", "# Ensures that operand order does not matter.", "if", "isinstance", "(", "a", ",", "list", ")", ":", "return", "_sequences_eq", "(", "a", ",", "b", ",", "recursive_comparison_func", ")", "elif", "isinstance", "(", "a", ",", "dict", ")", ":", "return", "_structs_eq", "(", "a", ",", "b", ",", "recursive_comparison_func", ")", "elif", "isinstance", "(", "a", ",", "datetime", ")", ":", "return", "timestamp_comparison_func", "(", "a", ",", "b", ")", "elif", "isinstance", "(", "a", ",", "SymbolToken", ")", ":", "return", "_symbols_eq", "(", "a", ",", "b", ")", "elif", "isinstance", "(", "a", ",", "Decimal", ")", ":", "return", "_decimals_eq", "(", "a", ",", "b", ")", "elif", "isinstance", "(", "a", ",", "float", ")", ":", "return", "_floats_eq", "(", "a", ",", "b", ")", "return", "a", "==", "b" ]
Deletes the resource from the pool and destroys the associated resource . Not usually needed by users of the pool but called internally when BadResource is raised .
def delete_resource ( self , resource ) : with self . lock : self . resources . remove ( resource ) self . destroy_resource ( resource . object ) del resource
251,708
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/pool.py#L209-L221
[ "def", "benchmark_setup", "(", "self", ")", ":", "def", "f", "(", ")", ":", "self", ".", "_setup", "(", ")", "self", ".", "mod_ext", ".", "synchronize", "(", "*", "*", "self", ".", "ext_kwargs", ")", "f", "(", ")", "# Ignore first", "self", ".", "setup_stat", "=", "self", ".", "_calc_benchmark_stat", "(", "f", ")" ]
Returns an Erlang - TTB encoded tuple with the appropriate data and metadata from a TsObject .
def encode_timeseries_put ( self , tsobj ) : if tsobj . columns : raise NotImplementedError ( 'columns are not used' ) if tsobj . rows and isinstance ( tsobj . rows , list ) : req_rows = [ ] for row in tsobj . rows : req_r = [ ] for cell in row : req_r . append ( self . encode_to_ts_cell ( cell ) ) req_rows . append ( tuple ( req_r ) ) req = tsputreq_a , tsobj . table . name , [ ] , req_rows mc = MSG_CODE_TS_TTB_MSG rc = MSG_CODE_TS_TTB_MSG return Msg ( mc , encode ( req ) , rc ) else : raise RiakError ( "TsObject requires a list of rows" )
251,709
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/ttb.py#L116-L140
[ "def", "get", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "private_file", "=", "self", ".", "get_private_file", "(", ")", "if", "not", "self", ".", "can_access_file", "(", "private_file", ")", ":", "return", "HttpResponseForbidden", "(", "'Private storage access denied'", ")", "if", "not", "private_file", ".", "exists", "(", ")", ":", "return", "self", ".", "serve_file_not_found", "(", "private_file", ")", "else", ":", "return", "self", ".", "serve_file", "(", "private_file", ")" ]
Decodes a TTB - encoded TsRow into a list
def decode_timeseries_row ( self , tsrow , tsct , convert_timestamp = False ) : row = [ ] for i , cell in enumerate ( tsrow ) : if cell is None : row . append ( None ) elif isinstance ( cell , list ) and len ( cell ) == 0 : row . append ( None ) else : if convert_timestamp and tsct [ i ] == timestamp_a : row . append ( datetime_from_unix_time_millis ( cell ) ) else : row . append ( cell ) return row
251,710
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/ttb.py#L205-L228
[ "def", "add_configuration_file", "(", "self", ",", "file_name", ")", ":", "logger", ".", "info", "(", "'adding %s to configuration files'", ",", "file_name", ")", "if", "file_name", "not", "in", "self", ".", "configuration_files", "and", "self", ".", "_inotify", ":", "self", ".", "_watch_manager", ".", "add_watch", "(", "file_name", ",", "pyinotify", ".", "IN_MODIFY", ")", "if", "os", ".", "access", "(", "file_name", ",", "os", ".", "R_OK", ")", ":", "self", ".", "configuration_files", "[", "file_name", "]", "=", "SafeConfigParser", "(", ")", "self", ".", "configuration_files", "[", "file_name", "]", ".", "read", "(", "file_name", ")", "else", ":", "logger", ".", "warn", "(", "'could not read %s'", ",", "file_name", ")", "warnings", ".", "warn", "(", "'could not read {}'", ".", "format", "(", "file_name", ")", ",", "ResourceWarning", ")" ]
Extracts the modification operation from the set .
def to_op ( self ) : if not self . _adds and not self . _removes : return None changes = { } if self . _adds : changes [ 'adds' ] = list ( self . _adds ) if self . _removes : changes [ 'removes' ] = list ( self . _removes ) return changes
251,711
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/set.py#L60-L73
[ "def", "density_hub", "(", "self", ",", "weather_df", ")", ":", "if", "self", ".", "density_model", "!=", "'interpolation_extrapolation'", ":", "temperature_hub", "=", "self", ".", "temperature_hub", "(", "weather_df", ")", "# Calculation of density in kg/m³ at hub height", "if", "self", ".", "density_model", "==", "'barometric'", ":", "logging", ".", "debug", "(", "'Calculating density using barometric height '", "'equation.'", ")", "closest_height", "=", "weather_df", "[", "'pressure'", "]", ".", "columns", "[", "min", "(", "range", "(", "len", "(", "weather_df", "[", "'pressure'", "]", ".", "columns", ")", ")", ",", "key", "=", "lambda", "i", ":", "abs", "(", "weather_df", "[", "'pressure'", "]", ".", "columns", "[", "i", "]", "-", "self", ".", "power_plant", ".", "hub_height", ")", ")", "]", "density_hub", "=", "density", ".", "barometric", "(", "weather_df", "[", "'pressure'", "]", "[", "closest_height", "]", ",", "closest_height", ",", "self", ".", "power_plant", ".", "hub_height", ",", "temperature_hub", ")", "elif", "self", ".", "density_model", "==", "'ideal_gas'", ":", "logging", ".", "debug", "(", "'Calculating density using ideal gas equation.'", ")", "closest_height", "=", "weather_df", "[", "'pressure'", "]", ".", "columns", "[", "min", "(", "range", "(", "len", "(", "weather_df", "[", "'pressure'", "]", ".", "columns", ")", ")", ",", "key", "=", "lambda", "i", ":", "abs", "(", "weather_df", "[", "'pressure'", "]", ".", "columns", "[", "i", "]", "-", "self", ".", "power_plant", ".", "hub_height", ")", ")", "]", "density_hub", "=", "density", ".", "ideal_gas", "(", "weather_df", "[", "'pressure'", "]", "[", "closest_height", "]", ",", "closest_height", ",", "self", ".", "power_plant", ".", "hub_height", ",", "temperature_hub", ")", "elif", "self", ".", "density_model", "==", "'interpolation_extrapolation'", ":", "logging", ".", "debug", "(", "'Calculating density using linear inter- or '", "'extrapolation.'", ")", "density_hub", "=", "tools", ".", "linear_interpolation_extrapolation", "(", "weather_df", "[", "'density'", "]", ",", "self", ".", "power_plant", ".", "hub_height", ")", "else", ":", "raise", "ValueError", "(", "\"'{0}' is an invalid value. \"", ".", "format", "(", "self", ".", "density_model", ")", "+", "\"`density_model` \"", "+", "\"must be 'barometric', 'ideal_gas' or \"", "+", "\"'interpolation_extrapolation'.\"", ")", "return", "density_hub" ]
Removes an element from the set .
def discard ( self , element ) : _check_element ( element ) self . _require_context ( ) self . _removes . add ( element )
251,712
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/set.py#L101-L113
[ "def", "regenerate_recovery_code", "(", "self", ",", "user_id", ")", ":", "url", "=", "self", ".", "_url", "(", "'{}/recovery-code-regeneration'", ".", "format", "(", "user_id", ")", ")", "return", "self", ".", "client", ".", "post", "(", "url", ")" ]
Get one value matching the key raising a KeyError if multiple values were found .
def getone ( self , key ) : v = self . getall ( key ) if not v : raise KeyError ( 'Key not found: %r' % key ) if len ( v ) > 1 : raise KeyError ( 'Multiple values match %r: %r' % ( key , v ) ) return v [ 0 ]
251,713
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/multidict.py#L73-L83
[ "def", "api_download", "(", "service", ",", "fileId", ",", "authorisation", ")", ":", "data", "=", "tempfile", ".", "SpooledTemporaryFile", "(", "max_size", "=", "SPOOL_SIZE", ",", "mode", "=", "'w+b'", ")", "headers", "=", "{", "'Authorization'", ":", "'send-v1 '", "+", "unpadded_urlsafe_b64encode", "(", "authorisation", ")", "}", "url", "=", "service", "+", "'api/download/'", "+", "fileId", "r", "=", "requests", ".", "get", "(", "url", ",", "headers", "=", "headers", ",", "stream", "=", "True", ")", "r", ".", "raise_for_status", "(", ")", "content_length", "=", "int", "(", "r", ".", "headers", "[", "'Content-length'", "]", ")", "pbar", "=", "progbar", "(", "content_length", ")", "for", "chunk", "in", "r", ".", "iter_content", "(", "chunk_size", "=", "CHUNK_SIZE", ")", ":", "data", ".", "write", "(", "chunk", ")", "pbar", ".", "update", "(", "len", "(", "chunk", ")", ")", "pbar", ".", "close", "(", ")", "data", ".", "seek", "(", "0", ")", "return", "data" ]
Returns a dictionary where each key is associated with a list of values .
def dict_of_lists ( self ) : result = { } for key , value in self . _items : if key in result : result [ key ] . append ( value ) else : result [ key ] = [ value ] return result
251,714
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/multidict.py#L108-L119
[ "def", "_at", "(", "self", ",", "t", ")", ":", "rITRF", ",", "vITRF", ",", "error", "=", "self", ".", "ITRF_position_velocity_error", "(", "t", ")", "rGCRS", ",", "vGCRS", "=", "ITRF_to_GCRS2", "(", "t", ",", "rITRF", ",", "vITRF", ")", "return", "rGCRS", ",", "vGCRS", ",", "rGCRS", ",", "error" ]
Enqueues a fetch task to the pool of workers . This will raise a RuntimeError if the pool is stopped or in the process of stopping .
def enq ( self , task ) : if not self . _stop . is_set ( ) : self . _inq . put ( task ) else : raise RuntimeError ( "Attempted to enqueue an operation while " "multi pool was shutdown!" )
251,715
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/multi.py#L73-L86
[ "def", "parse_files", "(", "self", ")", ":", "log_re", "=", "self", ".", "log_format_regex", "log_lines", "=", "[", "]", "for", "log_file", "in", "self", ".", "matching_files", "(", ")", ":", "with", "open", "(", "log_file", ")", "as", "f", ":", "matches", "=", "re", ".", "finditer", "(", "log_re", ",", "f", ".", "read", "(", ")", ")", "for", "match", "in", "matches", ":", "log_lines", ".", "append", "(", "match", ".", "groupdict", "(", ")", ")", "return", "log_lines" ]
Starts the worker threads if they are not already started . This method is thread - safe and will be called automatically when executing an operation .
def start ( self ) : # Check whether we are already started, skip if we are. if not self . _started . is_set ( ) : # If we are not started, try to capture the lock. if self . _lock . acquire ( False ) : # If we got the lock, go ahead and start the worker # threads, set the started flag, and release the lock. for i in range ( self . _size ) : name = "riak.client.multi-worker-{0}-{1}" . format ( self . _name , i ) worker = Thread ( target = self . _worker_method , name = name ) worker . daemon = False worker . start ( ) self . _workers . append ( worker ) self . _started . set ( ) self . _lock . release ( ) else : # We didn't get the lock, so someone else is already # starting the worker threads. Wait until they have # signaled that the threads are started. self . _started . wait ( )
251,716
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/multi.py#L88-L113
[ "def", "update_repodata", "(", "self", ",", "channels", "=", "None", ")", ":", "norm_channels", "=", "self", ".", "conda_get_condarc_channels", "(", "channels", "=", "channels", ",", "normalize", "=", "True", ")", "repodata_urls", "=", "self", ".", "_set_repo_urls_from_channels", "(", "norm_channels", ")", "self", ".", "_check_repos", "(", "repodata_urls", ")" ]
Signals the worker threads to exit and waits on them .
def stop ( self ) : if not self . stopped ( ) : self . _stop . set ( ) for worker in self . _workers : worker . join ( )
251,717
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/multi.py#L115-L122
[ "def", "characterize_local_files", "(", "filedir", ",", "max_bytes", "=", "MAX_FILE_DEFAULT", ")", ":", "file_data", "=", "{", "}", "logging", ".", "info", "(", "'Characterizing files in {}'", ".", "format", "(", "filedir", ")", ")", "for", "filename", "in", "os", ".", "listdir", "(", "filedir", ")", ":", "filepath", "=", "os", ".", "path", ".", "join", "(", "filedir", ",", "filename", ")", "file_stats", "=", "os", ".", "stat", "(", "filepath", ")", "creation_date", "=", "arrow", ".", "get", "(", "file_stats", ".", "st_ctime", ")", ".", "isoformat", "(", ")", "file_size", "=", "file_stats", ".", "st_size", "if", "file_size", "<=", "max_bytes", ":", "file_md5", "=", "hashlib", ".", "md5", "(", ")", "with", "open", "(", "filepath", ",", "\"rb\"", ")", "as", "f", ":", "for", "chunk", "in", "iter", "(", "lambda", ":", "f", ".", "read", "(", "4096", ")", ",", "b\"\"", ")", ":", "file_md5", ".", "update", "(", "chunk", ")", "md5", "=", "file_md5", ".", "hexdigest", "(", ")", "file_data", "[", "filename", "]", "=", "{", "'tags'", ":", "guess_tags", "(", "filename", ")", ",", "'description'", ":", "''", ",", "'md5'", ":", "md5", ",", "'creation_date'", ":", "creation_date", ",", "}", "return", "file_data" ]
Ensures well - formedness of a key .
def _check_key ( self , key ) : if not len ( key ) == 2 : raise TypeError ( 'invalid key: %r' % key ) elif key [ 1 ] not in TYPES : raise TypeError ( 'invalid datatype: %s' % key [ 1 ] )
251,718
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/map.py#L227-L234
[ "def", "getSrcBlocks", "(", "self", ",", "url", ",", "dataset", "=", "\"\"", ",", "block", "=", "\"\"", ")", ":", "if", "block", ":", "params", "=", "{", "'block_name'", ":", "block", ",", "'open_for_writing'", ":", "0", "}", "elif", "dataset", ":", "params", "=", "{", "'dataset'", ":", "dataset", ",", "'open_for_writing'", ":", "0", "}", "else", ":", "m", "=", "'DBSMigration: Invalid input. Either block or dataset name has to be provided'", "e", "=", "'DBSMigrate/getSrcBlocks: Invalid input. Either block or dataset name has to be provided'", "dbsExceptionHandler", "(", "'dbsException-invalid-input2'", ",", "m", ",", "self", ".", "logger", ".", "exception", ",", "e", ")", "return", "cjson", ".", "decode", "(", "self", ".", "callDBSService", "(", "url", ",", "'blocks'", ",", "params", ",", "{", "}", ")", ")" ]
Whether the map has staged local modifications .
def modified ( self ) : if self . _removes : return True for v in self . _value : if self . _value [ v ] . modified : return True for v in self . _updates : if self . _updates [ v ] . modified : return True return False
251,719
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/map.py#L252-L264
[ "def", "get_keyvault", "(", "access_token", ",", "subscription_id", ",", "rgname", ",", "vault_name", ")", ":", "endpoint", "=", "''", ".", "join", "(", "[", "get_rm_endpoint", "(", ")", ",", "'/subscriptions/'", ",", "subscription_id", ",", "'/resourcegroups/'", ",", "rgname", ",", "'/providers/Microsoft.KeyVault/vaults/'", ",", "vault_name", ",", "'?api-version='", ",", "KEYVAULT_API", "]", ")", "return", "do_get", "(", "endpoint", ",", "access_token", ")" ]
Change the PB files to use full pathnames for Python 3 . x and modify the metaclasses to be version agnostic
def _format_python2_or_3 ( self ) : pb_files = set ( ) with open ( self . source , 'r' , buffering = 1 ) as csvfile : reader = csv . reader ( csvfile ) for row in reader : _ , _ , proto = row pb_files . add ( 'riak/pb/{0}_pb2.py' . format ( proto ) ) for im in sorted ( pb_files ) : with open ( im , 'r' , buffering = 1 ) as pbfile : contents = 'from six import *\n' + pbfile . read ( ) contents = re . sub ( r'riak_pb2' , r'riak.pb.riak_pb2' , contents ) # Look for this pattern in the protoc-generated file: # # class RpbCounterGetResp(_message.Message): # __metaclass__ = _reflection.GeneratedProtocolMessageType # # and convert it to: # # @add_metaclass(_reflection.GeneratedProtocolMessageType) # class RpbCounterGetResp(_message.Message): contents = re . sub ( r'class\s+(\S+)\((\S+)\):\s*\n' '\s+__metaclass__\s+=\s+(\S+)\s*\n' , r'@add_metaclass(\3)\nclass \1(\2):\n' , contents ) with open ( im , 'w' , buffering = 1 ) as pbfile : pbfile . write ( contents )
251,720
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/commands.py#L372-L405
[ "def", "setThrowException", "(", "self", ",", "setting", ")", ":", "if", "setting", ":", "self", ".", "_throwException", "=", "True", "self", ".", "_findFailedResponse", "=", "\"ABORT\"", "else", ":", "self", ".", "_throwException", "=", "False", "self", ".", "_findFailedResponse", "=", "\"SKIP\"" ]
Reloads the datatype from Riak .
def reload ( self , * * params ) : if not self . bucket : raise ValueError ( 'bucket property not assigned' ) if not self . key : raise ValueError ( 'key property not assigned' ) dtype , value , context = self . bucket . _client . _fetch_datatype ( self . bucket , self . key , * * params ) if not dtype == self . type_name : raise TypeError ( "Expected datatype {} but " "got datatype {}" . format ( self . __class__ , TYPES [ dtype ] ) ) self . clear ( ) self . _context = context self . _set_value ( value ) return self
251,721
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/datatype.py#L79-L120
[ "def", "to_fp", "(", "self", ",", "file_pointer", ",", "comments", "=", "None", ")", ":", "# saving formula's internal comments", "for", "c", "in", "self", ".", "comments", ":", "print", "(", "c", ",", "file", "=", "file_pointer", ")", "# saving externally specified comments", "if", "comments", ":", "for", "c", "in", "comments", ":", "print", "(", "c", ",", "file", "=", "file_pointer", ")", "print", "(", "'p cnf'", ",", "self", ".", "nv", ",", "len", "(", "self", ".", "clauses", ")", ",", "file", "=", "file_pointer", ")", "for", "cl", "in", "self", ".", "clauses", ":", "print", "(", "' '", ".", "join", "(", "str", "(", "l", ")", "for", "l", "in", "cl", ")", ",", "'0'", ",", "file", "=", "file_pointer", ")" ]
Sends locally staged mutations to Riak .
def update ( self , * * params ) : if not self . modified : raise ValueError ( "No operation to perform" ) params . setdefault ( 'return_body' , True ) self . bucket . _client . update_datatype ( self , * * params ) self . clear ( ) return self
251,722
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/datatype.py#L133-L163
[ "def", "main", "(", ")", ":", "try", ":", "# Retrieve an AD2 device that has been exposed with ser2sock on localhost:10000.", "device", "=", "AlarmDecoder", "(", "SocketDevice", "(", "interface", "=", "(", "HOSTNAME", ",", "PORT", ")", ")", ")", "# Set up an event handler and open the device", "device", ".", "on_message", "+=", "handle_message", "with", "device", ".", "open", "(", ")", ":", "while", "True", ":", "time", ".", "sleep", "(", "1", ")", "except", "Exception", "as", "ex", ":", "print", "(", "'Exception:'", ",", "ex", ")" ]
Converts a symbolic quorum value into its on - the - wire equivalent .
def encode_quorum ( self , rw ) : if rw in QUORUM_TO_PB : return QUORUM_TO_PB [ rw ] elif type ( rw ) is int and rw >= 0 : return rw else : return None
251,723
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L124-L138
[ "def", "extract_string_pairs_in_directory", "(", "directory_path", ",", "extract_func", ",", "filter_func", ")", ":", "result", "=", "{", "}", "for", "root", ",", "dirnames", ",", "filenames", "in", "os", ".", "walk", "(", "directory_path", ")", ":", "for", "file_name", "in", "filenames", ":", "if", "filter_func", "(", "file_name", ")", ":", "file_path", "=", "os", ".", "path", ".", "join", "(", "root", ",", "file_name", ")", "try", ":", "extract_func", "(", "result", ",", "file_path", ")", "except", "Exception", "as", "e", ":", "print", "\"Error in file \"", "+", "file_name", "print", "e", "return", "result" ]
Decodes the list of siblings from the protobuf representation into the object .
def decode_contents ( self , contents , obj ) : obj . siblings = [ self . decode_content ( c , RiakContent ( obj ) ) for c in contents ] # Invoke sibling-resolution logic if len ( obj . siblings ) > 1 and obj . resolver is not None : obj . resolver ( obj ) return obj
251,724
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L154-L170
[ "def", "on_key_pressed", "(", "self", ",", "event", ")", ":", "return", "# TODO", "if", "event", ".", "keysym", "==", "\"Up\"", ":", "self", ".", "manager", ".", "set_joystick", "(", "0.0", ",", "-", "1.0", ",", "0", ")", "elif", "event", ".", "keysym", "==", "\"Down\"", ":", "self", ".", "manager", ".", "set_joystick", "(", "0.0", ",", "1.0", ",", "0", ")", "elif", "event", ".", "keysym", "==", "\"Left\"", ":", "self", ".", "manager", ".", "set_joystick", "(", "-", "1.0", ",", "0.0", ",", "0", ")", "elif", "event", ".", "keysym", "==", "\"Right\"", ":", "self", ".", "manager", ".", "set_joystick", "(", "1.0", ",", "0.0", ",", "0", ")", "elif", "event", ".", "char", "==", "\" \"", ":", "mode", "=", "self", ".", "manager", ".", "get_mode", "(", ")", "if", "mode", "==", "self", ".", "manager", ".", "MODE_DISABLED", ":", "self", ".", "manager", ".", "set_mode", "(", "self", ".", "manager", ".", "MODE_OPERATOR_CONTROL", ")", "else", ":", "self", ".", "manager", ".", "set_mode", "(", "self", ".", "manager", ".", "MODE_DISABLED", ")" ]
Decodes a single sibling from the protobuf representation into a RiakObject .
def decode_content ( self , rpb_content , sibling ) : if rpb_content . HasField ( "deleted" ) and rpb_content . deleted : sibling . exists = False else : sibling . exists = True if rpb_content . HasField ( "content_type" ) : sibling . content_type = bytes_to_str ( rpb_content . content_type ) if rpb_content . HasField ( "charset" ) : sibling . charset = bytes_to_str ( rpb_content . charset ) if rpb_content . HasField ( "content_encoding" ) : sibling . content_encoding = bytes_to_str ( rpb_content . content_encoding ) if rpb_content . HasField ( "vtag" ) : sibling . etag = bytes_to_str ( rpb_content . vtag ) sibling . links = [ self . decode_link ( link ) for link in rpb_content . links ] if rpb_content . HasField ( "last_mod" ) : sibling . last_modified = float ( rpb_content . last_mod ) if rpb_content . HasField ( "last_mod_usecs" ) : sibling . last_modified += rpb_content . last_mod_usecs / 1000000.0 sibling . usermeta = dict ( [ ( bytes_to_str ( usermd . key ) , bytes_to_str ( usermd . value ) ) for usermd in rpb_content . usermeta ] ) sibling . indexes = set ( [ ( bytes_to_str ( index . key ) , decode_index_value ( index . key , index . value ) ) for index in rpb_content . indexes ] ) sibling . encoded_data = rpb_content . value return sibling
251,725
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L172-L213
[ "def", "clean_expired_user_attempts", "(", "attempt_time", ":", "datetime", "=", "None", ")", "->", "int", ":", "if", "settings", ".", "AXES_COOLOFF_TIME", "is", "None", ":", "log", ".", "debug", "(", "'AXES: Skipping clean for expired access attempts because no AXES_COOLOFF_TIME is configured'", ")", "return", "0", "threshold", "=", "get_cool_off_threshold", "(", "attempt_time", ")", "count", ",", "_", "=", "AccessAttempt", ".", "objects", ".", "filter", "(", "attempt_time__lt", "=", "threshold", ")", ".", "delete", "(", ")", "log", ".", "info", "(", "'AXES: Cleaned up %s expired access attempts from database that were older than %s'", ",", "count", ",", "threshold", ")", "return", "count" ]
Fills an RpbContent message with the appropriate data and metadata from a RiakObject .
def encode_content ( self , robj , rpb_content ) : if robj . content_type : rpb_content . content_type = str_to_bytes ( robj . content_type ) if robj . charset : rpb_content . charset = str_to_bytes ( robj . charset ) if robj . content_encoding : rpb_content . content_encoding = str_to_bytes ( robj . content_encoding ) for uk in robj . usermeta : pair = rpb_content . usermeta . add ( ) pair . key = str_to_bytes ( uk ) pair . value = str_to_bytes ( robj . usermeta [ uk ] ) for link in robj . links : pb_link = rpb_content . links . add ( ) try : bucket , key , tag = link except ValueError : raise RiakError ( "Invalid link tuple %s" % link ) pb_link . bucket = str_to_bytes ( bucket ) pb_link . key = str_to_bytes ( key ) if tag : pb_link . tag = str_to_bytes ( tag ) else : pb_link . tag = str_to_bytes ( '' ) for field , value in robj . indexes : pair = rpb_content . indexes . add ( ) pair . key = str_to_bytes ( field ) pair . value = str_to_bytes ( str ( value ) ) # Python 2.x data is stored in a string if six . PY2 : rpb_content . value = str ( robj . encoded_data ) else : rpb_content . value = robj . encoded_data
251,726
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L215-L258
[ "def", "event_tracker", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "async", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\"\n Wraps function to provide redis\n tracking\n \"\"\"", "event", "=", "Event", "(", "args", "[", "0", "]", ")", "session", "=", "kwargs", "[", "'session'", "]", "service_name", "=", "session", ".", "name", "await", "track_event", "(", "event", ",", "EventState", ".", "started", ",", "service_name", ")", "await", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "await", "track_event", "(", "event", ",", "EventState", ".", "completed", ",", "service_name", ")", "return", "wrapper" ]
Decodes an RpbLink message into a tuple
def decode_link ( self , link ) : if link . HasField ( "bucket" ) : bucket = bytes_to_str ( link . bucket ) else : bucket = None if link . HasField ( "key" ) : key = bytes_to_str ( link . key ) else : key = None if link . HasField ( "tag" ) : tag = bytes_to_str ( link . tag ) else : tag = None return ( bucket , key , tag )
251,727
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L260-L282
[ "def", "main", "(", "config_file", ")", ":", "if", "not", "(", "os", ".", "path", ".", "exists", "(", "config_file", ")", "and", "os", ".", "path", ".", "isfile", "(", "config_file", ")", ")", ":", "msg", "=", "\"Missing or invalid config file {0}\"", ".", "format", "(", "config_file", ")", "raise", "ValueError", "(", "msg", ")", "print", "(", "\"Loading config file {0}.\"", ".", "format", "(", "config_file", ")", ")", "# Use the appropriate network driver to connect to the device:", "driver", "=", "napalm", ".", "get_network_driver", "(", "\"eos\"", ")", "# Connect:", "device", "=", "driver", "(", "hostname", "=", "\"127.0.0.1\"", ",", "username", "=", "\"vagrant\"", ",", "password", "=", "\"vagrant\"", ",", "optional_args", "=", "{", "\"port\"", ":", "12443", "}", ",", ")", "print", "(", "\"Opening ...\"", ")", "device", ".", "open", "(", ")", "print", "(", "\"Loading replacement candidate ...\"", ")", "device", ".", "load_replace_candidate", "(", "filename", "=", "config_file", ")", "# Note that the changes have not been applied yet. Before applying", "# the configuration you can check the changes:", "print", "(", "\"\\nDiff:\"", ")", "print", "(", "device", ".", "compare_config", "(", ")", ")", "# You can commit or discard the candidate changes.", "try", ":", "choice", "=", "raw_input", "(", "\"\\nWould you like to commit these changes? [yN]: \"", ")", "except", "NameError", ":", "choice", "=", "input", "(", "\"\\nWould you like to commit these changes? [yN]: \"", ")", "if", "choice", "==", "\"y\"", ":", "print", "(", "\"Committing ...\"", ")", "device", ".", "commit_config", "(", ")", "else", ":", "print", "(", "\"Discarding ...\"", ")", "device", ".", "discard_config", "(", ")", "# close the session with the device.", "device", ".", "close", "(", ")", "print", "(", "\"Done.\"", ")" ]
Encodes a dict of bucket properties into the protobuf message .
def encode_bucket_props ( self , props , msg ) : for prop in NORMAL_PROPS : if prop in props and props [ prop ] is not None : if isinstance ( props [ prop ] , six . string_types ) : setattr ( msg . props , prop , str_to_bytes ( props [ prop ] ) ) else : setattr ( msg . props , prop , props [ prop ] ) for prop in COMMIT_HOOK_PROPS : if prop in props : setattr ( msg . props , 'has_' + prop , True ) self . encode_hooklist ( props [ prop ] , getattr ( msg . props , prop ) ) for prop in MODFUN_PROPS : if prop in props and props [ prop ] is not None : self . encode_modfun ( props [ prop ] , getattr ( msg . props , prop ) ) for prop in QUORUM_PROPS : if prop in props and props [ prop ] not in ( None , 'default' ) : value = self . encode_quorum ( props [ prop ] ) if value is not None : if isinstance ( value , six . string_types ) : setattr ( msg . props , prop , str_to_bytes ( value ) ) else : setattr ( msg . props , prop , value ) if 'repl' in props : msg . props . repl = REPL_TO_PB [ props [ 'repl' ] ] return msg
251,728
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L298-L331
[ "def", "on_recv", "(", "self", ",", "cf", ")", ":", "data", "=", "bytes", "(", "cf", ".", "data", ")", "if", "len", "(", "data", ")", "<", "2", ":", "return", "ae", "=", "0", "if", "self", ".", "extended_rx_addr", "is", "not", "None", ":", "ae", "=", "1", "if", "len", "(", "data", ")", "<", "3", ":", "return", "if", "six", ".", "indexbytes", "(", "data", ",", "0", ")", "!=", "self", ".", "extended_rx_addr", ":", "return", "n_pci", "=", "six", ".", "indexbytes", "(", "data", ",", "ae", ")", "&", "0xf0", "if", "n_pci", "==", "N_PCI_FC", ":", "with", "self", ".", "tx_mutex", ":", "self", ".", "_recv_fc", "(", "data", "[", "ae", ":", "]", ")", "elif", "n_pci", "==", "N_PCI_SF", ":", "with", "self", ".", "rx_mutex", ":", "self", ".", "_recv_sf", "(", "data", "[", "ae", ":", "]", ")", "elif", "n_pci", "==", "N_PCI_FF", ":", "with", "self", ".", "rx_mutex", ":", "self", ".", "_recv_ff", "(", "data", "[", "ae", ":", "]", ")", "elif", "n_pci", "==", "N_PCI_CF", ":", "with", "self", ".", "rx_mutex", ":", "self", ".", "_recv_cf", "(", "data", "[", "ae", ":", "]", ")" ]
Decodes the protobuf bucket properties message into a dict .
def decode_bucket_props ( self , msg ) : props = { } for prop in NORMAL_PROPS : if msg . HasField ( prop ) : props [ prop ] = getattr ( msg , prop ) if isinstance ( props [ prop ] , bytes ) : props [ prop ] = bytes_to_str ( props [ prop ] ) for prop in COMMIT_HOOK_PROPS : if getattr ( msg , 'has_' + prop ) : props [ prop ] = self . decode_hooklist ( getattr ( msg , prop ) ) for prop in MODFUN_PROPS : if msg . HasField ( prop ) : props [ prop ] = self . decode_modfun ( getattr ( msg , prop ) ) for prop in QUORUM_PROPS : if msg . HasField ( prop ) : props [ prop ] = self . decode_quorum ( getattr ( msg , prop ) ) if msg . HasField ( 'repl' ) : props [ 'repl' ] = REPL_TO_PY [ msg . repl ] return props
251,729
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L333-L358
[ "def", "on_recv", "(", "self", ",", "cf", ")", ":", "data", "=", "bytes", "(", "cf", ".", "data", ")", "if", "len", "(", "data", ")", "<", "2", ":", "return", "ae", "=", "0", "if", "self", ".", "extended_rx_addr", "is", "not", "None", ":", "ae", "=", "1", "if", "len", "(", "data", ")", "<", "3", ":", "return", "if", "six", ".", "indexbytes", "(", "data", ",", "0", ")", "!=", "self", ".", "extended_rx_addr", ":", "return", "n_pci", "=", "six", ".", "indexbytes", "(", "data", ",", "ae", ")", "&", "0xf0", "if", "n_pci", "==", "N_PCI_FC", ":", "with", "self", ".", "tx_mutex", ":", "self", ".", "_recv_fc", "(", "data", "[", "ae", ":", "]", ")", "elif", "n_pci", "==", "N_PCI_SF", ":", "with", "self", ".", "rx_mutex", ":", "self", ".", "_recv_sf", "(", "data", "[", "ae", ":", "]", ")", "elif", "n_pci", "==", "N_PCI_FF", ":", "with", "self", ".", "rx_mutex", ":", "self", ".", "_recv_ff", "(", "data", "[", "ae", ":", "]", ")", "elif", "n_pci", "==", "N_PCI_CF", ":", "with", "self", ".", "rx_mutex", ":", "self", ".", "_recv_cf", "(", "data", "[", "ae", ":", "]", ")" ]
Encodes a dict with mod and fun keys into a protobuf modfun pair . Used in bucket properties .
def encode_modfun ( self , props , msg = None ) : if msg is None : msg = riak . pb . riak_pb2 . RpbModFun ( ) msg . module = str_to_bytes ( props [ 'mod' ] ) msg . function = str_to_bytes ( props [ 'fun' ] ) return msg
251,730
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L372-L387
[ "def", "a_capture_show_configuration_failed", "(", "ctx", ")", ":", "result", "=", "ctx", ".", "device", ".", "send", "(", "\"show configuration failed\"", ")", "ctx", ".", "device", ".", "last_command_result", "=", "result", "index", "=", "result", ".", "find", "(", "\"SEMANTIC ERRORS\"", ")", "ctx", ".", "device", ".", "chain", ".", "connection", ".", "emit_message", "(", "result", ",", "log_level", "=", "logging", ".", "ERROR", ")", "if", "index", ">", "0", ":", "raise", "ConfigurationSemanticErrors", "(", "result", ")", "else", ":", "raise", "ConfigurationErrors", "(", "result", ")" ]
Encodes a list of commit hooks into their protobuf equivalent . Used in bucket properties .
def encode_hooklist ( self , hooklist , msg ) : for hook in hooklist : pbhook = msg . add ( ) self . encode_hook ( hook , pbhook )
251,731
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L400-L411
[ "def", "is_expired", "(", "self", ",", "max_idle_seconds", ")", ":", "now", "=", "current_time", "(", ")", "return", "(", "self", ".", "expiration_time", "is", "not", "None", "and", "self", ".", "expiration_time", "<", "now", ")", "or", "(", "max_idle_seconds", "is", "not", "None", "and", "self", ".", "last_access_time", "+", "max_idle_seconds", "<", "now", ")" ]
Decodes a protobuf commit hook message into a dict . Used in bucket properties .
def decode_hook ( self , hook ) : if hook . HasField ( 'modfun' ) : return self . decode_modfun ( hook . modfun ) else : return { 'name' : bytes_to_str ( hook . name ) }
251,732
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L413-L425
[ "def", "is_expired", "(", "self", ",", "max_idle_seconds", ")", ":", "now", "=", "current_time", "(", ")", "return", "(", "self", ".", "expiration_time", "is", "not", "None", "and", "self", ".", "expiration_time", "<", "now", ")", "or", "(", "max_idle_seconds", "is", "not", "None", "and", "self", ".", "last_access_time", "+", "max_idle_seconds", "<", "now", ")" ]
Encodes a commit hook dict into the protobuf message . Used in bucket properties .
def encode_hook ( self , hook , msg ) : if 'name' in hook : msg . name = str_to_bytes ( hook [ 'name' ] ) else : self . encode_modfun ( hook , msg . modfun ) return msg
251,733
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L427-L442
[ "def", "_ensure_connection", "(", "self", ")", ":", "conn", "=", "self", ".", "connect", "(", ")", "if", "conn", ".", "recycle", "and", "conn", ".", "recycle", "<", "time", ".", "time", "(", ")", ":", "logger", ".", "debug", "(", "'Client session expired after %is. Recycling.'", ",", "self", ".", "_recycle", ")", "self", ".", "close", "(", ")", "conn", "=", "self", ".", "connect", "(", ")", "return", "conn" ]
Encodes a secondary index request into the protobuf message .
def encode_index_req ( self , bucket , index , startkey , endkey = None , return_terms = None , max_results = None , continuation = None , timeout = None , term_regex = None , streaming = False ) : req = riak . pb . riak_kv_pb2 . RpbIndexReq ( bucket = str_to_bytes ( bucket . name ) , index = str_to_bytes ( index ) ) self . _add_bucket_type ( req , bucket . bucket_type ) if endkey is not None : req . qtype = riak . pb . riak_kv_pb2 . RpbIndexReq . range req . range_min = str_to_bytes ( str ( startkey ) ) req . range_max = str_to_bytes ( str ( endkey ) ) else : req . qtype = riak . pb . riak_kv_pb2 . RpbIndexReq . eq req . key = str_to_bytes ( str ( startkey ) ) if return_terms is not None : req . return_terms = return_terms if max_results : req . max_results = max_results if continuation : req . continuation = str_to_bytes ( continuation ) if timeout : if timeout == 'infinity' : req . timeout = 0 else : req . timeout = timeout if term_regex : req . term_regex = str_to_bytes ( term_regex ) req . stream = streaming mc = riak . pb . messages . MSG_CODE_INDEX_REQ rc = riak . pb . messages . MSG_CODE_INDEX_RESP return Msg ( mc , req . SerializeToString ( ) , rc )
251,734
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L444-L501
[ "def", "__isOpenThreadWpanRunning", "(", "self", ")", ":", "print", "'call __isOpenThreadWpanRunning'", "if", "self", ".", "__stripValue", "(", "self", ".", "__sendCommand", "(", "WPANCTL_CMD", "+", "'getprop -v NCP:State'", ")", "[", "0", "]", ")", "==", "'associated'", ":", "print", "'*****OpenThreadWpan is running'", "return", "True", "else", ":", "print", "'*****Wrong OpenThreadWpan state'", "return", "False" ]
Fills an RpbYokozunaIndex message with the appropriate data .
def decode_search_index ( self , index ) : result = { } result [ 'name' ] = bytes_to_str ( index . name ) if index . HasField ( 'schema' ) : result [ 'schema' ] = bytes_to_str ( index . schema ) if index . HasField ( 'n_val' ) : result [ 'n_val' ] = index . n_val return result
251,735
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L519-L533
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "_access", "is", "not", "None", ":", "_logger", ".", "debug", "(", "\"Cleaning up\"", ")", "pci_cleanup", "(", "self", ".", "_access", ")", "self", ".", "_access", "=", "None" ]
Fills an TsPutReq message with the appropriate data and metadata from a TsObject .
def encode_timeseries_put ( self , tsobj ) : req = riak . pb . riak_ts_pb2 . TsPutReq ( ) req . table = str_to_bytes ( tsobj . table . name ) if tsobj . columns : raise NotImplementedError ( "columns are not implemented yet" ) if tsobj . rows and isinstance ( tsobj . rows , list ) : for row in tsobj . rows : tsr = req . rows . add ( ) # NB: type TsRow if not isinstance ( row , list ) : raise ValueError ( "TsObject row must be a list of values" ) for cell in row : tsc = tsr . cells . add ( ) # NB: type TsCell self . encode_to_ts_cell ( cell , tsc ) else : raise RiakError ( "TsObject requires a list of rows" ) mc = riak . pb . messages . MSG_CODE_TS_PUT_REQ rc = riak . pb . messages . MSG_CODE_TS_PUT_RESP return Msg ( mc , req . SerializeToString ( ) , rc )
251,736
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L758-L787
[ "def", "setup_recovery", "(", "working_dir", ")", ":", "db", "=", "get_db_state", "(", "working_dir", ")", "bitcoind_session", "=", "get_bitcoind", "(", "new", "=", "True", ")", "assert", "bitcoind_session", "is", "not", "None", "_", ",", "current_block", "=", "virtualchain", ".", "get_index_range", "(", "'bitcoin'", ",", "bitcoind_session", ",", "virtualchain_hooks", ",", "working_dir", ")", "assert", "current_block", ",", "'Failed to connect to bitcoind'", "set_recovery_range", "(", "working_dir", ",", "db", ".", "lastblock", ",", "current_block", "-", "NUM_CONFIRMATIONS", ")", "return", "True" ]
Decodes a TsRow into a list
def decode_timeseries_row ( self , tsrow , tscols = None , convert_timestamp = False ) : row = [ ] for i , cell in enumerate ( tsrow . cells ) : col = None if tscols is not None : col = tscols [ i ] if cell . HasField ( 'varchar_value' ) : if col and not ( col . type == TsColumnType . Value ( 'VARCHAR' ) or col . type == TsColumnType . Value ( 'BLOB' ) ) : raise TypeError ( 'expected VARCHAR or BLOB column' ) else : row . append ( cell . varchar_value ) elif cell . HasField ( 'sint64_value' ) : if col and col . type != TsColumnType . Value ( 'SINT64' ) : raise TypeError ( 'expected SINT64 column' ) else : row . append ( cell . sint64_value ) elif cell . HasField ( 'double_value' ) : if col and col . type != TsColumnType . Value ( 'DOUBLE' ) : raise TypeError ( 'expected DOUBLE column' ) else : row . append ( cell . double_value ) elif cell . HasField ( 'timestamp_value' ) : if col and col . type != TsColumnType . Value ( 'TIMESTAMP' ) : raise TypeError ( 'expected TIMESTAMP column' ) else : dt = cell . timestamp_value if convert_timestamp : dt = datetime_from_unix_time_millis ( cell . timestamp_value ) row . append ( dt ) elif cell . HasField ( 'boolean_value' ) : if col and col . type != TsColumnType . Value ( 'BOOLEAN' ) : raise TypeError ( 'expected BOOLEAN column' ) else : row . append ( cell . boolean_value ) else : row . append ( None ) return row
251,737
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L847-L895
[ "def", "_validate_checksum", "(", "self", ",", "buffer", ")", ":", "self", ".", "_log", ".", "debug", "(", "\"Validating the buffer\"", ")", "if", "len", "(", "buffer", ")", "==", "0", ":", "self", ".", "_log", ".", "debug", "(", "\"Buffer was empty\"", ")", "if", "self", ".", "_conn", ".", "isOpen", "(", ")", ":", "self", ".", "_log", ".", "debug", "(", "'Closing connection'", ")", "self", ".", "_conn", ".", "close", "(", ")", "return", "False", "p0", "=", "hex2int", "(", "buffer", "[", "0", "]", ")", "p1", "=", "hex2int", "(", "buffer", "[", "1", "]", ")", "checksum", "=", "sum", "(", "[", "hex2int", "(", "c", ")", "for", "c", "in", "buffer", "[", ":", "35", "]", "]", ")", "&", "0xFF", "p35", "=", "hex2int", "(", "buffer", "[", "35", "]", ")", "if", "p0", "!=", "165", "or", "p1", "!=", "150", "or", "p35", "!=", "checksum", ":", "self", ".", "_log", ".", "debug", "(", "\"Buffer checksum was not valid\"", ")", "return", "False", "return", "True" ]
Decodes a preflist response
def decode_preflist ( self , item ) : result = { 'partition' : item . partition , 'node' : bytes_to_str ( item . node ) , 'primary' : item . primary } return result
251,738
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L897-L909
[ "def", "init_drivers", "(", "enable_debug_driver", "=", "False", ")", ":", "for", "driver", "in", "DRIVERS", ":", "try", ":", "if", "driver", "!=", "DebugDriver", "or", "enable_debug_driver", ":", "CLASSES", ".", "append", "(", "driver", ")", "except", "Exception", ":", "# pylint: disable=W0703", "continue" ]
Ping the remote server
def ping ( self ) : msg_code = riak . pb . messages . MSG_CODE_PING_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_ping ( ) resp_code , _ = self . _request ( msg , codec ) if resp_code == riak . pb . messages . MSG_CODE_PING_RESP : return True else : return False
251,739
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L107-L118
[ "def", "find_rust_extensions", "(", "*", "directories", ",", "*", "*", "kwargs", ")", ":", "# Get the file used to mark a Rust extension", "libfile", "=", "kwargs", ".", "get", "(", "\"libfile\"", ",", "\"lib.rs\"", ")", "# Get the directories to explore", "directories", "=", "directories", "or", "[", "os", ".", "getcwd", "(", ")", "]", "extensions", "=", "[", "]", "for", "directory", "in", "directories", ":", "for", "base", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "directory", ")", ":", "if", "libfile", "in", "files", ":", "dotpath", "=", "os", ".", "path", ".", "relpath", "(", "base", ")", ".", "replace", "(", "os", ".", "path", ".", "sep", ",", "\".\"", ")", "tomlpath", "=", "os", ".", "path", ".", "join", "(", "base", ",", "\"Cargo.toml\"", ")", "ext", "=", "RustExtension", "(", "dotpath", ",", "tomlpath", ",", "*", "*", "kwargs", ")", "ext", ".", "libfile", "=", "os", ".", "path", ".", "join", "(", "base", ",", "libfile", ")", "extensions", ".", "append", "(", "ext", ")", "return", "extensions" ]
Get information about the server
def get_server_info ( self ) : # NB: can't do it this way due to recursion # codec = self._get_codec(ttb_supported=False) codec = PbufCodec ( ) msg = Msg ( riak . pb . messages . MSG_CODE_GET_SERVER_INFO_REQ , None , riak . pb . messages . MSG_CODE_GET_SERVER_INFO_RESP ) resp_code , resp = self . _request ( msg , codec ) return codec . decode_get_server_info ( resp )
251,740
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L120-L130
[ "def", "parse", "(", "self", ",", "msg", ")", ":", "#init", "dictValues", "=", "HL7Dict", "(", "self", ".", "tersersep", ")", "msg_", "=", "msg", ".", "strip", "(", "'\\r\\n '", ")", "# extracts separator defined in the message itself", "self", ".", "extractSeparators", "(", "dictValues", ",", "msg_", ")", "msg_", "=", "msg_", ".", "replace", "(", "'\\r'", ",", "'\\n'", ")", "lines", "=", "msg_", ".", "split", "(", "'\\n'", ")", "lineNumber", "=", "1", "# build the map of segments", "segmentNameCount", ",", "lineMap", "=", "self", ".", "buildSegmentMap", "(", "lines", ")", "dictValues", ".", "setSegmentsMap", "(", "segmentNameCount", ",", "lineMap", ")", "# Parse each line of the message : 1 line = 1 segment", "for", "line", "in", "lines", ":", "dictValues", ".", "currentLineNumber", "=", "lineNumber", "self", ".", "extractValues", "(", "dictValues", ",", "line", ")", "lineNumber", "+=", "1", "return", "dictValues" ]
Serialize get request and deserialize response
def get ( self , robj , r = None , pr = None , timeout = None , basic_quorum = None , notfound_ok = None , head_only = False ) : msg_code = riak . pb . messages . MSG_CODE_GET_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_get ( robj , r , pr , timeout , basic_quorum , notfound_ok , head_only ) resp_code , resp = self . _request ( msg , codec ) return codec . decode_get ( robj , resp )
251,741
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L149-L160
[ "def", "TrimBeginningAndEndingSlashes", "(", "path", ")", ":", "if", "path", ".", "startswith", "(", "'/'", ")", ":", "# Returns substring starting from index 1 to end of the string", "path", "=", "path", "[", "1", ":", "]", "if", "path", ".", "endswith", "(", "'/'", ")", ":", "# Returns substring starting from beginning to last but one char in the string", "path", "=", "path", "[", ":", "-", "1", "]", "return", "path" ]
Streams keys from a timeseries table returning an iterator that yields lists of keys .
def ts_stream_keys ( self , table , timeout = None ) : msg_code = riak . pb . messages . MSG_CODE_TS_LIST_KEYS_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_timeseries_listkeysreq ( table , timeout ) self . _send_msg ( msg . msg_code , msg . data ) return PbufTsKeyStream ( self , codec , self . _ts_convert_timestamp )
251,742
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L212-L221
[ "def", "cublasSdgmm", "(", "handle", ",", "mode", ",", "m", ",", "n", ",", "A", ",", "lda", ",", "x", ",", "incx", ",", "C", ",", "ldc", ")", ":", "status", "=", "_libcublas", ".", "cublasSdgmm", "(", "handle", ",", "_CUBLAS_SIDE", "[", "mode", "]", ",", "m", ",", "n", ",", "int", "(", "A", ")", ",", "lda", ",", "int", "(", "x", ")", ",", "incx", ",", "int", "(", "C", ")", ",", "ldc", ")", "cublasCheckStatus", "(", "status", ")" ]
Lists all keys within a bucket .
def get_keys ( self , bucket , timeout = None ) : msg_code = riak . pb . messages . MSG_CODE_LIST_KEYS_REQ codec = self . _get_codec ( msg_code ) stream = self . stream_keys ( bucket , timeout = timeout ) return codec . decode_get_keys ( stream )
251,743
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L231-L238
[ "def", "simple_parse_file", "(", "filename", ":", "str", ")", "->", "Feed", ":", "pairs", "=", "(", "(", "rss", ".", "parse_rss_file", ",", "_adapt_rss_channel", ")", ",", "(", "atom", ".", "parse_atom_file", ",", "_adapt_atom_feed", ")", ",", "(", "json_feed", ".", "parse_json_feed_file", ",", "_adapt_json_feed", ")", ")", "return", "_simple_parse", "(", "pairs", ",", "filename", ")" ]
Streams keys from a bucket returning an iterator that yields lists of keys .
def stream_keys ( self , bucket , timeout = None ) : msg_code = riak . pb . messages . MSG_CODE_LIST_KEYS_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_stream_keys ( bucket , timeout ) self . _send_msg ( msg . msg_code , msg . data ) return PbufKeyStream ( self , codec )
251,744
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L240-L249
[ "def", "compute_K_analytical", "(", "self", ",", "spacing", ")", ":", "K", "=", "redaK", ".", "compute_K_analytical", "(", "self", ".", "data", ",", "spacing", "=", "spacing", ")", "self", ".", "data", "=", "redaK", ".", "apply_K", "(", "self", ".", "data", ",", "K", ")", "redafixK", ".", "fix_sign_with_K", "(", "self", ".", "data", ")" ]
Serialize bucket listing request and deserialize response
def get_buckets ( self , bucket_type = None , timeout = None ) : msg_code = riak . pb . messages . MSG_CODE_LIST_BUCKETS_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_get_buckets ( bucket_type , timeout , streaming = False ) resp_code , resp = self . _request ( msg , codec ) return resp . buckets
251,745
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L251-L260
[ "def", "diff_commonSuffix", "(", "self", ",", "text1", ",", "text2", ")", ":", "# Quick check for common null cases.", "if", "not", "text1", "or", "not", "text2", "or", "text1", "[", "-", "1", "]", "!=", "text2", "[", "-", "1", "]", ":", "return", "0", "# Binary search.", "# Performance analysis: https://neil.fraser.name/news/2007/10/09/", "pointermin", "=", "0", "pointermax", "=", "min", "(", "len", "(", "text1", ")", ",", "len", "(", "text2", ")", ")", "pointermid", "=", "pointermax", "pointerend", "=", "0", "while", "pointermin", "<", "pointermid", ":", "if", "(", "text1", "[", "-", "pointermid", ":", "len", "(", "text1", ")", "-", "pointerend", "]", "==", "text2", "[", "-", "pointermid", ":", "len", "(", "text2", ")", "-", "pointerend", "]", ")", ":", "pointermin", "=", "pointermid", "pointerend", "=", "pointermin", "else", ":", "pointermax", "=", "pointermid", "pointermid", "=", "(", "pointermax", "-", "pointermin", ")", "//", "2", "+", "pointermin", "return", "pointermid" ]
Serialize bucket property request and deserialize response
def get_bucket_props ( self , bucket ) : msg_code = riak . pb . messages . MSG_CODE_GET_BUCKET_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_get_bucket_props ( bucket ) resp_code , resp = self . _request ( msg , codec ) return codec . decode_bucket_props ( resp . props )
251,746
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L276-L284
[ "def", "get_listing", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'listing'", ")", ":", "allEvents", "=", "self", ".", "get_allEvents", "(", ")", "openEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", "closedEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", "publicEvents", "=", "allEvents", ".", "instance_of", "(", "PublicEvent", ")", "allSeries", "=", "allEvents", ".", "instance_of", "(", "Series", ")", "self", ".", "listing", "=", "{", "'allEvents'", ":", "allEvents", ",", "'openEvents'", ":", "openEvents", ",", "'closedEvents'", ":", "closedEvents", ",", "'publicEvents'", ":", "publicEvents", ",", "'allSeries'", ":", "allSeries", ",", "'regOpenEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateEvents'", ":", "publicEvents", ".", "filter", "(", "publicevent__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'publicevent__category'", ")", ",", "'regOpenSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateSeries'", ":", "allSeries", ".", "filter", "(", "series__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'series__category'", ")", ",", "}", "return", "self", ".", "listing" ]
Serialize set bucket property request and deserialize response
def set_bucket_props ( self , bucket , props ) : if not self . pb_all_bucket_props ( ) : for key in props : if key not in ( 'n_val' , 'allow_mult' ) : raise NotImplementedError ( 'Server only supports n_val and ' 'allow_mult properties over PBC' ) msg_code = riak . pb . messages . MSG_CODE_SET_BUCKET_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_set_bucket_props ( bucket , props ) resp_code , resp = self . _request ( msg , codec ) return True
251,747
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L286-L299
[ "def", "fold_path", "(", "path", ",", "width", "=", "30", ")", ":", "assert", "isinstance", "(", "path", ",", "six", ".", "string_types", ")", "if", "len", "(", "path", ")", ">", "width", ":", "path", ".", "replace", "(", "\".\"", ",", "\".\\n \"", ")", "return", "path" ]
Clear bucket properties resetting them to their defaults
def clear_bucket_props ( self , bucket ) : if not self . pb_clear_bucket_props ( ) : return False msg_code = riak . pb . messages . MSG_CODE_RESET_BUCKET_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_clear_bucket_props ( bucket ) self . _request ( msg , codec ) return True
251,748
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L301-L311
[ "def", "__get_vibration_code", "(", "self", ",", "left_motor", ",", "right_motor", ",", "duration", ")", ":", "inner_event", "=", "struct", ".", "pack", "(", "'2h6x2h2x2H28x'", ",", "0x50", ",", "-", "1", ",", "duration", ",", "0", ",", "int", "(", "left_motor", "*", "65535", ")", ",", "int", "(", "right_motor", "*", "65535", ")", ")", "buf_conts", "=", "ioctl", "(", "self", ".", "_write_device", ",", "1076905344", ",", "inner_event", ")", "return", "int", "(", "codecs", ".", "encode", "(", "buf_conts", "[", "1", ":", "3", "]", ",", "'hex'", ")", ",", "16", ")" ]
Fetch bucket - type properties
def get_bucket_type_props ( self , bucket_type ) : self . _check_bucket_types ( bucket_type ) msg_code = riak . pb . messages . MSG_CODE_GET_BUCKET_TYPE_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_get_bucket_type_props ( bucket_type ) resp_code , resp = self . _request ( msg , codec ) return codec . decode_bucket_props ( resp . props )
251,749
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L313-L322
[ "def", "_mod_repo_in_file", "(", "repo", ",", "repostr", ",", "filepath", ")", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "filepath", ")", "as", "fhandle", ":", "output", "=", "[", "]", "for", "line", "in", "fhandle", ":", "cols", "=", "salt", ".", "utils", ".", "args", ".", "shlex_split", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_unicode", "(", "line", ")", ".", "strip", "(", ")", ")", "if", "repo", "not", "in", "cols", ":", "output", ".", "append", "(", "line", ")", "else", ":", "output", ".", "append", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_str", "(", "repostr", "+", "'\\n'", ")", ")", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "filepath", ",", "'w'", ")", "as", "fhandle", ":", "fhandle", ".", "writelines", "(", "output", ")" ]
Set bucket - type properties
def set_bucket_type_props ( self , bucket_type , props ) : self . _check_bucket_types ( bucket_type ) msg_code = riak . pb . messages . MSG_CODE_SET_BUCKET_TYPE_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_set_bucket_type_props ( bucket_type , props ) resp_code , resp = self . _request ( msg , codec ) return True
251,750
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L324-L333
[ "def", "_mod_repo_in_file", "(", "repo", ",", "repostr", ",", "filepath", ")", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "filepath", ")", "as", "fhandle", ":", "output", "=", "[", "]", "for", "line", "in", "fhandle", ":", "cols", "=", "salt", ".", "utils", ".", "args", ".", "shlex_split", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_unicode", "(", "line", ")", ".", "strip", "(", ")", ")", "if", "repo", "not", "in", "cols", ":", "output", ".", "append", "(", "line", ")", "else", ":", "output", ".", "append", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_str", "(", "repostr", "+", "'\\n'", ")", ")", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "filepath", ",", "'w'", ")", "as", "fhandle", ":", "fhandle", ".", "writelines", "(", "output", ")" ]
Prints the report of one step of a benchmark .
def print_report ( label , user , system , real ) : print ( "{:<12s} {:12f} {:12f} ( {:12f} )" . format ( label , user , system , real ) )
251,751
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/benchmark.py#L134-L141
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
Runs the next iteration of the benchmark .
def next ( self ) : if self . count == 0 : raise StopIteration elif self . count > 1 : print_rehearsal_header ( ) else : if self . rehearse : gc . collect ( ) print ( "-" * 59 ) print ( ) print_header ( ) self . count -= 1 return self
251,752
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/benchmark.py#L96-L112
[ "def", "SetConsoleTextAttribute", "(", "stream_id", ",", "attrs", ")", ":", "handle", "=", "handles", "[", "stream_id", "]", "return", "windll", ".", "kernel32", ".", "SetConsoleTextAttribute", "(", "handle", ",", "attrs", ")" ]
Adds a RiakObject to the inputs .
def add_object ( self , obj ) : return self . add_bucket_key_data ( obj . _bucket . _name , obj . _key , None )
251,753
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L77-L85
[ "def", "stop", "(", "self", ")", ":", "self", ".", "_hw_virtualization", "=", "False", "yield", "from", "self", ".", "_stop_ubridge", "(", ")", "yield", "from", "self", ".", "_stop_remote_console", "(", ")", "vm_state", "=", "yield", "from", "self", ".", "_get_vm_state", "(", ")", "if", "vm_state", "==", "\"running\"", "or", "vm_state", "==", "\"paused\"", "or", "vm_state", "==", "\"stuck\"", ":", "if", "self", ".", "acpi_shutdown", ":", "# use ACPI to shutdown the VM", "result", "=", "yield", "from", "self", ".", "_control_vm", "(", "\"acpipowerbutton\"", ")", "trial", "=", "0", "while", "True", ":", "vm_state", "=", "yield", "from", "self", ".", "_get_vm_state", "(", ")", "if", "vm_state", "==", "\"poweroff\"", ":", "break", "yield", "from", "asyncio", ".", "sleep", "(", "1", ")", "trial", "+=", "1", "if", "trial", ">=", "120", ":", "yield", "from", "self", ".", "_control_vm", "(", "\"poweroff\"", ")", "break", "self", ".", "status", "=", "\"stopped\"", "log", ".", "debug", "(", "\"ACPI shutdown result: {}\"", ".", "format", "(", "result", ")", ")", "else", ":", "# power off the VM", "result", "=", "yield", "from", "self", ".", "_control_vm", "(", "\"poweroff\"", ")", "self", ".", "status", "=", "\"stopped\"", "log", ".", "debug", "(", "\"Stop result: {}\"", ".", "format", "(", "result", ")", ")", "log", ".", "info", "(", "\"VirtualBox VM '{name}' [{id}] stopped\"", ".", "format", "(", "name", "=", "self", ".", "name", ",", "id", "=", "self", ".", "id", ")", ")", "yield", "from", "asyncio", ".", "sleep", "(", "0.5", ")", "# give some time for VirtualBox to unlock the VM", "try", ":", "# deactivate the first serial port", "yield", "from", "self", ".", "_modify_vm", "(", "\"--uart1 off\"", ")", "except", "VirtualBoxError", "as", "e", ":", "log", ".", "warn", "(", "\"Could not deactivate the first serial port: {}\"", ".", "format", "(", "e", ")", ")", "for", "adapter_number", "in", "range", "(", "0", ",", "self", ".", "_adapters", ")", ":", "nio", "=", "self", ".", "_ethernet_adapters", "[", "adapter_number", "]", ".", "get_nio", "(", "0", ")", "if", "nio", ":", "yield", "from", "self", ".", "_modify_vm", "(", "\"--nictrace{} off\"", ".", "format", "(", "adapter_number", "+", "1", ")", ")", "yield", "from", "self", ".", "_modify_vm", "(", "\"--cableconnected{} off\"", ".", "format", "(", "adapter_number", "+", "1", ")", ")", "yield", "from", "self", ".", "_modify_vm", "(", "\"--nic{} null\"", ".", "format", "(", "adapter_number", "+", "1", ")", ")", "yield", "from", "super", "(", ")", ".", "stop", "(", ")" ]
Adds all keys in a bucket to the inputs .
def add_bucket ( self , bucket , bucket_type = None ) : if not riak . disable_list_exceptions : raise riak . ListError ( ) self . _input_mode = 'bucket' if isinstance ( bucket , riak . RiakBucket ) : if bucket . bucket_type . is_default ( ) : self . _inputs = { 'bucket' : bucket . name } else : self . _inputs = { 'bucket' : [ bucket . bucket_type . name , bucket . name ] } elif bucket_type is not None and bucket_type != "default" : self . _inputs = { 'bucket' : [ bucket_type , bucket ] } else : self . _inputs = { 'bucket' : bucket } return self
251,754
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L121-L144
[ "def", "softDeactivate", "(", "rh", ")", ":", "rh", ".", "printSysLog", "(", "\"Enter powerVM.softDeactivate, userid: \"", "+", "rh", ".", "userid", ")", "strCmd", "=", "\"echo 'ping'\"", "iucvResults", "=", "execCmdThruIUCV", "(", "rh", ",", "rh", ".", "userid", ",", "strCmd", ")", "if", "iucvResults", "[", "'overallRC'", "]", "==", "0", ":", "# We could talk to the machine, tell it to shutdown nicely.", "strCmd", "=", "\"shutdown -h now\"", "iucvResults", "=", "execCmdThruIUCV", "(", "rh", ",", "rh", ".", "userid", ",", "strCmd", ")", "if", "iucvResults", "[", "'overallRC'", "]", "==", "0", ":", "time", ".", "sleep", "(", "15", ")", "else", ":", "# Shutdown failed. Let CP take down the system", "# after we log the results.", "rh", ".", "printSysLog", "(", "\"powerVM.softDeactivate \"", "+", "rh", ".", "userid", "+", "\" is unreachable. Treating it as already shutdown.\"", ")", "else", ":", "# Could not ping the machine. Treat it as a success", "# after we log the results.", "rh", ".", "printSysLog", "(", "\"powerVM.softDeactivate \"", "+", "rh", ".", "userid", "+", "\" is unreachable. Treating it as already shutdown.\"", ")", "# Tell z/VM to log off the system.", "parms", "=", "[", "\"-T\"", ",", "rh", ".", "userid", "]", "smcliResults", "=", "invokeSMCLI", "(", "rh", ",", "\"Image_Deactivate\"", ",", "parms", ")", "if", "smcliResults", "[", "'overallRC'", "]", "==", "0", ":", "pass", "elif", "(", "smcliResults", "[", "'overallRC'", "]", "==", "8", "and", "smcliResults", "[", "'rc'", "]", "==", "200", "and", "(", "smcliResults", "[", "'rs'", "]", "==", "12", "or", "+", "smcliResults", "[", "'rs'", "]", "==", "16", ")", ")", ":", "# Tolerable error.", "# Machine is already logged off or is logging off.", "rh", ".", "printLn", "(", "\"N\"", ",", "rh", ".", "userid", "+", "\" is already logged off.\"", ")", "else", ":", "# SMAPI API failed.", "rh", ".", "printLn", "(", "\"ES\"", ",", "smcliResults", "[", "'response'", "]", ")", "rh", ".", "updateResults", "(", "smcliResults", ")", "# Use results from invokeSMCLI", "if", "rh", ".", "results", "[", "'overallRC'", "]", "==", "0", "and", "'maxQueries'", "in", "rh", ".", "parms", ":", "# Wait for the system to log off.", "waitResults", "=", "waitForVMState", "(", "rh", ",", "rh", ".", "userid", ",", "'off'", ",", "maxQueries", "=", "rh", ".", "parms", "[", "'maxQueries'", "]", ",", "sleepSecs", "=", "rh", ".", "parms", "[", "'poll'", "]", ")", "if", "waitResults", "[", "'overallRC'", "]", "==", "0", ":", "rh", ".", "printLn", "(", "\"N\"", ",", "\"Userid '\"", "+", "rh", ".", "userid", "+", "\" is in the desired state: off\"", ")", "else", ":", "rh", ".", "updateResults", "(", "waitResults", ")", "rh", ".", "printSysLog", "(", "\"Exit powerVM.softDeactivate, rc: \"", "+", "str", "(", "rh", ".", "results", "[", "'overallRC'", "]", ")", ")", "return", "rh", ".", "results", "[", "'overallRC'", "]" ]
Adds key filters to the inputs .
def add_key_filters ( self , key_filters ) : if self . _input_mode == 'query' : raise ValueError ( 'Key filters are not supported in a query.' ) self . _key_filters . extend ( key_filters ) return self
251,755
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L146-L158
[ "def", "guess_peb_size", "(", "path", ")", ":", "file_offset", "=", "0", "offsets", "=", "[", "]", "f", "=", "open", "(", "path", ",", "'rb'", ")", "f", ".", "seek", "(", "0", ",", "2", ")", "file_size", "=", "f", ".", "tell", "(", ")", "+", "1", "f", ".", "seek", "(", "0", ")", "for", "_", "in", "range", "(", "0", ",", "file_size", ",", "FILE_CHUNK_SZ", ")", ":", "buf", "=", "f", ".", "read", "(", "FILE_CHUNK_SZ", ")", "for", "m", "in", "re", ".", "finditer", "(", "UBI_EC_HDR_MAGIC", ",", "buf", ")", ":", "start", "=", "m", ".", "start", "(", ")", "if", "not", "file_offset", ":", "file_offset", "=", "start", "idx", "=", "start", "else", ":", "idx", "=", "start", "+", "file_offset", "offsets", ".", "append", "(", "idx", ")", "file_offset", "+=", "FILE_CHUNK_SZ", "f", ".", "close", "(", ")", "occurances", "=", "{", "}", "for", "i", "in", "range", "(", "0", ",", "len", "(", "offsets", ")", ")", ":", "try", ":", "diff", "=", "offsets", "[", "i", "]", "-", "offsets", "[", "i", "-", "1", "]", "except", ":", "diff", "=", "offsets", "[", "i", "]", "if", "diff", "not", "in", "occurances", ":", "occurances", "[", "diff", "]", "=", "0", "occurances", "[", "diff", "]", "+=", "1", "most_frequent", "=", "0", "block_size", "=", "None", "for", "offset", "in", "occurances", ":", "if", "occurances", "[", "offset", "]", ">", "most_frequent", ":", "most_frequent", "=", "occurances", "[", "offset", "]", "block_size", "=", "offset", "return", "block_size" ]
Add a single key filter to the inputs .
def add_key_filter ( self , * args ) : if self . _input_mode == 'query' : raise ValueError ( 'Key filters are not supported in a query.' ) self . _key_filters . append ( args ) return self
251,756
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L160-L172
[ "async", "def", "_wait_exponentially", "(", "self", ",", "exception", ",", "max_wait_time", "=", "300", ")", ":", "wait_time", "=", "min", "(", "(", "2", "**", "self", ".", "_connection_attempts", ")", "+", "random", ".", "random", "(", ")", ",", "max_wait_time", ")", "try", ":", "wait_time", "=", "exception", ".", "response", "[", "\"headers\"", "]", "[", "\"Retry-After\"", "]", "except", "(", "KeyError", ",", "AttributeError", ")", ":", "pass", "self", ".", "_logger", ".", "debug", "(", "\"Waiting %s seconds before reconnecting.\"", ",", "wait_time", ")", "await", "asyncio", ".", "sleep", "(", "float", "(", "wait_time", ")", ")" ]
Adds the Javascript built - in Riak . reduceSort to the query as a reduce phase .
def reduce_sort ( self , js_cmp = None , options = None ) : if options is None : options = dict ( ) if js_cmp : options [ 'arg' ] = js_cmp return self . reduce ( "Riak.reduceSort" , options = options )
251,757
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L448-L466
[ "def", "_hex_ids", "(", "self", ",", "dev_list", ")", ":", "for", "dl", "in", "dev_list", ":", "match", "=", "self", ".", "nlp", ".", "search", "(", "dl", ")", "if", "match", ":", "yield", "match", ".", "group", "(", "\"usbid\"", ")", ",", "_readlink", "(", "dl", ")" ]
Adds the Javascript built - in Riak . reduceSlice to the query as a reduce phase .
def reduce_slice ( self , start , end , options = None ) : if options is None : options = dict ( ) options [ 'arg' ] = [ start , end ] return self . reduce ( "Riak.reduceSlice" , options = options )
251,758
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L500-L517
[ "def", "_hex_ids", "(", "self", ",", "dev_list", ")", ":", "for", "dl", "in", "dev_list", ":", "match", "=", "self", ".", "nlp", ".", "search", "(", "dl", ")", "if", "match", ":", "yield", "match", ".", "group", "(", "\"usbid\"", ")", ",", "_readlink", "(", "dl", ")" ]
Convert the RiakMapReducePhase to a format that can be output into JSON . Used internally .
def to_array ( self ) : stepdef = { 'keep' : self . _keep , 'language' : self . _language , 'arg' : self . _arg } if self . _language == 'javascript' : if isinstance ( self . _function , list ) : stepdef [ 'bucket' ] = self . _function [ 0 ] stepdef [ 'key' ] = self . _function [ 1 ] elif isinstance ( self . _function , string_types ) : if ( "{" in self . _function ) : stepdef [ 'source' ] = self . _function else : stepdef [ 'name' ] = self . _function elif ( self . _language == 'erlang' and isinstance ( self . _function , list ) ) : stepdef [ 'module' ] = self . _function [ 0 ] stepdef [ 'function' ] = self . _function [ 1 ] elif ( self . _language == 'erlang' and isinstance ( self . _function , string_types ) ) : stepdef [ 'source' ] = self . _function return { self . _type : stepdef }
251,759
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L569-L598
[ "async", "def", "set_kernel_options", "(", "cls", ",", "options", ":", "typing", ".", "Optional", "[", "str", "]", ")", ":", "await", "cls", ".", "set_config", "(", "\"kernel_opts\"", ",", "\"\"", "if", "options", "is", "None", "else", "options", ")" ]
Convert the RiakLinkPhase to a format that can be output into JSON . Used internally .
def to_array ( self ) : stepdef = { 'bucket' : self . _bucket , 'tag' : self . _tag , 'keep' : self . _keep } return { 'link' : stepdef }
251,760
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L626-L634
[ "async", "def", "set_kernel_options", "(", "cls", ",", "options", ":", "typing", ".", "Optional", "[", "str", "]", ")", ":", "await", "cls", ".", "set_config", "(", "\"kernel_opts\"", ",", "\"\"", "if", "options", "is", "None", "else", "options", ")" ]
A conflict - resolution function that resolves by selecting the most recently - modified sibling by timestamp .
def last_written_resolver ( riak_object ) : riak_object . siblings = [ max ( riak_object . siblings , key = lambda x : x . last_modified ) , ]
251,761
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/resolver.py#L31-L40
[ "def", "split_query", "(", "query", ":", "str", ")", "->", "List", "[", "str", "]", ":", "try", ":", "_query", "=", "query", ".", "strip", "(", ")", "except", "(", "ValueError", ",", "AttributeError", ")", ":", "raise", "QueryParserException", "(", "'query is not valid, received instead {}'", ".", "format", "(", "query", ")", ")", "expressions", "=", "_query", ".", "split", "(", "','", ")", "expressions", "=", "[", "exp", ".", "strip", "(", ")", "for", "exp", "in", "expressions", "if", "exp", ".", "strip", "(", ")", "]", "if", "not", "expressions", ":", "raise", "QueryParserException", "(", "'Query is not valid: {}'", ".", "format", "(", "query", ")", ")", "return", "expressions" ]
The default OpenSSL certificate verification callback .
def verify_cb ( conn , cert , errnum , depth , ok ) : if not ok : raise SecurityError ( "Could not verify CA certificate {0}" . format ( cert . get_subject ( ) ) ) return ok
251,762
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/security.py#L27-L34
[ "def", "clear_stalled_files", "(", "self", ")", ":", "# FIXME: put lock in directory?", "CLEAR_AFTER", "=", "self", ".", "config", "[", "\"DELETE_STALLED_AFTER\"", "]", "minimum_age", "=", "time", ".", "time", "(", ")", "-", "CLEAR_AFTER", "for", "user_dir", "in", "self", ".", "UPLOAD_DIR", ".", "iterdir", "(", ")", ":", "if", "not", "user_dir", ".", "is_dir", "(", ")", ":", "logger", ".", "error", "(", "\"Found non-directory in upload dir: %r\"", ",", "bytes", "(", "user_dir", ")", ")", "continue", "for", "content", "in", "user_dir", ".", "iterdir", "(", ")", ":", "if", "not", "content", ".", "is_file", "(", ")", ":", "logger", ".", "error", "(", "\"Found non-file in user upload dir: %r\"", ",", "bytes", "(", "content", ")", ")", "continue", "if", "content", ".", "stat", "(", ")", ".", "st_ctime", "<", "minimum_age", ":", "content", ".", "unlink", "(", ")" ]
Fetches the next page using the same parameters as the original query .
def next_page ( self , timeout = None , stream = None ) : if not self . continuation : raise ValueError ( "Cannot get next index page, no continuation" ) if stream is not None : self . stream = stream args = { 'bucket' : self . bucket , 'index' : self . index , 'startkey' : self . startkey , 'endkey' : self . endkey , 'return_terms' : self . return_terms , 'max_results' : self . max_results , 'continuation' : self . continuation , 'timeout' : timeout , 'term_regex' : self . term_regex } if self . stream : return self . client . stream_index ( * * args ) else : return self . client . get_index ( * * args )
251,763
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/index_page.py#L117-L150
[ "async", "def", "unexpose", "(", "self", ")", ":", "app_facade", "=", "client", ".", "ApplicationFacade", ".", "from_connection", "(", "self", ".", "connection", ")", "log", ".", "debug", "(", "'Unexposing %s'", ",", "self", ".", "name", ")", "return", "await", "app_facade", ".", "Unexpose", "(", "self", ".", "name", ")" ]
Raises an exception if the given timeout is an invalid value .
def _validate_timeout ( timeout , infinity_ok = False ) : if timeout is None : return if timeout == 'infinity' : if infinity_ok : return else : raise ValueError ( 'timeout must be a positive integer ' '("infinity" is not valid)' ) if isinstance ( timeout , six . integer_types ) and timeout > 0 : return raise ValueError ( 'timeout must be a positive integer' )
251,764
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L1270-L1288
[ "def", "indexed_file", "(", "self", ",", "f", ")", ":", "filename", ",", "handle", "=", "f", "if", "handle", "is", "None", "and", "filename", "is", "not", "None", ":", "handle", "=", "open", "(", "filename", ")", "if", "(", "handle", "is", "None", "and", "filename", "is", "None", ")", "or", "(", "filename", "!=", "self", ".", "_indexed_filename", ")", "or", "(", "handle", "!=", "self", ".", "_indexed_file_handle", ")", ":", "self", ".", "index", "=", "{", "}", "if", "(", "(", "handle", "is", "not", "None", "or", "filename", "is", "not", "None", ")", "and", "(", "self", ".", "record_iterator", "is", "None", "or", "self", ".", "record_hash_function", "is", "None", ")", ")", ":", "raise", "IndexError", "(", "\"Setting index file failed; reason: iterator \"", "\"(self.record_iterator) or hash function \"", "\"(self.record_hash_function) have to be set first\"", ")", "self", ".", "_indexed_filename", "=", "filename", "self", ".", "_indexed_file_handle", "=", "handle" ]
Streams the list of buckets . This is a generator method that should be iterated over .
def stream_buckets ( self , bucket_type = None , timeout = None ) : if not riak . disable_list_exceptions : raise ListError ( ) _validate_timeout ( timeout ) if bucket_type : bucketfn = self . _bucket_type_bucket_builder else : bucketfn = self . _default_type_bucket_builder def make_op ( transport ) : return transport . stream_buckets ( bucket_type = bucket_type , timeout = timeout ) for bucket_list in self . _stream_with_retry ( make_op ) : bucket_list = [ bucketfn ( bytes_to_str ( name ) , bucket_type ) for name in bucket_list ] if len ( bucket_list ) > 0 : yield bucket_list
251,765
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L72-L125
[ "def", "ActivateCard", "(", "self", ",", "card", ")", ":", "if", "not", "hasattr", "(", "card", ",", "'connection'", ")", ":", "card", ".", "connection", "=", "card", ".", "createConnection", "(", ")", "if", "None", "!=", "self", ".", "parent", ".", "apdutracerpanel", ":", "card", ".", "connection", ".", "addObserver", "(", "self", ".", "parent", ".", "apdutracerpanel", ")", "card", ".", "connection", ".", "connect", "(", ")", "self", ".", "dialogpanel", ".", "OnActivateCard", "(", "card", ")" ]
Queries a secondary index streaming matching keys through an iterator .
def stream_index ( self , bucket , index , startkey , endkey = None , return_terms = None , max_results = None , continuation = None , timeout = None , term_regex = None ) : # TODO FUTURE: implement "retry on connection closed" # as in stream_mapred _validate_timeout ( timeout , infinity_ok = True ) page = IndexPage ( self , bucket , index , startkey , endkey , return_terms , max_results , term_regex ) page . stream = True resource = self . _acquire ( ) transport = resource . object page . results = transport . stream_index ( bucket , index , startkey , endkey , return_terms = return_terms , max_results = max_results , continuation = continuation , timeout = timeout , term_regex = term_regex ) page . results . attach ( resource ) return page
251,766
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L238-L301
[ "def", "location", "(", "args", ")", ":", "fastafile", "=", "args", ".", "fastafile", "pwmfile", "=", "args", ".", "pwmfile", "lwidth", "=", "args", ".", "width", "if", "not", "lwidth", ":", "f", "=", "Fasta", "(", "fastafile", ")", "lwidth", "=", "len", "(", "f", ".", "items", "(", ")", "[", "0", "]", "[", "1", "]", ")", "f", "=", "None", "jobs", "=", "[", "]", "motifs", "=", "pwmfile_to_motifs", "(", "pwmfile", ")", "ids", "=", "[", "motif", ".", "id", "for", "motif", "in", "motifs", "]", "if", "args", ".", "ids", ":", "ids", "=", "args", ".", "ids", ".", "split", "(", "\",\"", ")", "n_cpus", "=", "int", "(", "MotifConfig", "(", ")", ".", "get_default_params", "(", ")", "[", "\"ncpus\"", "]", ")", "pool", "=", "Pool", "(", "processes", "=", "n_cpus", ",", "maxtasksperchild", "=", "1000", ")", "for", "motif", "in", "motifs", ":", "if", "motif", ".", "id", "in", "ids", ":", "outfile", "=", "os", ".", "path", ".", "join", "(", "\"%s_histogram\"", "%", "motif", ".", "id", ")", "jobs", ".", "append", "(", "pool", ".", "apply_async", "(", "motif_localization", ",", "(", "fastafile", ",", "motif", ",", "lwidth", ",", "outfile", ",", "args", ".", "cutoff", ")", ")", ")", "for", "job", "in", "jobs", ":", "job", ".", "get", "(", ")" ]
Lists all keys in a bucket via a stream . This is a generator method which should be iterated over .
def stream_keys ( self , bucket , timeout = None ) : if not riak . disable_list_exceptions : raise ListError ( ) _validate_timeout ( timeout ) def make_op ( transport ) : return transport . stream_keys ( bucket , timeout = timeout ) for keylist in self . _stream_with_retry ( make_op ) : if len ( keylist ) > 0 : if six . PY2 : yield keylist else : yield [ bytes_to_str ( item ) for item in keylist ]
251,767
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L484-L530
[ "def", "log_likelihood", "(", "C", ",", "T", ")", ":", "C", "=", "C", ".", "tocsr", "(", ")", "T", "=", "T", ".", "tocsr", "(", ")", "ind", "=", "scipy", ".", "nonzero", "(", "C", ")", "relT", "=", "np", ".", "array", "(", "T", "[", "ind", "]", ")", "[", "0", ",", ":", "]", "relT", "=", "np", ".", "log", "(", "relT", ")", "relC", "=", "np", ".", "array", "(", "C", "[", "ind", "]", ")", "[", "0", ",", ":", "]", "return", "relT", ".", "dot", "(", "relC", ")" ]
Lists all keys in a time series table via a stream . This is a generator method which should be iterated over .
def ts_stream_keys ( self , table , timeout = None ) : if not riak . disable_list_exceptions : raise ListError ( ) t = table if isinstance ( t , six . string_types ) : t = Table ( self , table ) _validate_timeout ( timeout ) resource = self . _acquire ( ) transport = resource . object stream = transport . ts_stream_keys ( t , timeout ) stream . attach ( resource ) try : for keylist in stream : if len ( keylist ) > 0 : yield keylist finally : stream . close ( )
251,768
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L665-L713
[ "def", "rank_loss", "(", "sentence_emb", ",", "image_emb", ",", "margin", "=", "0.2", ")", ":", "with", "tf", ".", "name_scope", "(", "\"rank_loss\"", ")", ":", "# Normalize first as this is assumed in cosine similarity later.", "sentence_emb", "=", "tf", ".", "nn", ".", "l2_normalize", "(", "sentence_emb", ",", "1", ")", "image_emb", "=", "tf", ".", "nn", ".", "l2_normalize", "(", "image_emb", ",", "1", ")", "# Both sentence_emb and image_emb have size [batch, depth].", "scores", "=", "tf", ".", "matmul", "(", "image_emb", ",", "tf", ".", "transpose", "(", "sentence_emb", ")", ")", "# [batch, batch]", "diagonal", "=", "tf", ".", "diag_part", "(", "scores", ")", "# [batch]", "cost_s", "=", "tf", ".", "maximum", "(", "0.0", ",", "margin", "-", "diagonal", "+", "scores", ")", "# [batch, batch]", "cost_im", "=", "tf", ".", "maximum", "(", "0.0", ",", "margin", "-", "tf", ".", "reshape", "(", "diagonal", ",", "[", "-", "1", ",", "1", "]", ")", "+", "scores", ")", "# [batch, batch]", "# Clear diagonals.", "batch_size", "=", "tf", ".", "shape", "(", "sentence_emb", ")", "[", "0", "]", "empty_diagonal_mat", "=", "tf", ".", "ones_like", "(", "cost_s", ")", "-", "tf", ".", "eye", "(", "batch_size", ")", "cost_s", "*=", "empty_diagonal_mat", "cost_im", "*=", "empty_diagonal_mat", "return", "tf", ".", "reduce_mean", "(", "cost_s", ")", "+", "tf", ".", "reduce_mean", "(", "cost_im", ")" ]
Fetches many keys in parallel via threads .
def multiget ( self , pairs , * * params ) : if self . _multiget_pool : params [ 'pool' ] = self . _multiget_pool return riak . client . multi . multiget ( self , pairs , * * params )
251,769
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L1003-L1016
[ "def", "update", "(", "self", ")", ":", "functions", ".", "check_valid_bs_range", "(", "self", ")", "# Here's a trick to find the gregorian date:", "# We find the number of days from earliest nepali date to the current", "# day. We then add the number of days to the earliest english date", "self", ".", "en_date", "=", "values", ".", "START_EN_DATE", "+", "(", "self", "-", "NepDate", "(", "values", ".", "START_NP_YEAR", ",", "1", ",", "1", ")", ")", "return", "self" ]
Stores objects in parallel via threads .
def multiput ( self , objs , * * params ) : if self . _multiput_pool : params [ 'pool' ] = self . _multiput_pool return riak . client . multi . multiput ( self , objs , * * params )
251,770
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L1018-L1031
[ "def", "upload_cbn_dir", "(", "dir_path", ",", "manager", ")", ":", "t", "=", "time", ".", "time", "(", ")", "for", "jfg_path", "in", "os", ".", "listdir", "(", "dir_path", ")", ":", "if", "not", "jfg_path", ".", "endswith", "(", "'.jgf'", ")", ":", "continue", "path", "=", "os", ".", "path", ".", "join", "(", "dir_path", ",", "jfg_path", ")", "log", ".", "info", "(", "'opening %s'", ",", "path", ")", "with", "open", "(", "path", ")", "as", "f", ":", "cbn_jgif_dict", "=", "json", ".", "load", "(", "f", ")", "graph", "=", "pybel", ".", "from_cbn_jgif", "(", "cbn_jgif_dict", ")", "out_path", "=", "os", ".", "path", ".", "join", "(", "dir_path", ",", "jfg_path", ".", "replace", "(", "'.jgf'", ",", "'.bel'", ")", ")", "with", "open", "(", "out_path", ",", "'w'", ")", "as", "o", ":", "pybel", ".", "to_bel", "(", "graph", ",", "o", ")", "strip_annotations", "(", "graph", ")", "enrich_pubmed_citations", "(", "manager", "=", "manager", ",", "graph", "=", "graph", ")", "pybel", ".", "to_database", "(", "graph", ",", "manager", "=", "manager", ")", "log", ".", "info", "(", "''", ")", "log", ".", "info", "(", "'done in %.2f'", ",", "time", ".", "time", "(", ")", "-", "t", ")" ]
Fetches the value of a Riak Datatype .
def fetch_datatype ( self , bucket , key , r = None , pr = None , basic_quorum = None , notfound_ok = None , timeout = None , include_context = None ) : dtype , value , context = self . _fetch_datatype ( bucket , key , r = r , pr = pr , basic_quorum = basic_quorum , notfound_ok = notfound_ok , timeout = timeout , include_context = include_context ) return TYPES [ dtype ] ( bucket = bucket , key = key , value = value , context = context )
251,771
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L1106-L1143
[ "def", "_remove_persistent_module", "(", "mod", ",", "comment", ")", ":", "if", "not", "mod", "or", "mod", "not", "in", "mod_list", "(", "True", ")", ":", "return", "set", "(", ")", "if", "comment", ":", "__salt__", "[", "'file.comment'", "]", "(", "_LOADER_CONF", ",", "_MODULE_RE", ".", "format", "(", "mod", ")", ")", "else", ":", "__salt__", "[", "'file.sed'", "]", "(", "_LOADER_CONF", ",", "_MODULE_RE", ".", "format", "(", "mod", ")", ",", "''", ")", "return", "set", "(", "[", "mod", "]", ")" ]
Sends an update to a Riak Datatype to the server . This operation is not idempotent and so will not be retried automatically .
def update_datatype ( self , datatype , w = None , dw = None , pw = None , return_body = None , timeout = None , include_context = None ) : _validate_timeout ( timeout ) with self . _transport ( ) as transport : return transport . update_datatype ( datatype , w = w , dw = dw , pw = pw , return_body = return_body , timeout = timeout , include_context = include_context )
251,772
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L1145-L1175
[ "def", "xstrip", "(", "filename", ")", ":", "while", "xisabs", "(", "filename", ")", ":", "# strip windows drive with all slashes", "if", "re", ".", "match", "(", "b'\\\\w:[\\\\\\\\/]'", ",", "filename", ")", ":", "filename", "=", "re", ".", "sub", "(", "b'^\\\\w+:[\\\\\\\\/]+'", ",", "b''", ",", "filename", ")", "# strip all slashes", "elif", "re", ".", "match", "(", "b'[\\\\\\\\/]'", ",", "filename", ")", ":", "filename", "=", "re", ".", "sub", "(", "b'^[\\\\\\\\/]+'", ",", "b''", ",", "filename", ")", "return", "filename" ]
Similar to self . _send_recv but doesn t try to initiate a connection thus preventing an infinite loop .
def _non_connect_send_recv ( self , msg_code , data = None ) : self . _non_connect_send_msg ( msg_code , data ) return self . _recv_msg ( )
251,773
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/connection.py#L53-L59
[ "def", "from_pint", "(", "cls", ",", "arr", ",", "unit_registry", "=", "None", ")", ":", "p_units", "=", "[", "]", "for", "base", ",", "exponent", "in", "arr", ".", "_units", ".", "items", "(", ")", ":", "bs", "=", "convert_pint_units", "(", "base", ")", "p_units", ".", "append", "(", "\"%s**(%s)\"", "%", "(", "bs", ",", "Rational", "(", "exponent", ")", ")", ")", "p_units", "=", "\"*\"", ".", "join", "(", "p_units", ")", "if", "isinstance", "(", "arr", ".", "magnitude", ",", "np", ".", "ndarray", ")", ":", "return", "unyt_array", "(", "arr", ".", "magnitude", ",", "p_units", ",", "registry", "=", "unit_registry", ")", "else", ":", "return", "unyt_quantity", "(", "arr", ".", "magnitude", ",", "p_units", ",", "registry", "=", "unit_registry", ")" ]
Similar to self . _send but doesn t try to initiate a connection thus preventing an infinite loop .
def _non_connect_send_msg ( self , msg_code , data ) : try : self . _socket . sendall ( self . _encode_msg ( msg_code , data ) ) except ( IOError , socket . error ) as e : if e . errno == errno . EPIPE : raise ConnectionClosed ( e ) else : raise
251,774
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/connection.py#L65-L76
[ "def", "from_pint", "(", "cls", ",", "arr", ",", "unit_registry", "=", "None", ")", ":", "p_units", "=", "[", "]", "for", "base", ",", "exponent", "in", "arr", ".", "_units", ".", "items", "(", ")", ":", "bs", "=", "convert_pint_units", "(", "base", ")", "p_units", ".", "append", "(", "\"%s**(%s)\"", "%", "(", "bs", ",", "Rational", "(", "exponent", ")", ")", ")", "p_units", "=", "\"*\"", ".", "join", "(", "p_units", ")", "if", "isinstance", "(", "arr", ".", "magnitude", ",", "np", ".", "ndarray", ")", ":", "return", "unyt_array", "(", "arr", ".", "magnitude", ",", "p_units", ",", "registry", "=", "unit_registry", ")", "else", ":", "return", "unyt_quantity", "(", "arr", ".", "magnitude", ",", "p_units", ",", "registry", "=", "unit_registry", ")" ]
Initialize a secure connection to the server .
def _init_security ( self ) : if not self . _starttls ( ) : raise SecurityError ( "Could not start TLS connection" ) # _ssh_handshake() will throw an exception upon failure self . _ssl_handshake ( ) if not self . _auth ( ) : raise SecurityError ( "Could not authorize connection" )
251,775
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/connection.py#L82-L91
[ "def", "setOverlayTexelAspect", "(", "self", ",", "ulOverlayHandle", ",", "fTexelAspect", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTexelAspect", "result", "=", "fn", "(", "ulOverlayHandle", ",", "fTexelAspect", ")", "return", "result" ]
Exchange a STARTTLS message with Riak to initiate secure communications return True is Riak responds with a STARTTLS response False otherwise
def _starttls ( self ) : resp_code , _ = self . _non_connect_send_recv ( riak . pb . messages . MSG_CODE_START_TLS ) if resp_code == riak . pb . messages . MSG_CODE_START_TLS : return True else : return False
251,776
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/connection.py#L93-L103
[ "def", "max_texture_limit", "(", "self", ")", ":", "max_unit_array", "=", "(", "gl", ".", "GLint", "*", "1", ")", "(", ")", "gl", ".", "glGetIntegerv", "(", "gl", ".", "GL_MAX_TEXTURE_IMAGE_UNITS", ",", "max_unit_array", ")", "return", "max_unit_array", "[", "0", "]" ]
Closes the underlying socket of the PB connection .
def close ( self ) : if self . _socket : if USE_STDLIB_SSL : # NB: Python 2.7.8 and earlier does not have a compatible # shutdown() method due to the SSL lib try : self . _socket . shutdown ( socket . SHUT_RDWR ) except EnvironmentError : # NB: sometimes these exceptions are raised if the initial # connection didn't succeed correctly, or if shutdown() is # called after the connection dies logging . debug ( 'Exception occurred while shutting ' 'down socket.' , exc_info = True ) self . _socket . close ( ) del self . _socket
251,777
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/connection.py#L266-L283
[ "def", "record", "(", "self", ",", "tags", ",", "measurement_map", ",", "timestamp", ",", "attachments", "=", "None", ")", ":", "assert", "all", "(", "vv", ">=", "0", "for", "vv", "in", "measurement_map", ".", "values", "(", ")", ")", "for", "measure", ",", "value", "in", "measurement_map", ".", "items", "(", ")", ":", "if", "measure", "!=", "self", ".", "_registered_measures", ".", "get", "(", "measure", ".", "name", ")", ":", "return", "view_datas", "=", "[", "]", "for", "measure_name", ",", "view_data_list", "in", "self", ".", "_measure_to_view_data_list_map", ".", "items", "(", ")", ":", "if", "measure_name", "==", "measure", ".", "name", ":", "view_datas", ".", "extend", "(", "view_data_list", ")", "for", "view_data", "in", "view_datas", ":", "view_data", ".", "record", "(", "context", "=", "tags", ",", "value", "=", "value", ",", "timestamp", "=", "timestamp", ",", "attachments", "=", "attachments", ")", "self", ".", "export", "(", "view_datas", ")" ]
Delegates a property to the first sibling in a RiakObject raising an error when the object is in conflict .
def content_property ( name , doc = None ) : def _setter ( self , value ) : if len ( self . siblings ) == 0 : # In this case, assume that what the user wants is to # create a new sibling inside an empty object. self . siblings = [ RiakContent ( self ) ] if len ( self . siblings ) != 1 : raise ConflictError ( ) setattr ( self . siblings [ 0 ] , name , value ) def _getter ( self ) : if len ( self . siblings ) == 0 : return if len ( self . siblings ) != 1 : raise ConflictError ( ) return getattr ( self . siblings [ 0 ] , name ) return property ( _getter , _setter , doc = doc )
251,778
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/riak_object.py#L22-L43
[ "def", "stop", "(", "self", ")", ":", "log", ".", "info", "(", "'Stopping te kafka listener class'", ")", "self", ".", "consumer", ".", "unsubscribe", "(", ")", "self", ".", "consumer", ".", "close", "(", ")" ]
Delegates a method to the first sibling in a RiakObject raising an error when the object is in conflict .
def content_method ( name ) : def _delegate ( self , * args , * * kwargs ) : if len ( self . siblings ) != 1 : raise ConflictError ( ) return getattr ( self . siblings [ 0 ] , name ) . __call__ ( * args , * * kwargs ) _delegate . __doc__ = getattr ( RiakContent , name ) . __doc__ return _delegate
251,779
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/riak_object.py#L46-L58
[ "def", "remove_namespace", "(", "self", ",", "namespace", ")", ":", "params", "=", "(", "namespace", ",", ")", "execute", "=", "self", ".", "cursor", ".", "execute", "execute", "(", "'DELETE FROM gauged_data WHERE namespace = %s'", ",", "params", ")", "execute", "(", "'DELETE FROM gauged_statistics WHERE namespace = %s'", ",", "params", ")", "execute", "(", "'DELETE FROM gauged_keys WHERE namespace = %s'", ",", "params", ")", "self", ".", "remove_cache", "(", "namespace", ")" ]
Store the object in Riak . When this operation completes the object could contain new metadata and possibly new data if Riak contains a newer version of the object according to the object s vector clock .
def store ( self , w = None , dw = None , pw = None , return_body = True , if_none_match = False , timeout = None ) : if len ( self . siblings ) != 1 : raise ConflictError ( "Attempting to store an invalid object, " "resolve the siblings first" ) self . client . put ( self , w = w , dw = dw , pw = pw , return_body = return_body , if_none_match = if_none_match , timeout = timeout ) return self
251,780
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/riak_object.py#L247-L283
[ "def", "configure_logging", "(", ")", ":", "if", "not", "parse_boolean", "(", "os", ".", "environ", ".", "get", "(", "'DISABLE_TRUSTAR_LOGGING'", ")", ")", ":", "# configure", "dictConfig", "(", "DEFAULT_LOGGING_CONFIG", ")", "# construct error logger", "error_logger", "=", "logging", ".", "getLogger", "(", "\"error\"", ")", "# log all uncaught exceptions", "def", "log_exception", "(", "exc_type", ",", "exc_value", ",", "exc_traceback", ")", ":", "error_logger", ".", "error", "(", "\"Uncaught exception\"", ",", "exc_info", "=", "(", "exc_type", ",", "exc_value", ",", "exc_traceback", ")", ")", "# register logging function as exception hook", "sys", ".", "excepthook", "=", "log_exception" ]
Reload the object from Riak . When this operation completes the object could contain new metadata and a new value if the object was updated in Riak since it was last retrieved .
def reload ( self , r = None , pr = None , timeout = None , basic_quorum = None , notfound_ok = None , head_only = False ) : self . client . get ( self , r = r , pr = pr , timeout = timeout , head_only = head_only ) return self
251,781
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/riak_object.py#L285-L317
[ "def", "sanitize_type", "(", "raw_type", ")", ":", "cleaned", "=", "get_printable", "(", "raw_type", ")", ".", "strip", "(", ")", "for", "bad", "in", "[", "r'__drv_aliasesMem'", ",", "r'__drv_freesMem'", ",", "r'__drv_strictTypeMatch\\(\\w+\\)'", ",", "r'__out_data_source\\(\\w+\\)'", ",", "r'_In_NLS_string_\\(\\w+\\)'", ",", "r'_Frees_ptr_'", ",", "r'_Frees_ptr_opt_'", ",", "r'opt_'", ",", "r'\\(Mem\\) '", "]", ":", "cleaned", "=", "re", ".", "sub", "(", "bad", ",", "''", ",", "cleaned", ")", ".", "strip", "(", ")", "if", "cleaned", "in", "[", "'_EXCEPTION_RECORD *'", ",", "'_EXCEPTION_POINTERS *'", "]", ":", "cleaned", "=", "cleaned", ".", "strip", "(", "'_'", ")", "cleaned", "=", "cleaned", ".", "replace", "(", "'[]'", ",", "'*'", ")", "return", "cleaned" ]
Delete this object from Riak .
def delete ( self , r = None , w = None , dw = None , pr = None , pw = None , timeout = None ) : self . client . delete ( self , r = r , w = w , dw = dw , pr = pr , pw = pw , timeout = timeout ) self . clear ( ) return self
251,782
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/riak_object.py#L319-L348
[ "def", "configure_app", "(", "*", "*", "kwargs", ")", ":", "sys_args", "=", "sys", ".", "argv", "args", ",", "command", ",", "command_args", "=", "parse_args", "(", "sys_args", "[", "1", ":", "]", ")", "parser", "=", "OptionParser", "(", ")", "parser", ".", "add_option", "(", "'--config'", ",", "metavar", "=", "'CONFIG'", ")", "(", "options", ",", "logan_args", ")", "=", "parser", ".", "parse_args", "(", "args", ")", "config_path", "=", "options", ".", "config", "logan_configure", "(", "config_path", "=", "config_path", ",", "*", "*", "kwargs", ")" ]
Get the encoding function for the provided content type for this bucket .
def get_encoder ( self , content_type ) : if content_type in self . _encoders : return self . _encoders [ content_type ] else : return self . _client . get_encoder ( content_type )
251,783
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/bucket.py#L88-L100
[ "def", "SetConsoleTextAttribute", "(", "stream_id", ",", "attrs", ")", ":", "handle", "=", "handles", "[", "stream_id", "]", "return", "windll", ".", "kernel32", ".", "SetConsoleTextAttribute", "(", "handle", ",", "attrs", ")" ]
Get the decoding function for the provided content type for this bucket .
def get_decoder ( self , content_type ) : if content_type in self . _decoders : return self . _decoders [ content_type ] else : return self . _client . get_decoder ( content_type )
251,784
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/bucket.py#L116-L128
[ "def", "_simulate_unitary", "(", "self", ",", "op", ":", "ops", ".", "Operation", ",", "data", ":", "_StateAndBuffer", ",", "indices", ":", "List", "[", "int", "]", ")", "->", "None", ":", "result", "=", "protocols", ".", "apply_unitary", "(", "op", ",", "args", "=", "protocols", ".", "ApplyUnitaryArgs", "(", "data", ".", "state", ",", "data", ".", "buffer", ",", "indices", ")", ")", "if", "result", "is", "data", ".", "buffer", ":", "data", ".", "buffer", "=", "data", ".", "state", "data", ".", "state", "=", "result" ]
Retrieves a list of keys belonging to this bucket in parallel .
def multiget ( self , keys , r = None , pr = None , timeout = None , basic_quorum = None , notfound_ok = None , head_only = False ) : bkeys = [ ( self . bucket_type . name , self . name , key ) for key in keys ] return self . _client . multiget ( bkeys , r = r , pr = pr , timeout = timeout , basic_quorum = basic_quorum , notfound_ok = notfound_ok , head_only = head_only )
251,785
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/bucket.py#L238-L268
[ "def", "realtime_observations", "(", "cls", ",", "buoy", ",", "data_type", "=", "'txt'", ")", ":", "endpoint", "=", "cls", "(", ")", "parsers", "=", "{", "'txt'", ":", "endpoint", ".", "_parse_met", ",", "'drift'", ":", "endpoint", ".", "_parse_drift", ",", "'cwind'", ":", "endpoint", ".", "_parse_cwind", ",", "'spec'", ":", "endpoint", ".", "_parse_spec", ",", "'ocean'", ":", "endpoint", ".", "_parse_ocean", ",", "'srad'", ":", "endpoint", ".", "_parse_srad", ",", "'dart'", ":", "endpoint", ".", "_parse_dart", ",", "'supl'", ":", "endpoint", ".", "_parse_supl", ",", "'rain'", ":", "endpoint", ".", "_parse_rain", "}", "if", "data_type", "not", "in", "parsers", ":", "raise", "KeyError", "(", "'Data type must be txt, drift, cwind, spec, ocean, srad, dart,'", "'supl, or rain for parsed realtime data.'", ")", "raw_data", "=", "endpoint", ".", "raw_buoy_data", "(", "buoy", ",", "data_type", "=", "data_type", ")", "return", "parsers", "[", "data_type", "]", "(", "raw_data", ")" ]
Streams the list of buckets under this bucket - type . This is a generator method that should be iterated over .
def stream_buckets ( self , timeout = None ) : return self . _client . stream_buckets ( bucket_type = self , timeout = timeout )
251,786
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/bucket.py#L712-L729
[ "def", "ActivateCard", "(", "self", ",", "card", ")", ":", "if", "not", "hasattr", "(", "card", ",", "'connection'", ")", ":", "card", ".", "connection", "=", "card", ".", "createConnection", "(", ")", "if", "None", "!=", "self", ".", "parent", ".", "apdutracerpanel", ":", "card", ".", "connection", ".", "addObserver", "(", "self", ".", "parent", ".", "apdutracerpanel", ")", "card", ".", "connection", ".", "connect", "(", ")", "self", ".", "dialogpanel", ".", "OnActivateCard", "(", "card", ")" ]
Increases the value by the argument .
def incr ( self , d ) : with self . lock : self . p = self . value ( ) + d
251,787
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/node.py#L46-L54
[ "def", "load_market_data", "(", "trading_day", "=", "None", ",", "trading_days", "=", "None", ",", "bm_symbol", "=", "'SPY'", ",", "environ", "=", "None", ")", ":", "if", "trading_day", "is", "None", ":", "trading_day", "=", "get_calendar", "(", "'XNYS'", ")", ".", "day", "if", "trading_days", "is", "None", ":", "trading_days", "=", "get_calendar", "(", "'XNYS'", ")", ".", "all_sessions", "first_date", "=", "trading_days", "[", "0", "]", "now", "=", "pd", ".", "Timestamp", ".", "utcnow", "(", ")", "# we will fill missing benchmark data through latest trading date", "last_date", "=", "trading_days", "[", "trading_days", ".", "get_loc", "(", "now", ",", "method", "=", "'ffill'", ")", "]", "br", "=", "ensure_benchmark_data", "(", "bm_symbol", ",", "first_date", ",", "last_date", ",", "now", ",", "# We need the trading_day to figure out the close prior to the first", "# date so that we can compute returns for the first date.", "trading_day", ",", "environ", ",", ")", "tc", "=", "ensure_treasury_data", "(", "bm_symbol", ",", "first_date", ",", "last_date", ",", "now", ",", "environ", ",", ")", "# combine dt indices and reindex using ffill then bfill", "all_dt", "=", "br", ".", "index", ".", "union", "(", "tc", ".", "index", ")", "br", "=", "br", ".", "reindex", "(", "all_dt", ",", "method", "=", "'ffill'", ")", ".", "fillna", "(", "method", "=", "'bfill'", ")", "tc", "=", "tc", ".", "reindex", "(", "all_dt", ",", "method", "=", "'ffill'", ")", ".", "fillna", "(", "method", "=", "'bfill'", ")", "benchmark_returns", "=", "br", "[", "br", ".", "index", ".", "slice_indexer", "(", "first_date", ",", "last_date", ")", "]", "treasury_curves", "=", "tc", "[", "tc", ".", "index", ".", "slice_indexer", "(", "first_date", ",", "last_date", ")", "]", "return", "benchmark_returns", ",", "treasury_curves" ]
Returns a random client identifier
def make_random_client_id ( self ) : if PY2 : return ( 'py_%s' % base64 . b64encode ( str ( random . randint ( 1 , 0x40000000 ) ) ) ) else : return ( 'py_%s' % base64 . b64encode ( bytes ( str ( random . randint ( 1 , 0x40000000 ) ) , 'ascii' ) ) )
251,788
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L42-L52
[ "def", "get_max_devices_per_port_for_storage_bus", "(", "self", ",", "bus", ")", ":", "if", "not", "isinstance", "(", "bus", ",", "StorageBus", ")", ":", "raise", "TypeError", "(", "\"bus can only be an instance of type StorageBus\"", ")", "max_devices_per_port", "=", "self", ".", "_call", "(", "\"getMaxDevicesPerPortForStorageBus\"", ",", "in_p", "=", "[", "bus", "]", ")", "return", "max_devices_per_port" ]
Fetches an object .
def get ( self , robj , r = None , pr = None , timeout = None , basic_quorum = None , notfound_ok = None , head_only = False ) : raise NotImplementedError
251,789
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L70-L75
[ "def", "delete_classifier", "(", "self", ",", "classifier_id", ",", "*", "*", "kwargs", ")", ":", "if", "classifier_id", "is", "None", ":", "raise", "ValueError", "(", "'classifier_id must be provided'", ")", "headers", "=", "{", "}", "if", "'headers'", "in", "kwargs", ":", "headers", ".", "update", "(", "kwargs", ".", "get", "(", "'headers'", ")", ")", "sdk_headers", "=", "get_sdk_headers", "(", "'watson_vision_combined'", ",", "'V3'", ",", "'delete_classifier'", ")", "headers", ".", "update", "(", "sdk_headers", ")", "params", "=", "{", "'version'", ":", "self", ".", "version", "}", "url", "=", "'/v3/classifiers/{0}'", ".", "format", "(", "*", "self", ".", "_encode_path_vars", "(", "classifier_id", ")", ")", "response", "=", "self", ".", "request", "(", "method", "=", "'DELETE'", ",", "url", "=", "url", ",", "headers", "=", "headers", ",", "params", "=", "params", ",", "accept_json", "=", "True", ")", "return", "response" ]
Stores an object .
def put ( self , robj , w = None , dw = None , pw = None , return_body = None , if_none_match = None , timeout = None ) : raise NotImplementedError
251,790
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L77-L82
[ "def", "pvd_factory", "(", "sys_ident", ",", "vol_ident", ",", "set_size", ",", "seqnum", ",", "log_block_size", ",", "vol_set_ident", ",", "pub_ident_str", ",", "preparer_ident_str", ",", "app_ident_str", ",", "copyright_file", ",", "abstract_file", ",", "bibli_file", ",", "vol_expire_date", ",", "app_use", ",", "xa", ")", ":", "# type: (bytes, bytes, int, int, int, bytes, bytes, bytes, bytes, bytes, bytes, bytes, float, bytes, bool) -> PrimaryOrSupplementaryVD", "pvd", "=", "PrimaryOrSupplementaryVD", "(", "VOLUME_DESCRIPTOR_TYPE_PRIMARY", ")", "pvd", ".", "new", "(", "0", ",", "sys_ident", ",", "vol_ident", ",", "set_size", ",", "seqnum", ",", "log_block_size", ",", "vol_set_ident", ",", "pub_ident_str", ",", "preparer_ident_str", ",", "app_ident_str", ",", "copyright_file", ",", "abstract_file", ",", "bibli_file", ",", "vol_expire_date", ",", "app_use", ",", "xa", ",", "1", ",", "b''", ")", "return", "pvd" ]
Deletes an object .
def delete ( self , robj , rw = None , r = None , w = None , dw = None , pr = None , pw = None , timeout = None ) : raise NotImplementedError
251,791
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L84-L89
[ "def", "pvd_factory", "(", "sys_ident", ",", "vol_ident", ",", "set_size", ",", "seqnum", ",", "log_block_size", ",", "vol_set_ident", ",", "pub_ident_str", ",", "preparer_ident_str", ",", "app_ident_str", ",", "copyright_file", ",", "abstract_file", ",", "bibli_file", ",", "vol_expire_date", ",", "app_use", ",", "xa", ")", ":", "# type: (bytes, bytes, int, int, int, bytes, bytes, bytes, bytes, bytes, bytes, bytes, float, bytes, bool) -> PrimaryOrSupplementaryVD", "pvd", "=", "PrimaryOrSupplementaryVD", "(", "VOLUME_DESCRIPTOR_TYPE_PRIMARY", ")", "pvd", ".", "new", "(", "0", ",", "sys_ident", ",", "vol_ident", ",", "set_size", ",", "seqnum", ",", "log_block_size", ",", "vol_set_ident", ",", "pub_ident_str", ",", "preparer_ident_str", ",", "app_ident_str", ",", "copyright_file", ",", "abstract_file", ",", "bibli_file", ",", "vol_expire_date", ",", "app_use", ",", "xa", ",", "1", ",", "b''", ")", "return", "pvd" ]
Updates a counter by the given value .
def update_counter ( self , bucket , key , value , w = None , dw = None , pw = None , returnvalue = False ) : raise NotImplementedError
251,792
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L285-L290
[ "def", "Run", "(", "self", ")", ":", "self", ".", "_GetArgs", "(", ")", "goodlogging", ".", "Log", ".", "Info", "(", "\"CLEAR\"", ",", "\"Using database: {0}\"", ".", "format", "(", "self", ".", "_databasePath", ")", ")", "self", ".", "_db", "=", "database", ".", "RenamerDB", "(", "self", ".", "_databasePath", ")", "if", "self", ".", "_dbPrint", "or", "self", ".", "_dbUpdate", ":", "goodlogging", ".", "Log", ".", "Seperator", "(", ")", "self", ".", "_db", ".", "PrintAllTables", "(", ")", "if", "self", ".", "_dbUpdate", ":", "goodlogging", ".", "Log", ".", "Seperator", "(", ")", "self", ".", "_db", ".", "ManualUpdateTables", "(", ")", "self", ".", "_GetDatabaseConfig", "(", ")", "if", "self", ".", "_enableExtract", ":", "goodlogging", ".", "Log", ".", "Seperator", "(", ")", "extractFileList", "=", "[", "]", "goodlogging", ".", "Log", ".", "Info", "(", "\"CLEAR\"", ",", "\"Parsing source directory for compressed files\"", ")", "goodlogging", ".", "Log", ".", "IncreaseIndent", "(", ")", "extract", ".", "GetCompressedFilesInDir", "(", "self", ".", "_sourceDir", ",", "extractFileList", ",", "self", ".", "_ignoredDirsList", ")", "goodlogging", ".", "Log", ".", "DecreaseIndent", "(", ")", "goodlogging", ".", "Log", ".", "Seperator", "(", ")", "extract", ".", "Extract", "(", "extractFileList", ",", "self", ".", "_supportedFormatsList", ",", "self", ".", "_archiveDir", ",", "self", ".", "_skipUserInputExtract", ")", "goodlogging", ".", "Log", ".", "Seperator", "(", ")", "tvFileList", "=", "[", "]", "goodlogging", ".", "Log", ".", "Info", "(", "\"CLEAR\"", ",", "\"Parsing source directory for compatible files\"", ")", "goodlogging", ".", "Log", ".", "IncreaseIndent", "(", ")", "self", ".", "_GetSupportedFilesInDir", "(", "self", ".", "_sourceDir", ",", "tvFileList", ",", "self", ".", "_supportedFormatsList", ",", "self", ".", "_ignoredDirsList", ")", "goodlogging", ".", "Log", ".", "DecreaseIndent", "(", ")", "tvRenamer", "=", "renamer", ".", "TVRenamer", "(", "self", ".", "_db", ",", "tvFileList", ",", "self", ".", "_archiveDir", ",", "guideName", "=", "'EPGUIDES'", ",", "tvDir", "=", "self", ".", "_tvDir", ",", "inPlaceRename", "=", "self", ".", "_inPlaceRename", ",", "forceCopy", "=", "self", ".", "_crossSystemCopyEnabled", ",", "skipUserInput", "=", "self", ".", "_skipUserInputRename", ")", "tvRenamer", ".", "Run", "(", ")" ]
Fetches a Riak Datatype .
def fetch_datatype ( self , bucket , key , r = None , pr = None , basic_quorum = None , notfound_ok = None , timeout = None , include_context = None ) : raise NotImplementedError
251,793
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L292-L297
[ "def", "_load_state", "(", "self", ",", "context", ")", ":", "try", ":", "state", "=", "cookie_to_state", "(", "context", ".", "cookie", ",", "self", ".", "config", "[", "\"COOKIE_STATE_NAME\"", "]", ",", "self", ".", "config", "[", "\"STATE_ENCRYPTION_KEY\"", "]", ")", "except", "SATOSAStateError", "as", "e", ":", "msg_tmpl", "=", "'Failed to decrypt state {state} with {error}'", "msg", "=", "msg_tmpl", ".", "format", "(", "state", "=", "context", ".", "cookie", ",", "error", "=", "str", "(", "e", ")", ")", "satosa_logging", "(", "logger", ",", "logging", ".", "WARNING", ",", "msg", ",", "None", ")", "state", "=", "State", "(", ")", "finally", ":", "context", ".", "state", "=", "state" ]
Updates a Riak Datatype by sending local operations to the server .
def update_datatype ( self , datatype , w = None , dw = None , pw = None , return_body = None , timeout = None , include_context = None ) : raise NotImplementedError
251,794
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L299-L304
[ "def", "set_azure_secret_access_key", "(", "config_fpath", ",", "container", ",", "az_secret_access_key", ")", ":", "key", "=", "AZURE_KEY_PREFIX", "+", "container", "return", "write_config_value_to_file", "(", "key", ",", "az_secret_access_key", ",", "config_fpath", ")" ]
Emulates a search request via MapReduce . Used in the case where the transport supports MapReduce but has no native search capability .
def _search_mapred_emu ( self , index , query ) : phases = [ ] if not self . phaseless_mapred ( ) : phases . append ( { 'language' : 'erlang' , 'module' : 'riak_kv_mapreduce' , 'function' : 'reduce_identity' , 'keep' : True } ) mr_result = self . mapred ( { 'module' : 'riak_search' , 'function' : 'mapred_search' , 'arg' : [ index , query ] } , phases ) result = { 'num_found' : len ( mr_result ) , 'max_score' : 0.0 , 'docs' : [ ] } for bucket , key , data in mr_result : if u'score' in data and data [ u'score' ] [ 0 ] > result [ 'max_score' ] : result [ 'max_score' ] = data [ u'score' ] [ 0 ] result [ 'docs' ] . append ( { u'id' : key } ) return result
251,795
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L313-L336
[ "def", "hash_blockquotes", "(", "text", ",", "hashes", ",", "markdown_obj", ")", ":", "def", "sub", "(", "match", ")", ":", "block", "=", "match", ".", "group", "(", "1", ")", ".", "strip", "(", ")", "block", "=", "re", ".", "sub", "(", "r'(?:(?<=\\n)|(?<=\\A))> ?'", ",", "''", ",", "block", ")", "block", "=", "markdown_obj", ".", "convert", "(", "block", ")", "block", "=", "'<blockquote>{}</blockquote>'", ".", "format", "(", "block", ")", "hashed", "=", "hash_text", "(", "block", ",", "'blockquote'", ")", "hashes", "[", "hashed", "]", "=", "block", "return", "'\\n\\n'", "+", "hashed", "+", "'\\n\\n'", "return", "re_blockquote", ".", "sub", "(", "sub", ",", "text", ")" ]
Emulates a secondary index request via MapReduce . Used in the case where the transport supports MapReduce but has no native secondary index query capability .
def _get_index_mapred_emu ( self , bucket , index , startkey , endkey = None ) : phases = [ ] if not self . phaseless_mapred ( ) : phases . append ( { 'language' : 'erlang' , 'module' : 'riak_kv_mapreduce' , 'function' : 'reduce_identity' , 'keep' : True } ) if endkey : result = self . mapred ( { 'bucket' : bucket , 'index' : index , 'start' : startkey , 'end' : endkey } , phases ) else : result = self . mapred ( { 'bucket' : bucket , 'index' : index , 'key' : startkey } , phases ) return [ key for resultbucket , key in result ]
251,796
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L339-L362
[ "def", "weld_variance", "(", "array", ",", "weld_type", ")", ":", "weld_obj_mean", "=", "weld_mean", "(", "array", ",", "weld_type", ")", "obj_id", ",", "weld_obj", "=", "create_weld_object", "(", "array", ")", "weld_obj_mean_id", "=", "get_weld_obj_id", "(", "weld_obj", ",", "weld_obj_mean", ")", "weld_template", "=", "_weld_variance_code", "weld_obj", ".", "weld_code", "=", "weld_template", ".", "format", "(", "array", "=", "obj_id", ",", "type", "=", "weld_type", ",", "mean", "=", "weld_obj_mean_id", ")", "return", "weld_obj" ]
Parse the body of an object response and populate the object .
def _parse_body ( self , robj , response , expected_statuses ) : # If no response given, then return. if response is None : return None status , headers , data = response # Check if the server is down(status==0) if not status : m = 'Could not contact Riak Server: http://{0}:{1}!' . format ( self . _node . host , self . _node . http_port ) raise RiakError ( m ) # Make sure expected code came back self . check_http_code ( status , expected_statuses ) if 'x-riak-vclock' in headers : robj . vclock = VClock ( headers [ 'x-riak-vclock' ] , 'base64' ) # If 404(Not Found), then clear the object. if status == 404 : robj . siblings = [ ] return None # If 201 Created, we need to extract the location and set the # key on the object. elif status == 201 : robj . key = headers [ 'location' ] . strip ( ) . split ( '/' ) [ - 1 ] # If 300(Siblings), apply the siblings to the object elif status == 300 : ctype , params = parse_header ( headers [ 'content-type' ] ) if ctype == 'multipart/mixed' : if six . PY3 : data = bytes_to_str ( data ) boundary = re . compile ( '\r?\n--%s(?:--)?\r?\n' % re . escape ( params [ 'boundary' ] ) ) parts = [ message_from_string ( p ) for p in re . split ( boundary , data ) [ 1 : - 1 ] ] robj . siblings = [ self . _parse_sibling ( RiakContent ( robj ) , part . items ( ) , part . get_payload ( ) ) for part in parts ] # Invoke sibling-resolution logic if robj . resolver is not None : robj . resolver ( robj ) return robj else : raise Exception ( 'unexpected sibling response format: {0}' . format ( ctype ) ) robj . siblings = [ self . _parse_sibling ( RiakContent ( robj ) , headers . items ( ) , data ) ] return robj
251,797
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/http.py#L46-L104
[ "def", "point_in_triangle", "(", "p", ",", "v1", ",", "v2", ",", "v3", ")", ":", "def", "_test", "(", "p1", ",", "p2", ",", "p3", ")", ":", "return", "(", "p1", "[", "0", "]", "-", "p3", "[", "0", "]", ")", "*", "(", "p2", "[", "1", "]", "-", "p3", "[", "1", "]", ")", "-", "(", "p2", "[", "0", "]", "-", "p3", "[", "0", "]", ")", "*", "(", "p1", "[", "1", "]", "-", "p3", "[", "1", "]", ")", "b1", "=", "_test", "(", "p", ",", "v1", ",", "v2", ")", "<", "0.0", "b2", "=", "_test", "(", "p", ",", "v2", ",", "v3", ")", "<", "0.0", "b3", "=", "_test", "(", "p", ",", "v3", ",", "v1", ")", "<", "0.0", "return", "(", "b1", "==", "b2", ")", "and", "(", "b2", "==", "b3", ")" ]
Parses a single sibling out of a response .
def _parse_sibling ( self , sibling , headers , data ) : sibling . exists = True # Parse the headers... for header , value in headers : header = header . lower ( ) if header == 'content-type' : sibling . content_type , sibling . charset = self . _parse_content_type ( value ) elif header == 'etag' : sibling . etag = value elif header == 'link' : sibling . links = self . _parse_links ( value ) elif header == 'last-modified' : sibling . last_modified = mktime_tz ( parsedate_tz ( value ) ) elif header . startswith ( 'x-riak-meta-' ) : metakey = header . replace ( 'x-riak-meta-' , '' ) sibling . usermeta [ metakey ] = value elif header . startswith ( 'x-riak-index-' ) : field = header . replace ( 'x-riak-index-' , '' ) reader = csv . reader ( [ value ] , skipinitialspace = True ) for line in reader : for token in line : token = decode_index_value ( field , token ) sibling . add_index ( field , token ) elif header == 'x-riak-deleted' : sibling . exists = False sibling . encoded_data = data return sibling
251,798
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/http.py#L106-L140
[ "def", "_OpenPathSpec", "(", "self", ",", "path_specification", ",", "ascii_codepage", "=", "'cp1252'", ")", ":", "if", "not", "path_specification", ":", "return", "None", "file_entry", "=", "self", ".", "_file_system", ".", "GetFileEntryByPathSpec", "(", "path_specification", ")", "if", "file_entry", "is", "None", ":", "return", "None", "file_object", "=", "file_entry", ".", "GetFileObject", "(", ")", "if", "file_object", "is", "None", ":", "return", "None", "registry_file", "=", "dfwinreg_regf", ".", "REGFWinRegistryFile", "(", "ascii_codepage", "=", "ascii_codepage", ")", "try", ":", "registry_file", ".", "Open", "(", "file_object", ")", "except", "IOError", "as", "exception", ":", "logger", ".", "warning", "(", "'Unable to open Windows Registry file with error: {0!s}'", ".", "format", "(", "exception", ")", ")", "file_object", ".", "close", "(", ")", "return", "None", "return", "registry_file" ]
Convert the link tuple to a link header string . Used internally .
def _to_link_header ( self , link ) : try : bucket , key , tag = link except ValueError : raise RiakError ( "Invalid link tuple %s" % link ) tag = tag if tag is not None else bucket url = self . object_path ( bucket , key ) header = '<%s>; riaktag="%s"' % ( url , tag ) return header
251,799
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/http.py#L142-L153
[ "def", "write_config_file", "(", "self", ",", "params", ",", "path", ")", ":", "cfgp", "=", "ConfigParser", "(", ")", "cfgp", ".", "add_section", "(", "params", "[", "'name'", "]", ")", "for", "p", "in", "params", ":", "if", "p", "==", "'name'", ":", "continue", "cfgp", ".", "set", "(", "params", "[", "'name'", "]", ",", "p", ",", "params", "[", "p", "]", ")", "f", "=", "open", "(", "os", ".", "path", ".", "join", "(", "path", ",", "'experiment.cfg'", ")", ",", "'w'", ")", "cfgp", ".", "write", "(", "f", ")", "f", ".", "close", "(", ")" ]