query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
sequencelengths
20
553
Normalizes a JSON search response so that PB and HTTP have the same return value
def _normalize_json_search_response ( self , json ) : result = { } if 'facet_counts' in json : result [ 'facet_counts' ] = json [ u'facet_counts' ] if 'grouped' in json : result [ 'grouped' ] = json [ u'grouped' ] if 'stats' in json : result [ 'stats' ] = json [ u'stats' ] if u'response' in json : result [ 'num_found' ] = json [ u'response' ] [ u'numFound' ] result [ 'max_score' ] = float ( json [ u'response' ] [ u'maxScore' ] ) docs = [ ] for doc in json [ u'response' ] [ u'docs' ] : resdoc = { } if u'_yz_rk' in doc : # Is this a Riak 2.0 result? resdoc = doc else : # Riak Search 1.0 Legacy assumptions about format resdoc [ u'id' ] = doc [ u'id' ] if u'fields' in doc : for k , v in six . iteritems ( doc [ u'fields' ] ) : resdoc [ k ] = v docs . append ( resdoc ) result [ 'docs' ] = docs return result
251,800
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/http.py#L223-L252
[ "def", "subtract", "(", "self", ")", ":", "if", "self", ".", "moc", "is", "None", ":", "raise", "CommandError", "(", "'No MOC information present for subtraction'", ")", "filename", "=", "self", ".", "params", ".", "pop", "(", ")", "self", ".", "moc", "-=", "MOC", "(", "filename", "=", "filename", ")" ]
Normalizes an XML search response so that PB and HTTP have the same return value
def _normalize_xml_search_response ( self , xml ) : target = XMLSearchResult ( ) parser = ElementTree . XMLParser ( target = target ) parser . feed ( xml ) return parser . close ( )
251,801
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/http.py#L254-L262
[ "def", "make_timestamp_columns", "(", ")", ":", "return", "(", "Column", "(", "'created_at'", ",", "DateTime", ",", "default", "=", "func", ".", "utcnow", "(", ")", ",", "nullable", "=", "False", ")", ",", "Column", "(", "'updated_at'", ",", "DateTime", ",", "default", "=", "func", ".", "utcnow", "(", ")", ",", "onupdate", "=", "func", ".", "utcnow", "(", ")", ",", "nullable", "=", "False", ")", ",", ")" ]
Set TCP_NODELAY on socket
def connect ( self ) : HTTPConnection . connect ( self ) self . sock . setsockopt ( socket . IPPROTO_TCP , socket . TCP_NODELAY , 1 )
251,802
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/__init__.py#L52-L57
[ "def", "tag_pos_volume", "(", "line", ")", ":", "def", "tagger", "(", "match", ")", ":", "groups", "=", "match", ".", "groupdict", "(", ")", "try", ":", "year", "=", "match", ".", "group", "(", "'year'", ")", "except", "IndexError", ":", "# Extract year from volume name", "# which should always include the year", "g", "=", "re", ".", "search", "(", "re_pos_year_num", ",", "match", ".", "group", "(", "'volume_num'", ")", ",", "re", ".", "UNICODE", ")", "year", "=", "g", ".", "group", "(", "0", ")", "if", "year", ":", "groups", "[", "'year'", "]", "=", "' <cds.YR>(%s)</cds.YR>'", "%", "year", ".", "strip", "(", ")", ".", "strip", "(", "'()'", ")", "else", ":", "groups", "[", "'year'", "]", "=", "''", "return", "'<cds.JOURNAL>PoS</cds.JOURNAL>'", "' <cds.VOL>%(volume_name)s%(volume_num)s</cds.VOL>'", "'%(year)s'", "' <cds.PG>%(page)s</cds.PG>'", "%", "groups", "for", "p", "in", "re_pos", ":", "line", "=", "p", ".", "sub", "(", "tagger", ",", "line", ")", "return", "line" ]
Performs the passed function with retries against the given pool .
def _with_retries ( self , pool , fn ) : skip_nodes = [ ] def _skip_bad_nodes ( transport ) : return transport . _node not in skip_nodes retry_count = self . retries - 1 first_try = True current_try = 0 while True : try : with pool . transaction ( _filter = _skip_bad_nodes , yield_resource = True ) as resource : transport = resource . object try : return fn ( transport ) except ( IOError , HTTPException , ConnectionClosed ) as e : resource . errored = True if _is_retryable ( e ) : transport . _node . error_rate . incr ( 1 ) skip_nodes . append ( transport . _node ) if first_try : continue else : raise BadResource ( e ) else : raise except BadResource as e : if current_try < retry_count : resource . errored = True current_try += 1 continue else : # Re-raise the inner exception raise e . args [ 0 ] finally : first_try = False
251,803
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/transport.py#L143-L188
[ "def", "segment", "(", "f", ",", "output", ",", "target_duration", ",", "mpegts", ")", ":", "try", ":", "target_duration", "=", "int", "(", "target_duration", ")", "except", "ValueError", ":", "exit", "(", "'Error: Invalid target duration.'", ")", "try", ":", "mpegts", "=", "int", "(", "mpegts", ")", "except", "ValueError", ":", "exit", "(", "'Error: Invalid MPEGTS value.'", ")", "WebVTTSegmenter", "(", ")", ".", "segment", "(", "f", ",", "output", ",", "target_duration", ",", "mpegts", ")" ]
Selects a connection pool according to the default protocol and the passed one .
def _choose_pool ( self , protocol = None ) : if not protocol : protocol = self . protocol if protocol == 'http' : pool = self . _http_pool elif protocol == 'tcp' or protocol == 'pbc' : pool = self . _tcp_pool else : raise ValueError ( "invalid protocol %s" % protocol ) if pool is None or self . _closed : # NB: GH-500, this can happen if client is closed raise RuntimeError ( "Client is closed." ) return pool
251,804
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/transport.py#L190-L210
[ "def", "setOverlayTransformAbsolute", "(", "self", ",", "ulOverlayHandle", ",", "eTrackingOrigin", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTransformAbsolute", "pmatTrackingOriginToOverlayTransform", "=", "HmdMatrix34_t", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "eTrackingOrigin", ",", "byref", "(", "pmatTrackingOriginToOverlayTransform", ")", ")", "return", "result", ",", "pmatTrackingOriginToOverlayTransform" ]
Default encoder for JSON datatypes which returns UTF - 8 encoded json instead of the default bloated backslash u XXXX escaped ASCII strings .
def default_encoder ( obj ) : if isinstance ( obj , bytes ) : return json . dumps ( bytes_to_str ( obj ) , ensure_ascii = False ) . encode ( "utf-8" ) else : return json . dumps ( obj , ensure_ascii = False ) . encode ( "utf-8" )
251,805
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/__init__.py#L37-L46
[ "def", "load_cert_chain", "(", "self", ",", "certfile", ",", "keyfile", "=", "None", ")", ":", "self", ".", "_certfile", "=", "certfile", "self", ".", "_keyfile", "=", "keyfile" ]
Iterate through all of the connections and close each one .
def close ( self ) : if not self . _closed : self . _closed = True self . _stop_multi_pools ( ) if self . _http_pool is not None : self . _http_pool . clear ( ) self . _http_pool = None if self . _tcp_pool is not None : self . _tcp_pool . clear ( ) self . _tcp_pool = None
251,806
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/__init__.py#L319-L331
[ "def", "download_storyitem", "(", "self", ",", "item", ":", "StoryItem", ",", "target", ":", "str", ")", "->", "bool", ":", "date_local", "=", "item", ".", "date_local", "dirname", "=", "_PostPathFormatter", "(", "item", ")", ".", "format", "(", "self", ".", "dirname_pattern", ",", "target", "=", "target", ")", "filename", "=", "dirname", "+", "'/'", "+", "self", ".", "format_filename", "(", "item", ",", "target", "=", "target", ")", "os", ".", "makedirs", "(", "os", ".", "path", ".", "dirname", "(", "filename", ")", ",", "exist_ok", "=", "True", ")", "downloaded", "=", "False", "if", "not", "item", ".", "is_video", "or", "self", ".", "download_video_thumbnails", "is", "True", ":", "url", "=", "item", ".", "url", "downloaded", "=", "self", ".", "download_pic", "(", "filename", "=", "filename", ",", "url", "=", "url", ",", "mtime", "=", "date_local", ")", "if", "item", ".", "is_video", "and", "self", ".", "download_videos", "is", "True", ":", "downloaded", "|=", "self", ".", "download_pic", "(", "filename", "=", "filename", ",", "url", "=", "item", ".", "video_url", ",", "mtime", "=", "date_local", ")", "# Save caption if desired", "metadata_string", "=", "_ArbitraryItemFormatter", "(", "item", ")", ".", "format", "(", "self", ".", "storyitem_metadata_txt_pattern", ")", ".", "strip", "(", ")", "if", "metadata_string", ":", "self", ".", "save_caption", "(", "filename", "=", "filename", ",", "mtime", "=", "item", ".", "date_local", ",", "caption", "=", "metadata_string", ")", "# Save metadata as JSON if desired.", "if", "self", ".", "save_metadata", "is", "not", "False", ":", "self", ".", "save_metadata_json", "(", "filename", ",", "item", ")", "self", ".", "context", ".", "log", "(", ")", "return", "downloaded" ]
Create security credentials if necessary .
def _create_credentials ( self , n ) : if not n : return n elif isinstance ( n , SecurityCreds ) : return n elif isinstance ( n , dict ) : return SecurityCreds ( * * n ) else : raise TypeError ( "%s is not a valid security configuration" % repr ( n ) )
251,807
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/__init__.py#L355-L367
[ "def", "fetch_result", "(", "self", ")", ":", "results", "=", "self", ".", "soup", ".", "find_all", "(", "'div'", ",", "{", "'class'", ":", "'container container-small'", "}", ")", "href", "=", "None", "is_match", "=", "False", "i", "=", "0", "while", "i", "<", "len", "(", "results", ")", "and", "not", "is_match", ":", "result", "=", "results", "[", "i", "]", "anchor", "=", "result", ".", "find", "(", "'a'", ",", "{", "'rel'", ":", "'bookmark'", "}", ")", "is_match", "=", "self", ".", "_filter_results", "(", "result", ",", "anchor", ")", "href", "=", "anchor", "[", "'href'", "]", "i", "+=", "1", "try", ":", "page", "=", "get_soup", "(", "href", ")", "except", "(", "Exception", ")", ":", "page", "=", "None", "# Return page if search is successful", "if", "href", "and", "page", ":", "return", "page", "else", ":", "raise", "PageNotFoundError", "(", "PAGE_ERROR", ")" ]
Use the appropriate connection class ; optionally with security .
def _connect ( self ) : timeout = None if self . _options is not None and 'timeout' in self . _options : timeout = self . _options [ 'timeout' ] if self . _client . _credentials : self . _connection = self . _connection_class ( host = self . _node . host , port = self . _node . http_port , credentials = self . _client . _credentials , timeout = timeout ) else : self . _connection = self . _connection_class ( host = self . _node . host , port = self . _node . http_port , timeout = timeout ) # Forces the population of stats and resources before any # other requests are made. self . server_version
251,808
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/connection.py#L65-L86
[ "def", "delete_annotation", "(", "self", ",", "term_ilx_id", ":", "str", ",", "annotation_type_ilx_id", ":", "str", ",", "annotation_value", ":", "str", ")", "->", "dict", ":", "term_data", "=", "self", ".", "get_entity", "(", "term_ilx_id", ")", "if", "not", "term_data", "[", "'id'", "]", ":", "exit", "(", "'term_ilx_id: '", "+", "term_ilx_id", "+", "' does not exist'", ")", "anno_data", "=", "self", ".", "get_entity", "(", "annotation_type_ilx_id", ")", "if", "not", "anno_data", "[", "'id'", "]", ":", "exit", "(", "'annotation_type_ilx_id: '", "+", "annotation_type_ilx_id", "+", "' does not exist'", ")", "entity_annotations", "=", "self", ".", "get_annotation_via_tid", "(", "term_data", "[", "'id'", "]", ")", "annotation_id", "=", "''", "for", "annotation", "in", "entity_annotations", ":", "if", "str", "(", "annotation", "[", "'tid'", "]", ")", "==", "str", "(", "term_data", "[", "'id'", "]", ")", ":", "if", "str", "(", "annotation", "[", "'annotation_tid'", "]", ")", "==", "str", "(", "anno_data", "[", "'id'", "]", ")", ":", "if", "str", "(", "annotation", "[", "'value'", "]", ")", "==", "str", "(", "annotation_value", ")", ":", "annotation_id", "=", "annotation", "[", "'id'", "]", "break", "if", "not", "annotation_id", ":", "print", "(", "'''WARNING: Annotation you wanted to delete does not exist '''", ")", "return", "None", "url", "=", "self", ".", "base_url", "+", "'term/edit-annotation/{annotation_id}'", ".", "format", "(", "annotation_id", "=", "annotation_id", ")", "data", "=", "{", "'tid'", ":", "' '", ",", "# for delete", "'annotation_tid'", ":", "' '", ",", "# for delete", "'value'", ":", "' '", ",", "# for delete", "'term_version'", ":", "' '", ",", "'annotation_term_version'", ":", "' '", ",", "}", "output", "=", "self", ".", "post", "(", "url", "=", "url", ",", "data", "=", "data", ",", ")", "# check output", "return", "output" ]
Add in the requisite HTTP Authentication Headers
def _security_auth_headers ( self , username , password , headers ) : userColonPassword = username + ":" + password b64UserColonPassword = base64 . b64encode ( str_to_bytes ( userColonPassword ) ) . decode ( "ascii" ) headers [ 'Authorization' ] = 'Basic %s' % b64UserColonPassword
251,809
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/connection.py#L101-L115
[ "def", "get_ymal_data", "(", "data", ")", ":", "try", ":", "format_data", "=", "yaml", ".", "load", "(", "data", ")", "except", "yaml", ".", "YAMLError", ",", "e", ":", "msg", "=", "\"Yaml format error: {}\"", ".", "format", "(", "unicode", "(", "str", "(", "e", ")", ",", "\"utf-8\"", ")", ")", "logging", ".", "error", "(", "msg", ")", "sys", ".", "exit", "(", "1", ")", "if", "not", "check_config", "(", "format_data", ")", ":", "sys", ".", "exit", "(", "1", ")", "return", "format_data" ]
Queries a timeseries table .
def query ( self , query , interpolations = None ) : return self . _client . ts_query ( self , query , interpolations )
251,810
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/table.py#L94-L102
[ "def", "friend", "(", "self", ",", "note", "=", "None", ",", "_unfriend", "=", "False", ")", ":", "self", ".", "reddit_session", ".", "evict", "(", "self", ".", "reddit_session", ".", "config", "[", "'friends'", "]", ")", "# Requests through password auth use /api/friend", "# Requests through oauth use /api/v1/me/friends/{username}", "if", "not", "self", ".", "reddit_session", ".", "is_oauth_session", "(", ")", ":", "modifier", "=", "_modify_relationship", "(", "'friend'", ",", "unlink", "=", "_unfriend", ")", "data", "=", "{", "'note'", ":", "note", "}", "if", "note", "else", "{", "}", "return", "modifier", "(", "self", ".", "reddit_session", ".", "user", ",", "self", ",", "*", "*", "data", ")", "url", "=", "self", ".", "reddit_session", ".", "config", "[", "'friend_v1'", "]", ".", "format", "(", "user", "=", "self", ".", "name", ")", "# This endpoint wants the data to be a string instead of an actual", "# dictionary, although it is not required to have any content for adds.", "# Unfriending does require the 'id' key.", "if", "_unfriend", ":", "data", "=", "{", "'id'", ":", "self", ".", "name", "}", "else", ":", "# We cannot send a null or empty note string.", "data", "=", "{", "'note'", ":", "note", "}", "if", "note", "else", "{", "}", "data", "=", "dumps", "(", "data", ")", "method", "=", "'DELETE'", "if", "_unfriend", "else", "'PUT'", "return", "self", ".", "reddit_session", ".", "request_json", "(", "url", ",", "data", "=", "data", ",", "method", "=", "method", ")" ]
Determines the platform - specific config directory location for ue4cli
def getConfigDirectory ( ) : if platform . system ( ) == 'Windows' : return os . path . join ( os . environ [ 'APPDATA' ] , 'ue4cli' ) else : return os . path . join ( os . environ [ 'HOME' ] , '.config' , 'ue4cli' )
251,811
https://github.com/adamrehn/ue4cli/blob/f1c34502c96059e36757b7433da7e98760a75a6f/ue4cli/ConfigurationManager.py#L10-L17
[ "def", "clean", "(", "self", ",", "timeout", "=", "60", ")", ":", "self", ".", "refresh", "(", ")", "tds", "=", "self", "[", "'maxTotalDataSizeMB'", "]", "ftp", "=", "self", "[", "'frozenTimePeriodInSecs'", "]", "was_disabled_initially", "=", "self", ".", "disabled", "try", ":", "if", "(", "not", "was_disabled_initially", "and", "self", ".", "service", ".", "splunk_version", "<", "(", "5", ",", ")", ")", ":", "# Need to disable the index first on Splunk 4.x,", "# but it doesn't work to disable it on 5.0.", "self", ".", "disable", "(", ")", "self", ".", "update", "(", "maxTotalDataSizeMB", "=", "1", ",", "frozenTimePeriodInSecs", "=", "1", ")", "self", ".", "roll_hot_buckets", "(", ")", "# Wait until event count goes to 0.", "start", "=", "datetime", ".", "now", "(", ")", "diff", "=", "timedelta", "(", "seconds", "=", "timeout", ")", "while", "self", ".", "content", ".", "totalEventCount", "!=", "'0'", "and", "datetime", ".", "now", "(", ")", "<", "start", "+", "diff", ":", "sleep", "(", "1", ")", "self", ".", "refresh", "(", ")", "if", "self", ".", "content", ".", "totalEventCount", "!=", "'0'", ":", "raise", "OperationError", "(", "\"Cleaning index %s took longer than %s seconds; timing out.\"", "%", "(", "self", ".", "name", ",", "timeout", ")", ")", "finally", ":", "# Restore original values", "self", ".", "update", "(", "maxTotalDataSizeMB", "=", "tds", ",", "frozenTimePeriodInSecs", "=", "ftp", ")", "if", "(", "not", "was_disabled_initially", "and", "self", ".", "service", ".", "splunk_version", "<", "(", "5", ",", ")", ")", ":", "# Re-enable the index if it was originally enabled and we messed with it.", "self", ".", "enable", "(", ")", "return", "self" ]
Sets the config data value for the specified dictionary key
def setConfigKey ( key , value ) : configFile = ConfigurationManager . _configFile ( ) return JsonDataManager ( configFile ) . setKey ( key , value )
251,812
https://github.com/adamrehn/ue4cli/blob/f1c34502c96059e36757b7433da7e98760a75a6f/ue4cli/ConfigurationManager.py#L28-L33
[ "def", "set_recovery_range", "(", "working_dir", ",", "start_block", ",", "end_block", ")", ":", "recovery_range_path", "=", "os", ".", "path", ".", "join", "(", "working_dir", ",", "'.recovery'", ")", "with", "open", "(", "recovery_range_path", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "'{}\\n{}\\n'", ".", "format", "(", "start_block", ",", "end_block", ")", ")", "f", ".", "flush", "(", ")", "os", ".", "fsync", "(", "f", ".", "fileno", "(", ")", ")" ]
Clears any cached data we have stored about specific engine versions
def clearCache ( ) : if os . path . exists ( CachedDataManager . _cacheDir ( ) ) == True : shutil . rmtree ( CachedDataManager . _cacheDir ( ) )
251,813
https://github.com/adamrehn/ue4cli/blob/f1c34502c96059e36757b7433da7e98760a75a6f/ue4cli/CachedDataManager.py#L11-L16
[ "def", "setup_sighandlers", "(", "self", ")", ":", "signal", ".", "signal", "(", "signal", ".", "SIGINT", ",", "signal", ".", "SIG_IGN", ")", "signal", ".", "signal", "(", "signal", ".", "SIGTERM", ",", "signal", ".", "SIG_IGN", ")", "signal", ".", "signal", "(", "signal", ".", "SIGPROF", ",", "self", ".", "on_sigprof", ")", "signal", ".", "signal", "(", "signal", ".", "SIGABRT", ",", "self", ".", "stop", ")", "signal", ".", "siginterrupt", "(", "signal", ".", "SIGPROF", ",", "False", ")", "signal", ".", "siginterrupt", "(", "signal", ".", "SIGABRT", ",", "False", ")", "LOGGER", ".", "debug", "(", "'Signal handlers setup'", ")" ]
Retrieves the cached data value for the specified engine version hash and dictionary key
def getCachedDataKey ( engineVersionHash , key ) : cacheFile = CachedDataManager . _cacheFileForHash ( engineVersionHash ) return JsonDataManager ( cacheFile ) . getKey ( key )
251,814
https://github.com/adamrehn/ue4cli/blob/f1c34502c96059e36757b7433da7e98760a75a6f/ue4cli/CachedDataManager.py#L19-L24
[ "def", "norm_coefs", "(", "self", ")", ":", "sum_coefs", "=", "self", ".", "sum_coefs", "self", ".", "ar_coefs", "/=", "sum_coefs", "self", ".", "ma_coefs", "/=", "sum_coefs" ]
Sets the cached data value for the specified engine version hash and dictionary key
def setCachedDataKey ( engineVersionHash , key , value ) : cacheFile = CachedDataManager . _cacheFileForHash ( engineVersionHash ) return JsonDataManager ( cacheFile ) . setKey ( key , value )
251,815
https://github.com/adamrehn/ue4cli/blob/f1c34502c96059e36757b7433da7e98760a75a6f/ue4cli/CachedDataManager.py#L27-L32
[ "def", "norm_coefs", "(", "self", ")", ":", "sum_coefs", "=", "self", ".", "sum_coefs", "self", ".", "ar_coefs", "/=", "sum_coefs", "self", ".", "ma_coefs", "/=", "sum_coefs" ]
Writes data to a file
def writeFile ( filename , data ) : with open ( filename , 'wb' ) as f : f . write ( data . encode ( 'utf-8' ) )
251,816
https://github.com/adamrehn/ue4cli/blob/f1c34502c96059e36757b7433da7e98760a75a6f/ue4cli/Utility.py#L34-L39
[ "def", "get_user_last_submissions", "(", "self", ",", "limit", "=", "5", ",", "request", "=", "None", ")", ":", "if", "request", "is", "None", ":", "request", "=", "{", "}", "request", ".", "update", "(", "{", "\"username\"", ":", "self", ".", "_user_manager", ".", "session_username", "(", ")", "}", ")", "# Before, submissions were first sorted by submission date, then grouped", "# and then resorted by submission date before limiting. Actually, grouping", "# and pushing, keeping the max date, followed by result filtering is much more", "# efficient", "data", "=", "self", ".", "_database", ".", "submissions", ".", "aggregate", "(", "[", "{", "\"$match\"", ":", "request", "}", ",", "{", "\"$group\"", ":", "{", "\"_id\"", ":", "{", "\"courseid\"", ":", "\"$courseid\"", ",", "\"taskid\"", ":", "\"$taskid\"", "}", ",", "\"submitted_on\"", ":", "{", "\"$max\"", ":", "\"$submitted_on\"", "}", ",", "\"submissions\"", ":", "{", "\"$push\"", ":", "{", "\"_id\"", ":", "\"$_id\"", ",", "\"result\"", ":", "\"$result\"", ",", "\"status\"", ":", "\"$status\"", ",", "\"courseid\"", ":", "\"$courseid\"", ",", "\"taskid\"", ":", "\"$taskid\"", ",", "\"submitted_on\"", ":", "\"$submitted_on\"", "}", "}", ",", "}", "}", ",", "{", "\"$project\"", ":", "{", "\"submitted_on\"", ":", "1", ",", "\"submissions\"", ":", "{", "# This could be replaced by $filter if mongo v3.2 is set as dependency", "\"$setDifference\"", ":", "[", "{", "\"$map\"", ":", "{", "\"input\"", ":", "\"$submissions\"", ",", "\"as\"", ":", "\"submission\"", ",", "\"in\"", ":", "{", "\"$cond\"", ":", "[", "{", "\"$eq\"", ":", "[", "\"$submitted_on\"", ",", "\"$$submission.submitted_on\"", "]", "}", ",", "\"$$submission\"", ",", "False", "]", "}", "}", "}", ",", "[", "False", "]", "]", "}", "}", "}", ",", "{", "\"$sort\"", ":", "{", "\"submitted_on\"", ":", "pymongo", ".", "DESCENDING", "}", "}", ",", "{", "\"$limit\"", ":", "limit", "}", "]", ")", "return", "[", "item", "[", "\"submissions\"", "]", "[", "0", "]", "for", "item", "in", "data", "]" ]
Applies the supplied list of replacements to a file
def patchFile ( filename , replacements ) : patched = Utility . readFile ( filename ) # Perform each of the replacements in the supplied dictionary for key in replacements : patched = patched . replace ( key , replacements [ key ] ) Utility . writeFile ( filename , patched )
251,817
https://github.com/adamrehn/ue4cli/blob/f1c34502c96059e36757b7433da7e98760a75a6f/ue4cli/Utility.py#L42-L52
[ "def", "not_storable", "(", "_type", ")", ":", "return", "Storable", "(", "_type", ",", "handlers", "=", "StorableHandler", "(", "poke", "=", "fake_poke", ",", "peek", "=", "fail_peek", "(", "_type", ")", ")", ")" ]
Escapes a filesystem path for use as a command - line argument
def escapePathForShell ( path ) : if platform . system ( ) == 'Windows' : return '"{}"' . format ( path . replace ( '"' , '""' ) ) else : return shellescape . quote ( path )
251,818
https://github.com/adamrehn/ue4cli/blob/f1c34502c96059e36757b7433da7e98760a75a6f/ue4cli/Utility.py#L62-L69
[ "def", "getTemplates", "(", "fnames", ",", "blend", "=", "True", ")", ":", "if", "not", "blend", ":", "newhdrs", "=", "blendheaders", ".", "getSingleTemplate", "(", "fnames", "[", "0", "]", ")", "newtab", "=", "None", "else", ":", "# apply rules to create final version of headers, plus table", "newhdrs", ",", "newtab", "=", "blendheaders", ".", "get_blended_headers", "(", "inputs", "=", "fnames", ")", "cleanTemplates", "(", "newhdrs", "[", "1", "]", ",", "newhdrs", "[", "2", "]", ",", "newhdrs", "[", "3", "]", ")", "return", "newhdrs", ",", "newtab" ]
Joins the supplied list of strings after removing any empty strings from the list
def join ( delim , items , quotes = False ) : transform = lambda s : s if quotes == True : transform = lambda s : s if ' ' not in s else '"{}"' . format ( s ) stripped = list ( [ transform ( i ) for i in items if len ( i ) > 0 ] ) if len ( stripped ) > 0 : return delim . join ( stripped ) return ''
251,819
https://github.com/adamrehn/ue4cli/blob/f1c34502c96059e36757b7433da7e98760a75a6f/ue4cli/Utility.py#L72-L83
[ "def", "not_storable", "(", "_type", ")", ":", "return", "Storable", "(", "_type", ",", "handlers", "=", "StorableHandler", "(", "poke", "=", "fake_poke", ",", "peek", "=", "fail_peek", "(", "_type", ")", ")", ")" ]