{"query":"Helper which expand_dims is_accepted then applies tf . where .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None ) : if not is_namedtuple_like ( arg_1 ) : return _Func_base_case ( arg_0 , arg_1 , arg_2 , arg_3 = arg_3 ) if not isinstance ( arg_1 , type ( arg_2 ) ) : raise TypeError ( 'Type of `accepted` ({}) must be identical to ' 'type of `rejected` ({})' . format ( type ( arg_1 ) . __name__ , type ( arg_2 ) . __name__ ) ) return type ( arg_1 ) ( ** dict ( [ ( arg_4 , Func ( arg_0 , getattr ( arg_1 , arg_4 ) , getattr ( arg_2 , arg_4 ) , arg_3 = arg_3 ) ) for arg_4 in arg_1 . _fields ] ) )","id_":251820,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/mcmc\/internal\/util.py#L114-L129","negative":"Collects memory stats for specified Python program."} {"query":"Is event consistent with the given evidence?","positive":"def Func ( arg_0 , arg_1 ) : return every ( lambda ( k , v ) : arg_1 . get ( k , v ) == v , arg_0 . items ( ) )","id_":251821,"task_name":"https:\/\/github.com\/hobson\/aima\/blob\/3572b2fb92039b4a1abe384be8545560fbd3d470\/aima\/probability.py#L414-L417","negative":"Execute the cloud_harness task."} {"query":"compute similarities in appended files","positive":"def Func ( arg_0 ) : arg_1 = defaultdict ( list ) for arg_2 , arg_3 , arg_4 , arg_5 , arg_6 in arg_0 . _iter_sims ( ) : arg_7 = arg_1 [ arg_2 ] for arg_8 in arg_7 : if ( arg_3 , arg_4 ) in arg_8 or ( arg_5 , arg_6 ) in arg_8 : arg_8 . add ( ( arg_3 , arg_4 ) ) arg_8 . add ( ( arg_5 , arg_6 ) ) break else : arg_7 . append ( { ( arg_3 , arg_4 ) , ( arg_5 , arg_6 ) } ) arg_9 = [ ] for arg_2 , arg_10 in arg_1 . items ( ) : for arg_8 in arg_10 : arg_9 . append ( ( arg_2 , arg_8 ) ) arg_9 . sort ( ) arg_9 . reverse ( ) return arg_9","id_":251822,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/checkers\/similar.py#L73-L91","negative":"Stop and remove a worker"} {"query":"Print a status message about the logger .","positive":"def Func ( arg_0 ) : if arg_0 . logfile is None : print 'Logging has not been activated.' else : arg_1 = arg_0 . log_active and 'active' or 'temporarily suspended' print 'Filename :' , arg_0 . logfname print 'Mode :' , arg_0 . logmode print 'Output logging :' , arg_0 . log_output print 'Raw input log :' , arg_0 . log_raw_input print 'Timestamping :' , arg_0 . timestamp print 'State :' , arg_1","id_":251823,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/core\/logger.py#L155-L166","negative":"Adds all parameters to `traj`"} {"query":"Cancel all started queries that have not yet completed","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . service . jobs ( ) if ( arg_0 . running_job_id and not arg_0 . poll_job_complete ( arg_0 . running_job_id ) ) : arg_0 . log . info ( 'Attempting to cancel job : %s, %s' , arg_0 . project_id , arg_0 . running_job_id ) if arg_0 . location : arg_1 . cancel ( projectId = arg_0 . project_id , jobId = arg_0 . running_job_id , location = arg_0 . location ) . execute ( num_retries = arg_0 . num_retries ) else : arg_1 . cancel ( projectId = arg_0 . project_id , jobId = arg_0 . running_job_id ) . execute ( num_retries = arg_0 . num_retries ) else : arg_0 . log . info ( 'No running BigQuery jobs to cancel.' ) return arg_2 = 12 arg_3 = 0 arg_4 = False while arg_3 < arg_2 and not arg_4 : arg_3 = arg_3 + 1 arg_4 = arg_0 . poll_job_complete ( arg_0 . running_job_id ) if arg_4 : arg_0 . log . info ( 'Job successfully canceled: %s, %s' , arg_0 . project_id , arg_0 . running_job_id ) elif arg_3 == arg_2 : arg_0 . log . info ( \"Stopping polling due to timeout. Job with id %s \" \"has not completed cancel and may or may not finish.\" , arg_0 . running_job_id ) else : arg_0 . log . info ( 'Waiting for canceled job with id %s to finish.' , arg_0 . running_job_id ) time . sleep ( 5 )","id_":251824,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/bigquery_hook.py#L1298-L1339","negative":"Save a vectorized image to file."} {"query":"Fill the entire strip with RGB color tuple","positive":"def Func ( arg_0 , arg_1 , arg_2 = 0 , arg_3 = - 1 ) : arg_2 = max ( arg_2 , 0 ) if arg_3 < 0 or arg_3 >= arg_0 . numLEDs : arg_3 = arg_0 . numLEDs - 1 for arg_4 in range ( arg_2 , arg_3 + 1 ) : arg_0 . _set_base ( arg_4 , arg_1 )","id_":251825,"task_name":"https:\/\/github.com\/ManiacalLabs\/BiblioPixel\/blob\/fd97e6c651a4bbcade64733847f4eec8f7704b7c\/bibliopixel\/layout\/layout.py#L205-L211","negative":"This function will create the VM directory where a repo will be mounted, if it\n doesn't exist. If wait_for_server is set, it will wait up to 10 seconds for\n the nfs server to start, by retrying mounts that fail with 'Connection Refused'.\n\n If wait_for_server is not set, it will attempt to run the mount command once"} {"query":"Generates a set of input record","positive":"def Func ( arg_0 , arg_1 = 400 , arg_2 = 42 ) : arg_3 = [ ] for arg_4 in xrange ( arg_0 ) : arg_5 = np . zeros ( arg_1 , dtype = realDType ) for arg_4 in range ( 0 , arg_2 ) : arg_6 = np . random . random_integers ( 0 , arg_1 - 1 , 1 ) [ 0 ] arg_5 [ arg_6 ] = 1 while abs ( arg_5 . sum ( ) - arg_2 ) > 0.1 : arg_6 = np . random . random_integers ( 0 , arg_1 - 1 , 1 ) [ 0 ] arg_5 [ arg_6 ] = 1 arg_3 . append ( arg_5 ) return arg_3","id_":251826,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/examples\/opf\/tools\/sp_plotter.py#L92-L117","negative":"Returns a dictionary with all the past baking statuses of a single book."} {"query":"Perform an action on the thing .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : if arg_1 not in arg_0 . available_actions : return None arg_3 = arg_0 . available_actions [ arg_1 ] if 'input' in arg_3 [ 'metadata' ] : try : validate ( arg_2 , arg_3 [ 'metadata' ] [ 'input' ] ) except ValidationError : return None arg_4 = arg_3 [ 'class' ] ( arg_0 , arg_2 = arg_2 ) arg_4 . set_href_prefix ( arg_0 . href_prefix ) arg_0 . action_notify ( arg_4 ) arg_0 . actions [ arg_1 ] . append ( arg_4 ) return arg_4","id_":251827,"task_name":"https:\/\/github.com\/mozilla-iot\/webthing-python\/blob\/65d467c89ed79d0bbc42b8b3c8f9e5a320edd237\/webthing\/thing.py#L322-L346","negative":"Get the decryption for col."} {"query":"Remove memory of state variables set in the command processor","positive":"def Func ( arg_0 ) : arg_0 . stack = [ ] arg_0 . curindex = 0 arg_0 . curframe = None arg_0 . thread_name = None arg_0 . frame_thread_name = None return","id_":251828,"task_name":"https:\/\/github.com\/rocky\/python3-trepan\/blob\/14e91bc0acce090d67be145b1ac040cab92ac5f3\/trepan\/processor\/cmdproc.py#L469-L476","negative":"Extracts start row from the bookmark information"} {"query":"Split a covariance matrix into block - diagonal marginals of given sizes .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = 0 arg_3 = [ ] for arg_4 in arg_1 : arg_5 = arg_2 + arg_4 arg_3 . append ( arg_0 [ ... , arg_2 : arg_5 , arg_2 : arg_5 ] ) arg_2 = arg_5 return arg_3","id_":251829,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/sts\/decomposition.py#L29-L37","negative":"APEv2 tag value factory.\n\n Use this if you need to specify the value's type manually. Binary\n and text data are automatically detected by APEv2.__setitem__."} {"query":"Hydrate Generated Python AST nodes with line numbers and column offsets if they exist in the node environment .","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 , arg_4 : arg_5 = False ) -> arg_1 : if arg_2 . line is not None : arg_0 . node . lineno = arg_2 . line if arg_4 : for arg_8 in arg_0 . dependencies : arg_8 . lineno = arg_2 . line if arg_2 . col is not None : arg_0 . node . col_offset = arg_2 . col if arg_4 : for arg_8 in arg_0 . dependencies : arg_8 . col_offset = arg_2 . col return arg_0","id_":251830,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/compiler\/generator.py#L343-L362","negative":"Returns an aggregator connection."} {"query":"Return the entire source file and starting line number for an object .","positive":"def Func ( arg_0 ) : arg_1 = getsourcefile ( arg_0 ) or getfile ( arg_0 ) arg_2 = None if inspect . isframe ( arg_0 ) : arg_2 = arg_0 . f_globals else : arg_3 = getmodule ( arg_0 , arg_1 ) if arg_3 : arg_2 = arg_3 . __dict__ arg_4 = linecache . getlines ( arg_1 , arg_2 ) if not arg_4 : raise IOError ( 'could not get source code' ) if ismodule ( arg_0 ) : return arg_4 , 0 if isclass ( arg_0 ) : arg_5 = arg_0 . __name__ arg_6 = re . compile ( r'^(\\s*)class\\s*' + arg_5 + r'\\b' ) arg_7 = [ ] for arg_8 in range ( len ( arg_4 ) ) : arg_9 = arg_6 . match ( arg_4 [ arg_8 ] ) if arg_9 : if arg_4 [ arg_8 ] [ 0 ] == 'c' : return arg_4 , arg_8 arg_7 . append ( ( arg_9 . group ( 1 ) , arg_8 ) ) if arg_7 : arg_7 . sort ( ) return arg_4 , arg_7 [ 0 ] [ 1 ] else : raise IOError ( 'could not find class definition' ) if ismethod ( arg_0 ) : arg_0 = arg_0 . im_func if isfunction ( arg_0 ) : arg_0 = arg_0 . func_code if istraceback ( arg_0 ) : arg_0 = arg_0 . tb_frame if isframe ( arg_0 ) : arg_0 = arg_0 . f_code if iscode ( arg_0 ) : if not hasattr ( arg_0 , 'co_firstlineno' ) : raise IOError ( 'could not find function definition' ) arg_6 = re . compile ( r'^(\\s*def\\s)|(.*(? 0 : if arg_10 ( arg_4 [ arg_11 ] ) : break arg_11 -= 1 return arg_4 , arg_11 raise IOError ( 'could not find code object' )","id_":251831,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/core\/ultratb.py#L138-L211","negative":"Adjust contrast of an Image.\n\n Args:\n img (PIL Image): PIL Image to be adjusted.\n contrast_factor (float): How much to adjust the contrast. Can be any\n non negative number. 0 gives a solid gray image, 1 gives the\n original image while 2 increases the contrast by a factor of 2.\n\n Returns:\n PIL Image: Contrast adjusted image."} {"query":"Return a dictionary consisting of the key itself","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . db . search ( Query ( ) . name == arg_1 ) if not arg_2 : return { } return arg_2 [ 0 ]","id_":251832,"task_name":"https:\/\/github.com\/nir0s\/ghost\/blob\/77da967a4577ca4cf100cfe34e87b39ad88bf21c\/ghost.py#L631-L647","negative":"Decimal adjusts AL after subtraction.\n\n Adjusts the result of the subtraction of two packed BCD values to create a packed BCD result.\n The AL register is the implied source and destination operand. If a decimal borrow is detected,\n the CF and AF flags are set accordingly. This instruction is not valid in 64-bit mode.\n\n The SF, ZF, and PF flags are set according to the result.::\n\n IF (AL AND 0FH) > 9 OR AF = 1\n THEN\n AL = AL - 6;\n CF = CF OR BorrowFromLastSubtraction; (* CF OR borrow from AL = AL - 6 *)\n AF = 1;\n ELSE\n AF = 0;\n FI;\n IF ((AL > 99H) or OLD_CF = 1)\n THEN\n AL = AL - 60H;\n CF = 1;\n\n :param cpu: current CPU."} {"query":"Read the user configuration","positive":"def Func ( arg_0 , arg_1 = None ) : if arg_1 : arg_0 . _config_filename = arg_1 else : try : import appdirs except ImportError : raise Exception ( \"Missing dependency for determining config path. Please install \" \"the 'appdirs' Python module.\" ) arg_0 . _config_filename = appdirs . user_config_dir ( _LIBRARY_NAME , \"ProfitBricks\" ) + \".ini\" if not arg_0 . _config : arg_0 . _config = configparser . ConfigParser ( ) arg_0 . _config . optionxform = str arg_0 . _config . read ( arg_0 . _config_filename )","id_":251833,"task_name":"https:\/\/github.com\/profitbricks\/profitbricks-sdk-python\/blob\/2c804b141688eccb07d6ae56601d5c60a62abebd\/profitbricks\/client.py#L80-L96","negative":"Regenerates the primary or secondary access key for the specified\n storage account.\n\n service_name:\n Name of the storage service account.\n key_type:\n Specifies which key to regenerate. Valid values are:\n Primary, Secondary"} {"query":"Compare vectors . Borrowed from A . Parish .","positive":"def Func ( arg_0 , arg_1 ) : if norm ( arg_0 ) > 0 and norm ( arg_1 ) > 0 : return dot ( arg_0 , arg_1 ) \/ ( norm ( arg_0 ) * norm ( arg_1 ) ) else : return 0.0","id_":251834,"task_name":"https:\/\/github.com\/carawarner\/pantheon\/blob\/7e8718f4397eaa389fb3d5dc04fa01c7cb556512\/pantheon\/process.py#L22-L27","negative":"Enable the joint motors in this skeleton.\n\n This method sets the maximum force that can be applied by each joint to\n attain the desired target velocities. It also enables torque feedback\n for all joint motors.\n\n Parameters\n ----------\n max_force : float\n The maximum force that each joint is allowed to apply to attain its\n target velocity."} {"query":"Retrieve an estimated time correction offset for the given stream .","positive":"def Func ( arg_0 , arg_1 = arg_2 ) : arg_3 = c_int ( ) arg_4 = lib . lsl_Func ( arg_0 . obj , c_double ( arg_1 ) , byref ( arg_3 ) ) handle_error ( arg_3 ) return arg_4","id_":251835,"task_name":"https:\/\/github.com\/labstreaminglayer\/liblsl-Python\/blob\/1ff6fe2794f8dba286b7491d1f7a4c915b8a0605\/pylsl\/pylsl.py#L739-L764","negative":"Slugify foreign key"} {"query":"This will output the nginx HTTP config string for specific port spec","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = \"\\t server {\\n\" arg_2 += \"\\t \\t {}\\n\" . format ( _nginx_max_file_size_string ( ) ) arg_2 += \"\\t \\t {}\\n\" . format ( _nginx_listen_string ( arg_0 ) ) arg_2 += \"\\t \\t {}\\n\" . format ( _nginx_server_name_string ( arg_0 ) ) arg_2 += _nginx_location_spec ( arg_0 , arg_1 ) arg_2 += _custom_502_page ( ) arg_2 += \"\\t }\\n\" return arg_2","id_":251836,"task_name":"https:\/\/github.com\/gamechanger\/dusty\/blob\/dc12de90bb6945023d6f43a8071e984313a1d984\/dusty\/compiler\/nginx\/__init__.py#L38-L47","negative":"How much of inner is in outer by volume"} {"query":"Create a callable that will invoke the given remote function . The stub will return a deferred even if the remote function does not .","positive":"def Func ( arg_0 , arg_1 ) : assert arg_0 . _opened , \"RPC System is not opened\" logging . debug ( \"Func(%s)\" % repr ( arg_1 ) ) arg_2 = urlparse . urlparse ( arg_1 ) arg_3 = arg_2 . scheme arg_4 = arg_2 . path . split ( \"\/\" ) if arg_3 != \"anycall\" : raise ValueError ( \"Not an anycall URL: %s\" % repr ( arg_1 ) ) if len ( arg_4 ) != 3 or arg_4 [ 0 ] != \"\" or arg_4 [ 1 ] != \"functions\" : raise ValueError ( \"Not an URL for a remote function: %s\" % repr ( arg_1 ) ) try : arg_5 = uuid . UUID ( arg_4 [ 2 ] ) except ValueError : raise ValueError ( \"Not a valid URL for a remote function: %s\" % repr ( arg_1 ) ) return _RPCFunctionStub ( arg_2 . netloc , arg_5 , arg_0 )","id_":251837,"task_name":"https:\/\/github.com\/pydron\/anycall\/blob\/43add96660258a14b24aa8e8413dffb1741b72d7\/anycall\/rpc.py#L224-L244","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Given a frame and a compiled function code find the corresponding function object within the frame .","positive":"def Func ( arg_0 , arg_1 ) : def find_code ( arg_2 , arg_3 = 0 ) : if arg_3 > 3 : return for arg_4 in arg_2 : if arg_4 is None : continue arg_5 = None if hasattr ( arg_4 , \"__code__\" ) and arg_4 . __code__ == arg_1 : arg_5 = arg_4 elif isinstance ( arg_4 , type ) or isinstance ( arg_4 , ModuleType ) : try : arg_5 = find_code ( ( getattr ( arg_4 , n , None ) for n in dir ( arg_4 ) ) , arg_3 + 1 ) except Exception : continue elif isinstance ( arg_4 , ( list , tuple , set ) ) : arg_5 = find_code ( arg_4 , arg_3 + 1 ) elif isinstance ( arg_4 , dict ) : arg_5 = find_code ( arg_4 . values ( ) , arg_3 + 1 ) if arg_5 : return arg_5 return find_code ( arg_0 . f_locals . values ( ) ) or find_code ( arg_0 . f_globals . values ( ) )","id_":251838,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-py\/h2o\/utils\/debugging.py#L268-L304","negative":"Required to allow using custom USER_DETAILS_SERIALIZER in\n JWTSerializer. Defining it here to avoid circular imports"} {"query":"Convert unixtime to unixtime on GTFS start - of - day .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . set_current_process_time_zone ( ) arg_1 = time . mktime ( time . localtime ( arg_1 ) [ : 3 ] + ( 12 , 00 , 0 , 0 , 0 , - 1 ) ) - 43200 set_process_timezone ( arg_2 ) return arg_1","id_":251839,"task_name":"https:\/\/github.com\/CxAalto\/gtfspy\/blob\/bddba4b74faae6c1b91202f19184811e326547e5\/gtfspy\/gtfs.py#L1087-L1111","negative":"Population parameter vals == average member parameter vals"} {"query":"Use this if you want to clone an existing contact and replace its data with new user input in one step .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_1 = arg_0 ( arg_1 . address_book , arg_1 . filename , arg_1 . supported_private_objects , None , arg_3 ) arg_1 . _process_user_input ( arg_2 ) return arg_1","id_":251840,"task_name":"https:\/\/github.com\/scheibler\/khard\/blob\/0f69430c2680f1ff5f073a977a3c5b753b96cc17\/khard\/carddav_object.py#L117-L126","negative":"Poll the queues that the worker can use to communicate with the \n supervisor, until all the workers are done and all the queues are \n empty. Handle messages as they appear."} {"query":"Associate a notification template from this workflow .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : return arg_0 . _assoc ( 'notification_templates_%s' % arg_3 , arg_1 , arg_2 )","id_":251841,"task_name":"https:\/\/github.com\/ansible\/tower-cli\/blob\/a2b151fed93c47725018d3034848cb3a1814bed7\/tower_cli\/resources\/workflow.py#L315-L334","negative":"Enumerate all possible resonance forms and return them as a list.\n\n :param mol: The input molecule.\n :type mol: rdkit.Chem.rdchem.Mol\n :return: A list of all possible resonance forms of the molecule.\n :rtype: list of rdkit.Chem.rdchem.Mol"} {"query":"L1 and L2 rules","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = True arg_3 = arg_0 [ 'chars' ] for arg_4 in arg_3 [ : : - 1 ] : if arg_4 [ 'orig' ] in ( 'B' , 'S' ) : arg_4 [ 'level' ] = arg_0 [ 'base_level' ] arg_2 = True elif arg_2 and arg_4 [ 'orig' ] in ( 'BN' , 'WS' ) : arg_4 [ 'level' ] = arg_0 [ 'base_level' ] else : arg_2 = False arg_5 = len ( arg_3 ) arg_6 = arg_11 = 0 arg_7 = 0 arg_8 = EXPLICIT_LEVEL_LIMIT for arg_9 in range ( arg_5 ) : arg_4 = arg_3 [ arg_9 ] arg_10 = arg_4 [ 'level' ] if arg_10 > arg_7 : arg_7 = arg_10 if arg_10 % 2 and arg_10 < arg_8 : arg_8 = arg_10 if arg_4 [ 'orig' ] == 'B' or arg_9 == arg_5 - 1 : arg_11 = arg_9 if arg_4 [ 'orig' ] == 'B' : arg_11 -= 1 reverse_contiguous_sequence ( arg_3 , arg_6 , arg_11 , arg_7 , arg_8 ) arg_6 = arg_9 + 1 arg_7 = 0 arg_8 = EXPLICIT_LEVEL_LIMIT if arg_1 : debug_storage ( arg_0 )","id_":251842,"task_name":"https:\/\/github.com\/MeirKriheli\/python-bidi\/blob\/a0e265bb465c1b7ad628487991e33b5ebe364641\/bidi\/algorithm.py#L517-L577","negative":"Return the class corresponding to the given temporalImp string"} {"query":"Find a webhook by name .","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in arg_0 . webhooks . list ( ) : if arg_2 . name == arg_1 : print ( \"Deleting Webhook:\" , arg_2 . name , arg_2 . targetUrl ) arg_0 . webhooks . delete ( arg_2 . id )","id_":251843,"task_name":"https:\/\/github.com\/CiscoDevNet\/webexteamssdk\/blob\/6fc2cc3557e080ba4b2a380664cb2a0532ae45cd\/examples\/ngrokwebhook.py#L95-L100","negative":"Upload all po files to GDocs ignoring conflicts.\n This method looks for all msgids in po_files and sends them\n as ods to GDocs Spreadsheet."} {"query":"Split the extension from a pathname .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = arg_0 . rfind ( arg_1 ) if arg_2 : arg_5 = arg_0 . rfind ( arg_2 ) arg_4 = max ( arg_4 , arg_5 ) arg_6 = arg_0 . rfind ( arg_3 ) if arg_6 > arg_4 : arg_7 = arg_4 + 1 while arg_7 < arg_6 : if arg_0 [ arg_7 ] != arg_3 : return arg_0 [ : arg_6 ] , arg_0 [ arg_6 : ] arg_7 += 1 return arg_0 , ''","id_":251844,"task_name":"https:\/\/github.com\/google\/grumpy\/blob\/3ec87959189cfcdeae82eb68a47648ac25ceb10b\/third_party\/stdlib\/genericpath.py#L93-L113","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"For lens data compute the unmasked blurred image of every unmasked unblurred image of every galaxy in each \\ plane . To do this this function iterates over all planes and then galaxies to extract their unmasked unblurred \\ images .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : return [ arg_3 . unmasked_blurred_image_of_galaxies_from_psf ( arg_1 , arg_2 ) for arg_3 in arg_0 ]","id_":251845,"task_name":"https:\/\/github.com\/Jammy2211\/PyAutoLens\/blob\/91e50369c7a9c048c83d217625578b72423cd5a7\/autolens\/lens\/util\/lens_fit_util.py#L152-L174","negative":"Return a AzureDLFileSystem object."} {"query":"Minimize using Hessian - informed proximal gradient descent .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = None , arg_5 = 1 , arg_6 = 1 , arg_7 = None , arg_8 = None ) : arg_9 = [ arg_1 , arg_3 , arg_4 , arg_5 , arg_6 , arg_2 , arg_7 , ] , with tf . compat . v1 . name_scope ( arg_8 , 'Func' , arg_9 ) : def _loop_cond ( arg_1 , arg_10 , arg_11 ) : del arg_1 return tf . logical_and ( arg_11 < arg_5 , tf . logical_not ( arg_10 ) ) def _loop_body ( arg_1 , arg_10 , arg_11 ) : arg_12 , arg_13 , arg_14 = arg_0 ( arg_1 ) arg_1 , arg_10 , arg_15 = Func_one_step ( gradient_unregularized_loss = arg_12 , hessian_unregularized_loss_outer = arg_13 , hessian_unregularized_loss_middle = arg_14 , arg_1 = arg_1 , arg_3 = arg_3 , arg_4 = arg_4 , maximum_full_sweeps = arg_6 , arg_2 = arg_2 , arg_7 = arg_7 ) return arg_1 , arg_10 , arg_11 + 1 return tf . while_loop ( cond = _loop_cond , body = _loop_body , loop_vars = [ arg_1 , tf . zeros ( [ ] , np . bool , arg_8 = 'converged' ) , tf . zeros ( [ ] , np . int32 , arg_8 = 'iter' ) , ] )","id_":251846,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/optimizer\/proximal_hessian_sparse.py#L471-L590","negative":"This method is called before first step of simulation."} {"query":"Returns the lowest bit num from a given bit pattern . Returns None if no bits set .","positive":"def Func ( arg_0 ) : if arg_0 == 0 : return None arg_1 = 0 while ( arg_0 & 1 ) == 0 : arg_0 = arg_0 >> 1 arg_1 += 1 if arg_1 > 7 : arg_1 = 0 break return arg_1","id_":251847,"task_name":"https:\/\/github.com\/piface\/pifacecommon\/blob\/006bca14c18d43ba2d9eafaa84ef83b512c51cf6\/pifacecommon\/core.py#L19-L46","negative":"Gets back all response headers."} {"query":"Set the focus to position or raise IndexError .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . _focus_position = arg_1 arg_0 . _modified ( ) try : arg_0 . next_position ( arg_1 ) except IndexError : arg_0 . _is_scrolling = False else : arg_0 . _is_scrolling = True","id_":251848,"task_name":"https:\/\/github.com\/tdryer\/hangups\/blob\/85c0bf0a57698d077461283895707260f9dbf931\/hangups\/ui\/__main__.py#L848-L859","negative":"Convert a ChaLearn Cause effect pairs challenge format into numpy.ndarray.\n\n :param filename: path of the file to read or DataFrame containing the data\n :type filename: str or pandas.DataFrame\n :param scale: Scale the data\n :type scale: bool\n :param kwargs: parameters to be passed to pandas.read_csv\n :return: Dataframe composed of (SampleID, a (numpy.ndarray) , b (numpy.ndarray))\n :rtype: pandas.DataFrame"} {"query":"Update the information for an institute","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None , arg_4 = None , arg_5 = None , arg_6 = None , arg_7 = None , arg_8 = None , arg_9 = None ) : arg_9 = arg_9 or False arg_10 = arg_0 . institute ( arg_1 ) if not arg_10 : raise IntegrityError ( \"Institute {} does not exist in database\" . format ( arg_1 ) ) arg_11 = { } arg_12 = arg_10 if arg_2 : arg_13 = arg_0 . user ( arg_2 ) if not arg_13 : raise IntegrityError ( \"user {} does not exist in database\" . format ( arg_2 ) ) LOG . info ( \"Updating sanger recipients for institute: {0} with {1}\" . format ( arg_1 , arg_2 ) ) arg_11 [ '$push' ] = { 'sanger_recipients' : arg_6 } if arg_6 : LOG . info ( \"Removing sanger recipient {0} from institute: {1}\" . format ( arg_6 , arg_1 ) ) arg_11 [ '$pull' ] = { 'sanger_recipients' : arg_6 } if arg_3 : LOG . info ( \"Updating coverage cutoff for institute: {0} to {1}\" . format ( arg_1 , arg_3 ) ) arg_11 [ '$set' ] = { 'coverage_cutoff' : arg_3 } if arg_4 : LOG . info ( \"Updating frequency cutoff for institute: {0} to {1}\" . format ( arg_1 , arg_4 ) ) if not '$set' in arg_11 : arg_11 [ '$set' ] = { } arg_11 [ '$set' ] = { 'frequency_cutoff' : arg_4 } if arg_5 : LOG . info ( \"Updating display name for institute: {0} to {1}\" . format ( arg_1 , arg_5 ) ) if not '$set' in arg_11 : arg_11 [ '$set' ] = { } arg_11 [ '$set' ] = { 'display_name' : arg_5 } if arg_7 : if arg_8 : arg_8 = list ( arg_8 ) arg_14 = { } if arg_9 : arg_14 = arg_10 . get ( 'phenotype_groups' , PHENOTYPE_GROUPS ) for arg_15 , arg_16 in enumerate ( arg_7 ) : arg_17 = arg_0 . hpo_term ( arg_16 ) if not arg_17 : raise IntegrityError ( \"Term {} does not exist\" . format ( arg_16 ) ) arg_18 = arg_17 [ 'hpo_id' ] arg_19 = arg_17 [ 'description' ] arg_20 = None if arg_8 : arg_20 = arg_8 [ arg_15 ] arg_14 [ arg_16 ] = { 'name' : arg_19 , 'abbr' : arg_20 } arg_11 [ '$set' ] = { 'phenotype_groups' : arg_14 } if arg_11 : if not '$set' in arg_11 : arg_11 [ '$set' ] = { } arg_11 [ '$set' ] [ 'updated_at' ] = datetime . now ( ) arg_12 = arg_0 . institute_collection . find_one_and_update ( { '_id' : arg_1 } , arg_11 , return_document = pymongo . ReturnDocument . AFTER ) LOG . info ( \"Institute updated\" ) return arg_12","id_":251849,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/adapter\/mongo\/institute.py#L38-L129","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Execute the wrapped code accepting a prompt optionally responding to the prompt .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = None ) : with arg_0 . driver . accept_modal ( \"prompt\" , arg_1 = arg_1 , arg_2 = arg_2 , arg_3 = arg_3 ) : yield","id_":251850,"task_name":"https:\/\/github.com\/elliterate\/capybara.py\/blob\/0c6ae449cc37e4445ec3cd6af95674533beedc6c\/capybara\/session.py#L549-L564","negative":"Removes a NIC from the load balancer.\n\n :param datacenter_id: The unique ID of the data center.\n :type datacenter_id: ``str``\n\n :param loadbalancer_id: The unique ID of the load balancer.\n :type loadbalancer_id: ``str``\n\n :param nic_id: The unique ID of the NIC.\n :type nic_id: ``str``"} {"query":"Cut a clip from a video .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None , arg_4 = None , arg_5 = None , arg_6 = 'info' , arg_7 = False , ** arg_8 ) : arg_9 = { 'log_level' : arg_6 } if arg_4 is None : arg_9 [ 'vcodec' ] = 'copy' if arg_5 is None : arg_9 [ 'acodec' ] = 'copy' if arg_2 : arg_9 [ 'ss' ] = arg_2 else : arg_2 = 0 if arg_3 : arg_9 [ 't' ] = arg_3 - arg_2 convert_video ( arg_0 , arg_1 , arg_7 , ** arg_9 )","id_":251851,"task_name":"https:\/\/github.com\/open-mmlab\/mmcv\/blob\/0d77f61450aab4dde8b8585a577cc496acb95d7f\/mmcv\/video\/processing.py#L91-L123","negative":"Returns an aggregator connection."} {"query":"Computes the FOLLOW set for every non - terminal in the grammar .","positive":"def Func ( arg_0 ) : arg_0 . _follow [ arg_0 . start_symbol ] . add ( END_OF_INPUT ) while True : arg_1 = False for arg_2 , arg_3 in arg_0 . nonterminals . items ( ) : for arg_4 in arg_3 : for arg_5 , arg_6 in enumerate ( arg_4 . rhs ) : if arg_6 not in arg_0 . nonterminals : continue arg_7 = arg_0 . first ( arg_4 . rhs [ arg_5 + 1 : ] ) arg_8 = arg_7 - set ( [ EPSILON ] ) if EPSILON in arg_7 or arg_5 == ( len ( arg_4 . rhs ) - 1 ) : arg_8 |= arg_0 . _follow [ arg_2 ] if arg_8 - arg_0 . _follow [ arg_6 ] : arg_0 . _follow [ arg_6 ] |= arg_8 arg_1 = True if not arg_1 : break","id_":251852,"task_name":"https:\/\/github.com\/mtomwing\/purplex\/blob\/4072109e1d4395826983cd9d95ead2c1dfc1184e\/purplex\/grammar.py#L132-L158","negative":"This method increases the permanence values of synapses of columns whose\n activity level has been too low. Such columns are identified by having an\n overlap duty cycle that drops too much below those of their peers. The\n permanence values for such columns are increased."} {"query":"Register the Selenium specific driver implementation .","positive":"def Func ( ) : FuncDriver ( ISelenium , Selenium , class_implements = [ Firefox , Chrome , Ie , Edge , Opera , Safari , BlackBerry , PhantomJS , Android , Remote , EventFiringWebDriver , ] , )","id_":251853,"task_name":"https:\/\/github.com\/mozilla\/PyPOM\/blob\/1e7d7ac6e19ec2dac0ea04bad5f3daadbe0c43b8\/src\/pypom\/selenium_driver.py#L121-L143","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Return a new Streamlet by outer join_streamlet with this streamlet","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : from heronpy . streamlet . impl . joinbolt import JoinStreamlet , JoinBolt arg_4 = JoinStreamlet ( JoinBolt . OUTER , arg_2 , arg_3 , arg_0 , arg_1 ) arg_0 . _add_child ( arg_4 ) arg_1 . _add_child ( arg_4 ) return arg_4","id_":251854,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heronpy\/streamlet\/streamlet.py#L188-L197","negative":"Open the editor at the given filename, linenumber, column and\n show an error message. This is used for correcting syntax errors.\n The current implementation only has special support for the VIM editor,\n and falls back on the 'editor' hook if VIM is not used.\n\n Call ip.set_hook('fix_error_editor',youfunc) to use your own function,"} {"query":"Retrieve the Engine - level model params from a Swarm model","positive":"def Func ( arg_0 ) : arg_1 = ClientJobsDAO . get ( ) ( arg_2 , arg_3 ) = arg_1 . modelsGetFields ( arg_0 , [ \"jobId\" , \"genDescription\" ] ) ( arg_4 , ) = arg_1 . jobGetFields ( arg_2 , [ \"genBaseDescription\" ] ) arg_5 = tempfile . mkdtemp ( ) try : arg_6 = os . path . join ( arg_5 , \"base.py\" ) with open ( arg_6 , mode = \"wb\" ) as f : f . write ( arg_4 ) arg_7 = os . path . join ( arg_5 , \"description.py\" ) with open ( arg_7 , mode = \"wb\" ) as f : f . write ( arg_3 ) arg_8 = helpers . getExperimentDescriptionInterfaceFromModule ( helpers . loadExperimentDescriptionScriptFromDir ( arg_5 ) ) return json . dumps ( dict ( modelConfig = arg_8 . getModelDescription ( ) , inferenceArgs = arg_8 . getModelControl ( ) . get ( \"inferenceArgs\" , None ) ) ) finally : shutil . rmtree ( arg_5 , ignore_errors = True )","id_":251855,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/swarming\/api.py#L73-L119","negative":"Return an existing or new ConversationWidget."} {"query":"Returns a list of degree vectors one for each input and hidden layer .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = \"left-to-right\" , arg_3 = \"equal\" ) : arg_2 = _create_input_order ( arg_0 , arg_2 ) arg_4 = [ arg_2 ] if arg_1 is None : arg_1 = [ ] for arg_5 in arg_1 : if isinstance ( arg_3 , six . string_types ) : if arg_3 == \"random\" : arg_4 . append ( np . random . randint ( low = min ( np . min ( arg_4 [ - 1 ] ) , arg_0 - 1 ) , high = arg_0 , size = arg_5 ) ) elif arg_3 == \"equal\" : arg_6 = min ( np . min ( arg_4 [ - 1 ] ) , arg_0 - 1 ) arg_4 . append ( np . maximum ( arg_6 , np . ceil ( np . arange ( 1 , arg_5 + 1 ) * ( arg_0 - 1 ) \/ float ( arg_5 + 1 ) ) . astype ( np . int32 ) ) ) else : raise ValueError ( 'Invalid hidden order: \"{}\".' . format ( arg_3 ) ) return arg_4","id_":251856,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/bijectors\/masked_autoregressive.py#L903-L954","negative":"Override of clean method to perform additional validation"} {"query":"Ensure client is authorized to use the response type requested .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , * arg_5 , ** arg_6 ) : if arg_2 not in ( 'code' , 'token' ) : return False if hasattr ( arg_3 , 'allowed_response_types' ) : return arg_2 in arg_3 . allowed_response_types return True","id_":251857,"task_name":"https:\/\/github.com\/lepture\/flask-oauthlib\/blob\/9e6f152a5bb360e7496210da21561c3e6d41b0e1\/flask_oauthlib\/provider\/oauth2.py#L984-L997","negative":"Converts py_zipkin's annotations dict to protobuf.\n\n :param annotations: annotations dict.\n :type annotations: dict\n :return: corresponding protobuf's list of annotations.\n :rtype: list"} {"query":"Decrypt a message using this double ratchet session .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None ) : if arg_3 == None : arg_3 = arg_0 . __ad arg_4 = arg_0 . __decryptSavedMessage ( arg_1 , arg_2 , arg_3 ) if arg_4 : return arg_4 if arg_0 . triggersStep ( arg_2 . dh_pub ) : arg_0 . __saveMessageKeys ( arg_2 . pn ) arg_0 . step ( arg_2 . dh_pub ) arg_0 . __saveMessageKeys ( arg_2 . n ) return arg_0 . __decrypt ( arg_1 , arg_0 . __skr . nextDecryptionKey ( ) , arg_2 , arg_3 )","id_":251858,"task_name":"https:\/\/github.com\/Syndace\/python-doubleratchet\/blob\/d4497af73044e0084efa3e447276ee9d6a6eb66a\/doubleratchet\/ratchets\/doubleratchet.py#L110-L154","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Returns dozscale and particle list of update","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = False arg_3 = [ ] for arg_4 in listify ( arg_1 ) : arg_5 , arg_6 = arg_0 . _p2i ( arg_4 ) arg_3 . append ( arg_6 ) arg_2 = arg_2 or arg_5 == 'zscale' arg_3 = set ( arg_3 ) return arg_2 , arg_3","id_":251859,"task_name":"https:\/\/github.com\/peri-source\/peri\/blob\/61beed5deaaf978ab31ed716e8470d86ba639867\/peri\/comp\/objs.py#L746-L755","negative":"Read a varint from file, parse it, and return the decoded integer."} {"query":"Asynchronous connection handler . Processes each line from the socket .","positive":"def Func ( arg_0 , arg_1 , * arg_2 ) : arg_0 . shell . stdout . write ( arg_0 . shell . prompt ) arg_3 = arg_0 . shell . stdin . readline ( ) if not len ( arg_3 ) : arg_3 = 'EOF' return False else : arg_3 = arg_3 . rstrip ( '\\r\\n' ) arg_3 = arg_0 . shell . precmd ( arg_3 ) arg_4 = arg_0 . shell . onecmd ( arg_3 ) arg_4 = arg_0 . shell . postcmd ( arg_4 , arg_3 ) arg_0 . shell . stdout . flush ( ) arg_0 . shell . postloop ( ) if arg_4 : arg_0 . shell = None arg_1 . close ( ) return not arg_4","id_":251860,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/shoebot\/sbio\/socket_server.py#L93-L114","negative":"Operates on item_dict\n\n Promotes the resource_name and resource_type fields to the\n top-level of the serialization so they can be printed as columns.\n Also makes a copies name field to type, which is a default column."} {"query":"Generate a training batch for the Skip - Gram model .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = 0 ) : if arg_1 % arg_2 != 0 : raise Exception ( \"batch_size should be able to be divided by num_skips.\" ) if arg_2 > 2 * arg_3 : raise Exception ( \"num_skips <= 2 * skip_window\" ) arg_5 = np . ndarray ( shape = ( arg_1 ) , dtype = np . int32 ) arg_6 = np . ndarray ( shape = ( arg_1 , 1 ) , dtype = np . int32 ) arg_7 = 2 * arg_3 + 1 arg_8 = collections . deque ( maxlen = arg_7 ) for arg_9 in range ( arg_7 ) : arg_8 . append ( arg_0 [ arg_4 ] ) arg_4 = ( arg_4 + 1 ) % len ( arg_0 ) for arg_10 in range ( arg_1 \/\/ arg_2 ) : arg_11 = arg_3 arg_12 = [ arg_3 ] for arg_13 in range ( arg_2 ) : while arg_11 in arg_12 : arg_11 = random . randint ( 0 , arg_7 - 1 ) arg_12 . append ( arg_11 ) arg_5 [ arg_10 * arg_2 + arg_13 ] = arg_8 [ arg_3 ] arg_6 [ arg_10 * arg_2 + arg_13 , 0 ] = arg_8 [ arg_11 ] arg_8 . append ( arg_0 [ arg_4 ] ) arg_4 = ( arg_4 + 1 ) % len ( arg_0 ) return arg_5 , arg_6 , arg_4","id_":251861,"task_name":"https:\/\/github.com\/tensorlayer\/tensorlayer\/blob\/aa9e52e36c7058a7e6fd81d36563ca6850b21956\/tensorlayer\/nlp.py#L52-L125","negative":"Return a Fmt representation of Translator for pretty-printing"} {"query":"Will delete the entities that match at the time the query is executed .","positive":"def Func ( arg_0 , arg_1 = 100 ) : from . columns import MODELS_REFERENCED if not arg_0 . _model . _no_fk or arg_0 . _model . _namespace in MODELS_REFERENCED : raise QueryError ( \"Can't Func entities of models with foreign key relationships\" ) arg_2 = [ ] arg_3 = 0 for arg_4 in arg_0 . iter_result ( pagesize = arg_1 ) : arg_2 . append ( arg_4 ) arg_3 += 1 if arg_3 >= arg_1 : session . Func ( arg_2 ) del arg_2 [ : ] arg_3 = 0 if arg_2 : session . Func ( arg_2 )","id_":251862,"task_name":"https:\/\/github.com\/josiahcarlson\/rom\/blob\/8b5607a856341df85df33422accc30ba9294dbdb\/rom\/query.py#L707-L735","negative":"Allocate or reallocate a floating IP.\n\n :param context: neutron api request context.\n :param content: dictionary describing the floating ip, with keys\n as listed in the RESOURCE_ATTRIBUTE_MAP object in\n neutron\/api\/v2\/attributes.py. All keys will be populated.\n\n :returns: Dictionary containing details for the new floating IP. If values\n are declared in the fields parameter, then only those keys will be\n present."} {"query":"Returns all of the items from queryset where the user has a product invoking that item s condition in one of their carts .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = Q ( enabling_products__productitem__cart__user = arg_2 ) arg_4 = commerce . Cart . STATUS_RELEASED arg_5 = commerce . Cart . STATUS_PAID arg_6 = commerce . Cart . STATUS_ACTIVE arg_7 = Q ( enabling_products__productitem__cart__status = arg_4 ) arg_8 = ~ ( Q ( enabling_products__productitem__cart__status = arg_5 ) | Q ( enabling_products__productitem__cart__status = arg_6 ) ) arg_1 = arg_1 . filter ( arg_3 ) arg_1 = arg_1 . exclude ( arg_7 & arg_8 ) return arg_1","id_":251863,"task_name":"https:\/\/github.com\/chrisjrn\/registrasion\/blob\/461d5846c6f9f3b7099322a94f5d9911564448e4\/registrasion\/controllers\/conditions.py#L173-L194","negative":"Creates a tempfile and starts the given editor, returns the data afterwards."} {"query":"Invokes a contract with given parameters and returns the result .","positive":"def Func ( arg_0 , arg_1 , arg_2 , ** arg_3 ) : arg_4 = encode_invocation_params ( arg_2 ) arg_5 = arg_0 . _call ( JSONRPCMethods . INVOKE . value , [ arg_1 , arg_4 , ] , ** arg_3 ) return decode_invocation_result ( arg_5 )","id_":251864,"task_name":"https:\/\/github.com\/ellmetha\/neojsonrpc\/blob\/e369b633a727482d5f9e310f0c3337ae5f7265db\/neojsonrpc\/client.py#L258-L275","negative":"Generates and writes the media pages for all media in the gallery"} {"query":"show basic info about ABF class variables .","positive":"def Func ( arg_0 , arg_1 = False , arg_2 = False ) : arg_3 = \"\\n### ABF INFO ###\\n\" arg_4 = { } for arg_5 in sorted ( dir ( arg_0 ) ) : if arg_5 in [ 'cm' , 'evIs' , 'colormap' , 'dataX' , 'dataY' , 'protoX' , 'protoY' ] : continue if \"_\" in arg_5 : continue arg_6 = getattr ( arg_0 , arg_5 ) if type ( arg_6 ) is list and len ( arg_6 ) > 5 : continue arg_7 = str ( type ( arg_6 ) ) . split ( \"'\" ) [ 1 ] if \"method\" in arg_7 or \"neo.\" in arg_7 : continue if arg_5 in [ \"header\" , \"MT\" ] : continue arg_3 += \"%s <%s> %s\\n\" % ( arg_5 , arg_7 , arg_6 ) arg_4 [ arg_5 ] = arg_6 if arg_1 : print ( ) for arg_8 in arg_3 . split ( \"\\n\" ) : if len ( arg_8 ) < 3 : continue print ( \" \" , arg_8 ) print ( ) if arg_2 : return arg_4 return arg_3","id_":251865,"task_name":"https:\/\/github.com\/swharden\/SWHLab\/blob\/a86c3c65323cec809a4bd4f81919644927094bf5\/doc\/oldcode\/swhlab\/core\/abf.py#L136-L165","negative":"Add one or more files or URLs to the manifest.\n If files contains a glob, it is expanded.\n\n All files are uploaded to SolveBio. The Upload\n object is used to fill the manifest."} {"query":"Returns true if the host is reachable . In some cases it may not be reachable a tunnel must be used .","positive":"def Func ( arg_0 ) : for arg_1 in arg_0 . hostportlist : try : socket . create_connection ( arg_1 , StateManager . TIMEOUT_SECONDS ) return True except : LOG . info ( \"StateManager %s Unable to connect to host: %s port %i\" % ( arg_0 . name , arg_1 [ 0 ] , arg_1 [ 1 ] ) ) continue return False","id_":251866,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/statemgrs\/src\/python\/statemanager.py#L86-L99","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Return formatted traceback .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = None , arg_5 = 5 ) : arg_6 = arg_0 . structured_traceback ( arg_1 , arg_2 , arg_3 , arg_4 , arg_5 ) return arg_0 . stb2Func ( arg_6 )","id_":251867,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/core\/ultratb.py#L406-L413","negative":"Remove a contact from the roster.\n\n :Parameters:\n - `jid`: contact's jid\n - `callback`: function to call when the request succeeds. It should\n accept a single argument - a `RosterItem` describing the\n requested change\n - `error_callback`: function to call when the request fails. It\n should accept a single argument - an error stanza received\n (`None` in case of timeout)\n :Types:\n - `jid`: `JID`"} {"query":"Prompts the user to save an SVG document to disk .","positive":"def Func ( arg_0 , arg_1 = None ) : if isinstance ( arg_0 , unicode ) : arg_0 = arg_0 . encode ( 'utf-8' ) arg_2 = QtGui . QFileDialog ( arg_1 , 'Save SVG Document' ) arg_2 . setAcceptMode ( QtGui . QFileDialog . AcceptSave ) arg_2 . setDefaultSuffix ( 'svg' ) arg_2 . setNameFilter ( 'SVG document (*.svg)' ) if arg_2 . exec_ ( ) : arg_3 = arg_2 . selectedFiles ( ) [ 0 ] arg_4 = open ( arg_3 , 'w' ) try : arg_4 . write ( arg_0 ) finally : arg_4 . close ( ) return arg_3 return None","id_":251868,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/frontend\/qt\/svg.py#L8-L39","negative":"Selects an open lswitch for a network.\n\n Note that it does not select the most full switch, but merely one with\n ports available."} {"query":"Decorator to log the shapes of input and output dataframes","positive":"def Func ( arg_0 ) : def decorator ( arg_1 ) : @ wraps ( arg_1 ) def wrapper ( * arg_2 , ** arg_3 ) : arg_4 = _get_dfs_shapes ( * arg_2 , ** arg_3 ) arg_5 = arg_1 ( * arg_2 , ** arg_3 ) arg_6 = _get_dfs_shapes ( arg_5 ) _Func ( arg_0 , arg_1 . __name__ , arg_4 , arg_6 ) return arg_5 return wrapper return decorator","id_":251869,"task_name":"https:\/\/github.com\/ToucanToco\/toucan-data-sdk\/blob\/c3ca874e1b64f4bdcc2edda750a72d45d1561d8a\/toucan_data_sdk\/utils\/decorators.py#L139-L155","negative":"Handle the retrieval of the code"} {"query":"Check if run_id is stored in DynamoDB table . Return True if run_id is stored or False otherwise .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_0 . get_item ( TableName = arg_1 , Key = { DYNAMODB_RUNID_ATTRIBUTE : { 'S' : arg_2 } } ) return arg_3 . get ( 'Item' ) is not None","id_":251870,"task_name":"https:\/\/github.com\/snowplow\/snowplow-python-analytics-sdk\/blob\/0ddca91e3f6d8bed88627fa557790aa4868bdace\/snowplow_analytics_sdk\/run_manifests.py#L230-L247","negative":"Decorator for methods accepting old_path and new_path."} {"query":"Helper method to parse a full or partial path and return a full path as well as a dict containing path parts .","positive":"def Func ( arg_0 , arg_1 , ** arg_2 ) : from solvebio . resource . vault import Vault arg_3 = arg_2 . pop ( 'client' , None ) or arg_0 . _client or client if not arg_1 : raise Exception ( 'Invalid path: ' , 'Full path must be in one of the following formats: ' '\"vault:\/path\", \"domain:vault:\/path\", or \"~\/path\"' ) arg_4 = arg_2 . get ( 'vault' ) or arg_1 try : arg_5 , arg_6 = Vault . Func ( arg_4 , client = arg_3 ) except Exception as err : raise Exception ( 'Could not determine vault from \"{0}\": {1}' . format ( arg_4 , err ) ) if arg_2 . get ( 'path' ) : arg_1 = '{0}:\/{1}' . format ( arg_5 , arg_2 [ 'path' ] ) arg_7 = arg_0 . PATH_RE . match ( arg_1 ) if arg_7 : arg_8 = arg_7 . groupdict ( ) [ 'path' ] else : raise Exception ( 'Cannot find a valid object path in \"{0}\". ' 'Full path must be in one of the following formats: ' '\"vault:\/path\", \"domain:vault:\/path\", or \"~\/path\"' . format ( arg_1 ) ) arg_8 = re . sub ( '\/\/+' , '\/' , arg_8 ) if arg_8 != '\/' : arg_8 = arg_8 . rstrip ( '\/' ) arg_6 [ 'path' ] = arg_8 arg_1 = '{domain}:{vault}:{path}' . format ( ** arg_6 ) arg_6 [ 'full_path' ] = arg_1 return arg_1 , arg_6","id_":251871,"task_name":"https:\/\/github.com\/solvebio\/solvebio-python\/blob\/b29614643043afd19c1d8074e8f25c6700d51a73\/solvebio\/resource\/object.py#L46-L135","negative":"Compute the yticks labels of this grid_stack, used for plotting the y-axis ticks when visualizing an image \\"} {"query":"Returns the lowest indices in each string in a column where the provided substring is fully contained between within a sample . If the substring is not found - 1 is returned . It is the same as str . find .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 0 , arg_3 = None ) : return str_find ( arg_0 , arg_1 , arg_2 , arg_3 )","id_":251872,"task_name":"https:\/\/github.com\/vaexio\/vaex\/blob\/a45b672f8287afca2ada8e36b74b604b9b28dd85\/packages\/vaex-core\/vaex\/functions.py#L808-L840","negative":"Show an image.\n\n Args:\n img (str or ndarray): The image to be displayed.\n win_name (str): The window name.\n wait_time (int): Value of waitKey param."} {"query":"Handle selection of item in listing .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_0 . _acceptButton . setEnabled ( True ) del arg_0 . _selected [ : ] arg_3 = arg_0 . _filesystemWidget . model ( ) . item ( arg_1 ) arg_0 . _selected . append ( arg_3 . path )","id_":251873,"task_name":"https:\/\/github.com\/4degrees\/riffle\/blob\/e5a0d908df8c93ff1ee7abdda8875fd1667df53d\/source\/riffle\/browser.py#L139-L144","negative":"Decorator for methods accepting old_path and new_path."} {"query":"Construct activity endpoint from host athlete name and filename","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : return '{host}{athlete}\/activity\/{filename}' . format ( host = arg_0 . host , arg_1 = quote_plus ( arg_1 ) , arg_2 = arg_2 )","id_":251874,"task_name":"https:\/\/github.com\/AartGoossens\/goldencheetahlib\/blob\/ebe57de7d94280674c8440a81f53ac02f0b4eb43\/goldencheetahlib\/client.py#L141-L152","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Program main .","positive":"def Func ( ) : arg_0 = parse_options ( ) arg_1 , arg_2 = create_output ( get_status ( arg_0 ) , arg_0 ) sys . stdout . write ( arg_1 ) sys . exit ( arg_2 )","id_":251875,"task_name":"https:\/\/github.com\/vint21h\/nagios-check-supervisord\/blob\/a40a542499197a4b5658bd6cc3b34326fe8d0ada\/check_supervisord.py#L241-L249","negative":"Revoke the token and remove the cookie."} {"query":"Calls MuTect to perform variant analysis","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 , arg_6 , arg_7 , arg_8 , arg_9 ) : arg_10 = arg_0 . fileStore . getLocalTempDir ( ) arg_11 = [ arg_1 , arg_2 , arg_3 , arg_4 , arg_5 , arg_7 , arg_6 , arg_8 , arg_9 ] arg_12 = [ 'normal.bam' , 'normal.bai' , 'tumor.bam' , 'tumor.bai' , 'ref.fasta' , 'ref.fasta.fai' , 'ref.dict' , 'cosmic.vcf' , 'dbsnp.vcf' ] for arg_13 , arg_14 in zip ( arg_11 , arg_12 ) : arg_0 . fileStore . readGlobalFile ( arg_13 , os . path . join ( arg_10 , arg_14 ) ) arg_15 = [ '--analysis_type' , 'MuTect' , '--reference_sequence' , 'ref.fasta' , '--cosmic' , '\/data\/cosmic.vcf' , '--dbsnp' , '\/data\/dbsnp.vcf' , '--input_file:normal' , '\/data\/normal.bam' , '--input_file:tumor' , '\/data\/tumor.bam' , '--tumor_lod' , str ( 10 ) , '--initial_tumor_lod' , str ( 4.0 ) , '--out' , 'mutect.out' , '--coverage_file' , 'mutect.cov' , '--vcf' , 'mutect.vcf' ] dockerCall ( arg_0 = arg_0 , workDir = arg_10 , arg_15 = arg_15 , tool = 'quay.io\/ucsc_cgl\/mutect:1.1.7--e8bf09459cf0aecb9f55ee689c2b2d194754cbd3' ) arg_16 = [ 'mutect.vcf' , 'mutect.cov' , 'mutect.out' ] arg_17 = [ os . path . join ( arg_10 , x ) for x in arg_16 ] tarball_files ( 'mutect.tar.gz' , file_paths = arg_17 , output_dir = arg_10 ) return arg_0 . fileStore . writeGlobalFile ( os . path . join ( arg_10 , 'mutect.tar.gz' ) )","id_":251876,"task_name":"https:\/\/github.com\/BD2KGenomics\/toil-lib\/blob\/022a615fc3dc98fc1aaa7bfd232409962ca44fbd\/src\/toil_lib\/tools\/mutation_callers.py#L9-L50","negative":"Renew the message lock.\n\n This will maintain the lock on the message to ensure\n it is not returned to the queue to be reprocessed. In order to complete (or otherwise settle)\n the message, the lock must be maintained. Messages received via ReceiveAndDelete mode are not\n locked, and therefore cannot be renewed. This operation can also be performed as an asynchronous\n background task by registering the message with an `azure.servicebus.aio.AutoLockRenew` instance.\n This operation is only available for non-sessionful messages.\n\n :raises: TypeError if the message is sessionful.\n :raises: ~azure.servicebus.common.errors.MessageLockExpired is message lock has already expired.\n :raises: ~azure.servicebus.common.errors.SessionLockExpired if session lock has already expired.\n :raises: ~azure.servicebus.common.errors.MessageAlreadySettled is message has already been settled."} {"query":"Apply some HTML highlighting to the contents . This can t be done in the","positive":"def Func ( arg_0 ) : arg_0 = escape ( arg_0 ) arg_0 = arg_0 . replace ( ' <iterator object>' , \" <this object can be used in a 'for' loop<\/var>><\/small>\" ) arg_0 = arg_0 . replace ( ' <dynamic item>' , ' <this object may have extra field names<\/var>><\/small>' ) arg_0 = arg_0 . replace ( ' <dynamic attribute>' , ' <this object may have extra field names<\/var>><\/small>' ) arg_0 = RE_PROXY . sub ( '\\g<1><proxy object<\/var>><\/small>' , arg_0 ) arg_0 = RE_FUNCTION . sub ( '\\g<1><object method<\/var>><\/small>' , arg_0 ) arg_0 = RE_GENERATOR . sub ( \"\\g<1><generator, use 'for' to traverse it<\/var>><\/small>\" , arg_0 ) arg_0 = RE_OBJECT_ADDRESS . sub ( '\\g<1><\\g<2> object<\/var>><\/small>' , arg_0 ) arg_0 = RE_MANAGER . sub ( '\\g<1><manager, use .all<\/kbd> to traverse it<\/var>><\/small>' , arg_0 ) arg_0 = RE_CLASS_REPR . sub ( '\\g<1><\\g<2> class<\/var>><\/small>' , arg_0 ) arg_0 = RE_REQUEST_FIELDNAME . sub ( '\\g<1>:\\n \\g<2><\/strong>: ' , arg_0 ) arg_0 = RE_REQUEST_CLEANUP1 . sub ( '\\g<1>' , arg_0 ) arg_0 = RE_REQUEST_CLEANUP2 . sub ( ')' , arg_0 ) return mark_safe ( arg_0 )","id_":251877,"task_name":"https:\/\/github.com\/edoburu\/django-debugtools\/blob\/5c609c00fa9954330cd135fc62a1e18b8e7fea8a\/debugtools\/formatter.py#L129-L152","negative":"Removes the specfied course from the specified organization"} {"query":"Construct the point record by reading the points from the stream","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = arg_2 . dtype arg_5 = bytearray ( arg_1 . read ( arg_3 * arg_4 . itemsize ) ) try : arg_6 = np . frombuffer ( arg_5 , dtype = arg_4 , arg_3 = arg_3 ) except ValueError : arg_7 = arg_3 * arg_4 . itemsize if len ( arg_5 ) % arg_4 . itemsize != 0 : arg_8 = arg_7 - len ( arg_5 ) raise_not_enough_bytes_error ( arg_7 , arg_8 , len ( arg_5 ) , arg_4 , ) else : arg_9 = len ( arg_5 ) \/\/ arg_4 . itemsize logger . critical ( \"Expected {} points, there are {} ({} missing)\" . format ( arg_3 , arg_9 , arg_3 - arg_9 ) ) arg_6 = np . frombuffer ( arg_5 , dtype = arg_4 , arg_3 = arg_9 ) return arg_0 ( arg_6 , arg_2 )","id_":251878,"task_name":"https:\/\/github.com\/tmontaigu\/pylas\/blob\/8335a1a7d7677f0e4bc391bb6fa3c75b42ed5b06\/pylas\/point\/record.py#L253-L282","negative":"Checks if a data is csr, csc, bsr, or dia Scipy sparse matrix"} {"query":"Returns the coach ID for the team s OC in a given year .","positive":"def Func ( arg_0 , arg_1 ) : try : arg_2 = arg_0 . _year_info_pq ( arg_1 , 'Offensive Coordinator' ) ( 'a' ) if arg_2 : return arg_2 . attr [ 'href' ] except ValueError : return None","id_":251879,"task_name":"https:\/\/github.com\/mdgoldberg\/sportsref\/blob\/09f11ac856a23c96d666d1d510bb35d6f050b5c3\/sportsref\/nfl\/teams.py#L283-L294","negative":"Click the right mouse button without modifiers pressed.\n\n Parameters: coordinates to click on scren (tuple (x, y))\n Returns: None"} {"query":"Flattens a nested dictionary .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = { } for arg_3 , arg_4 in arg_0 . items ( ) : if isinstance ( arg_4 , dict ) : arg_5 = Func ( arg_4 , arg_1 ) for arg_6 , arg_7 in arg_5 . items ( ) : arg_8 = arg_3 + arg_1 + arg_6 arg_2 [ arg_8 ] = arg_7 else : arg_2 [ arg_3 ] = arg_4 return arg_2","id_":251880,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/utils\/helpful_functions.py#L27-L43","negative":"230v power off"} {"query":"Parse a datetime to a unix timestamp .","positive":"def Func ( arg_0 , arg_1 = None ) : if arg_1 : return _Func_formats . get ( arg_1 , lambda v : Func_fmt ( v , arg_1 ) ) ( arg_0 ) arg_2 = len ( arg_0 ) if 19 <= arg_2 <= 24 and arg_0 [ 3 ] == \" \" : try : return Func_d_b_Y_H_M_S ( arg_0 ) except ( KeyError , ValueError , OverflowError ) : pass if 30 <= arg_2 <= 31 : try : return Func_a__d_b_Y_H_M_S_z ( arg_0 ) except ( KeyError , ValueError , OverflowError ) : pass if arg_2 == 14 : try : return Func_YmdHMS ( arg_0 ) except ( ValueError , OverflowError ) : pass try : return Func_epoch ( arg_0 ) except ValueError : pass return Func_any ( arg_0 )","id_":251881,"task_name":"https:\/\/github.com\/greenbender\/pynntp\/blob\/991a76331cdf5d8f9dbf5b18f6e29adc80749a2f\/nntp\/date.py#L316-L374","negative":"Compares and exchanges.\n\n Compares the value in the AL, AX, EAX or RAX register (depending on the\n size of the operand) with the first operand (destination operand). If\n the two values are equal, the second operand (source operand) is loaded\n into the destination operand. Otherwise, the destination operand is\n loaded into the AL, AX, EAX or RAX register.\n\n The ZF flag is set if the values in the destination operand and\n register AL, AX, or EAX are equal; otherwise it is cleared. The CF, PF,\n AF, SF, and OF flags are set according to the results of the comparison\n operation::\n\n (* accumulator = AL, AX, EAX or RAX, depending on whether *)\n (* a byte, word, a doubleword or a 64bit comparison is being performed*)\n IF accumulator == DEST\n THEN\n ZF = 1\n DEST = SRC\n ELSE\n ZF = 0\n accumulator = DEST\n FI;\n\n :param cpu: current CPU.\n :param dest: destination operand.\n :param src: source operand."} {"query":"Return an argument list node that takes only self .","positive":"def Func ( arg_0 ) : return ast . arguments ( args = [ ast . arg ( arg = \"self\" ) ] , defaults = [ ] , kw_defaults = [ ] , kwonlyargs = [ ] , )","id_":251882,"task_name":"https:\/\/github.com\/Julian\/Ivoire\/blob\/5b8218cffa409ed733cf850a6fde16fafb8fc2af\/ivoire\/transform.py#L141-L152","negative":"issue a command to read the archive records after a known time stamp."} {"query":"Stop this client .","positive":"def Func ( arg_0 ) : arg_0 . t . Func ( ) arg_0 . factory . FuncTrying ( ) arg_0 . connector . disconnect ( )","id_":251883,"task_name":"https:\/\/github.com\/calston\/tensor\/blob\/7c0c99708b5dbff97f3895f705e11996b608549d\/tensor\/outputs\/riemann.py#L125-L130","negative":"Semver tag triggered deployment helper"} {"query":"Return byte - size of a memoryview or buffer .","positive":"def Func ( arg_0 ) : if isinstance ( arg_0 , memoryview ) : if PY3 : return arg_0 . nbytes else : arg_1 = arg_0 . itemsize for arg_2 in arg_0 . shape : arg_1 *= arg_2 return arg_1 else : return len ( arg_0 )","id_":251884,"task_name":"https:\/\/github.com\/Parsl\/parsl\/blob\/d7afb3bc37f50dcf224ae78637944172edb35dac\/parsl\/executors\/serialize\/serialize.py#L38-L52","negative":"Attempts to fetch streams repeatedly\n until some are returned or limit hit."} {"query":"Create a new DataItem .","positive":"def Func ( arg_0 ) : models . DataItem . create ( content = '' . join ( random . choice ( string . ascii_uppercase + string . digits ) for arg_1 in range ( 20 ) ) ) return muffin . HTTPFound ( '\/' )","id_":251885,"task_name":"https:\/\/github.com\/klen\/muffin-peewee\/blob\/8e893e3ea1dfc82fbcfc6efe784308c8d4e2852e\/example\/views.py#L22-L27","negative":"Stop and remove a worker"} {"query":"Creates a db file with the core schema .","positive":"def Func ( arg_0 , arg_1 = False , arg_2 = False , ** arg_3 ) : if arg_0 . fname and arg_0 . fname . exists ( ) : raise ValueError ( 'db file already exists, use force=True to overFunc' ) with arg_0 . connection ( ) as db : for arg_4 in arg_0 . tables : db . execute ( arg_4 . sql ( translate = arg_0 . translate ) ) db . execute ( 'PRAGMA foreign_keys = ON;' ) db . commit ( ) arg_5 = defaultdict ( list ) for arg_6 in arg_0 . tables : if arg_6 . name not in arg_3 : continue arg_7 , arg_8 = [ ] , [ ] arg_9 = { c . name : c for c in arg_6 . columns } for arg_10 , arg_11 in enumerate ( arg_3 [ arg_6 . name ] ) : arg_12 = arg_11 [ arg_6 . primary_key [ 0 ] ] if arg_6 . primary_key and len ( arg_6 . primary_key ) == 1 else None arg_13 = [ ] for arg_14 , arg_15 in arg_11 . items ( ) : if arg_14 in arg_6 . many_to_many : assert arg_12 arg_16 = arg_6 . many_to_many [ arg_14 ] arg_17 = tuple ( [ arg_16 . name ] + [ c . name for c in arg_16 . columns ] ) for arg_18 in arg_15 : arg_19 , arg_20 = arg_0 . association_table_context ( arg_6 , arg_14 , arg_18 ) arg_5 [ arg_17 ] . append ( ( arg_12 , arg_19 , arg_20 ) ) else : arg_21 = arg_9 [ arg_14 ] if isinstance ( arg_15 , list ) : arg_15 = ( arg_21 . separator or ';' ) . join ( arg_21 . convert ( arg_18 ) for arg_18 in arg_15 ) else : arg_15 = arg_21 . convert ( arg_15 ) if arg_15 is not None else None if arg_10 == 0 : arg_8 . append ( arg_21 . name ) arg_13 . append ( arg_15 ) arg_7 . append ( tuple ( arg_13 ) ) insert ( db , arg_0 . translate , arg_6 . name , arg_8 , * arg_7 ) for arg_17 , arg_7 in arg_5 . items ( ) : insert ( db , arg_0 . translate , arg_17 [ 0 ] , arg_17 [ 1 : ] , * arg_7 ) db . commit ( )","id_":251886,"task_name":"https:\/\/github.com\/cldf\/csvw\/blob\/181c94b6c599575945e52d370a415f12f3433eab\/src\/csvw\/db.py#L373-L424","negative":"r'''Method to calculate heat capacity of a solid at temperature `T`\n with a given method.\n\n This method has no exception handling; see `T_dependent_property`\n for that.\n\n Parameters\n ----------\n T : float\n Temperature at which to calculate heat capacity, [K]\n method : str\n Name of the method to use\n\n Returns\n -------\n Cp : float\n Heat capacity of the solid at T, [J\/mol\/K]"} {"query":"Check if a PE_PE is globally defined i . e . not inside a C_C","positive":"def Func ( arg_0 ) : if type ( arg_0 ) . __name__ != 'PE_PE' : arg_0 = one ( arg_0 ) . PE_PE [ 8001 ] ( ) if one ( arg_0 ) . C_C [ 8003 ] ( ) : return False arg_0 = one ( arg_0 ) . EP_PKG [ 8000 ] . PE_PE [ 8001 ] ( ) if not arg_0 : return True return Func ( arg_0 )","id_":251887,"task_name":"https:\/\/github.com\/xtuml\/pyxtuml\/blob\/7dd9343b9a0191d1db1887ab9288d0a026608d9a\/bridgepoint\/ooaofooa.py#L89-L103","negative":"Asynchronously request a URL and get the encoded text content of the\n body.\n\n Parameters\n ----------\n url : `str`\n URL to download.\n session : `aiohttp.ClientSession`\n An open aiohttp session.\n\n Returns\n -------\n content : `str`\n Content downloaded from the URL."} {"query":"Defers an operator overload to attr .","positive":"def Func ( arg_0 ) : @ functools . wraps ( arg_0 ) def func ( arg_1 , * arg_2 ) : return arg_0 ( arg_1 . value , * arg_2 ) return func","id_":251888,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/edward2\/random_variable.py#L32-L44","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Read encoded contents from specified path or return default .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = 'utf8' ) : if not arg_0 : return arg_1 try : with io . open ( arg_0 , mode = 'r' , arg_2 = arg_2 ) as contents : return contents . Func ( ) except IOError : if arg_1 is not None : return arg_1 raise","id_":251889,"task_name":"https:\/\/github.com\/jazzband\/django-ddp\/blob\/1e1954b06fe140346acea43582515991685e4e01\/dddp\/views.py#L37-L47","negative":"format a table"} {"query":"Make sure the value evaluates to boolean True .","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 is None : raise ValueError ( \"Value for environment variable '{evar_name}' can't \" \"be empty.\" . format ( evar_name = arg_1 . name ) ) return arg_0","id_":251890,"task_name":"https:\/\/github.com\/gtaylor\/evarify\/blob\/37cec29373c820eda96939633e2067d55598915b\/evarify\/filters\/python_basics.py#L100-L113","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"S20 unit to generic address","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : arg_5 = [ \"nvm_addr Func\" , arg_0 . envs [ \"DEV_PATH\" ] , \"%d %d %d %d\" % ( arg_1 , arg_2 , arg_3 , arg_4 ) ] arg_6 , arg_7 , arg_8 = cij . ssh . command ( arg_5 , shell = True ) if arg_6 : raise RuntimeError ( \"cij.liblight.Func: cmd fail\" ) return int ( re . findall ( r\"val: ([0-9a-fx]+)\" , arg_7 ) [ 0 ] , 16 )","id_":251891,"task_name":"https:\/\/github.com\/refenv\/cijoe\/blob\/21d7b2ed4ff68e0a1457e7df2db27f6334f1a379\/deprecated\/modules\/cij\/liblight.py#L110-L118","negative":"Get events from this conversation.\n\n Makes a request to load historical events if necessary.\n\n Args:\n event_id (str): (optional) If provided, return events preceding\n this event, otherwise return the newest events.\n max_events (int): Maximum number of events to return. Defaults to\n 50.\n\n Returns:\n List of :class:`.ConversationEvent` instances, ordered\n newest-first.\n\n Raises:\n KeyError: If ``event_id`` does not correspond to a known event.\n .NetworkError: If the events could not be requested."} {"query":"Dynamically adjust the reader max_in_flight . Set to 0 to immediately disable a Reader","positive":"def Func ( arg_0 , arg_1 ) : assert isinstance ( arg_1 , int ) arg_0 . max_in_flight = arg_1 if arg_1 == 0 : for arg_2 in itervalues ( arg_0 . conns ) : if arg_2 . rdy > 0 : logger . debug ( '[%s:%s] rdy: %d -> 0' , arg_2 . id , arg_0 . name , arg_2 . rdy ) arg_0 . _send_rdy ( arg_2 , 0 ) arg_0 . total_rdy = 0 else : arg_0 . need_rdy_redistributed = True arg_0 . _redistribute_rdy_state ( )","id_":251892,"task_name":"https:\/\/github.com\/nsqio\/pynsq\/blob\/48bf62d65ea63cddaa401efb23187b95511dbc84\/nsq\/reader.py#L596-L610","negative":"Prepare sys.path for running the linter checks.\n\n Within this context, each of the given arguments is importable.\n Paths are added to sys.path in corresponding order to the arguments.\n We avoid adding duplicate directories to sys.path.\n `sys.path` is reset to its original value upon exiting this context."} {"query":"Read a notebook from base64 .","positive":"def Func ( arg_0 , arg_1 = arg_2 ) : try : return reads ( b64decode ( arg_0 ) . decode ( 'utf-8' ) , arg_1 = arg_1 ) except Exception as e : raise CorruptedFile ( e )","id_":251893,"task_name":"https:\/\/github.com\/quantopian\/pgcontents\/blob\/ed36268b7917332d16868208e1e565742a8753e1\/pgcontents\/api_utils.py#L116-L123","negative":"Enroll a single user in any number of courses using a particular course mode.\n\n Args:\n enterprise_customer: The EnterpriseCustomer which is sponsoring the enrollment\n user: The user who needs to be enrolled in the course\n course_mode: The mode with which the enrollment should be created\n *course_ids: An iterable containing any number of course IDs to eventually enroll the user in.\n\n Returns:\n Boolean: Whether or not enrollment succeeded for all courses specified"} {"query":"Returns a Raster from layer features .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = ImageDriver ( 'MEM' ) arg_3 = arg_2 . raster ( arg_2 . ShortName , arg_1 . size ) arg_3 . affine = arg_1 . affine arg_5 = arg_1 . sref if not arg_5 . srid : arg_5 = SpatialReference ( 4326 ) arg_3 . sref = arg_5 arg_6 = MemoryLayer ( arg_5 , arg_0 . GetGeomType ( ) ) arg_6 . load ( arg_0 ) arg_7 = gdal . RasterizeLayer ( arg_3 . ds , ( 1 , ) , arg_6 . layer , options = [ 'ATTRIBUTE=%s' % arg_6 . id ] ) arg_6 . close ( ) return arg_3","id_":251894,"task_name":"https:\/\/github.com\/bkg\/greenwich\/blob\/57ec644dadfe43ce0ecf2cfd32a2de71e0c8c141\/greenwich\/raster.py#L63-L82","negative":"Do format for NVMe device"} {"query":"Given a variable and one of its attributes determine if the attribute is accessible inside of a Django template and return True or False accordingly","positive":"def Func ( arg_0 , arg_1 ) : if arg_1 . startswith ( '_' ) : return False try : arg_2 = getattr ( arg_0 , arg_1 ) except : return False if isroutine ( arg_2 ) : if getattr ( arg_2 , 'alters_data' , False ) : return False else : try : arg_3 = getargspec ( arg_2 ) arg_4 = len ( arg_3 . args ) if arg_3 . args else 0 arg_5 = len ( arg_3 . defaults ) if arg_3 . defaults else 0 if arg_4 - arg_5 > 1 : return False except TypeError : pass return True","id_":251895,"task_name":"https:\/\/github.com\/calebsmith\/django-template-debug\/blob\/f3d52638da571164d63e5c8331d409b0743c628f\/template_debug\/utils.py#L79-L108","negative":"Called by the PDFLite object to prompt creating\r\n the font objects."} {"query":"Cast an arbitrary object or sequence to a number type","positive":"def Func ( arg_0 ) : if isinstance ( arg_0 , LiteralWrapper ) : arg_1 = arg_0 . obj elif isinstance ( arg_0 , Iterable ) and not isinstance ( arg_0 , str ) : arg_1 = next ( arg_0 , None ) else : arg_1 = arg_0 if arg_1 is None : yield 0 elif isinstance ( arg_1 , str ) : yield float ( arg_1 ) elif isinstance ( arg_1 , node ) : yield float ( strval ( arg_1 ) ) elif isinstance ( arg_1 , int ) or isinstance ( arg_1 , float ) : yield arg_1 else : raise RuntimeError ( 'Unknown type for number conversion: {}' . format ( arg_1 ) )","id_":251896,"task_name":"https:\/\/github.com\/uogbuji\/amara3-xml\/blob\/88c18876418cffc89bb85b4a3193e5002b6b39a6\/pylib\/uxml\/uxpath\/ast.py#L112-L132","negative":"Lists all temple templates and packages associated with those templates\n\n If ``template`` is None, returns the available templates for the configured\n Github org.\n\n If ``template`` is a Github path to a template, returns all projects spun\n up with that template.\n\n ``ls`` uses the github search API to find results.\n\n Note that the `temple.constants.TEMPLE_ENV_VAR` is set to 'ls' for the duration of this\n function.\n\n Args:\n github_user (str): The github user or org being searched.\n template (str, optional): The template git repo path. If provided, lists\n all projects that have been created with the provided template. Note\n that the template path is the SSH path\n (e.g. git@github.com:CloverHealth\/temple.git)\n\n Returns:\n dict: A dictionary of repository information keyed on the SSH Github url\n\n Raises:\n `InvalidGithubUserError`: When ``github_user`` is invalid"} {"query":"Find a config in our children so we can fill in variables in our other children with its data .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = None arg_3 = None if 'config' in arg_1 : if type ( arg_1 [ 'config' ] ) == str : arg_1 [ 'config' ] = ConfigFile ( arg_1 [ 'config' ] ) elif isinstance ( arg_1 [ 'config' ] , Config ) : arg_1 [ 'config' ] = arg_1 [ 'config' ] elif type ( arg_1 [ 'config' ] ) == dict : arg_1 [ 'config' ] = Config ( data = arg_1 [ 'config' ] ) else : raise TypeError ( \"Don't know how to turn {} into a Config\" . format ( type ( arg_1 [ 'config' ] ) ) ) arg_2 = arg_1 [ 'config' ] for arg_4 in arg_1 : if isinstance ( arg_1 [ arg_4 ] , Config ) : arg_3 = arg_1 [ arg_4 ] for arg_4 in arg_1 : if isinstance ( arg_1 [ arg_4 ] , Directory ) : for arg_5 in arg_1 [ arg_4 ] . _children : if arg_5 == 'config' and not arg_2 : arg_2 = arg_1 [ arg_4 ] . _children [ arg_5 ] if isinstance ( arg_1 [ arg_4 ] . _children [ arg_5 ] , Config ) : arg_3 = arg_1 [ arg_4 ] . _children [ arg_5 ] if arg_2 : return arg_2 else : return arg_3","id_":251897,"task_name":"https:\/\/github.com\/snare\/scruffy\/blob\/0fedc08cfdb6db927ff93c09f25f24ce5a04c541\/scruffy\/env.py#L61-L99","negative":"Send data synchronous to an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: PLC storage address\n :param value: value to write to the storage address of the PLC\n :param Type data_type: type of the data given to the PLC,\n according to PLCTYPE constants"} {"query":"Create a new empty Basilisp Python module . Modules are created for each Namespace when it is created .","positive":"def Func ( arg_0 : arg_1 , arg_2 = None ) -> types . ModuleType : arg_3 = types . ModuleType ( arg_0 , arg_2 = arg_2 ) arg_3 . __loader__ = None arg_3 . __package__ = None arg_3 . __spec__ = None arg_3 . __basilisp_bootstrapped__ = False return arg_3","id_":251898,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/runtime.py#L118-L126","negative":"Return the thresholded z-scored `icc`."} {"query":"Allows staff to make manual payments or refunds on an invoice .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = \"Func\" arg_3 = InvoiceController . for_id_or_404 ( arg_1 ) arg_4 = forms . ManualPaymentForm ( arg_0 . POST or None , prefix = arg_2 , ) if arg_0 . POST and arg_4 . is_valid ( ) : arg_4 . instance . invoice = arg_3 . invoice arg_4 . instance . entered_by = arg_0 . user arg_4 . save ( ) arg_3 . update_status ( ) arg_4 = forms . ManualPaymentForm ( prefix = arg_2 ) arg_8 = { \"invoice\" : arg_3 . invoice , \"form\" : arg_4 , } return render ( arg_0 , \"registrasion\/Func.html\" , arg_8 )","id_":251899,"task_name":"https:\/\/github.com\/chrisjrn\/registrasion\/blob\/461d5846c6f9f3b7099322a94f5d9911564448e4\/registrasion\/views.py#L784-L826","negative":"Add missing row to a df base on a reference column\n\n ---\n\n ### Parameters\n\n *mandatory :*\n - `id_cols` (*list of str*): names of the columns used to create each group\n - `reference_col` (*str*): name of the column used to identify missing rows\n\n *optional :*\n - `complete_index` (*list* or *dict*): [A, B, C] a list of values used to add missing rows.\n It can also be a dict to declare a date range.\n By default, use all values of reference_col.\n - `method` (*str*): by default all missing rows are added. The possible values are :\n - `\"between\"` : add missing rows having their value between min and max values for each group,\n - `\"between_and_after\"` : add missing rows having their value bigger than min value for each group.\n - `\"between_and_before\"` : add missing rows having their value smaller than max values for each group.\n - `cols_to_keep` (*list of str*): name of other columns to keep, linked to the reference_col.\n\n ---\n\n ### Example\n\n **Input**\n\n YEAR | MONTH | NAME\n :---:|:---:|:--:\n 2017|1|A\n 2017|2|A\n 2017|3|A\n 2017|1|B\n 2017|3|B\n\n ```cson\n add_missing_row:\n id_cols: ['NAME']\n reference_col: 'MONTH'\n ```\n\n **Output**\n\n YEAR | MONTH | NAME\n :---:|:---:|:--:\n 2017|1|A\n 2017|2|A\n 2017|3|A\n 2017|1|B\n 2017|2|B\n 2017|3|B"} {"query":"A generator to convert raw text segments without xml to a list of words without any markup . Additionally dates are replaced by 7777 for normalization .","positive":"def Func ( arg_0 , arg_1 = False , arg_2 = True ) : return sent_tokenize ( remove_dates ( _remove_urls ( arg_0 ) ) , arg_1 , arg_2 )","id_":251900,"task_name":"https:\/\/github.com\/JonathanRaiman\/ciseau\/blob\/f72d1c82d85eeb3d3ac9fac17690041725402175\/ciseau\/wiki_markup_processing.py#L118-L140","negative":"Checks if name_node has corresponding assign statement in same scope"} {"query":"Method builds a payload out of the passed arguments .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None , arg_4 = None ) : arg_5 = { 'event_type' : unicode ( arg_1 ) , 'tenant_id' : unicode ( arg_0 . used_by_tenant_id ) , 'ip_address' : unicode ( arg_0 . address_readable ) , 'ip_version' : int ( arg_0 . version ) , 'ip_type' : unicode ( arg_0 . address_type ) , 'id' : unicode ( arg_0 . id ) } if arg_1 == IP_EXISTS : if arg_3 is None or arg_4 is None : raise ValueError ( 'IP_BILL: {} start_time\/end_time cannot be empty' . format ( arg_1 ) ) arg_5 . update ( { 'startTime' : unicode ( convert_timestamp ( arg_3 ) ) , 'endTime' : unicode ( convert_timestamp ( arg_4 ) ) } ) elif arg_1 in [ IP_ADD , IP_DEL , IP_ASSOC , IP_DISASSOC ] : if arg_2 is None : raise ValueError ( 'IP_BILL: {}: event_time cannot be NULL' . format ( arg_1 ) ) arg_5 . update ( { 'eventTime' : unicode ( convert_timestamp ( arg_2 ) ) , 'subnet_id' : unicode ( arg_0 . subnet_id ) , 'network_id' : unicode ( arg_0 . network_id ) , 'public' : True if arg_0 . network_id == PUBLIC_NETWORK_ID else False , } ) else : raise ValueError ( 'IP_BILL: bad event_type: {}' . format ( arg_1 ) ) return arg_5","id_":251901,"task_name":"https:\/\/github.com\/openstack\/quark\/blob\/1112e6a66917d3e98e44cb7b33b107fd5a74bb2e\/quark\/billing.py#L154-L208","negative":"Unregister an extension code. For testing only."} {"query":"Import settings from the given file system path to given settings instance .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = QtCore . QSettings ( arg_1 , QtCore . QSettings . IniFormat ) for arg_3 in arg_2 . allKeys ( ) : arg_0 [ arg_3 ] = arg_2 . value ( arg_3 )","id_":251902,"task_name":"https:\/\/github.com\/williballenthin\/ida-settings\/blob\/ddfeab5bd0b6f6f177d0d50f8078c585602b1d9e\/ida_settings\/ida_settings.py#L823-L832","negative":"Sets the review comment. Raises CardinalityError if\n already set. OrderError if no reviewer defined before.\n Raises SPDXValueError if comment is not free form text."} {"query":"Returns arrays of long short and gross sector exposures of an algorithm s positions","positive":"def Func ( arg_0 , arg_1 , arg_2 = arg_3 ) : arg_4 = arg_2 . keys ( ) arg_5 = [ ] arg_6 = [ ] arg_7 = [ ] arg_8 = [ ] arg_9 = arg_0 . drop ( 'cash' , axis = 'columns' ) arg_10 = arg_9 [ arg_9 > 0 ] . sum ( axis = 'columns' ) arg_11 = arg_9 [ arg_9 < 0 ] . abs ( ) . sum ( axis = 'columns' ) arg_12 = arg_9 . abs ( ) . sum ( axis = 'columns' ) for arg_13 in arg_4 : arg_14 = arg_9 [ arg_1 == arg_13 ] arg_15 = arg_14 [ arg_14 > 0 ] . sum ( axis = 'columns' ) . divide ( arg_10 ) arg_16 = arg_14 [ arg_14 < 0 ] . sum ( axis = 'columns' ) . divide ( arg_11 ) arg_17 = arg_14 . abs ( ) . sum ( axis = 'columns' ) . divide ( arg_12 ) arg_18 = arg_15 . subtract ( arg_16 ) arg_5 . append ( arg_15 ) arg_6 . append ( arg_16 ) arg_7 . append ( arg_17 ) arg_8 . append ( arg_18 ) return arg_5 , arg_6 , arg_7 , arg_8","id_":251903,"task_name":"https:\/\/github.com\/quantopian\/pyfolio\/blob\/712716ab0cdebbec9fabb25eea3bf40e4354749d\/pyfolio\/risk.py#L119-L171","negative":"Perform dimensionality reduction on X.\n\n Parameters\n ----------\n X : {array-like, sparse matrix}, shape (n_samples, n_features)\n New data.\n\n Returns\n -------\n X_new : array, shape (n_samples, n_components)\n Reduced version of X. This will always be a dense array."} {"query":"Apply an operation to the output of the circuit .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None , arg_4 = None ) : arg_2 = arg_2 or [ ] arg_3 = arg_3 or [ ] arg_5 = arg_0 . _bits_in_condition ( arg_4 ) arg_5 . extend ( arg_3 ) arg_0 . _check_condition ( arg_1 . name , arg_4 ) arg_0 . _check_bits ( arg_2 , arg_0 . output_map ) arg_0 . _check_bits ( arg_5 , arg_0 . output_map ) arg_0 . _add_op_node ( arg_1 , arg_2 , arg_3 , arg_4 ) arg_6 = [ arg_2 , arg_5 ] for arg_7 in itertools . chain ( * arg_6 ) : arg_8 = list ( arg_0 . _multi_graph . predecessors ( arg_0 . output_map [ arg_7 ] ) ) if len ( arg_8 ) != 1 : raise DAGCircuitError ( \"output node has multiple in-edges\" ) arg_0 . _multi_graph . add_edge ( arg_8 [ 0 ] , arg_0 . _id_to_node [ arg_0 . _max_node_id ] , name = \"%s[%s]\" % ( arg_7 [ 0 ] . name , arg_7 [ 1 ] ) , wire = arg_7 ) arg_0 . _multi_graph . remove_edge ( arg_8 [ 0 ] , arg_0 . output_map [ arg_7 ] ) arg_0 . _multi_graph . add_edge ( arg_0 . _id_to_node [ arg_0 . _max_node_id ] , arg_0 . output_map [ arg_7 ] , name = \"%s[%s]\" % ( arg_7 [ 0 ] . name , arg_7 [ 1 ] ) , wire = arg_7 ) return arg_0 . _id_to_node [ arg_0 . _max_node_id ]","id_":251904,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/dagcircuit\/dagcircuit.py#L313-L357","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Walk the knitting pattern in a right - to - left fashion .","positive":"def Func ( arg_0 ) : arg_1 = { } arg_2 = [ ] Func = [ ] for arg_4 in arg_0 . rows : arg_5 = arg_4 . rows_before [ : ] if arg_5 : arg_1 [ arg_4 ] = arg_5 else : arg_2 . append ( arg_4 ) assert arg_2 while arg_2 : arg_4 = arg_2 . pop ( 0 ) Func . append ( arg_4 ) assert arg_4 not in arg_1 for arg_6 in reversed ( arg_4 . rows_after ) : arg_7 = arg_1 [ arg_6 ] arg_7 . remove ( arg_4 ) if not arg_7 : del arg_1 [ arg_6 ] arg_2 . insert ( 0 , arg_6 ) assert not arg_1 , \"everything is walked\" return Func","id_":251905,"task_name":"https:\/\/github.com\/fossasia\/knittingpattern\/blob\/8e608896b0ab82fea1ca9fbfa2b4ee023d8c8027\/knittingpattern\/walk.py#L4-L35","negative":"Verify course ID and retrieve course details."} {"query":"This just reads a pickled fake LC .","positive":"def Func ( arg_0 ) : try : with open ( arg_0 , 'rb' ) as infd : arg_1 = pickle . load ( infd ) except UnicodeDecodeError : with open ( arg_0 , 'rb' ) as infd : arg_1 = pickle . load ( infd , encoding = 'latin1' ) return arg_1","id_":251906,"task_name":"https:\/\/github.com\/waqasbhatti\/astrobase\/blob\/2922a14619d183fb28005fa7d02027ac436f2265\/astrobase\/fakelcs\/recovery.py#L84-L109","negative":"Init openstack neutron mq\n\n 1. Check if enable listening neutron notification\n 2. Create consumer\n\n :param mq: class ternya.mq.MQ"} {"query":"Execute a Perceval job on RQ .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 = None , arg_6 = arg_7 ) : arg_8 = rq . get_current_job ( ) arg_9 = PercevalJob ( arg_8 . id , arg_3 , arg_0 , arg_4 , arg_8 . connection , arg_2 ) logger . debug ( \"Running job #%s (task: %s) (%s) (cat:%s)\" , arg_9 . job_id , arg_3 , arg_0 , arg_4 ) if not arg_9 . has_archiving ( ) and arg_5 : raise AttributeError ( \"archive attributes set but archive is not supported\" ) arg_10 = True arg_11 = False arg_12 = 0 while arg_10 : try : arg_9 . run ( arg_1 , arg_5 = arg_5 , arg_11 = arg_11 ) except AttributeError as e : raise e except Exception as e : logger . debug ( \"Error running job %s (%s) - %s\" , arg_9 . job_id , arg_0 , str ( e ) ) arg_12 += 1 if not arg_9 . has_resuming ( ) or arg_12 >= arg_6 : logger . error ( \"Cancelling job #%s (task: %s) (%s)\" , arg_9 . job_id , arg_3 , arg_0 ) raise e logger . warning ( \"Resuming job #%s (task: %s) (%s) due to a failure (n %s, max %s)\" , arg_9 . job_id , arg_3 , arg_0 , arg_12 , arg_6 ) arg_11 = True else : arg_10 = False arg_13 = arg_9 . result logger . debug ( \"Job #%s (task: %s) completed (%s) - %s items (%s) fetched\" , arg_13 . job_id , arg_3 , arg_13 . backend , str ( arg_13 . nitems ) , arg_13 . category ) return arg_13","id_":251907,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-kingarthur\/blob\/9d6a638bee68d5e5c511f045eeebf06340fd3252\/arthur\/jobs.py#L244-L315","negative":"Given a Dusty repo object, clone the remote into Dusty's local repos\n directory if it does not already exist."} {"query":"For a 2D array and mask map the values of all unmasked pixels to a 1D array .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = mask_util . total_regular_pixels_from_mask ( arg_0 ) arg_3 = np . zeros ( shape = arg_2 ) arg_4 = 0 for arg_5 in range ( arg_0 . shape [ 0 ] ) : for arg_6 in range ( arg_0 . shape [ 1 ] ) : if not arg_0 [ arg_5 , arg_6 ] : arg_3 [ arg_4 ] = arg_1 [ arg_5 , arg_6 ] arg_4 += 1 return arg_3","id_":251908,"task_name":"https:\/\/github.com\/Jammy2211\/PyAutoLens\/blob\/91e50369c7a9c048c83d217625578b72423cd5a7\/autolens\/data\/array\/util\/mapping_util.py#L147-L193","negative":"Add all filenames in the given list to the parser's set."} {"query":"GBM model demo .","positive":"def Func ( arg_0 = True , arg_1 = True , arg_2 = False ) : def demo_body ( arg_3 ) : arg_3 ( ) h2o . init ( ) arg_3 ( ) arg_4 = h2o . load_dataset ( \"prostate\" ) arg_3 ( ) arg_4 . describe ( ) arg_3 ( ) arg_5 , arg_6 = arg_4 . split_frame ( ratios = [ 0.70 ] ) arg_3 ( ) arg_5 [ \"CAPSULE\" ] = arg_5 [ \"CAPSULE\" ] . asfactor ( ) arg_6 [ \"CAPSULE\" ] = arg_6 [ \"CAPSULE\" ] . asfactor ( ) arg_3 ( ) from h2o . estimators import H2OGradientBoostingEstimator arg_7 = H2OGradientBoostingEstimator ( distribution = \"bernoulli\" , ntrees = 10 , max_depth = 8 , min_rows = 10 , learn_rate = 0.2 ) arg_7 . train ( x = [ \"AGE\" , \"RACE\" , \"PSA\" , \"VOL\" , \"GLEASON\" ] , y = \"CAPSULE\" , training_frame = arg_5 ) arg_3 ( ) arg_7 . show ( ) arg_3 ( ) arg_8 = arg_7 . predict ( arg_6 ) arg_8 . show ( ) arg_3 ( ) from h2o . tree import H2OTree , H2ONode arg_9 = H2OTree ( arg_7 , 0 , \"0\" ) len ( arg_9 ) arg_9 . left_children arg_9 . right_children arg_9 . root_node . show ( ) arg_3 ( ) arg_10 = arg_7 . model_performance ( arg_6 ) arg_10 . show ( ) _run_demo ( demo_body , arg_0 , arg_1 , arg_2 )","id_":251909,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-py\/h2o\/demos.py#L20-L85","negative":"Start monitoring of the alarm status.\n\n Send command to satel integra to start sending updates. Read in a\n loop and call respective callbacks when received messages."} {"query":"Write variable header","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . write ( struct . pack ( 'b3xI' , etypes [ 'miUINT32' ] [ 'n' ] , 8 ) ) arg_0 . write ( struct . pack ( 'b3x4x' , mclasses [ arg_1 [ 'mclass' ] ] ) ) write_elements ( arg_0 , 'miINT32' , arg_1 [ 'dims' ] ) write_elements ( arg_0 , 'miINT8' , asbytes ( arg_1 [ 'name' ] ) , is_name = True )","id_":251910,"task_name":"https:\/\/github.com\/nephics\/mat4py\/blob\/6c1a2ad903937437cc5f24f3c3f5aa2c5a77a1c1\/mat4py\/savemat.py#L170-L182","negative":"Temporal distance probability density function.\n\n Returns\n -------\n non_delta_peak_split_points: numpy.array\n non_delta_peak_densities: numpy.array\n len(density) == len(temporal_distance_split_points_ordered) -1\n delta_peak_loc_to_probability_mass : dict"} {"query":"Context manager to execute a code block in a directory .","positive":"def Func ( arg_0 , arg_1 = True ) : arg_2 = os . getcwd ( ) try : try : os . chdir ( arg_0 ) logger . debug ( \"Working in {directory!r}...\" . format ( ** vars ( ) ) ) except OSError as err : if arg_1 and err . errno == errno . ENOENT : os . makedirs ( arg_0 ) os . chdir ( arg_0 ) logger . info ( \"Working in {directory!r} (newly created)...\" . format ( ** vars ( ) ) ) else : logger . exception ( \"Failed to start working in {directory!r}.\" . format ( ** vars ( ) ) ) raise yield os . getcwd ( ) finally : os . chdir ( arg_2 )","id_":251911,"task_name":"https:\/\/github.com\/Becksteinlab\/GromacsWrapper\/blob\/d4f9a8cb6f48292732cf7c7e4ef4a6d2ccbc51b9\/gromacs\/utilities.py#L411-L435","negative":"Given configuration initiate a SigningService instance\n\n :param config: The signing service configuration\n :param entity_id: The entity identifier\n :return: A SigningService instance"} {"query":"Updates the profile s config entry with values set in each attr by the user . This will overwrite existing values .","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in arg_0 : if arg_2 in arg_1 . data [ arg_0 . profile ] and arg_2 is not \"auth\" : arg_1 . data [ arg_0 . profile ] [ arg_2 ] = getattr ( arg_0 , arg_2 )","id_":251912,"task_name":"https:\/\/github.com\/trp07\/messages\/blob\/7789ebc960335a59ea5d319fceed3dd349023648\/messages\/_config.py#L136-L147","negative":"Returns how the result count compares to the query options.\n\n The return value is negative if too few results were found, zero if enough were found, and\n positive if too many were found.\n\n Returns:\n int: -1, 0, or 1."} {"query":"Add a client authenticator class to CLIENT_MECHANISMS_D CLIENT_MECHANISMS and optionally to SECURE_CLIENT_MECHANISMS","positive":"def Func ( arg_0 , arg_1 ) : arg_2 [ arg_1 ] = arg_0 arg_3 = sorted ( arg_2 . items ( ) , key = _key_func , reverse = True ) arg_4 [ : ] = [ k for ( k , v ) in arg_3 ] arg_5 [ : ] = [ k for ( k , v ) in arg_3 if v . _pyxmpp_sasl_secure ]","id_":251913,"task_name":"https:\/\/github.com\/Jajcus\/pyxmpp2\/blob\/14a40a3950910a9cd008b55f0d8905aa0186ce18\/pyxmpp2\/sasl\/core.py#L444-L453","negative":"Returns the bounds of the index\n\n :param coordinate_interleaved: If True, the coordinates are turned\n in the form [xmin, ymin, ..., kmin, xmax, ymax, ..., kmax],\n otherwise they are returned as\n [xmin, xmax, ymin, ymax, ..., ..., kmin, kmax]. If not specified,\n the :attr:`interleaved` member of the index is used, which\n defaults to True."} {"query":"check if dependency program is there","positive":"def Func ( arg_0 ) : return _subprocess . call ( \"type \" + arg_0 , shell = True , stdout = _subprocess . PIPE , stderr = _subprocess . PIPE ) == 0","id_":251914,"task_name":"https:\/\/github.com\/dereneaton\/ipyrad\/blob\/5eeb8a178160f45faf71bf47cec4abe998a575d1\/ipyrad\/__init__.py#L145-L150","negative":"Perform dimensionality reduction on X.\n\n Parameters\n ----------\n X : {array-like, sparse matrix}, shape (n_samples, n_features)\n New data.\n\n Returns\n -------\n X_new : array, shape (n_samples, n_components)\n Reduced version of X. This will always be a dense array."} {"query":"Place the next queen at the given row .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_1 . index ( None ) arg_4 = arg_1 [ : ] arg_4 [ arg_3 ] = arg_2 return arg_4","id_":251915,"task_name":"https:\/\/github.com\/hobson\/aima\/blob\/3572b2fb92039b4a1abe384be8545560fbd3d470\/aima\/search.py#L564-L569","negative":"Resets builder's state for building new documents.\n Must be called between usage with different documents."} {"query":"Intercept downstream websocket . accept message and thus allow this upsteam application to accept websocket frames .","positive":"async def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = not arg_0 . applications_accepting_frames arg_0 . applications_accepting_frames . add ( arg_2 ) if arg_3 : await arg_0 . accept ( )","id_":251916,"task_name":"https:\/\/github.com\/hishnash\/channelsmultiplexer\/blob\/3fa08bf56def990b3513d25e403f85357487b373\/channelsmultiplexer\/demultiplexer.py#L176-L185","negative":"Take a string and return the corresponding module"} {"query":"Validate that the name is a valid GCS bucket .","positive":"def Func ( arg_0 ) : if not arg_0 . startswith ( 'gs:\/\/' ) : raise ValueError ( 'Invalid bucket path \"%s\". Must start with \"gs:\/\/\".' % arg_0 ) arg_1 = arg_0 [ len ( 'gs:\/\/' ) : ] if not re . search ( r'^\\w[\\w_\\.-]{1,61}\\w$' , arg_1 ) : raise ValueError ( 'Invalid bucket name: %s' % arg_0 )","id_":251917,"task_name":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/443ce31daa6023dc2fd65ef2051796e19d18d5a7\/dsub\/lib\/job_model.py#L116-L123","negative":"Stop streaming samples from device and delete samples buffer"} {"query":"Compute the mappings between a pixelization s pixels and the unmasked sub - grid pixels . These mappings \\ are determined after the regular - grid is used to determine the pixelization .","positive":"def Func ( arg_0 ) : Func = [ [ ] for _ in range ( arg_0 . pixels ) ] for arg_2 , arg_3 in enumerate ( arg_0 . sub_to_pix ) : Func [ arg_3 ] . append ( arg_2 ) return Func","id_":251918,"task_name":"https:\/\/github.com\/Jammy2211\/PyAutoLens\/blob\/91e50369c7a9c048c83d217625578b72423cd5a7\/autolens\/model\/inversion\/mappers.py#L110-L121","negative":"Returns the coach ID for the team's OC in a given year.\n\n :year: An int representing the year.\n :returns: A string containing the coach ID of the OC."} {"query":"Creates a plane with a specified number of vertices on it sides but no vertices on the interior .","positive":"def Func ( arg_0 , arg_1 = 1.0 , arg_2 = 1 , arg_3 = 1 , arg_4 = False , arg_5 = None ) : arg_1 = util . make_list ( arg_1 , 2 ) grid ( arg_0 , arg_1 = [ arg_2 + arg_3 - 1 , 1 ] , arg_2 = ( arg_2 + arg_3 - 1 ) , arg_3 = 1 ) if ml_script1 . ml_version == '1.3.4BETA' : arg_6 = 'and' else : arg_6 = '&&' if arg_0 . ml_version == '1.3.4BETA' : transform . vert_function ( arg_0 , x_func = 'if((y>0) and (x<%s),0,x)' % ( arg_3 ) , y_func = 'if((y>0) and (x<%s),(x+1)*%s,y)' % ( arg_3 , arg_1 [ 1 ] \/ arg_3 ) ) transform . vert_function ( arg_0 , x_func = 'if((y>0) and (x>=%s),(x-%s+1)*%s,x)' % ( arg_3 , arg_3 , arg_1 [ 0 ] \/ arg_2 ) , y_func = 'if((y>0) and (x>=%s),%s,y)' % ( arg_3 , arg_1 [ 1 ] ) ) transform . vert_function ( arg_0 , x_func = 'if((y<.00001) and (x>%s),%s,x)' % ( arg_2 , arg_1 [ 0 ] ) , y_func = 'if((y<.00001) and (x>%s),(x-%s)*%s,y)' % ( arg_2 , arg_2 , arg_1 [ 1 ] \/ arg_3 ) ) transform . vert_function ( arg_0 , x_func = 'if((y<.00001) and (x<=%s) and (x>0),(x)*%s,x)' % ( arg_2 , arg_1 [ 0 ] \/ arg_2 ) , y_func = 'if((y<.00001) and (x<=%s) and (x>0),0,y)' % ( arg_2 ) ) else : transform . vert_function ( arg_0 , x_func = '((y>0) && (x<{yseg}) ? 0 : x)' . format ( yseg = arg_3 ) , y_func = '((y>0) && (x<%s) ? (x+1)*%s : y)' % ( arg_3 , arg_1 [ 1 ] \/ arg_3 ) ) transform . vert_function ( arg_0 , x_func = '((y>0) && (x>=%s) ? (x-%s+1)*%s : x)' % ( arg_3 , arg_3 , arg_1 [ 0 ] \/ arg_2 ) , y_func = '((y>0) && (x>=%s) ? %s : y)' % ( arg_3 , arg_1 [ 1 ] ) ) transform . vert_function ( arg_0 , x_func = '((y<.00001) && (x>%s) ? %s : x)' % ( arg_2 , arg_1 [ 0 ] ) , y_func = '((y<.00001) && (x>%s) ? (x-%s)*%s : y)' % ( arg_2 , arg_2 , arg_1 [ 1 ] \/ arg_3 ) ) transform . vert_function ( arg_0 , x_func = '((y<.00001) && (x<=%s) && (x>0) ? (x)*%s : x)' % ( arg_2 , arg_1 [ 0 ] \/ arg_2 ) , y_func = '((y<.00001) && (x<=%s) && (x>0) ? 0 : y)' % ( arg_2 ) ) if arg_4 : transform . translate ( arg_0 , [ - arg_1 [ 0 ] \/ 2 , - arg_1 [ 1 ] \/ 2 ] ) if arg_5 is not None : vert_color . function ( arg_0 , arg_5 = arg_5 ) return None","id_":251919,"task_name":"https:\/\/github.com\/3DLIRIOUS\/MeshLabXML\/blob\/177cce21e92baca500f56a932d66bd9a33257af8\/meshlabxml\/create.py#L510-L583","negative":"Gets back all response headers."} {"query":"Given a ChangeSet POST it to the Route53 API .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_3 = xml_generators . change_resource_record_set_writer ( connection = arg_0 , arg_1 = arg_1 , arg_2 = arg_2 ) arg_4 = arg_0 . _send_request ( path = 'hostedzone\/%s\/rrset' % arg_1 . hosted_zone_id , data = arg_3 , method = 'POST' , ) arg_5 = arg_4 . find ( '.\/{*}ChangeInfo' ) if arg_5 is None : arg_6 = arg_4 . find ( '.\/{*}Error' ) . find ( '.\/{*}Message' ) . text raise Route53Error ( arg_6 ) return parse_change_info ( arg_5 )","id_":251920,"task_name":"https:\/\/github.com\/gtaylor\/python-route53\/blob\/b9fc7e258a79551c9ed61e4a71668b7f06f9e774\/route53\/connection.py#L278-L312","negative":"Based on values in the exclude_dictionary generate a list of term queries that\n will filter out unwanted results."} {"query":"Return the first element of a binned expression where the values each bin are sorted by order_expression .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = [ ] , arg_4 = None , arg_5 = arg_6 , arg_7 = False , arg_8 = False , arg_9 = False , arg_10 = None ) : return arg_0 . _compute_agg ( 'Func' , arg_1 , arg_3 , arg_4 , arg_5 , arg_7 , arg_8 , arg_9 , arg_10 , extra_expressions = [ arg_2 ] ) logger . debug ( \"count(%r, binby=%r, limits=%r)\" , arg_1 , arg_3 , arg_4 ) logger . debug ( \"count(%r, binby=%r, limits=%r)\" , arg_1 , arg_3 , arg_4 ) arg_1 = _ensure_strings_from_expressions ( arg_1 ) arg_2 = _ensure_string_from_expression ( arg_2 ) arg_3 = _ensure_strings_from_expressions ( arg_3 ) arg_11 , [ arg_12 , ] = vaex . utils . listify ( arg_1 ) @ delayed def finish ( * arg_13 ) : arg_13 = np . asarray ( arg_13 ) return vaex . utils . unlistify ( arg_11 , arg_13 ) arg_14 = vaex . utils . progressbars ( arg_10 ) arg_4 = arg_0 . limits ( arg_3 , arg_4 , arg_8 = True , arg_5 = arg_5 ) arg_15 = [ arg_0 . _Func_calculation ( arg_1 , arg_2 , arg_3 = arg_3 , arg_4 = arg_4 , arg_5 = arg_5 , arg_7 = arg_7 , arg_9 = arg_9 , arg_14 = arg_14 ) for arg_1 in arg_12 ] arg_16 = finish ( * arg_15 ) return arg_0 . _delay ( arg_8 , arg_16 )","id_":251921,"task_name":"https:\/\/github.com\/vaexio\/vaex\/blob\/a45b672f8287afca2ada8e36b74b604b9b28dd85\/packages\/vaex-core\/vaex\/dataframe.py#L625-L665","negative":"Clean up stats file, if configured to do so."} {"query":"Makes a striplog of all unions .","positive":"def Func ( arg_0 , arg_1 ) : if not isinstance ( arg_1 , arg_0 . __class__ ) : arg_2 = \"You can only Func striplogs with each other.\" raise StriplogError ( arg_2 ) arg_3 = [ ] for arg_4 in deepcopy ( arg_0 ) : for arg_5 in arg_1 : if arg_4 . any_overlaps ( arg_5 ) : arg_4 = arg_4 . Func ( arg_5 ) arg_3 . append ( arg_4 ) return Striplog ( arg_3 )","id_":251922,"task_name":"https:\/\/github.com\/agile-geoscience\/striplog\/blob\/8033b673a151f96c29802b43763e863519a3124c\/striplog\/striplog.py#L1823-L1843","negative":"Get the filesystem path to a file that contains OpenSSL-compatible CA certs.\n\n On OS X and Windows, there are extracted from the system certificate store\n and cached in a file on the filesystem. This path should not be writable\n by other users, otherwise they could inject CA certs into the trust list.\n\n :param temp_dir:\n The temporary directory to cache the CA certs in on OS X and Windows.\n Needs to have secure permissions so other users can not modify the\n contents.\n\n :param cache_length:\n The number of hours to cache the CA certs on OS X and Windows\n\n :param cert_callback:\n A callback that is called once for each certificate in the trust store.\n It should accept two parameters: an asn1crypto.x509.Certificate object,\n and a reason. The reason will be None if the certificate is being\n exported, otherwise it will be a unicode string of the reason it won't.\n This is only called on Windows and OS X when passed to this function.\n\n :raises:\n oscrypto.errors.CACertsError - when an error occurs exporting\/locating certs\n\n :return:\n The full filesystem path to a CA certs file"} {"query":"Helper for wheel_color that distributes colors over length and allows shifting position .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : return wheel_color ( ( arg_0 * len ( _WHEEL ) \/ arg_1 ) + arg_2 )","id_":251923,"task_name":"https:\/\/github.com\/ManiacalLabs\/BiblioPixel\/blob\/fd97e6c651a4bbcade64733847f4eec8f7704b7c\/bibliopixel\/colors\/wheel.py#L31-L34","negative":"Deletes the video\n\n Authentication is required\n\n Params:\n entry: video entry fetch via 'fetch_video()'\n\n Return:\n True if successful\n\n Raise:\n OperationError: on unsuccessful deletion"} {"query":"Create a nice image name from the url .","positive":"def Func ( arg_0 ) : arg_1 = r'https?:\/\/|[^\\w]' arg_2 = '_' return re . sub ( arg_1 , arg_2 , arg_0 ) . strip ( '_' )","id_":251924,"task_name":"https:\/\/github.com\/DistilledLtd\/heimdall\/blob\/7568c915a2e5bce759750d5456b39ea3498a6683\/heimdall\/heimdall.py#L91-L96","negative":"Re-enable the FTDI drivers for the current platform."} {"query":"Return the number of ports .","positive":"def Func ( arg_0 , arg_1 = None ) : LOG . info ( \"Func for tenant %s filters %s\" % ( arg_0 . tenant_id , arg_1 ) ) return db_api . port_count_all ( arg_0 , join_security_groups = True , ** arg_1 )","id_":251925,"task_name":"https:\/\/github.com\/openstack\/quark\/blob\/1112e6a66917d3e98e44cb7b33b107fd5a74bb2e\/quark\/plugin_modules\/ports.py#L578-L597","negative":"Return HgResource object for path.\n\n See DAVProvider.get_resource_inst()"} {"query":"Formats answers depending on fmt .","positive":"def Func ( arg_0 , arg_1 = 'obj' ) : arg_2 = ( 'obj' , 'array' , 'plain' ) if arg_1 not in arg_2 : eprint ( \"Error: '{}' not in {}\" . format ( arg_1 , arg_2 ) ) return def stringify ( arg_3 ) : if type ( arg_3 ) in ( list , tuple ) : return ', ' . join ( str ( arg_4 ) for arg_4 in arg_3 ) return arg_3 if arg_1 == 'obj' : return json . dumps ( arg_0 . answers ) elif arg_1 == 'array' : arg_5 = [ [ k , v ] for k , v in arg_0 . answers . items ( ) ] return json . dumps ( arg_5 ) elif arg_1 == 'plain' : arg_5 = '\\n' . join ( '{}: {}' . format ( k , stringify ( v ) ) for k , v in arg_0 . answers . items ( ) ) return arg_5","id_":251926,"task_name":"https:\/\/github.com\/kylebebak\/questionnaire\/blob\/ed92642e8a2a0198da198acbcde2707f1d528585\/questionnaire\/__init__.py#L241-L261","negative":"Awake one process waiting to receive data on fd"} {"query":"Deletes the specified file from the given S3 bucket .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = S3Connection ( arg_0 . access_key_id , arg_0 . access_key_secret ) arg_4 = arg_3 . get_bucket ( arg_2 ) if type ( arg_1 ) . __name__ == 'Key' : arg_1 = '\/' + arg_1 . name arg_5 = arg_0 . _get_s3_path ( arg_1 ) arg_6 = Key ( arg_4 ) arg_6 . key = arg_5 try : arg_4 . delete_key ( arg_6 ) except S3ResponseError : pass","id_":251927,"task_name":"https:\/\/github.com\/Jaza\/s3-saver\/blob\/81dc4447d76c2fc0b0238fb96fa70e879612e355\/s3_saver.py#L59-L75","negative":"Generates a JSON string with the params to be used\n when sending CONNECT to the server.\n\n ->> CONNECT {\"lang\": \"python3\"}"} {"query":"Hamiltonian Monte Carlo TransitionOperator .","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 , arg_4 : arg_5 , arg_6 : arg_7 , arg_8 : arg_9 = None , arg_10 : arg_3 = None , arg_11 : arg_12 = None , arg_13 : arg_14 [ [ arg_15 , arg_16 ] , arg_17 ] = lambda * arg_18 : ( ) , arg_19 = None , ) -> Tuple [ arg_1 , HamiltonianMonteCarloExtra ] : arg_20 = arg_0 . state arg_21 = arg_0 . state_grads arg_22 = arg_0 . target_log_prob arg_23 = arg_0 . state_extra if arg_10 is None : def arg_10 ( * arg_8 ) : return tf . add_n ( [ tf . reduce_sum ( input_tensor = tf . square ( arg_24 ) , axis = - 1 ) \/ 2. for arg_24 in tf . nest . flatten ( arg_8 ) ] ) , ( ) if arg_11 is None : def arg_11 ( * arg_8 ) : arg_25 = tf . nest . map_structure ( lambda arg_24 : tf . random . normal ( tf . shape ( input = arg_24 ) , dtype = arg_24 . dtype ) , arg_8 ) if len ( arg_25 ) == 1 : return arg_25 [ 0 ] else : return arg_25 if arg_8 is None : arg_8 = call_fn ( arg_11 , tf . nest . map_structure ( tf . zeros_like , arg_20 ) ) if arg_22 is None : arg_22 , arg_23 , arg_21 = call_and_grads ( arg_2 , arg_20 ) arg_26 , arg_27 = call_fn ( arg_10 , arg_8 ) arg_28 = - arg_22 + arg_26 arg_29 = arg_1 ( arg_20 = arg_20 , arg_21 = arg_21 , arg_23 = arg_23 , arg_22 = arg_22 ) def leapfrog_wrapper ( arg_30 , arg_22 , arg_23 ) : del arg_22 del arg_23 arg_30 , arg_31 = leapfrog_step ( arg_30 , arg_4 = arg_4 , arg_2 = arg_2 , arg_10 = arg_10 ) return [ arg_30 , arg_31 . target_log_prob , arg_31 . state_extra ] , arg_31 def leapfrog_trace_wrapper_fn ( arg_18 , arg_31 ) : return arg_13 ( arg_18 [ 0 ] , arg_31 ) arg_32 = ( arg_15 ( arg_20 , arg_21 , arg_8 ) , arg_22 , arg_23 ) [ [ arg_30 , arg_22 , arg_23 ] , arg_27 ] , arg_33 = trace ( arg_32 , leapfrog_wrapper , arg_6 , trace_fn = leapfrog_trace_wrapper_fn ) arg_26 , arg_27 = call_fn ( arg_10 , arg_30 . momentum ) arg_34 = - arg_22 + arg_26 arg_35 = arg_1 ( arg_20 = arg_30 . state , arg_21 = arg_30 . state_grads , arg_22 = arg_22 , arg_23 = arg_23 ) arg_36 = arg_34 - arg_28 arg_0 , arg_37 , arg_27 = metropolis_hastings_step ( arg_29 , arg_35 , arg_36 , arg_19 = arg_19 ) arg_0 = arg_0 return arg_0 , HamiltonianMonteCarloExtra ( arg_37 = arg_37 , proposed_hmc_state = arg_35 , log_accept_ratio = - arg_36 , arg_33 = arg_33 )","id_":251928,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/experimental\/fun_mcmc\/fun_mcmc_lib.py#L361-L517","negative":"Decompress and unpickle."} {"query":"Shrinks the trajectory and removes all exploration ranges from the parameters . Only possible if the trajectory has not been stored to disk before or was loaded as new .","positive":"def Func ( arg_0 , arg_1 = False ) : if arg_0 . _stored and not arg_1 : raise TypeError ( 'Your trajectory is already stored to disk or database, shrinking is ' 'not allowed.' ) for arg_2 in arg_0 . _explored_parameters . values ( ) : arg_2 . f_unlock ( ) try : arg_2 . _shrink ( ) except Exception as exc : arg_0 . _logger . error ( 'Could not shrink `%s` because of:`%s`' % ( arg_2 . v_full_name , repr ( exc ) ) ) arg_0 . _explored_parameters = { } arg_0 . _run_information = { } arg_0 . _single_run_ids = { } arg_0 . _add_run_info ( 0 ) arg_0 . _test_run_addition ( 1 )","id_":251929,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/trajectory.py#L696-L729","negative":"This method fixes a bug in Python's SGMLParser."} {"query":"Returns ON - OFF for all Stokes parameters given a cross_pols noise diode measurement","positive":"def Func ( arg_0 , arg_1 , ** arg_2 ) : arg_3 = Waterfall ( arg_0 , max_load = 150 ) arg_4 = arg_3 . populate_freqs ( ) arg_5 = arg_3 . header [ 'tsamp' ] arg_6 = arg_3 . data arg_3 = None arg_7 , arg_8 , arg_9 , arg_10 = get_stokes ( arg_6 , arg_1 ) arg_11 , arg_12 = foldcal ( arg_7 , arg_5 , ** arg_2 ) arg_13 , arg_14 = foldcal ( arg_8 , arg_5 , ** arg_2 ) arg_15 , arg_16 = foldcal ( arg_9 , arg_5 , ** arg_2 ) arg_17 , arg_18 = foldcal ( arg_10 , arg_5 , ** arg_2 ) arg_19 = arg_12 - arg_11 arg_20 = arg_14 - arg_13 arg_21 = arg_16 - arg_15 arg_22 = arg_18 - arg_17 return arg_19 , arg_20 , arg_21 , arg_22 , arg_4","id_":251930,"task_name":"https:\/\/github.com\/UCBerkeleySETI\/blimpy\/blob\/b8822d3e3e911944370d84371a91fa0c29e9772e\/blimpy\/calib_utils\/calib_plots.py#L7-L32","negative":"Resolves VirtualEnvironments in CPENV_HOME"} {"query":"Find Vars mapped by the given Symbol input or None if no Vars are mapped by that Symbol .","positive":"def Func ( arg_0 , arg_1 : arg_1 . Symbol ) -> Optional [ Var ] : arg_3 = arg_0 . interns . entry ( arg_1 , None ) if arg_3 is None : return arg_0 . refers . entry ( arg_1 , None ) return arg_3","id_":251931,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/runtime.py#L472-L478","negative":"This runs the pyeebls.eebls function using the given inputs.\n\n Parameters\n ----------\n\n times,mags : np.array\n The input magnitude time-series to search for transits.\n\n nfreq : int\n The number of frequencies to use when searching for transits.\n\n freqmin : float\n The minimum frequency of the period-search -> max period that will be\n used for the search.\n\n stepsize : float\n The step-size in frequency to use to generate a frequency-grid.\n\n nbins : int\n The number of phase bins to use.\n\n minduration : float\n The minimum fractional transit duration that will be considered.\n\n maxduration : float\n The maximum fractional transit duration that will be considered.\n\n Returns\n -------\n\n dict\n Returns a dict of the form::\n\n {\n 'power': the periodogram power array,\n 'bestperiod': the best period found,\n 'bestpower': the highest peak of the periodogram power,\n 'transdepth': transit depth found by eebls.f,\n 'transduration': transit duration found by eebls.f,\n 'transingressbin': transit ingress bin found by eebls.f,\n 'transegressbin': transit egress bin found by eebls.f,\n }"} {"query":"Convert this unnormalized batch to an instance of Batch .","positive":"def Func ( arg_0 ) : assert all ( [ arg_2 is None for arg_1 , arg_2 in arg_0 . __dict__ . items ( ) if arg_1 . endswith ( \"_aug\" ) ] ) , \"Expected UnnormalizedBatch to not contain any augmented data \" \"before normalization, but at least one '*_aug' attribute was \" \"already set.\" arg_3 = nlib . normalize_images ( arg_0 . images_unaug ) arg_4 = None if arg_3 is not None : arg_4 = [ image . shape for image in arg_3 ] return Batch ( images = arg_3 , heatmaps = nlib . normalize_heatmaps ( arg_0 . heatmaps_unaug , arg_4 ) , segmentation_maps = nlib . normalize_segmentation_maps ( arg_0 . segmentation_maps_unaug , arg_4 ) , keypoints = nlib . normalize_keypoints ( arg_0 . keypoints_unaug , arg_4 ) , bounding_boxes = nlib . normalize_bounding_boxes ( arg_0 . bounding_boxes_unaug , arg_4 ) , polygons = nlib . normalize_polygons ( arg_0 . polygons_unaug , arg_4 ) , line_strings = nlib . normalize_line_strings ( arg_0 . line_strings_unaug , arg_4 ) , data = arg_0 . data )","id_":251932,"task_name":"https:\/\/github.com\/aleju\/imgaug\/blob\/786be74aa855513840113ea523c5df495dc6a8af\/imgaug\/augmentables\/batches.py#L180-L223","negative":"Copy a path from inside a Dusty container to a path on the\n local filesystem. The path on the local filesystem must be\n wrist-accessible by the user specified in mac_username."} {"query":"Evaluate model on splitted 10 percent testing set","positive":"def Func ( arg_0 , arg_1 ) : arg_2 , arg_3 , arg_4 = prepare_feature ( arg_0 , option = 'test' ) arg_5 = arg_1 . predict ( [ arg_2 , arg_3 ] ) arg_5 = ( arg_5 . ravel ( ) > 0.5 ) . astype ( int ) arg_6 = f1_score ( arg_4 , arg_5 ) arg_7 = precision_score ( arg_4 , arg_5 ) arg_8 = recall_score ( arg_4 , arg_5 ) return arg_6 , arg_7 , arg_8","id_":251933,"task_name":"https:\/\/github.com\/rkcosmos\/deepcut\/blob\/9a2729071d01972af805acede85d7aa9e7a6da30\/deepcut\/train.py#L204-L217","negative":"Uploads file to GDocs spreadsheet.\n Content type can be provided as argument, default is ods."} {"query":"Creates a FloatingIP in a region without assigning it to a specific Droplet .","positive":"def Func ( arg_0 , * arg_1 , ** arg_2 ) : arg_3 = arg_0 . get_data ( 'floating_ips\/' , type = POST , params = { 'region' : arg_0 . region_slug } ) if arg_3 : arg_0 . ip = arg_3 [ 'floating_ip' ] [ 'ip' ] arg_0 . region = arg_3 [ 'floating_ip' ] [ 'region' ] return arg_0","id_":251934,"task_name":"https:\/\/github.com\/koalalorenzo\/python-digitalocean\/blob\/d0221b57856fb1e131cafecf99d826f7b07a947c\/digitalocean\/FloatingIP.py#L61-L80","negative":"This will return a single tr element, with all tds already populated."} {"query":"Runs a network in an experimental run .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 ) : arg_0 . _Func ( arg_1 , arg_2 , arg_3 , arg_4 , arg_5 , pre_run = False )","id_":251935,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/brian2\/network.py#L305-L381","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Creates a reference to the parent document to allow for partial - tree validation .","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 . document != arg_1 : arg_0 . document = arg_1 for arg_3 in arg_0 . children : if not isinstance ( arg_3 , dom_tag ) : return arg_3 . Func ( arg_1 )","id_":251936,"task_name":"https:\/\/github.com\/Knio\/dominate\/blob\/1eb88f9fd797658eef83568a548e2ef9b546807d\/dominate\/dom_tag.py#L169-L179","negative":"Converts given J, H, Ks mags to an SDSS r magnitude value.\n\n Parameters\n ----------\n\n jmag,hmag,kmag : float\n 2MASS J, H, Ks mags of the object.\n\n Returns\n -------\n\n float\n The converted SDSS r band magnitude."} {"query":"Returns the dictionary of CORS specific app configurations .","positive":"def Func ( arg_0 ) : arg_1 = getattr ( arg_0 , 'config' , { } ) return dict ( ( arg_2 . lower ( ) . replace ( 'cors_' , '' ) , arg_1 . get ( arg_2 ) ) for arg_2 in CONFIG_OPTIONS if arg_1 . get ( arg_2 ) is not None )","id_":251937,"task_name":"https:\/\/github.com\/ashleysommer\/sanic-cors\/blob\/f3d68def8cf859398b3c83e4109d815f1f038ea2\/sanic_cors\/core.py#L318-L326","negative":"Regenerates the primary or secondary access key for the specified\n storage account.\n\n service_name:\n Name of the storage service account.\n key_type:\n Specifies which key to regenerate. Valid values are:\n Primary, Secondary"} {"query":"Parse the value of the state parameter .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 , arg_3 = arg_0 . rsplit ( ':' , 1 ) if xsrfutil . validate_token ( xsrf_secret_key ( ) , arg_3 , arg_1 . user_id ( ) , action_id = arg_2 ) : return arg_2 else : return None","id_":251938,"task_name":"https:\/\/github.com\/googleapis\/oauth2client\/blob\/50d20532a748f18e53f7d24ccbe6647132c979a9\/oauth2client\/contrib\/appengine.py#L450-L467","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"run a build meaning creating a build . Retry if there is failure","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = arg_0 . _get_project ( ) bot . custom ( 'PROJECT' , arg_4 , \"CYAN\" ) bot . custom ( 'BUILD ' , arg_1 [ 'steps' ] [ 0 ] [ 'name' ] , \"CYAN\" ) arg_5 = arg_0 . _build_service . projects ( ) . builds ( ) . create ( body = arg_1 , projectId = arg_4 ) . execute ( ) arg_6 = arg_5 [ 'metadata' ] [ 'build' ] [ 'id' ] arg_7 = arg_5 [ 'metadata' ] [ 'build' ] [ 'status' ] bot . log ( \"build %s: %s\" % ( arg_6 , arg_7 ) ) arg_8 = time . time ( ) while arg_7 not in [ 'COMPLETE' , 'FAILURE' , 'SUCCESS' ] : time . sleep ( 15 ) arg_5 = arg_0 . _build_service . projects ( ) . builds ( ) . get ( id = arg_6 , projectId = arg_4 ) . execute ( ) arg_6 = arg_5 [ 'id' ] arg_7 = arg_5 [ 'status' ] bot . log ( \"build %s: %s\" % ( arg_6 , arg_7 ) ) arg_9 = time . time ( ) bot . log ( 'Total build time: %s seconds' % ( round ( arg_9 - arg_8 , 2 ) ) ) if arg_7 == 'SUCCESS' : arg_10 = 'SREGISTRY_GOOGLE_STORAGE_PRIVATE' arg_11 = arg_2 . blob ( arg_5 [ 'artifacts' ] [ 'objects' ] [ 'paths' ] [ 0 ] ) if arg_0 . _get_and_update_setting ( arg_10 ) == None : arg_11 . make_public ( ) arg_5 [ 'public_url' ] = arg_11 . public_url update_blob_metadata ( arg_11 , arg_5 , arg_1 , arg_2 , arg_3 ) arg_5 [ 'media_link' ] = arg_11 . media_link arg_5 [ 'size' ] = arg_11 . size arg_5 [ 'file_hash' ] = arg_11 . md5_hash return arg_5","id_":251939,"task_name":"https:\/\/github.com\/singularityhub\/sregistry-cli\/blob\/abc96140a1d15b5e96d83432e1e0e1f4f8f36331\/sregistry\/main\/google_build\/build.py#L174-L222","negative":"Returns a list of the dicom files within root_path\n\n Parameters\n ----------\n root_path: str\n Path to the directory to be recursively searched for DICOM files.\n\n Returns\n -------\n dicoms: set\n Set of DICOM absolute file paths"} {"query":"Handle a POST request .","positive":"def Func ( arg_0 , arg_1 = '0' ) : arg_2 = arg_0 . get_thing ( arg_1 ) if arg_2 is None : arg_0 . set_status ( 404 ) return try : arg_3 = json . loads ( arg_0 . request . body . decode ( ) ) except ValueError : arg_0 . set_status ( 400 ) return arg_4 = { } for arg_5 , arg_6 in arg_3 . items ( ) : arg_7 = None if 'input' in arg_6 : arg_7 = arg_6 [ 'input' ] arg_8 = arg_2 . perform_action ( arg_5 , arg_7 ) if arg_8 : arg_4 . update ( arg_8 . as_action_description ( ) ) tornado . ioloop . IOLoop . current ( ) . spawn_callback ( perform_action , arg_8 , ) arg_0 . set_status ( 201 ) arg_0 . write ( json . dumps ( arg_4 ) )","id_":251940,"task_name":"https:\/\/github.com\/mozilla-iot\/webthing-python\/blob\/65d467c89ed79d0bbc42b8b3c8f9e5a320edd237\/webthing\/server.py#L417-L451","negative":"Returns a new random state.\n\n Parameters\n ----------\n seed : None or int, optional\n Optional seed value to use.\n The same datatypes are allowed as for ``numpy.random.RandomState(seed)``.\n\n fully_random : bool, optional\n Whether to use numpy's random initialization for the\n RandomState (used if set to True). If False, a seed is sampled from\n the global random state, which is a bit faster and hence the default.\n\n Returns\n -------\n numpy.random.RandomState\n The new random state."} {"query":"Return adjusted start and stop index as tuple .","positive":"def Func ( arg_0 , arg_1 : arg_2 [ arg_3 , arg_4 ] ) -> ( arg_4 , arg_4 ) : arg_5 , arg_6 = arg_0 . _span if isinstance ( arg_1 , arg_4 ) : if arg_1 < 0 : arg_1 += arg_6 - arg_5 if arg_1 < 0 : raise IndexError ( 'index out of range' ) elif arg_1 >= arg_6 - arg_5 : raise IndexError ( 'index out of range' ) arg_7 = arg_5 + arg_1 return arg_7 , arg_7 + 1 if arg_1 . step is not None : raise NotImplementedError ( 'step is not implemented for string setter.' ) arg_7 , arg_8 = arg_1 . start or 0 , arg_1 . stop if arg_7 < 0 : arg_7 += arg_6 - arg_5 if arg_7 < 0 : raise IndexError ( 'start index out of range' ) if arg_8 is None : arg_8 = arg_6 - arg_5 elif arg_8 < 0 : arg_8 += arg_6 - arg_5 if arg_7 > arg_8 : raise IndexError ( 'stop index out of range or start is after the stop' ) return arg_7 + arg_5 , arg_8 + arg_5","id_":251941,"task_name":"https:\/\/github.com\/5j9\/wikitextparser\/blob\/1347425814361d7955342c53212edbb27f0ff4b5\/wikitextparser\/_wikitext.py#L171-L202","negative":"Upload all po files to GDocs ignoring conflicts.\n This method looks for all msgids in po_files and sends them\n as ods to GDocs Spreadsheet."} {"query":"r Take pauli string to construct pauli .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = np . zeros ( len ( arg_1 ) , dtype = np . bool ) arg_3 = np . zeros ( len ( arg_1 ) , dtype = np . bool ) for arg_4 , arg_5 in enumerate ( arg_1 ) : if arg_5 == 'X' : arg_3 [ - arg_4 - 1 ] = True elif arg_5 == 'Z' : arg_2 [ - arg_4 - 1 ] = True elif arg_5 == 'Y' : arg_2 [ - arg_4 - 1 ] = True arg_3 [ - arg_4 - 1 ] = True elif arg_5 != 'I' : raise QiskitError ( \"Pauli string must be only consisted of 'I', 'X', \" \"'Y' or 'Z' but you have {}.\" . format ( arg_5 ) ) return arg_0 ( arg_2 = arg_2 , arg_3 = arg_3 )","id_":251942,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/quantum_info\/operators\/pauli.py#L82-L110","negative":"Setup coverage related extensions."} {"query":"Creates a generator that does all the work .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . bundle . group_transactions ( ) arg_2 = arg_0 . bundle . hash arg_3 = len ( arg_0 . bundle ) - 1 arg_4 = 0 arg_5 = 0 for arg_6 in arg_1 : for arg_7 in arg_6 : arg_4 += arg_7 . value if arg_7 . bundle_hash != arg_2 : yield 'Transaction {i} has invalid bundle hash.' . format ( i = arg_5 , ) if arg_7 . current_index != arg_5 : yield ( 'Transaction {i} has invalid current index value ' '(expected {i}, actual {actual}).' . format ( actual = arg_7 . current_index , i = arg_5 , ) ) if arg_7 . last_index != arg_3 : yield ( 'Transaction {i} has invalid last index value ' '(expected {expected}, actual {actual}).' . format ( actual = arg_7 . last_index , expected = arg_3 , i = arg_5 , ) ) arg_5 += 1 if arg_4 != 0 : yield ( 'Bundle has invalid balance ' '(expected 0, actual {actual}).' . format ( actual = arg_4 , ) ) if not arg_0 . _errors : arg_8 = [ ] for arg_6 in arg_1 : if arg_6 [ 0 ] . value >= 0 : continue arg_9 = True for arg_10 , arg_7 in enumerate ( arg_6 ) : if ( arg_10 > 0 ) and ( arg_7 . value != 0 ) : yield ( 'Transaction {i} has invalid value ' '(expected 0, actual {actual}).' . format ( actual = arg_7 . value , i = arg_7 . current_index , ) ) arg_9 = False continue if arg_9 : arg_8 . append ( arg_6 ) if arg_8 : for arg_11 in arg_0 . _get_bundle_signature_errors ( arg_8 ) : yield arg_11","id_":251943,"task_name":"https:\/\/github.com\/iotaledger\/iota.lib.py\/blob\/97cdd1e241498446b46157b79b2a1ea2ec6d387a\/iota\/transaction\/validator.py#L68-L186","negative":"display connection info, and store ports"} {"query":"NamePrefix = string","positive":"def Func ( arg_0 = None , ** arg_1 ) : arg_2 = arg_0 . Func ( ** arg_1 ) if not arg_2 . get ( \"Rules\" ) : arg_2 . update ( { \"Rules\" : [ ] } ) return arg_2","id_":251944,"task_name":"https:\/\/github.com\/Netflix-Skunkworks\/cloudaux\/blob\/c4b0870c3ac68b1c69e71d33cf78b6a8bdf437ea\/cloudaux\/aws\/events.py#L9-L17","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Process post - publication events coming out of the database .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 , arg_3 = arg_0 . module_ident , arg_0 . ident_hash arg_4 = get_current_registry ( ) . celery_app arg_1 . execute ( 'SELECT result_id::text ' 'FROM document_baking_result_associations ' 'WHERE module_ident = %s' , ( arg_2 , ) ) for arg_5 in arg_1 . fetchall ( ) : arg_6 = arg_4 . AsyncResult ( arg_5 [ 0 ] ) . state if arg_6 in ( 'QUEUED' , 'STARTED' , 'RETRY' ) : logger . debug ( 'Already queued module_ident={} ident_hash={}' . format ( arg_2 , arg_3 ) ) return logger . debug ( 'Queued for processing module_ident={} ident_hash={}' . format ( arg_2 , arg_3 ) ) arg_7 = _get_recipe_ids ( arg_2 , arg_1 ) update_module_state ( arg_1 , arg_2 , 'processing' , arg_7 [ 0 ] ) arg_1 . connection . commit ( ) arg_8 = 'cnxpublishing.subscribers.baking_processor' arg_9 = arg_4 . tasks [ arg_8 ] arg_5 = arg_9 . delay ( arg_2 , arg_3 ) arg_9 . backend . store_result ( arg_5 . id , None , 'QUEUED' ) track_baking_proc_state ( arg_5 , arg_2 , arg_1 )","id_":251945,"task_name":"https:\/\/github.com\/openstax\/cnx-publishing\/blob\/f55b4a2c45d8618737288f1b74b4139d5ac74154\/cnxpublishing\/subscribers.py#L32-L64","negative":"Asset minifier\n Uses default minifier in bundle if it's not defined\n\n :rtype: static_bundle.minifiers.DefaultMinifier|None"} {"query":"Return a dict of keys that differ with another config object .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = set ( arg_0 . keys ( ) + arg_1 . keys ( ) ) arg_3 = { } for arg_4 in arg_2 : if arg_0 . get ( arg_4 ) != arg_1 . get ( arg_4 ) : arg_3 [ arg_4 ] = ( arg_0 . get ( arg_4 ) , arg_1 . get ( arg_4 ) ) return arg_3","id_":251946,"task_name":"https:\/\/github.com\/napalm-automation\/napalm-yang\/blob\/998e8a933171d010b8544bcc5dc448e2b68051e2\/interactive_demo\/ansible\/callback\/selective.py#L63-L70","negative":"Parse the conservation predictors\n\n Args:\n variant(dict): A variant dictionary\n\n Returns:\n conservations(dict): A dictionary with the conservations"} {"query":"Returns a normalized unit number i . e . integers Raises exception X10InvalidUnitNumber if unit number appears to be invalid","positive":"def Func ( arg_0 ) : try : try : arg_0 = int ( arg_0 ) except ValueError : raise X10InvalidUnitNumber ( '%r not a valid unit number' % arg_0 ) except TypeError : raise X10InvalidUnitNumber ( '%r not a valid unit number' % arg_0 ) if not ( 1 <= arg_0 <= 16 ) : raise X10InvalidUnitNumber ( '%r not a valid unit number' % arg_0 ) return arg_0","id_":251947,"task_name":"https:\/\/github.com\/clach04\/x10_any\/blob\/5b90a543b127ab9e6112fd547929b5ef4b8f0cbc\/x10_any\/__init__.py#L83-L96","negative":"This method will iterate over all process in the pipeline and\n populate the nextflow configuration files with the directives\n of each process in the pipeline."} {"query":"Checks the Scrabble score of a single word .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 0 ) : arg_3 = 0 arg_4 = 0 arg_5 = [ ] arg_6 = list ( arg_1 ) for arg_7 in arg_0 : if arg_7 in arg_6 : arg_4 += 1 arg_3 += letter_score ( arg_7 ) arg_6 . remove ( arg_7 ) else : arg_5 . append ( letter_score ( arg_7 ) ) for arg_8 in sorted ( arg_5 , reverse = True ) : if arg_2 > 0 : arg_3 += arg_8 arg_2 -= 1 if arg_4 > 6 : arg_3 += 50 return arg_3","id_":251948,"task_name":"https:\/\/github.com\/a-tal\/nagaram\/blob\/2edcb0ef8cb569ebd1c398be826472b4831d6110\/nagaram\/scrabble.py#L34-L69","negative":"Show Program Landing page for the Enterprise's Program.\n\n Render the Enterprise's Program Enrollment page for a specific program.\n The Enterprise and Program are both selected by their respective UUIDs.\n\n Unauthenticated learners will be redirected to enterprise-linked SSO.\n\n A 404 will be raised if any of the following conditions are met:\n * No enterprise customer UUID query parameter ``enterprise_uuid`` found in request.\n * No enterprise customer found against the enterprise customer\n uuid ``enterprise_uuid`` in the request kwargs.\n * No Program can be found given ``program_uuid`` either at all or associated with\n the Enterprise.."} {"query":"Returns the Message object for this message .","positive":"def Func ( arg_0 , arg_1 : arg_2 ) -> list : if arg_1 [ 1 : ] . isdigit ( ) : arg_1 = arg_1 . upper ( ) for arg_3 in ( arg_0 . _alternative_names , arg_0 . _messages_definitions ) : try : return [ arg_3 [ arg_1 ] ] except KeyError : pass arg_4 = \"No such message id or symbol '{msgid_or_symbol}'.\" . format ( arg_1 = arg_1 ) raise UnknownMessageError ( arg_4 )","id_":251949,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/message\/message_store.py#L162-L180","negative":"This is a parallel worker for running period-recovery.\n\n Parameters\n ----------\n\n task : tuple\n This is used to pass args to the `periodicvar_recovery` function::\n\n task[0] = period-finding result pickle to work on\n task[1] = simbasedir\n task[2] = period_tolerance\n\n Returns\n -------\n\n dict\n This is the dict produced by the `periodicvar_recovery` function for the\n input period-finding result pickle."} {"query":"quit command when several threads are involved .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = threading . enumerate ( ) arg_3 = threading . currentThread ( ) for arg_4 in arg_2 : if arg_4 != arg_3 : ctype_async_raise ( arg_4 , Mexcept . DebuggerQuit ) pass pass raise Mexcept . DebuggerQuit","id_":251950,"task_name":"https:\/\/github.com\/rocky\/python3-trepan\/blob\/14e91bc0acce090d67be145b1ac040cab92ac5f3\/trepan\/processor\/command\/quit.py#L89-L98","negative":"Accept a publication request at form value 'epub"} {"query":"Output profiler report .","positive":"def Func ( arg_0 , arg_1 ) : log . debug ( 'printing profiler Func' ) arg_0 . prof . close ( ) arg_2 = stats . load ( arg_0 . pfile ) arg_2 . sort_stats ( arg_0 . sort ) arg_3 = hasattr ( arg_2 , 'stream' ) if arg_3 : arg_4 = arg_2 . stream arg_2 . stream = arg_1 else : arg_4 = arg_5 . stdout arg_5 . stdout = arg_1 try : if arg_0 . restrict : log . debug ( 'setting profiler restriction to %s' , arg_0 . restrict ) arg_2 . print_stats ( * arg_0 . restrict ) else : arg_2 . print_stats ( ) finally : if arg_3 : arg_2 . stream = arg_4 else : arg_5 . stdout = arg_4","id_":251951,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/nose\/plugins\/prof.py#L97-L125","negative":"Sets the player's paused state."} {"query":"Read response payload and decode .","positive":"async def Func ( arg_0 , * , arg_1 : arg_2 [ arg_3 ] = None , arg_4 : arg_3 = 'strict' ) -> arg_3 : return await arg_0 . _aws_Func ( arg_1 = arg_1 , arg_4 = arg_4 )","id_":251952,"task_name":"https:\/\/github.com\/howie6879\/ruia\/blob\/2dc5262fc9c3e902a8faa7d5fa2f046f9d9ee1fa\/ruia\/response.py#L136-L141","negative":"Clean up stats file, if configured to do so."} {"query":"This method will remove any stored records within the range from start to end . Noninclusive of end .","positive":"def Func ( arg_0 , arg_1 = 0 , arg_2 = None ) : arg_3 = arg_0 . htm_prediction_model . _getAnomalyClassifier ( ) arg_4 = arg_3 . getSelf ( ) . _knn arg_5 = numpy . array ( arg_3 . getSelf ( ) . getParameter ( 'categoryRecencyList' ) ) if arg_2 is None : arg_2 = arg_5 . max ( ) + 1 arg_6 = numpy . logical_and ( arg_5 >= arg_1 , arg_5 < arg_2 ) arg_7 = arg_5 [ arg_6 ] arg_8 = arg_4 . _numPatterns arg_4 . removeIds ( arg_7 . tolist ( ) ) assert arg_4 . _numPatterns == arg_8 - len ( arg_7 )","id_":251953,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/frameworks\/opf\/htm_prediction_model_classifier_helper.py#L365-L391","negative":"Load a configuration module and return a Config"} {"query":"Generic method for a resource s Update endpoint .","positive":"def Func ( arg_0 , ** arg_1 ) : arg_0 . data . Func ( arg_0 . item_Func ( arg_0 . api , arg_0 . id , ** arg_1 ) ) return arg_0 . data","id_":251954,"task_name":"https:\/\/github.com\/attm2x\/m2x-python\/blob\/df83f590114692b1f96577148b7ba260065905bb\/m2x\/v2\/resource.py#L17-L34","negative":"Factory for loading the ccd data from .fits files, as well as computing properties like the noise-map,\n exposure-time map, etc. from the ccd-data.\n\n This factory also includes a number of routines for converting the ccd-data from units not supported by PyAutoLens \\\n (e.g. adus, electrons) to electrons per second.\n\n Parameters\n ----------\n lens_name\n image_path : str\n The path to the image .fits file containing the image (e.g. '\/path\/to\/image.fits')\n pixel_scale : float\n The size of each pixel in arc seconds.\n image_hdu : int\n The hdu the image is contained in the .fits file specified by *image_path*. \n image_hdu : int\n The hdu the image is contained in the .fits file that *image_path* points too.\n resized_ccd_shape : (int, int) | None\n If input, the ccd arrays that are image sized, e.g. the image, noise-maps) are resized to these dimensions.\n resized_ccd_origin_pixels : (int, int) | None\n If the ccd arrays are resized, this defines a new origin (in pixels) around which recentering occurs.\n resized_ccd_origin_arcsec : (float, float) | None\n If the ccd arrays are resized, this defines a new origin (in arc-seconds) around which recentering occurs.\n psf_path : str\n The path to the psf .fits file containing the psf (e.g. '\/path\/to\/psf.fits') \n psf_hdu : int\n The hdu the psf is contained in the .fits file specified by *psf_path*.\n resized_psf_shape : (int, int) | None\n If input, the psf is resized to these dimensions.\n renormalize_psf : bool\n If True, the PSF is renoralized such that all elements sum to 1.0.\n noise_map_path : str\n The path to the noise_map .fits file containing the noise_map (e.g. '\/path\/to\/noise_map.fits') \n noise_map_hdu : int\n The hdu the noise_map is contained in the .fits file specified by *noise_map_path*.\n noise_map_from_image_and_background_noise_map : bool\n If True, the noise-map is computed from the observed image and background noise-map \\\n (see NoiseMap.from_image_and_background_noise_map).\n convert_noise_map_from_weight_map : bool\n If True, the noise-map loaded from the .fits file is converted from a weight-map to a noise-map (see \\\n *NoiseMap.from_weight_map).\n convert_noise_map_from_inverse_noise_map : bool\n If True, the noise-map loaded from the .fits file is converted from an inverse noise-map to a noise-map (see \\\n *NoiseMap.from_inverse_noise_map).\n background_noise_map_path : str\n The path to the background_noise_map .fits file containing the background noise-map \\ \n (e.g. '\/path\/to\/background_noise_map.fits') \n background_noise_map_hdu : int\n The hdu the background_noise_map is contained in the .fits file specified by *background_noise_map_path*.\n convert_background_noise_map_from_weight_map : bool\n If True, the bacground noise-map loaded from the .fits file is converted from a weight-map to a noise-map (see \\\n *NoiseMap.from_weight_map).\n convert_background_noise_map_from_inverse_noise_map : bool\n If True, the background noise-map loaded from the .fits file is converted from an inverse noise-map to a \\\n noise-map (see *NoiseMap.from_inverse_noise_map).\n poisson_noise_map_path : str\n The path to the poisson_noise_map .fits file containing the Poisson noise-map \\\n (e.g. '\/path\/to\/poisson_noise_map.fits') \n poisson_noise_map_hdu : int\n The hdu the poisson_noise_map is contained in the .fits file specified by *poisson_noise_map_path*.\n poisson_noise_map_from_image : bool\n If True, the Poisson noise-map is estimated using the image.\n convert_poisson_noise_map_from_weight_map : bool\n If True, the Poisson noise-map loaded from the .fits file is converted from a weight-map to a noise-map (see \\\n *NoiseMap.from_weight_map).\n convert_poisson_noise_map_from_inverse_noise_map : bool\n If True, the Poisson noise-map loaded from the .fits file is converted from an inverse noise-map to a \\\n noise-map (see *NoiseMap.from_inverse_noise_map).\n exposure_time_map_path : str\n The path to the exposure_time_map .fits file containing the exposure time map \\ \n (e.g. '\/path\/to\/exposure_time_map.fits') \n exposure_time_map_hdu : int\n The hdu the exposure_time_map is contained in the .fits file specified by *exposure_time_map_path*.\n exposure_time_map_from_single_value : float\n The exposure time of the ccd imaging, which is used to compute the exposure-time map as a single value \\\n (see *ExposureTimeMap.from_single_value*).\n exposure_time_map_from_inverse_noise_map : bool\n If True, the exposure-time map is computed from the background noise_map map \\\n (see *ExposureTimeMap.from_background_noise_map*)\n background_sky_map_path : str\n The path to the background_sky_map .fits file containing the background sky map \\\n (e.g. '\/path\/to\/background_sky_map.fits').\n background_sky_map_hdu : int\n The hdu the background_sky_map is contained in the .fits file specified by *background_sky_map_path*.\n convert_from_electrons : bool\n If True, the input unblurred_image_1d are in units of electrons and all converted to electrons \/ second using the exposure \\\n time map.\n gain : float\n The image gain, used for convert from ADUs.\n convert_from_adus : bool\n If True, the input unblurred_image_1d are in units of adus and all converted to electrons \/ second using the exposure \\\n time map and gain."} {"query":"Yields each node of object graph in postorder .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = None , arg_4 = None ) : arg_5 = Node ( value = arg_0 , arg_1 = arg_1 , arg_2 = arg_2 , arg_4 = arg_4 , arg_3 = arg_3 ) if isinstance ( arg_0 , list ) : arg_6 = len ( arg_0 ) for arg_7 , arg_8 in enumerate ( arg_0 ) : for arg_9 in Func ( arg_8 , arg_5 , None , arg_7 + 1 , arg_6 ) : yield arg_9 elif isinstance ( arg_0 , collections . Mapping ) : for arg_10 in arg_0 : for arg_9 in Func ( arg_0 [ arg_10 ] , arg_5 , arg_10 ) : yield arg_9 yield arg_5","id_":251955,"task_name":"https:\/\/github.com\/mwhooker\/jsonselect\/blob\/c64aa9ea930de0344797ff87b04c753c8fc096a6\/jsonselect\/jsonselect.py#L98-L114","negative":"Parses the API response and raises appropriate errors if\n raise_errors was set to True"} {"query":"PUTs the object and returns the results . This is used to create or overwrite objects . X - Object - Meta - xxx can optionally be sent to be stored with the object . Content - Type Content - Encoding and other standard HTTP headers can often also be set depending on the Swift cluster .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = None , arg_5 = None , arg_6 = False ) : arg_7 = arg_0 . _object_path ( arg_1 , arg_2 ) return arg_0 . request ( 'PUT' , arg_7 , arg_3 , arg_4 , arg_5 = arg_5 , arg_6 = arg_6 )","id_":251956,"task_name":"https:\/\/github.com\/gholt\/swiftly\/blob\/5bcc1c65323b1caf1f85adbefd9fc4988c072149\/swiftly\/client\/client.py#L604-L640","negative":"Wrapper function for TUN and serial port monitoring\n\n Wraps the necessary functions to loop over until self._isRunning\n threading.Event() is set(). This checks for data on the TUN\/serial\n interfaces and then sends data over the appropriate interface. This\n function is automatically run when Threading.start() is called on the\n Monitor class."} {"query":"Get item with min key of tree raises ValueError if tree is empty .","positive":"def Func ( arg_0 ) : if arg_0 . is_empty ( ) : raise ValueError ( \"Tree is empty\" ) arg_1 = arg_0 . _root while arg_1 . left is not None : arg_1 = arg_1 . left return arg_1 . key , arg_1 . value","id_":251957,"task_name":"https:\/\/github.com\/aleju\/imgaug\/blob\/786be74aa855513840113ea523c5df495dc6a8af\/imgaug\/external\/poly_point_isect_py2py3.py#L844-L851","negative":"write lines, one by one, separated by \\n to device"} {"query":"A function to construct a hierarchical dictionary representing the different citation layers of a text","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = OrderedDict ( ) arg_3 = [ x for x in arg_1 ] for arg_4 , arg_5 in arg_0 : arg_6 = arg_4 . split ( '-' ) [ 0 ] arg_7 = [ '%{}|{}%' . format ( arg_3 [ i ] . name , v ) for i , v in enumerate ( arg_6 . split ( '.' ) ) ] arg_8 ( arg_2 , arg_7 [ : - 1 ] ) [ arg_5 ] = arg_4 return arg_2","id_":251958,"task_name":"https:\/\/github.com\/Capitains\/flask-capitains-nemo\/blob\/8d91f2c05b925a6c8ea8c997baf698c87257bc58\/flask_nemo\/filters.py#L55-L71","negative":"Set the value,bounds,free,errors based on corresponding kwargs\n\n The invokes hooks for type-checking and bounds-checking that\n may be implemented by sub-classes."} {"query":"Creates a shift","positive":"def Func ( arg_0 , arg_1 = { } ) : arg_2 = \"\/2\/shifts\/\" arg_3 = arg_1 arg_4 = arg_0 . _post_resource ( arg_2 , arg_3 ) arg_5 = arg_0 . shift_from_json ( arg_4 [ \"shift\" ] ) return arg_5","id_":251959,"task_name":"https:\/\/github.com\/uw-it-cte\/uw-restclients-wheniwork\/blob\/0d3ca09d5bbe808fec12e5f943596570d33a1731\/uw_wheniwork\/shifts.py#L52-L64","negative":"Prune the cache"} {"query":"Returns a given table for the given user .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = True ) : arg_1 = arg_1 or [ ] arg_3 = boto . connect_dynamodb ( * arg_1 ) Func = arg_3 . get_table ( arg_0 ) return Table ( Func = Func , arg_2 = arg_2 )","id_":251960,"task_name":"https:\/\/github.com\/kenneth-reitz\/dynamo\/blob\/e24276a7e68d868857fd1d0deabccd001920e0c2\/dynamo.py#L113-L119","negative":"Returns the maximum delay for the InferenceElements in the inference\n dictionary\n\n Parameters:\n -----------------------------------------------------------------------\n inferences: A dictionary where the keys are InferenceElements"} {"query":"clearRedisPools - Disconnect all managed connection pools and clear the connectiobn_pool attribute on all stored managed connection pools .","positive":"def Func ( ) : global RedisPools global _redisManagedConnectionParams for arg_0 in RedisPools . values ( ) : try : arg_0 . disconnect ( ) except : pass for arg_1 in _redisManagedConnectionParams . values ( ) : for arg_2 in arg_1 : if 'connection_pool' in arg_2 : del arg_2 [ 'connection_pool' ] RedisPools . clear ( ) _redisManagedConnectionParams . clear ( )","id_":251961,"task_name":"https:\/\/github.com\/kata198\/indexedredis\/blob\/f9c85adcf5218dac25acb06eedc63fc2950816fa\/IndexedRedis\/__init__.py#L119-L146","negative":"Register a file reader for use in parse_config_file.\n\n Registered file readers will be used to try reading files passed to\n `parse_config_file`. All file readers (beginning with the default `open`) will\n be tried until one of them succeeds at opening the file.\n\n This function may also be be used used as a decorator. For example:\n\n @register_file_reader(IOError)\n def exotic_data_source(filename):\n ...\n\n Args:\n *args: (When used as a decorator, only the existence check is supplied.)\n - file_reader_fn: The file reader function to register. This should be a\n function that can be used as a context manager to open a file and\n provide a file-like object, similar to Python's built-in `open`.\n - is_readable_fn: A function taking the file path and returning a boolean\n indicating whether the file can be read by `file_reader_fn`.\n\n Returns:\n `None`, or when used as a decorator, a function that will perform the\n registration using the supplied readability predicate."} {"query":"Gets valid user credentials from storage .","positive":"def Func ( arg_0 ) : with arg_0 . AUTHENTICATION_LOCK : log . info ( 'Starting authentication for %s' , arg_0 . target ) arg_1 = oauth2client . file . Storage ( arg_0 . credentials_path ) arg_2 = arg_1 . get ( ) if not arg_2 or arg_2 . invalid : log . info ( \"No valid login. Starting OAUTH flow.\" ) arg_3 = oauth2client . client . flow_from_clientsecrets ( arg_0 . client_secret_path , arg_0 . SCOPES ) arg_3 . user_agent = arg_0 . APPLICATION_NAME arg_5 = oauth2client . tools . argparser . parse_args ( [ ] ) arg_2 = oauth2client . tools . run_flow ( arg_3 , arg_1 , arg_5 ) log . info ( 'Storing credentials to %r' , arg_0 . credentials_path ) return arg_2","id_":251962,"task_name":"https:\/\/github.com\/ralphbean\/bugwarrior\/blob\/b2a5108f7b40cb0c437509b64eaa28f941f7ac8b\/bugwarrior\/services\/gmail.py#L120-L140","negative":"Get a single publication."} {"query":"Propagate clk clock and reset rst signal to all subcomponents","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . clk arg_2 = arg_0 . rst for arg_3 in arg_0 . _units : _tryConnect ( arg_1 , arg_3 , 'clk' ) _tryConnect ( ~ arg_2 , arg_3 , 'rst_n' ) _tryConnect ( arg_2 , arg_3 , 'rst' )","id_":251963,"task_name":"https:\/\/github.com\/Nic30\/hwt\/blob\/8cbb399e326da3b22c233b98188a9d08dec057e6\/hwt\/interfaces\/utils.py#L57-L67","negative":"validate source directory names in components"} {"query":"Set callback which will be called when new service appeared online and sent Hi message","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None ) : arg_0 . _remoteServiceHelloCallback = arg_1 arg_0 . _remoteServiceHelloCallbackTypesFilter = arg_2 arg_0 . _remoteServiceHelloCallbackScopesFilter = arg_3","id_":251964,"task_name":"https:\/\/github.com\/andreikop\/python-ws-discovery\/blob\/a7b852cf43115c6f986e509b1870d6963e76687f\/wsdiscovery\/daemon.py#L291-L303","negative":"If ctx is a cairocffi Context convert it to a PyCairo Context\n otherwise return the original context\n\n :param ctx:\n :return:"} {"query":"Init project .","positive":"def Func ( arg_0 ) : return [ ( ( '--yes' , ) , dict ( action = 'store_true' , help = 'clean .git repo' ) ) , ( ( '--variable' , '-s' ) , dict ( nargs = '+' , help = 'set extra variable,format is name:value' ) ) , ( ( '--skip-builtin' , ) , dict ( action = 'store_true' , help = 'skip replace builtin variable' ) ) , ]","id_":251965,"task_name":"https:\/\/github.com\/wangwenpei\/cliez\/blob\/d6fe775544cd380735c56c8a4a79bc2ad22cb6c4\/cliez\/components\/init.py#L264-L274","negative":"Revoke the token and remove the cookie."} {"query":"Draw all bounding boxes onto a given image .","positive":"def Func ( arg_0 , arg_1 , arg_2 = ( 0 , 255 , 0 ) , arg_3 = 1.0 , arg_4 = 1 , arg_5 = True , arg_6 = False , arg_7 = None ) : arg_1 = np . copy ( arg_1 ) if arg_5 else arg_1 for arg_8 in arg_0 . bounding_boxes : arg_1 = arg_8 . Func ( arg_1 , arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = arg_4 , arg_5 = False , arg_6 = arg_6 , arg_7 = arg_7 ) return arg_1","id_":251966,"task_name":"https:\/\/github.com\/aleju\/imgaug\/blob\/786be74aa855513840113ea523c5df495dc6a8af\/imgaug\/augmentables\/bbs.py#L954-L1005","negative":"Returns a string contains start time and the secondary training job status message.\n\n :param job_description: Returned response from DescribeTrainingJob call\n :type job_description: dict\n :param prev_description: Previous job description from DescribeTrainingJob call\n :type prev_description: dict\n\n :return: Job status string to be printed."} {"query":"Initialize and run command line interface .","positive":"def Func ( ) : arg_0 = create_parser ( ) arg_1 = arg_0 . parse_args ( ) check_arguments ( arg_1 , arg_0 ) run ( arg_0 , arg_1 )","id_":251967,"task_name":"https:\/\/github.com\/RRZE-HPC\/kerncraft\/blob\/c60baf8043e4da8d8d66da7575021c2f4c6c78af\/kerncraft\/kerncraft.py#L308-L320","negative":"Called by the PDFLite object to prompt creating\r\n the font objects."} {"query":"Sets the autocommit flag on the connection","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if not arg_0 . supports_autocommit and arg_2 : arg_0 . log . warn ( ( \"%s connection doesn't support \" \"autocommit but autocommit activated.\" ) , getattr ( arg_0 , arg_0 . conn_name_attr ) ) arg_1 . autocommit = arg_2","id_":251968,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/hooks\/dbapi_hook.py#L168-L177","negative":"Add one or more files or URLs to the manifest.\n If files contains a glob, it is expanded.\n\n All files are uploaded to SolveBio. The Upload\n object is used to fill the manifest."} {"query":"Jumps short if RCX register is 0 .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . PC = Operators . ITEBV ( arg_0 . address_bit_size , arg_0 . RCX == 0 , arg_1 . read ( ) , arg_0 . PC )","id_":251969,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/native\/cpu\/x86.py#L3225-L3232","negative":"Get merge notes"} {"query":"Gets an array from datasets .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : if arg_1 in arg_0 . keys ( ) : arg_3 = arg_0 [ arg_1 ] . value if arg_2 is None : return arg_3 else : if _np . shape ( arg_3 ) == _np . shape ( arg_2 ) : return arg_3 return arg_2","id_":251970,"task_name":"https:\/\/github.com\/joelfrederico\/SciSalt\/blob\/7bf57c49c7dde0a8b0aa337fbd2fbd527ce7a67f\/scisalt\/h5\/h5.py#L28-L44","negative":"Returns the maximum delay for the InferenceElements in the inference\n dictionary\n\n Parameters:\n -----------------------------------------------------------------------\n inferences: A dictionary where the keys are InferenceElements"} {"query":"Opens an SSH connection .","positive":"def Func ( arg_0 , arg_1 = 0 , arg_2 = '' , arg_3 = None , arg_4 = None ) : from burlap . common import get_hosts_for_site if arg_3 is not None : arg_0 . dryrun = arg_3 arg_5 = arg_0 . local_renderer if arg_5 . genv . SITE != arg_5 . genv . default_site : arg_6 = get_hosts_for_site ( ) if arg_6 : arg_5 . genv . host_string = arg_6 [ 0 ] arg_5 . env . SITE = arg_5 . genv . SITE or arg_5 . genv . default_site if int ( arg_1 ) : arg_5 . env . Func_default_options . append ( '-X' ) if 'host_string' not in arg_0 . genv or not arg_0 . genv . host_string : if 'available_sites' in arg_0 . genv and arg_5 . env . SITE not in arg_5 . genv . available_sites : raise Exception ( 'No host_string set. Unknown site %s.' % arg_5 . env . SITE ) else : raise Exception ( 'No host_string set.' ) if '@' in arg_5 . genv . host_string : arg_5 . env . Func_host_string = arg_5 . genv . host_string else : arg_5 . env . Func_host_string = '{user}@{host_string}' if arg_2 : arg_5 . env . Func_interactive_cmd_str = arg_2 else : arg_5 . env . Func_interactive_cmd_str = arg_5 . format ( arg_4 or arg_5 . env . Func_interactive_cmd ) arg_5 . env . Func_default_options_str = ' ' . join ( arg_5 . env . Func_default_options ) if arg_0 . is_local : arg_0 . vprint ( 'Using direct local.' ) arg_13 = '{Func_interactive_cmd_str}' elif arg_5 . genv . key_filename : arg_0 . vprint ( 'Using key filename.' ) arg_14 = arg_5 . env . Func_host_string . split ( ':' ) [ - 1 ] if arg_14 . isdigit ( ) : arg_5 . env . Func_host_string = arg_5 . env . Func_host_string . split ( ':' ) [ 0 ] + ( ' -p %s' % arg_14 ) arg_13 = 'ssh -t {Func_default_options_str} -i {key_filename} {Func_host_string} \"{Func_interactive_cmd_str}\"' elif arg_5 . genv . password : arg_0 . vprint ( 'Using password.' ) arg_13 = 'ssh -t {Func_default_options_str} {Func_host_string} \"{Func_interactive_cmd_str}\"' else : arg_0 . vprint ( 'Using nothing.' ) arg_13 = 'ssh -t {Func_default_options_str} {Func_host_string} \"{Func_interactive_cmd_str}\"' arg_5 . local ( arg_13 )","id_":251971,"task_name":"https:\/\/github.com\/chrisspen\/burlap\/blob\/a92b0a8e5206850bb777c74af8421ea8b33779bd\/burlap\/debug.py#L196-L251","negative":"Enables uniform interface to value and batch jacobian calculation.\n\n Works in both eager and graph modes.\n\n Arguments:\n f: The scalar function to evaluate.\n x: The value at which to compute the value and the batch jacobian.\n\n Returns:\n A tuple (f(x), J(x)), where J(x) is the batch jacobian."} {"query":"Collect the learner completion data from the course certificate .","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 . certificates_api is None : arg_0 . certificates_api = CertificatesApiClient ( arg_0 . user ) arg_3 = arg_1 . course_id arg_4 = arg_1 . enterprise_customer_user . user . username try : arg_5 = arg_0 . certificates_api . get_course_certificate ( arg_3 , arg_4 ) arg_6 = arg_5 . get ( 'created_date' ) if arg_6 : arg_6 = parse_datetime ( arg_6 ) else : arg_6 = timezone . now ( ) arg_7 = arg_5 . get ( 'is_passing' ) arg_8 = arg_0 . grade_passing if arg_7 else arg_0 . grade_failing except HttpNotFoundError : arg_6 = None arg_8 = arg_0 . grade_incomplete arg_7 = False return arg_6 , arg_8 , arg_7","id_":251972,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/integrated_channels\/integrated_channel\/exporters\/learner_data.py#L173-L215","negative":"Restricts a function in n-dimensions to a given direction.\n\n Suppose f: R^n -> R. Then given a point x0 and a vector p0 in R^n, the\n restriction of the function along that direction is defined by:\n\n ```None\n g(t) = f(x0 + t * p0)\n ```\n\n This function performs this restriction on the given function. In addition, it\n also computes the gradient of the restricted function along the restriction\n direction. This is equivalent to computing `dg\/dt` in the definition above.\n\n Args:\n value_and_gradients_function: Callable accepting a single real `Tensor`\n argument of shape `[..., n]` and returning a tuple of a real `Tensor` of\n shape `[...]` and a real `Tensor` of shape `[..., n]`. The multivariate\n function whose restriction is to be computed. The output values of the\n callable are the function value and the gradients at the input argument.\n position: `Tensor` of real dtype and shape consumable by\n `value_and_gradients_function`. Corresponds to `x0` in the definition\n above.\n direction: `Tensor` of the same dtype and shape as `position`. The direction\n along which to restrict the function. Note that the direction need not\n be a unit vector.\n\n Returns:\n restricted_value_and_gradients_func: A callable accepting a tensor of shape\n broadcastable to `[...]` and same dtype as `position` and returning a\n namedtuple of `Tensors`. The input tensor is the parameter along the\n direction labelled `t` above. The return value contains fields:\n x: A real `Tensor` of shape `[...]`. The input value `t` where the line\n function was evaluated, after any necessary broadcasting.\n f: A real `Tensor` of shape `[...]` containing the value of the\n function at the point `position + t * direction`.\n df: A real `Tensor` of shape `[...]` containing the derivative at\n `position + t * direction`.\n full_gradient: A real `Tensor` of shape `[..., n]`, the full gradient\n of the original `value_and_gradients_function`."} {"query":"A command line auto complete similar behavior . Find all item with same prefix of this one .","positive":"def Func ( arg_0 , arg_1 = False ) : arg_2 = arg_0 . basename arg_3 = arg_0 . basename . lower ( ) if arg_1 : def match ( arg_4 ) : return arg_4 . startswith ( arg_2 ) else : def match ( arg_4 ) : return arg_4 . lower ( ) . startswith ( arg_3 ) arg_5 = list ( ) if arg_0 . is_dir ( ) : arg_5 . append ( arg_0 ) for arg_6 in arg_0 . sort_by_abspath ( arg_0 . select ( recursive = False ) ) : arg_5 . append ( arg_6 ) else : arg_7 = arg_0 . parent if arg_7 . is_dir ( ) : for arg_6 in arg_0 . sort_by_abspath ( arg_7 . select ( recursive = False ) ) : if match ( arg_6 . basename ) : arg_5 . append ( arg_6 ) else : raise ValueError ( \"'%s' directory does not exist!\" % arg_7 ) return arg_5","id_":251973,"task_name":"https:\/\/github.com\/MacHu-GWU\/pathlib_mate-project\/blob\/f9fb99dd7cc9ea05d1bec8b9ce8f659e8d97b0f1\/pathlib_mate\/mate_tool_box.py#L57-L87","negative":"Parses the API response and raises appropriate errors if\n raise_errors was set to True"} {"query":"Toggle back and forth between a name and a tuple representation .","positive":"def Func ( arg_0 ) : arg_1 = ',' in arg_0 or arg_0 . startswith ( '0x' ) or arg_0 . startswith ( '#' ) arg_2 = name_to_color ( arg_0 ) return color_to_name ( arg_2 ) if arg_1 else str ( arg_2 )","id_":251974,"task_name":"https:\/\/github.com\/ManiacalLabs\/BiblioPixel\/blob\/fd97e6c651a4bbcade64733847f4eec8f7704b7c\/bibliopixel\/colors\/names.py#L69-L82","negative":"Return a list of published courses for the passed account ID."} {"query":"Build an xsd complexType out of a S_SDT .","positive":"def Func ( arg_0 ) : arg_1 = nav_one ( arg_0 ) . S_DT [ 17 ] ( ) arg_2 = ET . Element ( 'xs:complexType' , name = arg_1 . name ) arg_3 = lambda selected : not nav_one ( selected ) . S_MBR [ 46 , 'succeeds' ] ( ) arg_4 = nav_any ( arg_0 ) . S_MBR [ 44 ] ( arg_3 ) while arg_4 : arg_1 = nav_one ( arg_4 ) . S_DT [ 45 ] ( ) arg_5 = get_type_name ( arg_1 ) ET . SubElement ( arg_2 , 'xs:attribute' , name = arg_4 . name , type = arg_5 ) arg_4 = nav_one ( arg_4 ) . S_MBR [ 46 , 'precedes' ] ( ) return arg_2","id_":251975,"task_name":"https:\/\/github.com\/xtuml\/pyxtuml\/blob\/7dd9343b9a0191d1db1887ab9288d0a026608d9a\/bridgepoint\/gen_xsd_schema.py#L126-L142","negative":"Sequentially update the actors, the world, and the messaging system. \n The theater terminates once all of the actors indicate that they are done."} {"query":"Read a WAV file and returns the data and sample rate","positive":"def Func ( arg_0 ) : from scipy . io . wavfile import read as Func arg_1 , arg_2 = Func ( arg_0 ) return arg_2 , arg_1","id_":251976,"task_name":"https:\/\/github.com\/cokelaer\/spectrum\/blob\/bad6c32e3f10e185098748f67bb421b378b06afe\/src\/spectrum\/io.py#L5-L16","negative":"Register unit object on interface level object"} {"query":"fills the duplicates array from the multi_muscle_align tmp files","positive":"def Func ( arg_0 ) : arg_1 = glob . glob ( os . path . join ( arg_0 . tmpdir , \"duples_*.tmp.npy\" ) ) arg_1 . sort ( key = lambda x : int ( x . rsplit ( \"_\" , 1 ) [ - 1 ] [ : - 8 ] ) ) arg_2 = h5py . File ( arg_0 . clust_database , 'r+' ) arg_3 = arg_2 [ \"duplicates\" ] arg_4 = 0 for arg_5 in arg_1 : arg_6 = int ( arg_5 . rsplit ( \"_\" , 1 ) [ - 1 ] [ : - 8 ] ) arg_7 = np . load ( arg_5 ) arg_3 [ arg_4 : arg_6 ] = arg_7 arg_4 += arg_6 - arg_4 LOGGER . info ( \"all duplicates: %s\" , arg_3 [ : ] . sum ( ) ) arg_2 . close ( )","id_":251977,"task_name":"https:\/\/github.com\/dereneaton\/ipyrad\/blob\/5eeb8a178160f45faf71bf47cec4abe998a575d1\/ipyrad\/assemble\/cluster_across.py#L718-L742","negative":"REBINDING state."} {"query":"Collects and returns a list of values from the given iterable . If the n parameter is not specified collects all values from the iterable .","positive":"def Func ( arg_0 , arg_1 = None ) : if arg_1 : return list ( itertools . islice ( arg_0 , arg_1 ) ) else : return list ( arg_0 )","id_":251978,"task_name":"https:\/\/github.com\/mshroyer\/pointfree\/blob\/a25ecb3f0cd583e0730ecdde83018e5089711854\/pointfree.py#L649-L675","negative":"Propagate \"clk\" clock and reset \"rst\" signal to all subcomponents"} {"query":"Custom serialization functionality for working with advanced data types .","positive":"def Func ( arg_0 ) : if isinstance ( arg_0 , np . integer ) : return int ( arg_0 ) elif isinstance ( arg_0 , np . floating ) : return float ( arg_0 ) elif isinstance ( arg_0 , np . ndarray ) : return arg_0 . tolist ( ) elif isinstance ( arg_0 , list ) : return [ Func ( arg_1 ) for arg_1 in arg_0 ] elif isinstance ( arg_0 , Observation ) : return { arg_2 : Func ( arg_3 ) for arg_2 , arg_3 in six . iteritems ( arg_0 . _asdict ( ) ) } return arg_0","id_":251979,"task_name":"https:\/\/github.com\/marl\/jams\/blob\/b16778399b9528efbd71434842a079f7691a7a66\/jams\/core.py#L2081-L2104","negative":"List all events occurring at or after a timestamp."} {"query":"Creates and connects the underlying text widget .","positive":"def Func ( arg_0 ) : if arg_0 . custom_control : arg_1 = arg_0 . custom_control ( ) elif arg_0 . kind == 'plain' : arg_1 = QtGui . QPlainTextEdit ( ) elif arg_0 . kind == 'rich' : arg_1 = QtGui . QTextEdit ( ) arg_1 . setAcceptRichText ( False ) arg_1 . installEventFilter ( arg_0 ) arg_1 . viewport ( ) . installEventFilter ( arg_0 ) arg_1 . customContextMenuRequested . connect ( arg_0 . _custom_context_menu_requested ) arg_1 . copyAvailable . connect ( arg_0 . copy_available ) arg_1 . redoAvailable . connect ( arg_0 . redo_available ) arg_1 . undoAvailable . connect ( arg_0 . undo_available ) arg_2 = arg_1 . document ( ) . documentLayout ( ) arg_2 . documentSizeChanged . disconnect ( ) arg_2 . documentSizeChanged . connect ( arg_0 . _adjust_scrollbars ) arg_1 . setAttribute ( QtCore . Qt . WA_InputMethodEnabled , True ) arg_1 . setContextMenuPolicy ( QtCore . Qt . CustomContextMenu ) arg_1 . setReadOnly ( True ) arg_1 . setUndoRedoEnabled ( False ) arg_1 . setVerticalScrollBarPolicy ( QtCore . Qt . ScrollBarAlwaysOn ) return arg_1","id_":251980,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/frontend\/qt\/console\/console_widget.py#L996-L1034","negative":"Load roster from an XML file.\n\n Can be used before the connection is started to load saved\n roster copy, for efficient retrieval of versioned roster.\n\n :Parameters:\n - `source`: file name or a file object\n :Types:\n - `source`: `str` or file-like object"} {"query":"Create a ByteParser on demand .","positive":"def Func ( arg_0 ) : if not arg_0 . _byte_parser : arg_0 . _byte_parser = ByteParser ( text = arg_0 . text , filename = arg_0 . filename ) return arg_0 . _byte_parser","id_":251981,"task_name":"https:\/\/github.com\/tnkteja\/myhelp\/blob\/fb3a4809d448ad14d5b2e6ddf2e7e89ad52b71cb\/virtualEnvironment\/lib\/python2.7\/site-packages\/coverage\/parser.py#L69-L74","negative":"For all DAG IDs in the SimpleDagBag, look for task instances in the\n old_states and set them to new_state if the corresponding DagRun\n does not exist or exists but is not in the running state. This\n normally should not happen, but it can if the state of DagRuns are\n changed manually.\n\n :param old_states: examine TaskInstances in this state\n :type old_state: list[airflow.utils.state.State]\n :param new_state: set TaskInstances to this state\n :type new_state: airflow.utils.state.State\n :param simple_dag_bag: TaskInstances associated with DAGs in the\n simple_dag_bag and with states in the old_state will be examined\n :type simple_dag_bag: airflow.utils.dag_processing.SimpleDagBag"} {"query":"Wraps a function with reporting to errors backend","positive":"def Func ( arg_0 = None , arg_1 = None , arg_2 = True , arg_3 = True ) : if arg_0 : return flawless . client . client . _Func_with_error_decorator ( arg_0 = arg_0 , arg_1 = arg_1 , arg_2 = arg_2 , arg_3 = arg_3 ) else : return functools . partial ( flawless . client . client . _Func_with_error_decorator , arg_1 = arg_1 , arg_2 = arg_2 , arg_3 = arg_3 )","id_":251982,"task_name":"https:\/\/github.com\/shopkick\/flawless\/blob\/c54b63ca1991c153e6f75080536f6df445aacc64\/flawless\/client\/decorators.py#L20-L41","negative":"Load the configuration.\n\n :param under_test:\n Tell us if we only have to load the configuration file (True)\n or load the configuration file and initate the output directory\n if it does not exist (False).\n :type under_test: bool\n\n :param custom:\n A dict with the configuration index (from .PyFunceble.yaml) to update.\n :type custom: dict\n\n .. warning::\n If :code:`custom` is given, the given :code:`dict` overwrite\n the last value of the given configuration indexes."} {"query":"A text - based job status checker","positive":"def Func ( arg_0 , arg_1 , arg_2 = False , arg_3 = False , arg_4 = arg_5 . stdout ) : arg_7 = arg_0 . status ( ) arg_8 = arg_7 . value arg_9 = arg_8 arg_10 = len ( arg_8 ) if not arg_3 : print ( '\\r%s: %s' % ( 'Job Status' , arg_8 ) , end = '' , file = arg_4 ) while arg_7 . name not in [ 'DONE' , 'CANCELLED' , 'ERROR' ] : time . sleep ( arg_1 ) arg_7 = arg_0 . status ( ) arg_8 = arg_7 . value if arg_7 . name == 'QUEUED' : arg_8 += ' (%s)' % arg_0 . queue_position ( ) if not arg_2 : arg_1 = max ( arg_0 . queue_position ( ) , 2 ) else : if not arg_2 : arg_1 = 2 if len ( arg_8 ) < arg_10 : arg_8 += ' ' * ( arg_10 - len ( arg_8 ) ) elif len ( arg_8 ) > arg_10 : arg_10 = len ( arg_8 ) if arg_8 != arg_9 and not arg_3 : print ( '\\r%s: %s' % ( 'Job Status' , arg_8 ) , end = '' , file = arg_4 ) arg_9 = arg_8 if not arg_3 : print ( '' , file = arg_4 )","id_":251983,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/tools\/monitor\/job_monitor.py#L22-L64","negative":"Show the variables window."} {"query":"Return True if process is 64 bit . Return False if process is 32 bit . Return None if unknown maybe caused by having no acess right to the process .","positive":"def Func ( arg_0 : arg_1 ) -> bool : try : arg_2 = ctypes . windll . ntdll . ZwWow64ReadVirtualMemory64 except Exception as ex : return False try : arg_3 = ctypes . windll . kernel32 . IsWow64Process arg_3 . argtypes = ( ctypes . c_void_p , ctypes . POINTER ( ctypes . c_int ) ) except Exception as ex : return False arg_5 = ctypes . windll . kernel32 . OpenProcess ( 0x1000 , 0 , arg_0 ) if arg_5 : arg_6 = ctypes . c_int32 ( ) if arg_3 ( arg_5 , ctypes . byref ( arg_6 ) ) : ctypes . windll . kernel32 . CloseHandle ( ctypes . c_void_p ( arg_5 ) ) return False if arg_6 . value else True else : ctypes . windll . kernel32 . CloseHandle ( ctypes . c_void_p ( arg_5 ) )","id_":251984,"task_name":"https:\/\/github.com\/yinkaisheng\/Python-UIAutomation-for-Windows\/blob\/2cc91060982cc8b777152e698d677cc2989bf263\/uiautomation\/uiautomation.py#L2328-L2350","negative":"List all events occurring at or after a timestamp."} {"query":"Returns an environment name for the given cname","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . get_environments ( ) for arg_3 in arg_2 : if arg_3 [ 'Status' ] != 'Terminated' and 'CNAME' in arg_3 and arg_3 [ 'CNAME' ] and arg_3 [ 'CNAME' ] . lower ( ) . startswith ( arg_1 . lower ( ) + '.' ) : return arg_3 [ 'EnvironmentName' ] return None","id_":251985,"task_name":"https:\/\/github.com\/briandilley\/ebs-deploy\/blob\/4178c9c1282a9025fb987dab3470bea28c202e10\/ebs_deploy\/__init__.py#L379-L390","negative":"Uploads file to GDocs spreadsheet.\n Content type can be provided as argument, default is ods."} {"query":"Copy or move a contact to a different address book .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = choose_vcard_from_list ( \"Select contact to %s\" % arg_0 . title ( ) , arg_1 ) if arg_3 is None : print ( \"Found no contact\" ) sys . exit ( 1 ) else : print ( \"%s contact %s from address book %s\" % ( arg_0 . title ( ) , arg_3 , arg_3 . address_book ) ) if len ( arg_2 ) == 1 and arg_2 [ 0 ] == arg_3 . address_book : print ( \"The address book %s already contains the contact %s\" % ( arg_2 [ 0 ] , arg_3 ) ) sys . exit ( 1 ) else : arg_4 = [ abook for abook in arg_2 if abook != arg_3 . address_book ] arg_5 = choose_address_book_from_list ( \"Select target address book\" , arg_4 ) if arg_5 is None : print ( \"Error: address book list is empty\" ) sys . exit ( 1 ) arg_6 = choose_vcard_from_list ( \"Select target contact which to overwrite\" , get_contact_list_by_user_selection ( [ arg_5 ] , arg_3 . get_full_name ( ) , True ) ) if arg_6 is None : copy_contact ( arg_3 , arg_5 , arg_0 == \"move\" ) else : if arg_3 == arg_6 : print ( \"Target contact: %s\" % arg_6 ) if arg_0 == \"move\" : copy_contact ( arg_3 , arg_5 , True ) else : print ( \"The selected contacts are already identical\" ) else : print ( \"The address book %s already contains the contact %s\\n\\n\" \"Source\\n\\n%s\\n\\nTarget\\n\\n%s\\n\\n\" \"Possible actions:\\n\" \" a: %s anyway\\n\" \" m: Merge from source into target contact\\n\" \" o: Overwrite target contact\\n\" \" q: Quit\" % ( arg_6 . address_book , arg_3 , arg_3 . print_vcard ( ) , arg_6 . print_vcard ( ) , \"Move\" if arg_0 == \"move\" else \"Copy\" ) ) while True : arg_7 = input ( \"Your choice: \" ) if arg_7 . lower ( ) == \"a\" : copy_contact ( arg_3 , arg_5 , arg_0 == \"move\" ) break if arg_7 . lower ( ) == \"o\" : copy_contact ( arg_3 , arg_5 , arg_0 == \"move\" ) arg_6 . delete_vcard_file ( ) break if arg_7 . lower ( ) == \"m\" : merge_existing_contacts ( arg_3 , arg_6 , arg_0 == \"move\" ) break if arg_7 . lower ( ) in [ \"\" , \"q\" ] : print ( \"Canceled\" ) break","id_":251986,"task_name":"https:\/\/github.com\/scheibler\/khard\/blob\/0f69430c2680f1ff5f073a977a3c5b753b96cc17\/khard\/khard.py#L1313-L1399","negative":"Returns a list of two actions per gcs bucket to mount."} {"query":"Sleep for the time specified in the exception . If not specified wait for 60 seconds .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = int ( arg_1 . response . headers . get ( 'Retry-After' , 60 ) ) arg_0 . log . info ( \"Hit Zendesk API rate limit. Pausing for %s seconds\" , arg_2 ) time . sleep ( arg_2 )","id_":251987,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/hooks\/zendesk_hook.py#L39-L50","negative":"This function returns a dictionary representation of a docker-compose.yml file, based on assembled_specs from\n the spec_assembler, and port_specs from the port_spec compiler"} {"query":"Select file path by criterion .","positive":"def Func ( arg_0 , arg_1 = arg_2 , arg_3 = True ) : for arg_4 in arg_0 . select ( arg_1 , arg_3 ) : if arg_4 . is_file ( ) : yield arg_4","id_":251988,"task_name":"https:\/\/github.com\/MacHu-GWU\/pathlib_mate-project\/blob\/f9fb99dd7cc9ea05d1bec8b9ce8f659e8d97b0f1\/pathlib_mate\/mate_path_filters.py#L87-L96","negative":"Send the message via HTTP POST, default is json-encoded."} {"query":"True if something has been written to the storage . Note that if a slot has been erased from the storage this function may lose any meaning .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . _world_state [ arg_1 ] [ 'storage' ] arg_3 = arg_2 . array while not isinstance ( arg_3 , ArrayVariable ) : if isinstance ( arg_3 , ArrayStore ) : return True arg_3 = arg_3 . array return False","id_":251989,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/platforms\/evm.py#L2190-L2202","negative":"Load the configuration.\n\n :param under_test:\n Tell us if we only have to load the configuration file (True)\n or load the configuration file and initate the output directory\n if it does not exist (False).\n :type under_test: bool\n\n :param custom:\n A dict with the configuration index (from .PyFunceble.yaml) to update.\n :type custom: dict\n\n .. warning::\n If :code:`custom` is given, the given :code:`dict` overwrite\n the last value of the given configuration indexes."} {"query":"flush the line to stdout","positive":"def Func ( arg_0 , arg_1 ) : sys . stdout . write ( arg_1 ) sys . stdout . Func ( )","id_":251990,"task_name":"https:\/\/github.com\/Jaymon\/captain\/blob\/4297f32961d423a10d0f053bc252e29fbe939a47\/captain\/client.py#L87-L91","negative":"Wrapper around json.loads.\n\n Wraps errors in custom exception with a snippet of the data in the message."} {"query":"Recursively merges the contents of two dictionaries into a new dictionary .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False ) : arg_3 = dict ( arg_0 ) for arg_4 , arg_5 in iteritems ( arg_1 ) : if isinstance ( arg_3 . get ( arg_4 ) , dict ) : arg_3 [ arg_4 ] = Func ( arg_3 [ arg_4 ] , arg_5 ) elif arg_2 and isinstance ( arg_3 . get ( arg_4 ) , list ) : arg_3 [ arg_4 ] = merge_list ( arg_3 [ arg_4 ] , arg_5 ) else : arg_3 [ arg_4 ] = arg_5 return arg_3","id_":251991,"task_name":"https:\/\/github.com\/jayclassless\/tidypy\/blob\/3c3497ca377fbbe937103b77b02b326c860c748f\/src\/tidypy\/util.py#L43-L72","negative":"The pods that hold the response to a simple, discrete query."} {"query":"Appends content to a local file .","positive":"def Func ( arg_0 , arg_1 , arg_2 = arg_3 , arg_4 = None , arg_5 = False ) : _AssertIsLocal ( arg_0 ) assert isinstance ( arg_1 , six . text_type ) ^ arg_5 , 'Must always receive unicode contents, unless binary=True' if not arg_5 : arg_1 = _HandleContentsEol ( arg_1 , arg_2 ) arg_1 = arg_1 . encode ( arg_4 or sys . getfilesystemencoding ( ) ) arg_6 = open ( arg_0 , 'ab' ) try : arg_6 . write ( arg_1 ) finally : arg_6 . close ( )","id_":251992,"task_name":"https:\/\/github.com\/zerotk\/easyfs\/blob\/140923db51fb91d5a5847ad17412e8bce51ba3da\/zerotk\/easyfs\/_easyfs.py#L729-L773","negative":"Iterates over a generator looking for things that match."} {"query":"A decorator factory to check if prerequisites are satisfied .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 'Prerequisites \"{}\" are required in method \"{}\" but not ' 'found, please install them first.' ) : def wrap ( arg_3 ) : @ functools . wraps ( arg_3 ) def wrapped_func ( * arg_4 , ** arg_5 ) : arg_6 = [ arg_0 ] if isinstance ( arg_0 , str ) else arg_0 arg_7 = [ ] for arg_8 in arg_6 : if not arg_1 ( arg_8 ) : arg_7 . append ( arg_8 ) if arg_7 : print ( arg_2 . format ( ', ' . join ( arg_7 ) , arg_3 . __name__ ) ) raise RuntimeError ( 'Prerequisites not meet.' ) else : return arg_3 ( * arg_4 , ** arg_5 ) return wrapped_func return wrap","id_":251993,"task_name":"https:\/\/github.com\/open-mmlab\/mmcv\/blob\/0d77f61450aab4dde8b8585a577cc496acb95d7f\/mmcv\/utils\/misc.py#L138-L173","negative":"This function adds the given stream to the logger, but does not check with a ConnectorDB database\n to make sure that the stream exists. Use at your own risk."} {"query":"Undo the scale transformation .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , ** arg_3 ) : for arg_4 in range ( arg_1 . ncol ) : arg_1 [ arg_4 ] = arg_0 . means [ arg_4 ] + arg_0 . stds [ arg_4 ] * arg_1 [ arg_4 ] return arg_1","id_":251994,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-py\/h2o\/transforms\/preprocessing.py#L78-L89","negative":"Get the reason of this revocation.\n\n :return: The reason, or ``None`` if there is none.\n :rtype: bytes or NoneType\n\n .. seealso::\n\n :meth:`all_reasons`, which gives you a list of all supported\n reasons this method might return."} {"query":"Fetch a gene panel .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_3 = { 'panel_name' : arg_1 } if arg_2 : LOG . info ( \"Fetch gene panel {0}, version {1} from database\" . format ( arg_1 , arg_2 ) ) arg_3 [ 'version' ] = arg_2 return arg_0 . panel_collection . find_one ( arg_3 ) else : LOG . info ( \"Fetching gene panels %s from database\" , arg_1 ) arg_4 = arg_0 . panel_collection . find ( arg_3 ) . sort ( 'version' , - 1 ) if arg_4 . count ( ) > 0 : return arg_4 [ 0 ] else : LOG . info ( \"No gene panel found\" ) return None","id_":251995,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/adapter\/mongo\/panel.py#L202-L228","negative":"Updates the target temperature on the NuHeat API\n\n :param temperature: The desired temperature in NuHeat format\n :param permanent: Permanently hold the temperature. If set to False, the schedule will\n resume at the next programmed event"} {"query":"Return the list of all completed swarms .","positive":"def Func ( arg_0 ) : arg_1 = [ ] for arg_2 , arg_3 in arg_0 . _state [ 'swarms' ] . iteritems ( ) : if arg_3 [ 'status' ] == 'completed' : arg_1 . append ( arg_2 ) return arg_1","id_":251996,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/swarming\/hypersearch\/hs_state.py#L442-L454","negative":"Creates and connects the underlying text widget."} {"query":"Get the document title in the specified markup format .","positive":"def Func ( arg_0 , arg_1 = 'html5' , arg_2 = True , arg_3 = False , arg_4 = True , arg_5 = None ) : if arg_0 . title is None : return None arg_6 = convert_lsstdoc_tex ( arg_0 . title , arg_1 , arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = arg_4 , arg_5 = arg_5 ) return arg_6","id_":251997,"task_name":"https:\/\/github.com\/lsst-sqre\/lsst-projectmeta-kit\/blob\/ac8d4ff65bb93d8fdeb1b46ae6eb5d7414f1ae14\/lsstprojectmeta\/tex\/lsstdoc.py#L282-L315","negative":"Fetch the events pages of a given group."} {"query":"Verify that base_url specifies a protocol and network location .","positive":"def Func ( arg_0 ) : arg_1 = urllib . parse . urlparse ( arg_0 ) if arg_1 . scheme and arg_1 . netloc : return arg_1 . geturl ( ) else : arg_2 = \"base_url must contain a valid scheme (protocol \" \"specifier) and network location (hostname)\" raise ValueError ( arg_2 )","id_":251998,"task_name":"https:\/\/github.com\/CiscoDevNet\/webexteamssdk\/blob\/6fc2cc3557e080ba4b2a380664cb2a0532ae45cd\/webexteamssdk\/utils.py#L89-L97","negative":"Delete latex comments from TeX source.\n\n Parameters\n ----------\n tex_source : str\n TeX source content.\n\n Returns\n -------\n tex_source : str\n TeX source without comments."} {"query":"Update template config for specified template name .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_0 . _json_encode ( arg_1 ) arg_4 = arg_0 . _default_headers ( ) if arg_2 is not None : arg_4 [ \"If-Match\" ] = arg_2 return arg_0 . _request ( arg_0 . name , ok_status = None , arg_3 = arg_3 , arg_4 = arg_4 , method = \"PUT\" )","id_":251999,"task_name":"https:\/\/github.com\/gaqzi\/py-gocd\/blob\/6fe5b62dea51e665c11a343aba5fc98e130c5c63\/gocd\/api\/template_config.py#L37-L56","negative":"Read attribute from sysfs and return as string"} {"query":"If state_name or im_name is None picks them interactively through Tk and then sets with or without the full path .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False ) : arg_3 = os . getcwd ( ) if ( arg_0 is None ) or ( arg_1 is None ) : arg_4 = tk . Tk ( ) arg_4 . withdraw ( ) if arg_0 is None : arg_0 = tkfd . askopenfilename ( initialdir = arg_3 , title = 'Select pre-featured state' ) os . chdir ( os . path . dirname ( arg_0 ) ) if arg_1 is None : arg_1 = tkfd . askopenfilename ( initialdir = arg_3 , title = 'Select new image' ) if ( not arg_2 ) and ( os . path . dirname ( arg_1 ) != '' ) : arg_5 = os . path . dirname ( arg_1 ) os . chdir ( arg_5 ) arg_1 = os . path . basename ( arg_1 ) else : os . chdir ( arg_3 ) return arg_0 , arg_1","id_":252000,"task_name":"https:\/\/github.com\/peri-source\/peri\/blob\/61beed5deaaf978ab31ed716e8470d86ba639867\/peri\/runner.py#L583-L617","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"This function intercepts the mouse s right click and its position .","positive":"def Func ( arg_0 , arg_1 ) : if arg_1 . button == 3 : if arg_0 . ui . tabWidget . currentIndex ( ) == TabWidget . NORMAL_MODE : arg_0 . pos = QtGui . QCursor ( ) . pos ( ) arg_0 . graphic_context_menu ( arg_0 . pos )","id_":252001,"task_name":"https:\/\/github.com\/marrabld\/planarradpy\/blob\/5095d1cb98d4f67a7c3108c9282f2d59253e89a8\/gui\/gui_mainLayout.py#L854-L861","negative":"Creates a layer from its config.\n\n This method is the reverse of `get_config`, capable of instantiating the\n same layer from the config dictionary.\n\n Args:\n config: A Python dictionary, typically the output of `get_config`.\n\n Returns:\n layer: A layer instance."} {"query":"Return the appropriate URL prefix to prepend to requests based on the host provided in settings .","positive":"def Func ( arg_0 , arg_1 = True ) : arg_2 = settings . host if ':\/\/' not in arg_2 : arg_2 = 'https:\/\/%s' % arg_2 . strip ( '\/' ) elif arg_2 . startswith ( 'http:\/\/' ) and settings . verify_ssl : raise exc . TowerCLIError ( 'Can not verify ssl with non-https protocol. Change the ' 'verify_ssl configuration setting to continue.' ) arg_3 = urlparse ( arg_2 ) if arg_3 [ 0 ] not in [ 'http' , 'https' ] : raise exc . ConnectionError ( 'URL must be http(s), {} is not valid' . format ( arg_3 [ 0 ] ) ) arg_4 = urljoin ( arg_2 , '\/api\/' ) if arg_1 : arg_4 = urljoin ( arg_4 , \"{}\/\" . format ( CUR_API_VERSION ) ) return arg_4","id_":252002,"task_name":"https:\/\/github.com\/ansible\/tower-cli\/blob\/a2b151fed93c47725018d3034848cb3a1814bed7\/tower_cli\/api.py#L184-L206","negative":"Generator that reads a block of data from the server.\n\n It first attempts to read from the internal buffer. If there is not\n enough data in the internal buffer it then requests more data from the\n server and adds it to the buffer.\n\n Args:\n length: An optional amount of data to retrieve. A length of 0 (the\n default) will retrieve a least one buffer of data.\n\n Yields:\n A block of data when enough data becomes available.\n\n Note:\n If a length of 0 is supplied then the size of the yielded buffer can\n vary. If there is data in the internal buffer it will yield all of\n that data otherwise it will yield the the data returned by a recv\n on the socket."} {"query":"Returns the stability for the population averaged over multiple time steps","positive":"def Func ( arg_0 , arg_1 = None ) : arg_2 = len ( arg_0 ) if arg_1 is None : arg_1 = arg_2 - 1 arg_3 = range ( arg_2 - 1 ) else : arg_3 = numpy . random . randint ( 0 , arg_2 - 1 , arg_1 ) arg_4 = 0.0 for arg_5 in arg_3 : arg_6 = checkMatch ( arg_0 [ arg_5 ] , arg_0 [ arg_5 + 1 ] , sparse = False ) if arg_6 [ 1 ] != 0 : arg_4 += float ( arg_6 [ 0 ] ) \/ arg_6 [ 1 ] return arg_4 \/ arg_1","id_":252003,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/algorithms\/fdrutilities.py#L1123-L1156","negative":"Gets the FileDescriptor for the file containing the specified symbol.\n\n Args:\n symbol: The name of the symbol to search for.\n\n Returns:\n A FileDescriptor that contains the specified symbol.\n\n Raises:\n KeyError: if the file can not be found in the pool."} {"query":"Return an iterable of possible completions matching the given prefix from the list of referred Vars .","positive":"def Func ( arg_0 , arg_1 : arg_2 ) -> Iterable [ arg_2 ] : return map ( lambda entry : f\"{entry[0].name}\" , filter ( Namespace . __completion_matcher ( arg_1 ) , [ ( arg_3 , arg_4 ) for arg_3 , arg_4 in arg_0 . refers ] ) , )","id_":252004,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/runtime.py#L662-L670","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"Deletes sent MailerMessage records","positive":"def Func ( arg_0 , arg_1 = None ) : if arg_1 is None : arg_1 = getattr ( settings , 'MAILQUEUE_CLEAR_OFFSET' , defaults . MAILQUEUE_CLEAR_OFFSET ) if type ( arg_1 ) is int : arg_1 = datetime . timedelta ( hours = arg_1 ) arg_2 = timezone . now ( ) - arg_1 arg_0 . filter ( sent = True , last_attempt__lte = arg_2 ) . delete ( )","id_":252005,"task_name":"https:\/\/github.com\/dstegelman\/django-mail-queue\/blob\/c9429d53454b117cde2e7a8cb912c8f5ae8394af\/mailqueue\/models.py#L28-L37","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"Take control of QTM .","positive":"async def Func ( arg_0 , arg_1 ) : arg_2 = \"takecontrol %s\" % arg_1 return await asyncio . wait_for ( arg_0 . _protocol . send_command ( arg_2 ) , timeout = arg_0 . _timeout )","id_":252006,"task_name":"https:\/\/github.com\/qualisys\/qualisys_python_sdk\/blob\/127d7eeebc2b38b5cafdfa5d1d0198437fedd274\/qtm\/qrt.py#L184-L192","negative":"List all events occurring at or after a timestamp."} {"query":"Return the ElementTree of the SVG content in filepath with the font content embedded .","positive":"def Func ( arg_0 , arg_1 ) : with open ( arg_0 , 'r' ) as svgf : arg_2 = etree . parse ( svgf ) if not arg_1 : return arg_2 arg_3 = FontFaceGroup ( ) for arg_4 in arg_1 : arg_3 . append ( FontFace ( arg_4 ) ) for arg_5 in arg_2 . iter ( ) : if arg_5 . tag . split ( \"}\" ) [ 1 ] == 'svg' : break arg_5 . insert ( 0 , arg_3 . xml_elem ) return arg_2","id_":252007,"task_name":"https:\/\/github.com\/PythonSanSebastian\/docstamp\/blob\/b43808f2e15351b0b2f0b7eade9c7ef319c9e646\/docstamp\/svg_fonts.py#L97-L117","negative":"Generate RENEWING time.\n\n [:rfc:`2131#section-4.4.5`]::\n\n T1\n defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 *\n duration_of_lease). Times T1 and T2 SHOULD be chosen with some\n random \"fuzz\" around a fixed value, to avoid synchronization of\n client reacquisition."} {"query":"Takes an object a key and a value and produces a new object that is a copy of the original but with value as the new value of key .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : try : arg_0 . _lens_Func except AttributeError : arg_3 = copy . copy ( arg_0 ) arg_3 [ arg_1 ] = arg_2 return arg_3 else : return arg_0 . _lens_Func ( arg_1 , arg_2 )","id_":252008,"task_name":"https:\/\/github.com\/ingolemo\/python-lenses\/blob\/a3a6ed0a31f6674451e542e7380a8aa16e6f8edf\/lenses\/hooks\/hook_funcs.py#L43-L77","negative":"Evaluates traits and returns a list containing the description of traits which are not true.\n Notice that if LAZY_EVALUATION is set to False all traits are evaluated before returning. Use this option\n only for debugging purposes."} {"query":"create our session object","positive":"def Func ( arg_0 ) : default_secure ( arg_0 . config ) arg_0 . session = Session ( config = arg_0 . config , username = u'kernel' )","id_":252009,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/zmq\/kernelapp.py#L267-L270","negative":"APEv2 tag value factory.\n\n Use this if you need to specify the value's type manually. Binary\n and text data are automatically detected by APEv2.__setitem__."} {"query":"Return a color interpolated from the Palette .","positive":"def Func ( arg_0 , arg_1 = 0 ) : arg_2 = len ( arg_0 ) if arg_2 == 1 : return arg_0 [ 0 ] arg_3 = arg_1 if arg_0 . length and arg_0 . autoscale : arg_3 *= len ( arg_0 ) arg_3 \/= arg_0 . length arg_3 *= arg_0 . scale arg_3 += arg_0 . offset if not arg_0 . continuous : if not arg_0 . serpentine : return arg_0 [ int ( arg_3 % arg_2 ) ] arg_4 = ( 2 * arg_2 ) - 2 arg_3 %= arg_4 if arg_3 < arg_2 : return arg_0 [ int ( arg_3 ) ] else : return arg_0 [ int ( arg_4 - arg_3 ) ] if arg_0 . serpentine : arg_3 %= ( 2 * arg_2 ) if arg_3 > arg_2 : arg_3 = ( 2 * arg_2 ) - arg_3 else : arg_3 %= arg_2 arg_3 *= arg_2 - 1 arg_3 \/= arg_2 arg_5 = int ( arg_3 ) arg_6 = arg_3 - arg_5 if not arg_6 : return arg_0 [ arg_5 ] arg_7 , arg_8 , arg_9 = arg_0 [ arg_5 ] arg_10 , arg_11 , arg_12 = arg_0 [ ( arg_5 + 1 ) % len ( arg_0 ) ] arg_13 , arg_14 , arg_15 = arg_10 - arg_7 , arg_11 - arg_8 , arg_12 - arg_9 return arg_7 + arg_6 * arg_13 , arg_8 + arg_6 * arg_14 , arg_9 + arg_6 * arg_15","id_":252010,"task_name":"https:\/\/github.com\/ManiacalLabs\/BiblioPixel\/blob\/fd97e6c651a4bbcade64733847f4eec8f7704b7c\/bibliopixel\/colors\/palette.py#L56-L117","negative":"This will output the nginx HTTP config string for specific port spec"} {"query":"For all DAG IDs in the SimpleDagBag look for task instances in the old_states and set them to new_state if the corresponding DagRun does not exist or exists but is not in the running state . This normally should not happen but it can if the state of DagRuns are changed manually .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = None ) : arg_5 = 0 arg_6 = arg_4 . query ( models . TaskInstance ) . outerjoin ( models . DagRun , and_ ( models . TaskInstance . dag_id == models . DagRun . dag_id , models . TaskInstance . execution_date == models . DagRun . execution_date ) ) . filter ( models . TaskInstance . dag_id . in_ ( arg_1 . dag_ids ) ) . filter ( models . TaskInstance . state . in_ ( arg_2 ) ) . filter ( or_ ( models . DagRun . state != State . RUNNING , models . DagRun . state . is_ ( None ) ) ) if arg_0 . using_sqlite : arg_7 = arg_6 . with_for_update ( ) . all ( ) for arg_8 in arg_7 : arg_8 . set_state ( arg_3 , arg_4 = arg_4 ) arg_5 += 1 else : arg_9 = arg_6 . subquery ( ) arg_5 = arg_4 . query ( models . TaskInstance ) . filter ( and_ ( models . TaskInstance . dag_id == arg_9 . c . dag_id , models . TaskInstance . task_id == arg_9 . c . task_id , models . TaskInstance . execution_date == arg_9 . c . execution_date ) ) . update ( { models . TaskInstance . state : arg_3 } , synchronize_session = False ) arg_4 . commit ( ) if arg_5 > 0 : arg_0 . log . warning ( \"Set %s task instances to state=%s as their associated DagRun was not in RUNNING state\" , arg_5 , arg_3 )","id_":252011,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/jobs.py#L957-L1012","negative":"Prepare received data for representation.\n\n Args:\n data (dict): values to represent (ex. {'001' : 130})\n number_to_keep (int): number of elements to show individually.\n\n Returns:\n dict: processed data to show."} {"query":"See if span tag has italic style and wrap with em tag .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . get ( 'style' ) if arg_2 and 'font-style:italic' in arg_2 : arg_1 . wrap ( arg_0 . soup . new_tag ( 'em' ) )","id_":252012,"task_name":"https:\/\/github.com\/nprapps\/copydoc\/blob\/e1ab09b287beb0439748c319cf165cbc06c66624\/copydoc.py#L110-L116","negative":"Parses ldapdomaindump files and stores hosts and users in elasticsearch."} {"query":"Transform SVG file to PNG file","positive":"def Func ( arg_0 , arg_1 , arg_2 = 150 , arg_3 = None ) : return inkscape_export ( arg_0 , arg_1 , export_flag = \"-e\" , arg_2 = arg_2 , arg_3 = arg_3 )","id_":252013,"task_name":"https:\/\/github.com\/PythonSanSebastian\/docstamp\/blob\/b43808f2e15351b0b2f0b7eade9c7ef319c9e646\/docstamp\/inkscape.py#L98-L102","negative":"Fetch the events pages of a given group."} {"query":"Call the API with a GET request .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , ** arg_3 ) : return arg_0 . call_api ( \"GET\" , arg_1 , arg_2 = arg_2 , ** arg_3 )","id_":252014,"task_name":"https:\/\/github.com\/hirmeos\/entity-fishing-client-python\/blob\/cd5c6e10c6c4e653669e11d735d5773766986bda\/nerd\/client.py#L143-L158","negative":"Stop streaming samples from device and delete samples buffer"} {"query":"Implementation of the Context Likelihood or Relatedness Network algorithm .","positive":"def Func ( arg_0 , ** arg_1 ) : arg_2 = np . zeros ( arg_0 . shape ) arg_3 = [ [ 0 , 0 ] for arg_4 in range ( arg_0 . shape [ 0 ] ) ] for arg_4 in range ( arg_0 . shape [ 0 ] ) : arg_5 = np . mean ( arg_0 [ arg_4 , : ] ) arg_6 = np . std ( arg_0 [ arg_4 , : ] ) arg_3 [ arg_4 ] = [ arg_5 , arg_6 ] for arg_4 in range ( arg_0 . shape [ 0 ] ) : for arg_7 in range ( arg_4 + 1 , arg_0 . shape [ 0 ] ) : arg_8 = np . max ( [ 0 , ( arg_0 [ arg_4 , arg_7 ] - arg_3 [ arg_4 ] [ 0 ] ) \/ arg_3 [ arg_4 ] [ 0 ] ] ) arg_9 = np . max ( [ 0 , ( arg_0 [ arg_4 , arg_7 ] - arg_3 [ arg_7 ] [ 0 ] ) \/ arg_3 [ arg_7 ] [ 0 ] ] ) arg_2 [ arg_4 , arg_7 ] = np . sqrt ( arg_8 ** 2 + arg_9 ** 2 ) arg_2 [ arg_7 , arg_4 ] = arg_2 [ arg_4 , arg_7 ] return arg_2","id_":252015,"task_name":"https:\/\/github.com\/Diviyan-Kalainathan\/CausalDiscoveryToolbox\/blob\/be228b078ba9eb76c01b3ccba9a1c0ad9e9e5ed1\/cdt\/utils\/graph.py#L139-L173","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Sets the review comment . Raises CardinalityError if already set . OrderError if no reviewer defined before . Raises SPDXValueError if comment is not free form text .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if len ( arg_1 . reviews ) != 0 : if not arg_0 . review_comment_set : arg_0 . review_comment_set = True if validations . validate_review_comment ( arg_2 ) : arg_1 . reviews [ - 1 ] . comment = str_from_text ( arg_2 ) return True else : raise SPDXValueError ( 'ReviewComment::Comment' ) else : raise CardinalityError ( 'ReviewComment' ) else : raise OrderError ( 'ReviewComment' )","id_":252016,"task_name":"https:\/\/github.com\/spdx\/tools-python\/blob\/301d72f6ae57c832c1da7f6402fa49b192de6810\/spdx\/parsers\/tagvaluebuilders.py#L383-L399","negative":"Return list of GATT characteristics that have been discovered for this\n service."} {"query":"Get a list of device management request device statuses . Get an individual device mangaement request device status .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None ) : if arg_2 is None or arg_3 is None : arg_4 = MgmtRequests . mgmtRequestStatus % ( arg_1 ) arg_5 = arg_0 . _apiClient . get ( arg_4 ) if arg_5 . status_code == 200 : return arg_5 . json ( ) else : raise ApiException ( arg_5 ) else : arg_4 = MgmtRequests . mgmtRequestSingleDeviceStatus % ( arg_1 , arg_2 , arg_3 ) arg_5 = arg_0 . _apiClient . get ( arg_4 ) if arg_5 . status_code == 200 : return arg_5 . json ( ) else : raise ApiException ( arg_5 )","id_":252017,"task_name":"https:\/\/github.com\/ibm-watson-iot\/iot-python\/blob\/195f05adce3fba4ec997017e41e02ebd85c0c4cc\/src\/wiotp\/sdk\/api\/mgmt\/requests.py#L87-L107","negative":"Upload a file to s3.\n\n Args:\n info (ExpectationExecutionInfo): Must expose a boto3 S3 client as its `s3` resource.\n\n Returns:\n (str, str):\n The bucket and key to which the file was uploaded."} {"query":"Returns a data frame with Sample data and state .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . samples . keys ( ) arg_1 . sort ( ) arg_2 . options . display . max_rows = len ( arg_0 . samples ) arg_6 = arg_2 . DataFrame ( [ arg_0 . samples [ i ] . Func for i in arg_1 ] , index = arg_1 ) . dropna ( axis = 1 , how = 'all' ) for arg_7 in arg_6 : if arg_7 not in [ \"hetero_est\" , \"error_est\" ] : arg_6 [ arg_7 ] = np . nan_to_num ( arg_6 [ arg_7 ] ) . astype ( int ) return arg_6","id_":252018,"task_name":"https:\/\/github.com\/dereneaton\/ipyrad\/blob\/5eeb8a178160f45faf71bf47cec4abe998a575d1\/ipyrad\/core\/assembly.py#L244-L257","negative":"Build the markdown release notes for Hugo.\n\n Inserts the required TOML header with specific values and adds a break\n for long release notes.\n\n Parameters\n ----------\n filename : str, path\n The release notes file.\n tag : str\n The tag, following semantic versioning, of the current release.\n bump : {\"major\", \"minor\", \"patch\", \"alpha\", \"beta\"}\n The type of release."} {"query":"return an iterator on stripped lines starting from a given index if specified else 0","positive":"def Func ( arg_0 , arg_1 = 0 ) : arg_2 = arg_1 if arg_1 : arg_3 = arg_0 . _stripped_lines [ arg_1 : ] else : arg_3 = arg_0 . _stripped_lines for arg_4 in arg_3 : yield arg_2 , arg_4 arg_2 += 1","id_":252019,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/checkers\/similar.py#L235-L247","negative":"Export as a ``cryptography`` certificate signing request.\n\n :rtype: ``cryptography.x509.CertificateSigningRequest``\n\n .. versionadded:: 17.1.0"} {"query":"A replacement for signal . signal which chains the signal behind the debugger s handler","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = lookup_signame ( arg_1 ) if arg_3 is None : arg_0 . dbgr . intf [ - 1 ] . errmsg ( ( \"%s is not a signal number\" \" I know about.\" ) % arg_1 ) return False arg_0 . sigs [ arg_3 ] . pass_along = True if arg_0 . check_and_adjust_sighandler ( arg_3 , arg_0 . sigs ) : arg_0 . sigs [ arg_3 ] . old_handler = arg_2 return True return False","id_":252020,"task_name":"https:\/\/github.com\/rocky\/python3-trepan\/blob\/14e91bc0acce090d67be145b1ac040cab92ac5f3\/trepan\/lib\/sighandler.py#L221-L235","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Set parameter error estimate","positive":"def Func ( arg_0 , arg_1 ) : if arg_1 is None : arg_0 . __errors__ = None return arg_0 . __errors__ = [ asscalar ( e ) for e in arg_1 ]","id_":252021,"task_name":"https:\/\/github.com\/kadrlica\/pymodeler\/blob\/f426c01416fd4b8fc3afeeb6d3b5d1cb0cb8f8e3\/pymodeler\/parameter.py#L620-L625","negative":"Returns any parameters needed for Akamai HD player verification.\n\n Algorithm originally documented by KSV, source:\n http:\/\/stream-recorder.com\/forum\/showpost.php?p=43761&postcount=13"} {"query":"The centers for the KMeans model .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . _model_json [ \"output\" ] arg_2 = arg_1 [ \"centers\" ] . cell_values Func = [ list ( cval [ 1 : ] ) for cval in arg_2 ] return Func","id_":252022,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-py\/h2o\/model\/clustering.py#L143-L148","negative":"Edit a block.\n\n If no number is given, use the last block executed.\n\n This edits the in-memory copy of the demo, it does NOT modify the\n original source file. If you want to do that, simply open the file in\n an editor and use reload() when you make changes to the file. This\n method is meant to let you change a block during a demonstration for\n explanatory purposes, without damaging your original script."} {"query":"Add pop - up information to a point on the graph .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = len ( arg_3 ) * arg_0 . font_size * 0.6 + 10 arg_5 = arg_1 + [ 5 , - 5 ] [ int ( arg_1 + arg_4 > arg_0 . width ) ] arg_6 = [ 'start' , 'end' ] [ arg_1 + arg_4 > arg_0 . width ] arg_7 = 'fill: #000; text-anchor: %s;' % arg_6 arg_8 = 'label-%s' % arg_0 . _w3c_name ( arg_3 ) arg_9 = { 'x' : str ( arg_5 ) , 'y' : str ( arg_2 - arg_0 . font_size ) , 'visibility' : 'hidden' , 'style' : arg_7 , 'text' : arg_3 , 'id' : arg_8 , } etree . SubElement ( arg_0 . foreground , 'text' , arg_9 ) arg_10 = ( \"document.getElementById('{id}').setAttribute('visibility', {val})\" ) arg_9 = { 'cx' : str ( arg_1 ) , 'cy' : str ( arg_2 ) , 'r' : str ( 10 ) , 'style' : 'opacity: 0;' , 'onmouseover' : arg_10 . format ( val = 'visible' , arg_8 = arg_8 ) , 'onmouseout' : arg_10 . format ( val = 'hidden' , arg_8 = arg_8 ) , } etree . SubElement ( arg_0 . foreground , 'circle' , arg_9 )","id_":252023,"task_name":"https:\/\/github.com\/jaraco\/svg.charts\/blob\/23053497b3f1af4e760f355050107ae3bc05909d\/svg\/charts\/graph.py#L230-L261","negative":"Gets back all response headers."} {"query":"print paver options .","positive":"def Func ( ) : arg_0 = json . dumps ( environment . options , indent = 4 , sort_keys = True , skipkeys = True , cls = MyEncoder ) print ( arg_0 )","id_":252024,"task_name":"https:\/\/github.com\/eykd\/paved\/blob\/f04f8a4248c571f3d5ce882b325884a3e5d80203\/paved\/paved.py#L56-L67","negative":"Delete previous webhooks. If local ngrok tunnel, create a webhook."} {"query":"Increases or decreases the brightness in the layer . The given value is a percentage to increase or decrease the image brightness for example 0 . 8 means brightness at 80% .","positive":"def Func ( arg_0 , arg_1 = 1.0 ) : arg_2 = ImageEnhance . Brightness ( arg_0 . img ) arg_0 . img = arg_2 . enhance ( arg_1 )","id_":252025,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/lib\/photobot\/__init__.py#L563-L574","negative":"Find the necessary file for the given test case.\n\n Args:\n device(napalm device connection): for which device\n filename(str): file to find\n path(str): where to find it relative to where the module is installed"} {"query":"Returns the scaled x positions of the points as doubles","positive":"def Func ( arg_0 ) : return scale_dimension ( arg_0 . X , arg_0 . header . Func_scale , arg_0 . header . Func_offset )","id_":252026,"task_name":"https:\/\/github.com\/tmontaigu\/pylas\/blob\/8335a1a7d7677f0e4bc391bb6fa3c75b42ed5b06\/pylas\/lasdatas\/base.py#L60-L63","negative":"scan through the java output text and extract the bad java messages that may or may not happened when\n unit tests are run. It will not record any bad java messages that are stored in g_ok_java_messages.\n\n :return: none"} {"query":"Load the hgnc aliases to the mongo database .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : LOG . info ( \"Running scout update Func\" ) arg_3 = arg_0 . obj [ 'adapter' ] arg_2 = arg_2 or arg_0 . obj . get ( 'omim_api_key' ) if not arg_2 : LOG . warning ( \"Please provide a omim api key to load the omim gene panel\" ) arg_0 . abort ( ) try : arg_4 = fetch_mim_files ( arg_2 , mim2Func = True , morbidmap = True , genemap2 = True ) except Exception as err : LOG . warning ( err ) arg_0 . abort ( ) LOG . warning ( \"Dropping all gene information\" ) arg_3 . drop_Func ( arg_1 ) LOG . info ( \"Genes dropped\" ) LOG . warning ( \"Dropping all transcript information\" ) arg_3 . drop_transcripts ( arg_1 ) LOG . info ( \"transcripts dropped\" ) arg_5 = fetch_hpo_Func ( ) if arg_1 : arg_6 = [ arg_1 ] else : arg_6 = [ '37' , '38' ] arg_7 = fetch_hgnc ( ) arg_8 = fetch_exac_constraint ( ) for arg_1 in arg_6 : arg_9 = fetch_ensembl_Func ( arg_1 = arg_1 ) arg_10 = load_hgnc_Func ( arg_3 = arg_3 , ensembl_lines = arg_9 , arg_7 = arg_7 , arg_8 = arg_8 , mim2gene_lines = arg_4 [ 'mim2Func' ] , genemap_lines = arg_4 [ 'genemap2' ] , hpo_lines = arg_5 , arg_1 = arg_1 , ) arg_9 = { } for arg_11 in arg_10 : arg_12 = arg_11 [ 'ensembl_id' ] arg_9 [ arg_12 ] = arg_11 arg_13 = fetch_ensembl_transcripts ( arg_1 = arg_1 ) arg_14 = load_transcripts ( arg_3 , arg_13 , arg_1 , arg_9 ) arg_3 . update_indexes ( ) LOG . info ( \"Genes, transcripts and Exons loaded\" )","id_":252027,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/commands\/update\/genes.py#L42-L106","negative":"Determine the Fitch profile for a single character of the node's sequence.\n The profile is essentially the intersection between the children's\n profiles or, if the former is empty, the union of the profiles.\n\n Parameters\n ----------\n\n node : PhyloTree.Clade:\n Internal node which the profiles are to be determined\n\n pos : int\n Position in the node's sequence which the profiles should\n be determinedf for.\n\n Returns\n -------\n state : numpy.array\n Fitch profile for the character at position pos of the given node."} {"query":"Hivy formated logger","positive":"def Func ( arg_0 = 'debug' , arg_1 = None ) : arg_1 = arg_1 or settings . LOG [ 'file' ] arg_0 = arg_0 . upper ( ) arg_2 = [ logbook . NullHandler ( ) ] if arg_1 == 'stdout' : arg_2 . append ( logbook . StreamHandler ( sys . stdout , format_string = settings . LOG [ 'format' ] , arg_0 = arg_0 ) ) else : arg_2 . append ( logbook . FileHandler ( arg_1 , format_string = settings . LOG [ 'format' ] , arg_0 = arg_0 ) ) arg_3 = settings . LOG [ 'sentry_dns' ] if arg_3 : arg_2 . append ( SentryHandler ( arg_3 , arg_0 = 'ERROR' ) ) return logbook . NestedSetup ( arg_2 )","id_":252028,"task_name":"https:\/\/github.com\/hivetech\/dna\/blob\/50ad00031be29765b2576fa407d35a36e0608de9\/python\/dna\/logging.py#L31-L54","negative":"Modify an existing lock's timeout.\n\n token:\n Valid lock token.\n timeout:\n Suggested lifetime in seconds (-1 for infinite).\n The real expiration time may be shorter than requested!\n Returns:\n Lock dictionary.\n Raises ValueError, if token is invalid."} {"query":"Execute the Discord webhook call","positive":"def Func ( arg_0 ) : arg_1 = { } if arg_0 . proxy : arg_1 = { 'https' : arg_0 . proxy } arg_2 = arg_0 . _build_discord_payload ( ) arg_0 . run ( endpoint = arg_0 . webhook_endpoint , data = arg_2 , headers = { 'Content-type' : 'application\/json' } , extra_options = { 'proxies' : arg_1 } )","id_":252029,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/discord_webhook_hook.py#L126-L140","negative":"Prune the cache"} {"query":"Parse this node and all children returning the connected task spec .","positive":"def Func ( arg_0 ) : try : arg_0 . task = arg_0 . create_task ( ) arg_0 . task . documentation = arg_0 . parser . _parse_documentation ( arg_0 . node , xpath = arg_0 . xpath , task_parser = arg_0 ) arg_3 = arg_0 . process_xpath ( '.\/\/bpmn:boundaryEvent[@attachedToRef=\"%s\"]' % arg_0 . get_id ( ) ) if arg_3 : arg_4 = _BoundaryEventParent ( arg_0 . spec , '%s.BoundaryEventParent' % arg_0 . get_id ( ) , arg_0 . task , lane = arg_0 . task . lane ) arg_0 . process_parser . parsed_nodes [ arg_0 . node . get ( 'id' ) ] = arg_4 arg_4 . connect_outgoing ( arg_0 . task , '%s.FromBoundaryEventParent' % arg_0 . get_id ( ) , None , None ) for arg_9 in arg_3 : arg_10 = arg_0 . process_parser . Func ( arg_9 ) arg_4 . connect_outgoing ( arg_10 , '%s.FromBoundaryEventParent' % arg_9 . get ( 'id' ) , None , None ) else : arg_0 . process_parser . parsed_nodes [ arg_0 . node . get ( 'id' ) ] = arg_0 . task arg_11 = [ ] arg_12 = arg_0 . process_xpath ( '.\/\/bpmn:sequenceFlow[@sourceRef=\"%s\"]' % arg_0 . get_id ( ) ) if len ( arg_12 ) > 1 and not arg_0 . handles_multiple_outgoing ( ) : raise ValidationException ( 'Multiple outgoing flows are not supported for ' 'tasks of type' , arg_7 = arg_0 . node , filename = arg_0 . process_parser . filename ) for arg_13 in arg_12 : arg_14 = arg_13 . get ( 'targetRef' ) arg_15 = one ( arg_0 . process_xpath ( '.\/\/*[@id=\"%s\"]' % arg_14 ) ) arg_16 = arg_0 . process_parser . Func ( arg_15 ) arg_11 . append ( ( arg_16 , arg_15 , arg_13 ) ) if arg_11 : arg_17 = arg_0 . node . get ( 'default' ) if not arg_17 : ( arg_16 , arg_15 , arg_13 ) = arg_11 [ 0 ] arg_17 = arg_13 . get ( 'id' ) for ( arg_16 , arg_15 , arg_13 ) in arg_11 : arg_0 . connect_outgoing ( arg_16 , arg_15 , arg_13 , arg_13 . get ( 'id' ) == arg_17 ) return arg_4 if arg_3 else arg_0 . task except ValidationException : raise except Exception as ex : arg_18 = sys . exc_info ( ) arg_19 = \"\" . join ( traceback . format_exception ( arg_18 [ 0 ] , arg_18 [ 1 ] , arg_18 [ 2 ] ) ) LOG . error ( \"%r\\n%s\" , ex , arg_19 ) raise ValidationException ( \"%r\" % ( ex ) , arg_7 = arg_0 . node , filename = arg_0 . process_parser . filename )","id_":252030,"task_name":"https:\/\/github.com\/knipknap\/SpiffWorkflow\/blob\/f0af7f59a332e0619e4f3c00a7d4a3d230760e00\/SpiffWorkflow\/bpmn\/parser\/TaskParser.py#L58-L129","negative":"Upload all po files to GDocs ignoring conflicts.\n This method looks for all msgids in po_files and sends them\n as ods to GDocs Spreadsheet."} {"query":"Verifies a signature on a certificate request .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = _lib . NETSCAPE_SPKI_Func ( arg_0 . _spki , arg_1 . _pkey ) if arg_2 <= 0 : _raise_current_error ( ) return True","id_":252031,"task_name":"https:\/\/github.com\/pyca\/pyopenssl\/blob\/1fbe064c50fd030948141d7d630673761525b0d0\/src\/OpenSSL\/crypto.py#L2552-L2567","negative":"Deletes the video\n\n Authentication is required\n\n Params:\n entry: video entry fetch via 'fetch_video()'\n\n Return:\n True if successful\n\n Raise:\n OperationError: on unsuccessful deletion"} {"query":"Checks if the string value ends with another string .","positive":"def Func ( arg_0 ) : def ends_with ( arg_1 ) : validate ( text , arg_1 ) if not arg_1 . Func ( arg_0 ) : raise ValueError ( \"'{0}' does not end with '{1}'\" . format ( arg_1 , arg_0 ) ) return True return ends_with","id_":252032,"task_name":"https:\/\/github.com\/streamlink\/streamlink\/blob\/c8ed1daff14ac03195870238b9b900c1109dd5c1\/src\/streamlink\/plugin\/api\/validate.py#L132-L140","negative":"Returns a decorator to swallow a requests exception for modules that\n are not accessible without logging in, and turn it into an Unavailable\n exception."} {"query":"Very simple patterns . Each pattern has numOnes consecutive bits on . The amount of overlap between consecutive patterns is configurable via the patternOverlap parameter .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 0 ) : assert ( arg_2 < arg_0 ) arg_3 = arg_0 - arg_2 arg_4 = arg_3 * arg_1 + arg_2 arg_5 = [ ] for arg_6 in xrange ( arg_1 ) : arg_7 = numpy . zeros ( arg_4 , dtype = 'float32' ) arg_8 = arg_6 * arg_3 arg_9 = arg_8 + arg_0 arg_7 [ arg_8 : arg_9 ] = 1 arg_5 . append ( arg_7 ) return arg_5","id_":252033,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/examples\/tm\/tm_overlapping_sequences.py#L80-L109","negative":"Deletes the video\n\n Authentication is required\n\n Params:\n entry: video entry fetch via 'fetch_video()'\n\n Return:\n True if successful\n\n Raise:\n OperationError: on unsuccessful deletion"} {"query":"Bump a development version .","positive":"def Func ( arg_0 , arg_1 = False , arg_2 = False ) : arg_3 = config . load ( ) arg_4 = scm_provider ( arg_3 . project_root , commit = False , arg_0 = arg_0 ) if not arg_4 . workdir_is_clean ( ) : notify . warning ( \"You have uncommitted changes, will create a time-stamped version!\" ) arg_5 = arg_4 . pep440_dev_version ( arg_1 = arg_1 , non_local = arg_2 ) arg_6 = arg_3 . rootjoin ( 'setup.cfg' ) if not arg_5 : notify . info ( \"Working directory contains a release version!\" ) elif os . path . exists ( arg_6 ) : with io . open ( arg_6 , encoding = 'utf-8' ) as handle : arg_7 = handle . readlines ( ) arg_8 = False for arg_9 , arg_10 in enumerate ( arg_7 ) : if re . match ( r\"#? *tag_build *= *.*\" , arg_10 ) : arg_11 , arg_12 = arg_7 [ arg_9 ] . split ( '=' , 1 ) arg_7 [ arg_9 ] = '{}= {}\\n' . format ( arg_11 , arg_5 ) arg_8 = True if arg_8 : notify . info ( \"Rewriting 'setup.cfg'...\" ) with io . open ( arg_6 , 'w' , encoding = 'utf-8' ) as handle : handle . write ( '' . join ( arg_7 ) ) else : notify . warning ( \"No 'tag_build' setting found in 'setup.cfg'!\" ) else : notify . warning ( \"Cannot rewrite 'setup.cfg', none found!\" ) if os . path . exists ( arg_6 ) : arg_13 = shell . capture ( \"python setup.py egg_info\" , echo = True if arg_1 else None ) for arg_10 in arg_13 . splitlines ( ) : if arg_10 . endswith ( 'PKG-INFO' ) : arg_14 = arg_10 . split ( None , 1 ) [ 1 ] with io . open ( arg_14 , encoding = 'utf-8' ) as handle : notify . info ( '\\n' . join ( arg_9 for arg_9 in handle . readlines ( ) if arg_9 . startswith ( 'Version:' ) ) . strip ( ) ) arg_0 . run ( \"python setup.py -q develop\" , echo = True if arg_1 else None )","id_":252034,"task_name":"https:\/\/github.com\/jhermann\/rituals\/blob\/1534f50d81e19bbbe799e2eba0acdefbce047c06\/src\/rituals\/acts\/releasing.py#L126-L170","negative":"Returns the profile with the received ID as a dict\n\n If a local copy of the profile exists, it'll be returned. If not, it'll\n be downloaded from the web. The results are cached, so any subsequent\n calls won't hit the filesystem or the web.\n\n Args:\n profile_id (str): The ID of the profile you want.\n\n Raises:\n RegistryError: If there was some problem opening the profile file\n or its format was incorrect."} {"query":"Remove self from the containing DiscoItems object .","positive":"def Func ( arg_0 ) : if arg_0 . disco is None : return arg_0 . xmlnode . unlinkNode ( ) arg_1 = arg_0 . xmlnode . ns ( ) arg_2 = arg_0 . xmlnode . newNs ( arg_1 . getContent ( ) , None ) arg_0 . xmlnode . replaceNs ( arg_1 , arg_2 ) common_root . addChild ( arg_0 . xmlnode ( ) ) arg_0 . disco = None","id_":252035,"task_name":"https:\/\/github.com\/Jajcus\/pyxmpp2\/blob\/14a40a3950910a9cd008b55f0d8905aa0186ce18\/pyxmpp2\/ext\/disco.py#L120-L129","negative":"Return the most recent timestamp in the operation."} {"query":"Plots total amount of stocks with an active position either short or long . Displays daily total daily average per month and all - time daily average .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 'best' , arg_3 = None , ** arg_4 ) : if arg_3 is None : arg_3 = plt . gca ( ) arg_1 = arg_1 . copy ( ) . drop ( 'cash' , axis = 'columns' ) arg_5 = arg_1 . replace ( 0 , np . nan ) . count ( axis = 1 ) arg_6 = arg_5 . resample ( '1M' ) . mean ( ) arg_5 . plot ( color = 'steelblue' , alpha = 0.6 , lw = 0.5 , arg_3 = arg_3 , ** arg_4 ) arg_6 . plot ( color = 'orangered' , lw = 2 , arg_3 = arg_3 , ** arg_4 ) arg_3 . axhline ( arg_5 . values . mean ( ) , color = 'steelblue' , ls = '--' , lw = 3 ) arg_3 . set_xlim ( ( arg_0 . index [ 0 ] , arg_0 . index [ - 1 ] ) ) arg_7 = arg_3 . legend ( [ 'Daily holdings' , 'Average daily holdings, by month' , 'Average daily holdings, overall' ] , loc = arg_2 , frameon = True , framealpha = 0.5 ) arg_7 . get_frame ( ) . set_edgecolor ( 'black' ) arg_3 . set_title ( 'Total holdings' ) arg_3 . set_ylabel ( 'Holdings' ) arg_3 . set_xlabel ( '' ) return arg_3","id_":252036,"task_name":"https:\/\/github.com\/quantopian\/pyfolio\/blob\/712716ab0cdebbec9fabb25eea3bf40e4354749d\/pyfolio\/plotting.py#L286-L343","negative":"Create a new shell stream."} {"query":"Start procedure to validate variant using other techniques .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : arg_5 , arg_6 = institute_and_case ( store , arg_0 , arg_1 ) arg_7 = store . variant ( arg_2 ) arg_8 = store . user ( current_user . email ) arg_9 = request . form . get ( 'verification_comment' ) try : controllers . variant_verification ( store = store , mail = mail , arg_5 = arg_5 , arg_6 = arg_6 , arg_8 = arg_8 , arg_9 = arg_9 , arg_7 = arg_7 , sender = current_app . config [ 'MAIL_USERNAME' ] , variant_url = request . referrer , arg_4 = arg_4 , url_builder = url_for ) except controllers . MissingVerificationRecipientError : flash ( 'No verification recipients added to institute.' , 'danger' ) return redirect ( request . referrer )","id_":252037,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/server\/blueprints\/variants\/views.py#L368-L382","negative":"Converts stderr string to a list."} {"query":"Returns the name of the offensive scheme the team ran in the given year .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . _year_info_pq ( arg_1 , 'Offensive Scheme' ) . text ( ) arg_3 = re . search ( r'Offensive Scheme[:\\s]*(.+)\\s*' , arg_2 , re . I ) if arg_3 : return arg_3 . group ( 1 ) else : return None","id_":252038,"task_name":"https:\/\/github.com\/mdgoldberg\/sportsref\/blob\/09f11ac856a23c96d666d1d510bb35d6f050b5c3\/sportsref\/nfl\/teams.py#L322-L334","negative":"Combines all masks from a list of arrays, and logically ors them into a single mask"} {"query":"Search for a tournament by its name","positive":"def Func ( arg_0 , arg_1 : arg_2 , ** arg_3 : arg_4 ) : arg_5 = arg_0 . api . TOURNAMENT arg_3 [ 'name' ] = arg_1 return arg_0 . _get_model ( arg_5 , PartialTournament , ** arg_3 )","id_":252039,"task_name":"https:\/\/github.com\/cgrok\/clashroyale\/blob\/2618f4da22a84ad3e36d2446e23436d87c423163\/clashroyale\/official_api\/client.py#L417-L431","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Make a variable a list if it is not already","positive":"def Func ( arg_0 , arg_1 = 1 ) : if not isinstance ( arg_0 , list ) : if isinstance ( arg_0 , tuple ) : arg_0 = list ( arg_0 ) else : arg_0 = [ arg_0 ] for arg_2 in range ( 1 , arg_1 ) : arg_0 . append ( arg_0 [ 0 ] ) return arg_0","id_":252040,"task_name":"https:\/\/github.com\/3DLIRIOUS\/MeshLabXML\/blob\/177cce21e92baca500f56a932d66bd9a33257af8\/meshlabxml\/util.py#L90-L104","negative":"Configure the Outstation's database of input point definitions.\n\n Configure two Analog points (group\/variation 30.1) at indexes 1 and 2.\n Configure two Binary points (group\/variation 1.2) at indexes 1 and 2."} {"query":"Consumes a boolean value .","positive":"def Func ( arg_0 ) : try : arg_1 = ParseBool ( arg_0 . token ) except ValueError as e : raise arg_0 . _ParseError ( str ( e ) ) arg_0 . NextToken ( ) return arg_1","id_":252041,"task_name":"https:\/\/github.com\/ibelie\/typy\/blob\/3616845fb91459aacd8df6bf82c5d91f4542bee7\/typy\/google\/protobuf\/text_format.py#L976-L990","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Logs single dialog utterance to current dialog log file .","positive":"def Func ( arg_0 , arg_1 : arg_2 , arg_3 : arg_4 , arg_5 : arg_6 [ arg_7 ] = None ) : if isinstance ( arg_1 , arg_4 ) : pass elif isinstance ( arg_1 , RichMessage ) : arg_1 = arg_1 . json ( ) elif isinstance ( arg_1 , ( list , dict ) ) : arg_1 = jsonify_data ( arg_1 ) else : arg_1 = arg_4 ( arg_1 ) arg_5 = arg_4 ( arg_5 ) if not isinstance ( arg_5 , arg_4 ) else arg_5 if arg_0 . log_file . tell ( ) >= arg_0 . log_max_size * 1024 : arg_0 . log_file . close ( ) arg_0 . log_file = arg_0 . _getFunc_file ( ) else : try : arg_9 = { } arg_9 [ 'timestamp' ] = arg_0 . _get_timestamp_utc_str ( ) arg_9 [ 'dialog_id' ] = arg_5 arg_9 [ 'direction' ] = arg_3 arg_9 [ 'message' ] = arg_1 arg_10 = json . dumps ( arg_9 , ensure_ascii = arg_0 . config [ 'ensure_ascii' ] ) arg_0 . log_file . write ( f'{log_str}\\n' ) except IOError : log . error ( 'Failed to write dialog log.' )","id_":252042,"task_name":"https:\/\/github.com\/deepmipt\/DeepPavlov\/blob\/f3e4a69a3764d25d2f5bad4f1f1aebc872b00f9c\/deeppavlov\/core\/agent\/dialog_logger.py#L78-L110","negative":"Delete a space."} {"query":"Encodes data to slip protocol and then sends over serial port","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = sliplib . Driver ( ) arg_3 = arg_2 . Func ( arg_1 ) arg_4 = arg_0 . _serialPort . write ( arg_3 ) return arg_4","id_":252043,"task_name":"https:\/\/github.com\/FaradayRF\/faradayio\/blob\/6cf3af88bb4a83e5d2036e5cbdfaf8f0f01500bb\/faradayio\/faraday.py#L32-L56","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Draws a circle at point x0 y0 with radius r of the specified RGB color","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = None ) : arg_5 = 1 - arg_3 arg_6 = 1 arg_7 = - 2 * arg_3 arg_8 = 0 arg_9 = arg_3 arg_0 ( arg_1 , arg_2 + arg_3 , arg_4 ) arg_0 ( arg_1 , arg_2 - arg_3 , arg_4 ) arg_0 ( arg_1 + arg_3 , arg_2 , arg_4 ) arg_0 ( arg_1 - arg_3 , arg_2 , arg_4 ) while arg_8 < arg_9 : if arg_5 >= 0 : arg_9 -= 1 arg_7 += 2 arg_5 += arg_7 arg_8 += 1 arg_6 += 2 arg_5 += arg_6 arg_0 ( arg_1 + arg_8 , arg_2 + arg_9 , arg_4 ) arg_0 ( arg_1 - arg_8 , arg_2 + arg_9 , arg_4 ) arg_0 ( arg_1 + arg_8 , arg_2 - arg_9 , arg_4 ) arg_0 ( arg_1 - arg_8 , arg_2 - arg_9 , arg_4 ) arg_0 ( arg_1 + arg_9 , arg_2 + arg_8 , arg_4 ) arg_0 ( arg_1 - arg_9 , arg_2 + arg_8 , arg_4 ) arg_0 ( arg_1 + arg_9 , arg_2 - arg_8 , arg_4 ) arg_0 ( arg_1 - arg_9 , arg_2 - arg_8 , arg_4 )","id_":252044,"task_name":"https:\/\/github.com\/ManiacalLabs\/BiblioPixel\/blob\/fd97e6c651a4bbcade64733847f4eec8f7704b7c\/bibliopixel\/layout\/matrix_drawing.py#L13-L44","negative":"Convert this unnormalized batch to an instance of Batch.\n\n As this method is intended to be called before augmentation, it\n assumes that none of the ``*_aug`` attributes is yet set.\n It will produce an AssertionError otherwise.\n\n The newly created Batch's ``*_unaug`` attributes will match the ones\n in this batch, just in normalized form.\n\n Returns\n -------\n imgaug.augmentables.batches.Batch\n The batch, with ``*_unaug`` attributes being normalized."} {"query":"Reboots the server .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_0 . _perform_request ( url = '\/datacenters\/%s\/servers\/%s\/reboot' % ( arg_1 , arg_2 ) , method = 'POST-ACTION' ) return arg_3","id_":252045,"task_name":"https:\/\/github.com\/profitbricks\/profitbricks-sdk-python\/blob\/2c804b141688eccb07d6ae56601d5c60a62abebd\/profitbricks\/client.py#L1579-L1596","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Aggregates results of a query into buckets defined by the agg_def parameter . The aggregations are represented by dicts containing a name key and a terms key holding a list of the aggregation buckets . Each bucket element is a dict containing a term key containing the term used for this bucket a count key containing the count of items that match this bucket and an aggregations key containing any child aggregations .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = None , arg_5 = None , arg_6 = 10 , arg_7 = arg_8 ) : arg_9 = load_wkt ( arg_1 ) . __geo_interface__ arg_10 = str ( arg_2 ) arg_11 = { \"count\" : arg_6 , \"aggs\" : arg_10 } if arg_3 : arg_11 [ 'query' ] = arg_3 if arg_4 : arg_11 [ 'start_date' ] = arg_4 if arg_5 : arg_11 [ 'end_date' ] = arg_5 arg_12 = arg_0 . aggregations_by_index_url % arg_7 if arg_7 else arg_0 . aggregations_url arg_13 = arg_0 . gbdx_connection . post ( arg_12 , arg_11 = arg_11 , json = arg_9 ) arg_13 . raise_for_status ( ) return arg_13 . json ( object_pairs_hook = OrderedDict ) [ 'aggregations' ]","id_":252046,"task_name":"https:\/\/github.com\/DigitalGlobe\/gbdxtools\/blob\/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb\/gbdxtools\/vectors.py#L256-L296","negative":"Clear all configuration properties from in-memory cache, but do NOT\n alter the custom configuration file. Used in unit-testing."} {"query":"Parse the incoming grid .","positive":"def Func ( arg_0 ) : try : arg_1 = NEWLINE_RE . split ( arg_0 ) if len ( arg_1 ) < 2 : raise ZincParseException ( 'Malformed grid received' , arg_0 , 1 , 1 ) arg_2 = arg_1 . pop ( 0 ) arg_3 = arg_1 . pop ( 0 ) arg_4 = VERSION_RE . match ( arg_2 ) if arg_4 is None : raise ZincParseException ( 'Could not determine version from %r' % arg_2 , arg_0 , 1 , 1 ) arg_5 = Version ( arg_4 . group ( 1 ) ) try : arg_6 = hs_gridMeta [ arg_5 ] . parseString ( arg_2 , parseAll = True ) [ 0 ] except pp . ParseException as pe : raise ZincParseException ( 'Failed to parse grid metadata: %s' % pe , arg_0 , 1 , pe . col ) except : LOG . debug ( 'Failed to parse grid meta: %r' , arg_2 ) raise try : arg_7 = hs_cols [ arg_5 ] . parseString ( arg_3 , parseAll = True ) [ 0 ] except pp . ParseException as pe : raise ZincParseException ( 'Failed to parse column metadata: %s' % reformat_exception ( pe , 2 ) , arg_0 , 2 , pe . col ) except : LOG . debug ( 'Failed to parse column meta: %r' , arg_3 ) raise arg_8 = hs_row [ arg_5 ] def _parse_row ( arg_9 ) : ( arg_10 , arg_11 ) = arg_9 arg_12 = arg_10 + 3 try : return dict ( zip ( arg_7 . keys ( ) , arg_8 . parseString ( arg_11 , parseAll = True ) [ 0 ] . asList ( ) ) ) except pp . ParseException as pe : raise ZincParseException ( 'Failed to parse row: %s' % reformat_exception ( pe , arg_12 ) , arg_0 , arg_12 , pe . col ) except : LOG . debug ( 'Failed to parse row: %r' , arg_11 ) raise arg_13 = Grid ( arg_5 = arg_6 . pop ( 'ver' ) , metadata = arg_6 , columns = list ( arg_7 . items ( ) ) ) arg_13 . extend ( map ( _parse_row , filter ( lambda gp : bool ( gp [ 1 ] ) , enumerate ( arg_1 ) ) ) ) return arg_13 except : LOG . debug ( 'Failing grid: %r' , arg_0 ) raise","id_":252047,"task_name":"https:\/\/github.com\/vrtsystems\/hszinc\/blob\/d52a7c6b5bc466f3c1a77b71814c8c0776aba995\/hszinc\/zincparser.py#L497-L572","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Poll the queues that the worker can use to communicate with the supervisor until all the workers are done and all the queues are empty . Handle messages as they appear .","positive":"def Func ( arg_0 ) : import time arg_1 = lambda : ( multiprocessing . active_children ( ) or not arg_0 . log_queue . empty ( ) or not arg_0 . exception_queue . empty ( ) ) try : while arg_1 ( ) : try : arg_2 = arg_0 . log_queue . get_nowait ( ) arg_3 = logging . getLogger ( arg_2 . name ) arg_3 . handle ( arg_2 ) except queue . Empty : pass try : arg_4 = arg_0 . exception_queue . get_nowait ( ) except queue . Empty : pass else : raise arg_4 time . sleep ( 1 \/ arg_0 . frame_rate ) arg_0 . elapsed_time += 1 \/ arg_0 . frame_rate if arg_0 . time_limit and arg_0 . elapsed_time > arg_0 . time_limit : raise RuntimeError ( \"timeout\" ) finally : for arg_5 in multiprocessing . active_children ( ) : arg_5 . terminate ( )","id_":252048,"task_name":"https:\/\/github.com\/kxgames\/kxg\/blob\/a68c01dc4aa1abf6b3780ba2c65a7828282566aa\/kxg\/quickstart.py#L143-L192","negative":"Return an open file-object to the index file"} {"query":"Calculate the temperature of the package given the specified enthalpy using a secant algorithm .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = list ( ) arg_2 . append ( arg_0 . _T ) arg_2 . append ( arg_0 . _T + 10.0 ) arg_3 = list ( ) arg_3 . append ( arg_0 . _calculate_H ( arg_2 [ 0 ] ) - arg_1 ) arg_3 . append ( arg_0 . _calculate_H ( arg_2 [ 1 ] ) - arg_1 ) for arg_4 in range ( 2 , 50 ) : arg_2 . append ( arg_2 [ arg_4 - 1 ] - arg_3 [ arg_4 - 1 ] * ( ( arg_2 [ arg_4 - 1 ] - arg_2 [ arg_4 - 2 ] ) \/ ( arg_3 [ arg_4 - 1 ] - arg_3 [ arg_4 - 2 ] ) ) ) arg_3 . append ( arg_0 . _calculate_H ( arg_2 [ arg_4 ] ) - arg_1 ) if abs ( arg_3 [ arg_4 - 1 ] ) < 1.0e-5 : break return arg_2 [ len ( arg_2 ) - 1 ]","id_":252049,"task_name":"https:\/\/github.com\/Ex-Mente\/auxi.0\/blob\/2dcdae74154f136f8ca58289fe5b20772f215046\/auxi\/modelling\/process\/materials\/thermo.py#L706-L733","negative":"Inform the widget about the encoding of the underlying character stream."} {"query":"Move an existing key to the beginning or end of this ordered bidict .","positive":"def Func ( arg_0 , arg_1 , arg_2 = True ) : arg_3 = arg_0 . _fwdm [ arg_1 ] arg_3 . prv . nxt = arg_3 . nxt arg_3 . nxt . prv = arg_3 . prv arg_6 = arg_0 . _sntl if arg_2 : arg_2 = arg_6 . prv arg_3 . prv = arg_2 arg_3 . nxt = arg_6 arg_6 . prv = arg_2 . nxt = arg_3 else : arg_7 = arg_6 . nxt arg_3 . prv = arg_6 arg_3 . nxt = arg_7 arg_6 . nxt = arg_7 . prv = arg_3","id_":252050,"task_name":"https:\/\/github.com\/jab\/bidict\/blob\/1a1ba9758651aed9c4f58384eff006d2e2ad6835\/bidict\/_orderedbidict.py#L61-L81","negative":"Print informations about PyFunceble and the date of generation of a file\n into a given path, if doesn't exist."} {"query":"Write the data encoding the ProtocolVersion struct to a stream .","positive":"def Func ( arg_0 , arg_1 , arg_2 = arg_3 . KMIPVersion . KMIP_1_0 ) : arg_6 = utils . BytearrayStream ( ) if arg_0 . _major : arg_0 . _major . Func ( arg_6 , arg_2 = arg_2 ) else : raise ValueError ( \"Invalid struct missing the major protocol version number.\" ) if arg_0 . _minor : arg_0 . _minor . Func ( arg_6 , arg_2 = arg_2 ) else : raise ValueError ( \"Invalid struct missing the minor protocol version number.\" ) arg_0 . length = arg_6 . length ( ) super ( ProtocolVersion , arg_0 ) . Func ( arg_1 , arg_2 = arg_2 ) arg_1 . Func ( arg_6 . buffer )","id_":252051,"task_name":"https:\/\/github.com\/OpenKMIP\/PyKMIP\/blob\/b51c5b044bd05f8c85a1d65d13a583a4d8fc1b0e\/kmip\/core\/messages\/contents.py#L146-L182","negative":"This is a parallel worker for running period-recovery.\n\n Parameters\n ----------\n\n task : tuple\n This is used to pass args to the `periodicvar_recovery` function::\n\n task[0] = period-finding result pickle to work on\n task[1] = simbasedir\n task[2] = period_tolerance\n\n Returns\n -------\n\n dict\n This is the dict produced by the `periodicvar_recovery` function for the\n input period-finding result pickle."} {"query":"add some stats entries to the statistic dictionary raise an AssertionError if there is a key conflict","positive":"def Func ( arg_0 , ** arg_1 ) : for arg_2 , arg_3 in arg_1 . items ( ) : if arg_2 [ - 1 ] == \"_\" : arg_2 = arg_2 [ : - 1 ] assert arg_2 not in arg_0 . stats arg_0 . stats [ arg_2 ] = arg_3 return arg_0 . stats","id_":252052,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/reporters\/reports_handler_mix_in.py#L70-L79","negative":"Send a completion status call to Degreed using the client.\n\n Args:\n payload: The learner completion data payload to send to Degreed"} {"query":"r This function handles the retrieval of a chemical s triple pressure . Lookup is based on CASRNs . Will automatically select a data source to use if no Method is provided ; returns None if the data is not available .","positive":"def Func ( arg_0 , arg_1 = False , arg_2 = None ) : def list_methods ( ) : arg_3 = [ ] if arg_0 in Staveley_data . index and not np . isnan ( Staveley_data . at [ arg_0 , 'Pt' ] ) : arg_3 . append ( STAVELEY ) if Tt ( arg_0 ) and VaporPressure ( arg_0 = arg_0 ) . T_dependent_property ( T = Tt ( arg_0 ) ) : arg_3 . append ( DEFINITION ) arg_3 . append ( NONE ) return arg_3 if arg_1 : return list_methods ( ) if not arg_2 : arg_2 = list_methods ( ) [ 0 ] if arg_2 == STAVELEY : Func = Staveley_data . at [ arg_0 , 'Pt' ] elif arg_2 == DEFINITION : Func = VaporPressure ( arg_0 = arg_0 ) . T_dependent_property ( T = Tt ( arg_0 ) ) elif arg_2 == NONE : Func = None else : raise Exception ( 'Failure in in function' ) return Func","id_":252053,"task_name":"https:\/\/github.com\/CalebBell\/thermo\/blob\/3857ed023a3e64fd3039a32d53576c24990ef1c3\/thermo\/triple.py#L125-L193","negative":"Multiply tensor of matrices by vectors assuming values stored are logs."} {"query":"Resizes the window to the given dimensions .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_0 . driver . resize_window_to ( arg_0 . handle , arg_1 , arg_2 )","id_":252054,"task_name":"https:\/\/github.com\/elliterate\/capybara.py\/blob\/0c6ae449cc37e4445ec3cd6af95674533beedc6c\/capybara\/window.py#L106-L118","negative":"Parses package fields."} {"query":"Formats the output of another tool in the given way . Has default styles for ranges hosts and services .","positive":"def Func ( ) : arg_0 = argparse . ArgumentParser ( description = 'Formats a json object in a certain way. Use with pipes.' ) arg_0 . add_argument ( 'Func' , metavar = 'Func' , help = 'How to Func the json for example \"{address}:{port}\".' , nargs = '?' ) arg_1 = arg_0 . parse_args ( ) arg_2 = \"{address:15} {port:7} {protocol:5} {service:15} {state:10} {banner} {tags}\" arg_3 = \"{address:15} {tags}\" arg_4 = \"{range:18} {tags}\" arg_5 = \"{username}\" if arg_1 . Func : Func_input ( arg_1 . Func ) else : arg_6 = DocMapper ( ) if arg_6 . is_pipe : for arg_7 in arg_6 . get_pipe ( ) : arg_8 = '' if isinstance ( arg_7 , Range ) : arg_8 = arg_4 elif isinstance ( arg_7 , Host ) : arg_8 = arg_3 elif isinstance ( arg_7 , Service ) : arg_8 = arg_2 elif isinstance ( arg_7 , User ) : arg_8 = arg_5 print_line ( fmt . Func ( arg_8 , ** arg_7 . to_dict ( include_meta = True ) ) ) else : print_error ( \"Please use this script with pipes\" )","id_":252055,"task_name":"https:\/\/github.com\/mwgielen\/jackal\/blob\/7fe62732eb5194b7246215d5277fb37c398097bf\/jackal\/scripts\/filter.py#L27-L56","negative":"The estimated signal-to-noise_maps mappers of the image."} {"query":"sets up the threadpool with map for parallel processing","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = False , arg_4 = False ) : if arg_2 is None : arg_5 = ThreadPool ( ) else : arg_5 = ThreadPool ( arg_2 ) arg_6 = time . time ( ) if arg_3 is False : try : arg_7 = arg_5 . map ( arg_0 , arg_1 ) arg_5 . close ( ) arg_5 . join ( ) except : print 'Func Failed... running in series :-(' arg_7 = series ( arg_1 , arg_0 ) else : arg_7 = series ( arg_1 , arg_0 ) arg_8 = time . time ( ) arg_9 = arg_8 - arg_6 if arg_4 is False : if arg_2 is None : print \"Elapsed time: %s :-)\\n\" % str ( arg_9 ) else : print \"Elapsed time: %s on %s Funcs :-)\\n\" % ( str ( arg_9 ) , str ( arg_2 ) ) return arg_7","id_":252056,"task_name":"https:\/\/github.com\/jshiv\/turntable\/blob\/c095a93df14d672ba54db164a7ab7373444d1829\/turntable\/spin.py#L188-L223","negative":"Checks if an blob_name is updated in Google Cloud Storage.\n\n :param bucket_name: The Google cloud storage bucket where the object is.\n :type bucket_name: str\n :param object_name: The name of the object to check in the Google cloud\n storage bucket.\n :type object_name: str\n :param ts: The timestamp to check against.\n :type ts: datetime.datetime"} {"query":"Fetch all the information by using aiohttp","positive":"async def Func ( arg_0 ) -> Response : if arg_0 . request_config . get ( 'DELAY' , 0 ) > 0 : await asyncio . sleep ( arg_0 . request_config [ 'DELAY' ] ) arg_1 = arg_0 . request_config . get ( 'TIMEOUT' , 10 ) try : async with async_timeout . timeout ( arg_1 ) : arg_2 = await arg_0 . _make_request ( ) try : arg_3 = await arg_2 . text ( encoding = arg_0 . encoding ) except UnicodeDecodeError : arg_3 = await arg_2 . read ( ) arg_4 = Response ( url = arg_0 . url , method = arg_0 . method , encoding = arg_2 . get_encoding ( ) , html = arg_3 , metadata = arg_0 . metadata , cookies = arg_2 . cookies , headers = arg_2 . headers , history = arg_2 . history , status = arg_2 . status , aws_json = arg_2 . json , aws_text = arg_2 . text , aws_read = arg_2 . read ) arg_5 = arg_0 . request_config . get ( 'VALID' ) if arg_5 and iscoroutinefunction ( arg_5 ) : arg_4 = await arg_5 ( arg_4 ) if arg_4 . ok : return arg_4 else : return await arg_0 . _retry ( error_msg = 'request url failed!' ) except asyncio . TimeoutError : return await arg_0 . _retry ( error_msg = 'timeout' ) except Exception as e : return await arg_0 . _retry ( error_msg = e ) finally : await arg_0 . _close_request_session ( )","id_":252057,"task_name":"https:\/\/github.com\/howie6879\/ruia\/blob\/2dc5262fc9c3e902a8faa7d5fa2f046f9d9ee1fa\/ruia\/request.py#L85-L126","negative":"Return a valid zero cutoff value."} {"query":"Transport the file from the local filesystem to the remote Globus endpoint .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_1 . scheme == 'http' or arg_1 . scheme == 'https' : raise Exception ( 'HTTP\/HTTPS file staging out is not supported' ) elif arg_1 . scheme == 'ftp' : raise Exception ( 'FTP file staging out is not supported' ) elif arg_1 . scheme == 'globus' : arg_3 = arg_0 . _get_globus_endpoint ( arg_2 ) arg_4 = arg_0 . _globus_Func_app ( ) return arg_4 ( arg_3 , inputs = [ arg_1 ] ) else : raise Exception ( 'Staging out with unknown file scheme {} is not supported' . format ( arg_1 . scheme ) )","id_":252058,"task_name":"https:\/\/github.com\/Parsl\/parsl\/blob\/d7afb3bc37f50dcf224ae78637944172edb35dac\/parsl\/data_provider\/data_manager.py#L190-L213","negative":"Add members found in prior versions up till the next major release\n\n These members are to be considered deprecated. When a new major\n release is made, these members are removed."} {"query":"Return a dictionary of environment variables for the container .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = { } arg_4 . update ( providers_util . get_file_environment_variables ( arg_1 ) ) arg_4 . update ( providers_util . get_file_environment_variables ( arg_2 ) ) arg_4 . update ( providers_util . get_file_environment_variables ( arg_3 ) ) return arg_4","id_":252059,"task_name":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/443ce31daa6023dc2fd65ef2051796e19d18d5a7\/dsub\/providers\/local.py#L779-L785","negative":"Register a file reader for use in parse_config_file.\n\n Registered file readers will be used to try reading files passed to\n `parse_config_file`. All file readers (beginning with the default `open`) will\n be tried until one of them succeeds at opening the file.\n\n This function may also be be used used as a decorator. For example:\n\n @register_file_reader(IOError)\n def exotic_data_source(filename):\n ...\n\n Args:\n *args: (When used as a decorator, only the existence check is supplied.)\n - file_reader_fn: The file reader function to register. This should be a\n function that can be used as a context manager to open a file and\n provide a file-like object, similar to Python's built-in `open`.\n - is_readable_fn: A function taking the file path and returning a boolean\n indicating whether the file can be read by `file_reader_fn`.\n\n Returns:\n `None`, or when used as a decorator, a function that will perform the\n registration using the supplied readability predicate."} {"query":"Strip water and fit to the remaining system .","positive":"def Func ( arg_0 , ** arg_1 ) : arg_1 . setdefault ( 'fit' , 'rot+trans' ) arg_2 = { } for arg_3 in ( 'xy' , 'fit' , 'fitgroup' , 'input' ) : if arg_3 in arg_1 : arg_2 [ arg_3 ] = arg_1 . pop ( arg_3 ) arg_1 [ 'input' ] = arg_1 . pop ( 'strip_input' , [ 'Protein' ] ) arg_1 [ 'force' ] = arg_2 [ 'force' ] = arg_1 . pop ( 'force' , arg_0 . force ) arg_4 = arg_0 . strip_water ( ** arg_1 ) arg_5 = arg_0 . nowater [ arg_4 [ 'xtc' ] ] return arg_5 . fit ( ** arg_2 )","id_":252060,"task_name":"https:\/\/github.com\/Becksteinlab\/GromacsWrapper\/blob\/d4f9a8cb6f48292732cf7c7e4ef4a6d2ccbc51b9\/gromacs\/cbook.py#L2070-L2105","negative":"Accept a publication request at form value 'epub"} {"query":"Start asynchronous HTTP Server on an individual process .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = False , arg_5 = 60 , arg_6 = None , arg_7 = None , arg_8 = False , arg_9 = None , arg_10 = arg_11 , arg_12 = 100 , ** arg_13 ) : if arg_4 : arg_9 . set_debug ( arg_4 ) arg_14 = partial ( arg_10 , arg_9 = arg_9 , connections = arg_0 . connections , signal = arg_0 . signal , arg_2 = arg_2 , arg_3 = arg_3 , arg_5 = arg_5 , arg_7 = arg_7 , ) arg_15 = arg_9 . create_Funcr ( arg_14 , host = None , port = None , arg_6 = arg_6 , arg_8 = arg_8 , arg_1 = arg_1 , arg_12 = arg_12 ) arg_9 . call_soon ( partial ( update_current_time , arg_9 ) ) return arg_15","id_":252061,"task_name":"https:\/\/github.com\/messense\/sanic-gunicorn\/blob\/da1e738d9ff4bb064ca477f9aeb37e12f31be243\/sanic_gunicorn.py#L108-L152","negative":"Unregister an extension code. For testing only."} {"query":"Parse value from database .","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 . field_type == 'TEXT' and isinstance ( arg_1 , str ) : return arg_0 . loads ( arg_1 ) return arg_1","id_":252062,"task_name":"https:\/\/github.com\/klen\/muffin-peewee\/blob\/8e893e3ea1dfc82fbcfc6efe784308c8d4e2852e\/muffin_peewee\/fields.py#L46-L50","negative":"Send data synchronous to an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: PLC storage address\n :param value: value to write to the storage address of the PLC\n :param Type data_type: type of the data given to the PLC,\n according to PLCTYPE constants"} {"query":"Update metadata host information","positive":"def Func ( arg_0 ) : print ( \"Added platform information\" ) arg_1 = arg_0 . package arg_2 = plugins_get_mgr ( ) arg_3 = arg_2 . get ( what = 'instrumentation' , name = 'platform' ) arg_1 [ 'platform' ] = arg_3 . get_metadata ( )","id_":252063,"task_name":"https:\/\/github.com\/pingali\/dgit\/blob\/ecde01f40b98f0719dbcfb54452270ed2f86686d\/dgitcore\/datasets\/common.py#L540-L549","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Check if self has overlap with interval .","positive":"def Func ( arg_0 , arg_1 : 'Interval' ) -> bool : if arg_0 . begin < arg_1 . end and arg_1 . begin < arg_0 . end : return True return False","id_":252064,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/pulse\/timeslots.py#L59-L70","negative":"Setup logging for the application and aiohttp."} {"query":"Compute matches when text is a simple name .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ ] arg_3 = arg_2 . append arg_4 = len ( arg_1 ) for arg_5 in [ keyword . kwlist , __builtin__ . __dict__ . keys ( ) , arg_0 . namespace . keys ( ) , arg_0 . global_namespace . keys ( ) ] : for arg_6 in arg_5 : if arg_6 [ : arg_4 ] == arg_1 and arg_6 != \"__builtins__\" : arg_3 ( arg_6 ) return arg_2","id_":252065,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/core\/completer.py#L306-L324","negative":"Uploads file to GDocs spreadsheet.\n Content type can be provided as argument, default is ods."} {"query":"Determines whether the status of the current cart is valid ; this is normally called before generating or paying an invoice","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . cart arg_2 = arg_0 . cart . user arg_3 = [ ] try : arg_0 . _test_vouchers ( arg_0 . cart . vouchers . all ( ) ) except ValidationError as ve : arg_3 . append ( ve ) arg_4 = commerce . ProductItem . objects . filter ( arg_1 = arg_1 ) arg_4 = arg_4 . select_related ( \"product\" , \"product__category\" ) arg_5 = list ( ( i . product , i . quantity ) for i in arg_4 ) try : arg_0 . _test_limits ( arg_5 ) except ValidationError as ve : arg_0 . _append_errors ( arg_3 , ve ) try : arg_0 . _test_required_categories ( ) except ValidationError as ve : arg_0 . _append_errors ( arg_3 , ve ) arg_6 = [ i . product for i in arg_4 ] arg_7 = DiscountController . available_discounts ( arg_2 , [ ] , arg_6 , ) arg_8 = set ( i . discount . id for i in arg_7 ) arg_9 = commerce . DiscountItem . objects . filter ( arg_1 = arg_1 ) for arg_10 in arg_9 : arg_11 = arg_10 . discount if arg_11 . id not in arg_8 : arg_3 . append ( ValidationError ( \"Discounts are no longer available\" ) ) if arg_3 : raise ValidationError ( arg_3 )","id_":252066,"task_name":"https:\/\/github.com\/chrisjrn\/registrasion\/blob\/461d5846c6f9f3b7099322a94f5d9911564448e4\/registrasion\/controllers\/cart.py#L352-L403","negative":"Get a texture by its label\n\n Args:\n label (str): The Label for the texture\n\n Returns:\n The py:class:`moderngl.Texture` instance"} {"query":"Add a linear version of a minimal medium to the model solver .","positive":"def Func ( arg_0 ) : arg_1 = { } for arg_2 in find_boundary_types ( arg_0 , \"exchange\" ) : arg_3 = len ( arg_2 . reactants ) == 1 if arg_3 : arg_1 [ arg_2 . reverse_variable ] = 1 else : arg_1 [ arg_2 . forward_variable ] = 1 arg_0 . objective . set_linear_coefficients ( arg_1 ) arg_0 . objective . direction = \"min\"","id_":252067,"task_name":"https:\/\/github.com\/opencobra\/cobrapy\/blob\/9d1987cdb3a395cf4125a3439c3b002ff2be2009\/cobra\/medium\/minimal_medium.py#L17-L38","negative":"See token counts as pandas dataframe"} {"query":"Imports experiences into the TensorFlow memory structure . Can be used to import off - policy data .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 ) : return arg_0 . memory . store ( arg_1 = arg_1 , arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = arg_4 , arg_5 = arg_5 )","id_":252068,"task_name":"https:\/\/github.com\/tensorforce\/tensorforce\/blob\/520a8d992230e382f08e315ede5fc477f5e26bfb\/tensorforce\/models\/memory_model.py#L575-L593","negative":"Return a new individual crossing self and other."} {"query":"Helper to raise an AssertionError and optionally prepend custom description .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = '%s%s' % ( '[%s] ' % arg_0 . description if len ( arg_0 . description ) > 0 else '' , arg_1 ) if arg_0 . kind == 'warn' : print ( arg_2 ) return arg_0 elif arg_0 . kind == 'soft' : global _softFunc _softFunc . append ( arg_2 ) return arg_0 else : raise AssertionError ( arg_2 )","id_":252069,"task_name":"https:\/\/github.com\/ActivisionGameScience\/assertpy\/blob\/08d799cdb01f9a25d3e20672efac991c7bc26d79\/assertpy\/assertpy.py#L1100-L1111","negative":"Register sample aggregations."} {"query":"Play the queue from a specific point . Disregards tracks before the index .","positive":"async def Func ( arg_0 , arg_1 : arg_2 ) : arg_0 . queue = arg_0 . queue [ min ( arg_1 , len ( arg_0 . queue ) - 1 ) : len ( arg_0 . queue ) ] await arg_0 . play ( ignore_shuffle = True )","id_":252070,"task_name":"https:\/\/github.com\/Devoxin\/Lavalink.py\/blob\/63f55c3d726d24c4cfd3674d3cd6aab6f5be110d\/lavalink\/PlayerManager.py#L132-L135","negative":"Load the configuration.\n\n :param under_test:\n Tell us if we only have to load the configuration file (True)\n or load the configuration file and initate the output directory\n if it does not exist (False).\n :type under_test: bool\n\n :param custom:\n A dict with the configuration index (from .PyFunceble.yaml) to update.\n :type custom: dict\n\n .. warning::\n If :code:`custom` is given, the given :code:`dict` overwrite\n the last value of the given configuration indexes."} {"query":"Return the available MatchMaker nodes","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ ] if not arg_0 or not arg_1 : return arg_2 arg_3 = '' . join ( [ arg_0 , '\/nodes' ] ) arg_2 = matchmaker_request ( arg_3 = arg_3 , arg_1 = arg_1 , method = 'GET' ) LOG . info ( 'Matchmaker has the following connected nodes:{}' . format ( arg_2 ) ) return arg_2","id_":252071,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/utils\/matchmaker.py#L61-L77","negative":"Decompress and unpickle."} {"query":"Retrieve an activity given its name .","positive":"def Func ( arg_0 , arg_1 ) : return [ arg_2 for arg_2 in arg_0 . activities if arg_2 . name == arg_1 ] [ 0 ]","id_":252072,"task_name":"https:\/\/github.com\/Ex-Mente\/auxi.0\/blob\/2dcdae74154f136f8ca58289fe5b20772f215046\/auxi\/modelling\/business\/structure.py#L222-L231","negative":"This will output the nginx HTTP config string for specific port spec"} {"query":"Calculate percentage usage of used against total .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : try : arg_3 = ( arg_0 \/ arg_1 ) * 100 except ZeroDivisionError : arg_3 = 0 if arg_2 is not None : return round ( arg_3 , arg_2 ) else : return arg_3","id_":252073,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/psutil\/_common.py#L22-L31","negative":"Decorator for methods that need many retries, because of\n intermittent failures, such as AWS calls via boto, which has a\n non-back-off retry."} {"query":"Count the number of non - zero values for each feature in sparse X .","positive":"def Func ( arg_0 ) : if sp . isspmatrix_csr ( arg_0 ) : return np . bincount ( arg_0 . indices , minlength = arg_0 . shape [ 1 ] ) return np . diff ( sp . csc_matrix ( arg_0 , copy = False ) . indptr )","id_":252074,"task_name":"https:\/\/github.com\/rkcosmos\/deepcut\/blob\/9a2729071d01972af805acede85d7aa9e7a6da30\/deepcut\/deepcut.py#L70-L76","negative":"send the registration_request"} {"query":"Loads a HostEntry from a boto instance .","positive":"def Func ( arg_0 , arg_1 ) : return arg_0 ( name = arg_1 . tags . get ( 'Name' ) , private_ip = arg_1 . private_ip_address , public_ip = arg_1 . ip_address , instance_type = arg_1 . instance_type , instance_id = arg_1 . id , hostname = arg_1 . dns_name , stack_id = arg_1 . tags . get ( 'aws:cloudformation:stack-id' ) , stack_name = arg_1 . tags . get ( 'aws:cloudformation:stack-name' ) , logical_id = arg_1 . tags . get ( 'aws:cloudformation:logical-id' ) , security_groups = [ arg_2 . name for arg_2 in arg_1 . groups ] , launch_time = arg_1 . launch_time , ami_id = arg_1 . image_id , tags = { arg_3 . lower ( ) : arg_4 for arg_3 , arg_4 in six . iteritems ( arg_1 . tags ) } )","id_":252075,"task_name":"https:\/\/github.com\/NarrativeScience\/lsi\/blob\/7d901b03fdb1a34ef795e5412bfe9685d948e32d\/src\/lsi\/utils\/hosts.py#L239-L262","negative":"Setup coverage related extensions."} {"query":"Move this object above the referenced object .","positive":"def Func ( arg_0 , arg_1 ) : if not arg_0 . _valid_ordering_reference ( arg_1 ) : raise ValueError ( \"%r can only be moved Func instances of %r which %s equals %r.\" % ( arg_0 , arg_0 . __class__ , arg_0 . order_with_respect_to , arg_0 . _get_order_with_respect_to ( ) ) ) if arg_0 . order == arg_1 . order : return if arg_0 . order > arg_1 . order : arg_2 = arg_1 . order else : arg_2 = arg_0 . get_ordering_queryset ( ) . filter ( order__lt = arg_1 . order ) . aggregate ( Max ( 'order' ) ) . get ( 'order__max' ) or 0 arg_0 . to ( arg_2 )","id_":252076,"task_name":"https:\/\/github.com\/lucastheis\/django-publications\/blob\/5a75cf88cf794937711b6850ff2acb07fe005f08\/publications\/models\/orderedmodel.py#L163-L180","negative":"Remove cards from watchlist.\n\n :params trade_id: Trade id."} {"query":"Unserializes a serialized php array and prints it to the console as a data structure in the specified language . Used to translate or convert a php array into a data structure in another language . Currently supports PHP Python Javascript and JSON .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = 3 , arg_4 = False ) : arg_2 = arg_2 . lower ( ) assert arg_0 . is_built_in ( arg_2 ) or arg_2 in arg_0 . outer_templates , \"Sorry, \" + arg_2 + \" is not a supported language.\" arg_5 = phpserialize . loads ( bytes ( arg_1 , 'utf-8' ) , array_hook = list , decode_strings = True ) if arg_0 . is_built_in ( arg_2 ) : arg_0 . get_built_in ( arg_2 , arg_3 , arg_5 ) print ( arg_0 ) return arg_0 . data_structure if arg_4 else None def loop_print ( arg_6 , arg_3 = 3 ) : arg_7 = '' arg_8 = ' ' * arg_3 if not arg_0 . is_iterable ( arg_6 ) or isinstance ( arg_6 , str ) : arg_9 = str ( arg_6 ) return str ( arg_9 ) for arg_10 in arg_6 : if isinstance ( arg_10 , tuple ) and len ( arg_10 ) == 2 : arg_11 = arg_10 [ 0 ] arg_12 = loop_print ( arg_10 [ 1 ] , arg_3 = arg_3 + 3 ) arg_12 = arg_0 . translate_val ( arg_2 , arg_12 ) if arg_2 in arg_0 . lang_specific_values and arg_12 in arg_0 . lang_specific_values [ arg_2 ] else arg_12 arg_11 = str ( arg_11 ) if isinstance ( arg_11 , int ) else '\\'' + str ( arg_11 ) + '\\'' arg_13 = hasattr ( arg_10 [ 0 ] , '__iter__' ) == False and hasattr ( arg_10 [ 1 ] , '__iter__' ) == True if arg_13 : arg_7 += arg_0 . get_inner_template ( arg_2 , 'iterable' , arg_8 , arg_11 , arg_12 ) else : arg_12 = str ( arg_12 ) if arg_12 . isdigit ( ) or arg_12 in arg_0 . lang_specific_values [ arg_2 ] . values ( ) else '\\'' + str ( arg_12 ) + '\\'' arg_7 += arg_0 . get_inner_template ( arg_2 , 'singular' , arg_8 , arg_11 , arg_12 ) return arg_7 arg_0 . data_structure = arg_0 . outer_templates [ arg_2 ] % ( loop_print ( arg_5 ) ) print ( arg_0 ) return arg_0 . data_structure if arg_4 else None","id_":252077,"task_name":"https:\/\/github.com\/bbusenius\/Diablo-Python\/blob\/646ac5a6f1c79cf9b928a4e2a7979988698b6c82\/convert_php\/convert_php.py#L166-L260","negative":"Pauses the thread for a specified time.\n\n Returns False if interrupted by another thread and True if the\n time runs out normally."} {"query":"A way to figure out the boot time directly on Linux .","positive":"def Func ( ) : global arg_2 try : arg_0 = open ( '\/proc\/stat' , 'r' ) for arg_1 in arg_0 : if arg_1 . startswith ( 'btime' ) : arg_2 = int ( arg_1 . split ( ) [ 1 ] ) if datetime is None : raise NotImplementedError ( 'datetime module required.' ) return datetime . fromtimestamp ( arg_2 ) except ( IOError , IndexError ) : return None","id_":252078,"task_name":"https:\/\/github.com\/Cairnarvon\/uptime\/blob\/1ddfd06bb300c00e6dc4bd2a9ddf9bf1aa27b1bb\/src\/__init__.py#L97-L111","negative":"compat_convertHashedIndexes - Reindex all fields for the provided objects, where the field value is hashed or not.\n\t\t\tIf the field is unhashable, do not allow.\n\n\t\t\tNOTE: This works one object at a time. It is intended to be used while your application is offline,\n\t\t\t as it doesn't make sense to be changing your model while applications are actively using it.\n\n\t\t\t@param objs \n\t\t\t@param conn - Specific Redis connection or None to reuse."} {"query":"The index of the deepest character readed .","positive":"def Func ( arg_0 ) -> Position : return Position ( arg_0 . _maxindex , arg_0 . _maxline , arg_0 . _maxcol )","id_":252079,"task_name":"https:\/\/github.com\/LionelAuroux\/pyrser\/blob\/f153a97ef2b6bf915a1ed468c0252a9a59b754d5\/pyrser\/parsing\/stream.py#L51-L53","negative":"Add an HTTP header to response object.\n\n Arguments:\n name (str): HTTP header field name\n value (str): HTTP header field value"} {"query":"Fit CRF according to X y","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = pycrfsuite . Trainer ( verbose = True ) for arg_4 , arg_5 in zip ( arg_1 , arg_2 ) : arg_3 . append ( arg_4 , arg_5 ) arg_3 . set_params ( arg_0 . params ) if arg_0 . filename : arg_6 = arg_0 . filename else : arg_6 = 'model.tmp' arg_3 . train ( arg_6 ) arg_7 = pycrfsuite . Tagger ( ) arg_7 . open ( arg_6 ) arg_0 . estimator = arg_7","id_":252080,"task_name":"https:\/\/github.com\/undertheseanlp\/languageflow\/blob\/1436e0bf72803e02ccf727f41e8fc85ba167d9fe\/languageflow\/model\/crf.py#L10-L33","negative":"Fetch the base Managed Policy.\n\n This includes the base policy and the latest version document.\n\n :param managed_policy:\n :param conn:\n :return:"} {"query":"Creates a Transaction object from a sequence of trytes .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_3 = TransactionTrytes ( arg_1 ) if not arg_2 : arg_4 = [ 0 ] * HASH_LENGTH arg_5 = Curl ( ) arg_5 . absorb ( arg_3 . as_trits ( ) ) arg_5 . squeeze ( arg_4 ) arg_2 = TransactionHash . from_trits ( arg_4 ) return arg_0 ( arg_2 = arg_2 , signature_message_fragment = Fragment ( arg_3 [ 0 : 2187 ] ) , address = Address ( arg_3 [ 2187 : 2268 ] ) , value = int_from_trits ( arg_3 [ 2268 : 2295 ] . as_trits ( ) ) , legacy_tag = Tag ( arg_3 [ 2295 : 2322 ] ) , timestamp = int_from_trits ( arg_3 [ 2322 : 2331 ] . as_trits ( ) ) , current_index = int_from_trits ( arg_3 [ 2331 : 2340 ] . as_trits ( ) ) , last_index = int_from_trits ( arg_3 [ 2340 : 2349 ] . as_trits ( ) ) , bundle_hash = BundleHash ( arg_3 [ 2349 : 2430 ] ) , trunk_transaction_hash = TransactionHash ( arg_3 [ 2430 : 2511 ] ) , branch_transaction_hash = TransactionHash ( arg_3 [ 2511 : 2592 ] ) , tag = Tag ( arg_3 [ 2592 : 2619 ] ) , attachment_timestamp = int_from_trits ( arg_3 [ 2619 : 2628 ] . as_trits ( ) ) , attachment_timestamp_lower_bound = int_from_trits ( arg_3 [ 2628 : 2637 ] . as_trits ( ) ) , attachment_timestamp_upper_bound = int_from_trits ( arg_3 [ 2637 : 2646 ] . as_trits ( ) ) , nonce = Nonce ( arg_3 [ 2646 : 2673 ] ) , )","id_":252081,"task_name":"https:\/\/github.com\/iotaledger\/iota.lib.py\/blob\/97cdd1e241498446b46157b79b2a1ea2ec6d387a\/iota\/transaction\/base.py#L29-L78","negative":"Connect to the stream\n\n Returns\n -------\n asyncio.coroutine\n The streaming response"} {"query":"For each section defined in the local config file creates a folder inside the local config folder named after the section . Downloads the environemnt file defined by the S3CONF variable for this section to this folder .","positive":"def Func ( arg_0 , arg_1 ) : try : arg_2 = config . Settings ( arg_0 = arg_0 ) arg_3 = STORAGES [ 's3' ] ( arg_2 = arg_2 ) arg_4 = s3conf . S3Conf ( arg_3 = arg_3 , arg_2 = arg_2 ) arg_5 = os . path . join ( config . LOCAL_CONFIG_FOLDER , arg_0 ) arg_4 . Func ( arg_5 , arg_1 = arg_1 ) except exceptions . EnvfilePathNotDefinedError : raise exceptions . EnvfilePathNotDefinedUsageError ( )","id_":252082,"task_name":"https:\/\/github.com\/sbneto\/s3conf\/blob\/92fd2973beccc85bb21d3157ff227929e62ed695\/s3conf\/client.py#L217-L230","negative":"Register a new range type as a PostgreSQL range.\n\n >>> register_range_type(\"int4range\", intrange, conn)\n\n The above will make sure intrange is regarded as an int4range for queries\n and that int4ranges will be cast into intrange when fetching rows.\n\n pgrange should be the full name including schema for the custom range type.\n\n Note that adaption is global, meaning if a range type is passed to a regular\n psycopg2 connection it will adapt it to its proper range type. Parsing of\n rows from the database however is not global and just set on a per connection\n basis."} {"query":"Runs GenotypeGVCFs on one or more gVCFs generated by HaplotypeCaller .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 = None , arg_6 = 10.0 , arg_7 = 30.0 , arg_8 = False ) : arg_9 = { 'genome.fa' : arg_2 , 'genome.fa.fai' : arg_3 , 'genome.dict' : arg_4 } arg_9 . update ( arg_1 ) arg_10 = arg_0 . fileStore . getLocalTempDir ( ) for arg_11 , arg_12 in arg_9 . iteritems ( ) : arg_0 . fileStore . readGlobalFile ( arg_12 , os . path . join ( arg_10 , arg_11 ) ) arg_13 = [ '-T' , 'GenotypeGVCFs' , '-R' , '\/data\/genome.fa' , '--out' , 'genotyped.vcf' , '-stand_emit_conf' , str ( arg_6 ) , '-stand_call_conf' , str ( arg_7 ) ] if arg_5 : for arg_14 in arg_5 : arg_13 . extend ( [ '-A' , arg_14 ] ) for arg_15 in arg_1 . keys ( ) : arg_13 . extend ( [ '--variant' , os . path . join ( '\/data' , arg_15 ) ] ) if arg_8 : arg_13 . extend ( [ '-U' , 'ALLOW_SEQ_DICT_INCOMPATIBILITY' ] ) arg_0 . fileStore . logToMaster ( 'Running GATK GenotypeGVCFs\\n' 'Emit threshold: {emit_threshold}\\n' 'Call threshold: {call_threshold}\\n\\n' 'Annotations:\\n{annotations}\\n\\n' 'Samples:\\n{samples}\\n' . format ( arg_6 = arg_6 , arg_7 = arg_7 , arg_5 = '\\n' . join ( arg_5 ) if arg_5 else '' , samples = '\\n' . join ( arg_1 . keys ( ) ) ) ) arg_16 = [ '--rm' , 'log-driver' , 'none' , '-e' , 'JAVA_OPTS=-Djava.io.tmpdir=\/data\/ -Xmx{}' . format ( arg_0 . memory ) ] dockerCall ( arg_0 = arg_0 , workDir = arg_10 , parameters = arg_13 , tool = 'quay.io\/ucsc_cgl\/gatk:3.5--dba6dae49156168a909c43330350c6161dc7ecc2' , dockerParameters = arg_16 ) return arg_0 . fileStore . writeGlobalFile ( os . path . join ( arg_10 , 'genotyped.vcf' ) )","id_":252083,"task_name":"https:\/\/github.com\/BD2KGenomics\/toil-lib\/blob\/022a615fc3dc98fc1aaa7bfd232409962ca44fbd\/src\/toil_lib\/tools\/variant_annotation.py#L7-L72","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Load songs from local playlist .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = False , arg_4 = False , arg_5 = None ) : logger . info ( \"Loading local playlist songs...\" ) if os . name == 'nt' and CYGPATH_RE . match ( arg_0 ) : arg_0 = convert_cygwin_path ( arg_0 ) arg_6 = [ ] arg_7 = os . path . dirname ( os . path . abspath ( arg_0 ) ) with open ( arg_0 ) as local_playlist : for arg_8 in local_playlist . readlines ( ) : arg_8 = arg_8 . strip ( ) if arg_8 . lower ( ) . endswith ( SUPPORTED_SONG_FORMATS ) : arg_9 = arg_8 if not os . path . isabs ( arg_9 ) : arg_9 = os . path . join ( arg_7 , arg_9 ) if os . path . isfile ( arg_9 ) : arg_6 . append ( arg_9 ) arg_10 = get_supported_filepaths ( arg_6 , SUPPORTED_SONG_FORMATS ) arg_11 , arg_12 = exclude_filepaths ( arg_10 , arg_5 = arg_5 ) arg_13 , arg_14 = filter_local_songs ( arg_11 , arg_1 = arg_1 , arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = arg_4 ) logger . info ( \"Excluded {0} local playlist songs\" . format ( len ( arg_12 ) ) ) logger . info ( \"Filtered {0} local playlist songs\" . format ( len ( arg_14 ) ) ) logger . info ( \"Loaded {0} local playlist songs\" . format ( len ( arg_13 ) ) ) return arg_13 , arg_14 , arg_12","id_":252084,"task_name":"https:\/\/github.com\/thebigmunch\/gmusicapi-wrapper\/blob\/8708683cd33955def1378fc28319ef37805b851d\/gmusicapi_wrapper\/base.py#L129-L193","negative":"Parses a file and returns a document object.\n File, a file like object."} {"query":"Sends a new message event to an Event Hub .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = None ) : _validate_not_none ( 'hub_name' , arg_1 ) arg_5 = HTTPRequest ( ) arg_5 . method = 'POST' arg_5 . host = arg_0 . _get_host ( ) if arg_3 : arg_5 . path = '\/{0}\/publishers\/{1}\/messages?api-version=2014-01' . format ( arg_1 , arg_3 ) else : arg_5 . path = '\/{0}\/messages?api-version=2014-01' . format ( arg_1 ) if arg_4 : arg_5 . headers . append ( ( 'BrokerProperties' , str ( arg_4 ) ) ) arg_5 . body = _get_request_body ( arg_2 ) arg_5 . path , arg_5 . query = arg_0 . _httpclient . _update_request_uri_query ( arg_5 ) arg_5 . headers = arg_0 . _update_service_bus_header ( arg_5 ) arg_0 . _perform_request ( arg_5 )","id_":252085,"task_name":"https:\/\/github.com\/Azure\/azure-sdk-for-python\/blob\/d7306fde32f60a293a7567678692bdad31e4b667\/azure-servicebus\/azure\/servicebus\/control_client\/servicebusservice.py#L1199-L1218","negative":"Prune the cache"} {"query":"Show Matchmaker submission data for a sample and eventual matches .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = { 'institute' : arg_1 , 'case' : arg_0 , 'server_errors' : [ ] } arg_5 = { } if not arg_0 . get ( 'mme_submission' ) : return None for arg_6 in arg_0 [ 'mme_submission' ] [ 'patients' ] : arg_7 = arg_6 [ 'id' ] arg_5 [ arg_7 ] = None arg_8 = '' . join ( [ arg_2 , '\/matches\/' , arg_7 ] ) arg_9 = matchmaker_request ( arg_8 = arg_8 , token = arg_3 , method = 'GET' ) if 'status_code' in arg_9 : arg_10 = [ ] if arg_9 . get ( 'matches' ) : arg_10 = parse_matches ( arg_7 , arg_9 [ 'matches' ] ) arg_5 [ arg_7 ] = arg_10 else : LOG . warning ( 'Server returned error message: {}' . format ( arg_9 [ 'message' ] ) ) arg_4 [ 'server_errors' ] . append ( arg_9 [ 'message' ] ) arg_4 [ 'matches' ] = arg_5 return arg_4","id_":252086,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/server\/blueprints\/cases\/controllers.py#L733-L772","negative":"Generate a square lattice with auxiliary nodes for spanning detection\n\n Parameters\n ----------\n\n length : int\n Number of nodes in one dimension, excluding the auxiliary nodes.\n\n Returns\n -------\n\n networkx.Graph\n A square lattice graph with auxiliary nodes for spanning cluster\n detection\n\n See Also\n --------\n\n sample_states : spanning cluster detection"} {"query":"Starts an asyncio event loop to connect to the master and run jobs .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = asyncio . new_event_loop ( ) asyncio . set_event_loop ( None ) arg_3 . run_until_complete ( handle_jobs ( arg_0 , arg_1 , arg_2 , arg_3 = arg_3 ) ) arg_3 . close ( )","id_":252087,"task_name":"https:\/\/github.com\/abau171\/highfive\/blob\/07b3829331072035ab100d1d66deca3e8f3f372a\/highfive\/worker.py#L46-L54","negative":"Save the object to file given by filename."} {"query":"Transfer playback to a new device and determine if it should start playing .","positive":"async def Func ( arg_0 , arg_1 : arg_2 , arg_3 : arg_4 = False ) : await arg_0 . _user . http . Func_player ( str ( arg_1 ) , play = arg_3 )","id_":252088,"task_name":"https:\/\/github.com\/mental32\/spotify.py\/blob\/bb296cac7c3dd289908906b7069bd80f43950515\/spotify\/models\/player.py#L213-L224","negative":"Adds a chunk of tasks to the job\n\n Retry chunk if body exceeds the maximum request size and retry tasks\n if failed due to server errors.\n\n :param results_queue: Queue to place the return value of the request\n :type results_queue: collections.deque\n :param chunk_tasks_to_add: Chunk of at most 100 tasks with retry details\n :type chunk_tasks_to_add: list[~TrackedCloudTask]"} {"query":"Use this to create a new and empty contact .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : return arg_0 ( arg_1 , None , arg_2 , arg_3 , arg_4 )","id_":252089,"task_name":"https:\/\/github.com\/scheibler\/khard\/blob\/0f69430c2680f1ff5f073a977a3c5b753b96cc17\/khard\/carddav_object.py#L91-L95","negative":"Attempts to find the Teradata install directory with the defaults\n for a given platform. Should always return `None` when the defaults\n are not present and the TERADATA_HOME environment variable wasn't\n explicitly set to the correct install location."} {"query":"Convert an object to a form ready to dump to json .","positive":"def Func ( arg_0 , arg_1 ) : if arg_1 is None : return [ ] elif isinstance ( arg_1 , list ) : return [ arg_2 . as_dictionary ( ) for arg_2 in arg_1 ] elif isinstance ( arg_1 , dict ) : return arg_0 . _keys_to_camel_case ( arg_1 ) else : return arg_1 . as_dictionary ( )","id_":252090,"task_name":"https:\/\/github.com\/CitrineInformatics\/python-citrination-client\/blob\/409984fc65ce101a620f069263f155303492465c\/citrination_client\/search\/query_encoder.py#L11-L25","negative":"Log 'msg % args' at level 'level' once per 'n' times.\n\n Logs the 1st call, (N+1)st call, (2N+1)st call, etc.\n Not threadsafe.\n\n Args:\n level: The level at which to log.\n msg: The message to be logged.\n n: The number of times this should be called before it is logged.\n *args: The args to be substituted into the msg."} {"query":"Return all evaluations for a certain variant .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = dict ( variant_id = arg_1 [ 'variant_id' ] ) arg_3 = arg_0 . acmg_collection . find ( arg_2 ) . sort ( [ ( 'created_at' , pymongo . DESCENDING ) ] ) return arg_3","id_":252091,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/adapter\/mongo\/acmg.py#L89-L100","negative":"Whether a connection can be established between those two meshes."} {"query":"Execute the script within the context of the specified task","positive":"def Func ( arg_0 , arg_1 , arg_2 , ** arg_3 ) : locals ( ) . update ( arg_3 ) exec ( arg_2 )","id_":252092,"task_name":"https:\/\/github.com\/knipknap\/SpiffWorkflow\/blob\/f0af7f59a332e0619e4f3c00a7d4a3d230760e00\/SpiffWorkflow\/bpmn\/BpmnScriptEngine.py#L47-L52","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Callback for the utility messages","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 ) : arg_6 = 'FAIL' if arg_2 == FAIL_REASON . SUCCESS : arg_6 = 'SUCCESS' arg_3 . send ( arg_6 + ' ' + arg_4 + ' ' + arg_5 )","id_":252093,"task_name":"https:\/\/github.com\/bakwc\/PySyncObj\/blob\/be3b0aaa932d5156f5df140c23c962430f51b7b8\/pysyncobj\/transport.py#L367-L381","negative":"Returns how the result count compares to the query options.\n\n The return value is negative if too few results were found, zero if enough were found, and\n positive if too many were found.\n\n Returns:\n int: -1, 0, or 1."} {"query":"Injects the URL defaults for the given endpoint directly into the values dictionary passed . This is used internally and automatically called on URL building .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_0 . url_default_functions . get ( None , ( ) ) if '.' in arg_1 : arg_4 = arg_1 . rsplit ( '.' , 1 ) [ 0 ] arg_3 = chain ( arg_3 , arg_0 . url_default_functions . get ( arg_4 , ( ) ) ) for arg_5 in arg_3 : arg_5 ( arg_1 , arg_2 )","id_":252094,"task_name":"https:\/\/github.com\/AkihikoITOH\/capybara\/blob\/e86c2173ea386654f4ae061148e8fbe3f25e715c\/capybara\/virtualenv\/lib\/python2.7\/site-packages\/flask\/app.py#L1611-L1623","negative":"An integer-valued dimension bounded between `min` <= x <= `max`.\n Note that the right endpoint of the interval includes `max`.\n\n When `warp` is None, the base measure associated with this dimension\n is a categorical distribution with each weight on each of the integers\n in [min, max]. With `warp == 'log'`, the base measure is a uniform\n distribution on the log of the variable, with bounds at `log(min)` and\n `log(max)`. This is appropriate for variables that are \"naturally\" in\n log-space. Other `warp` functions are not supported (yet), but may be\n at a later time. Please note that this functionality is not supported\n for `hyperopt_tpe`."} {"query":"Called when the tab key is pressed . Returns whether to continue processing the event .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . _get_input_buffer_cursor_line ( ) if arg_1 is None : return False arg_2 = bool ( arg_1 [ : arg_0 . _get_input_buffer_cursor_column ( ) ] . strip ( ) ) if arg_2 : arg_0 . _complete ( ) return not arg_2","id_":252095,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/frontend\/qt\/console\/frontend_widget.py#L237-L250","negative":"Calculate a t-test score for the difference between two samples.\n\n Args:\n sample1: one sample.\n sample2: the other sample.\n\n Returns:\n The t-test score, as a float."} {"query":"Decorator for registering a path pattern .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None ) : if not arg_3 : arg_3 = { } def decorator ( arg_4 ) : arg_0 . add ( arg_1 , arg_4 , arg_2 , arg_3 ) return arg_4 return decorator","id_":252096,"task_name":"https:\/\/github.com\/linuxwhatelse\/mapper\/blob\/3481715b2a36d2da8bf5e9c6da80ceaed0d7ca59\/mapper.py#L64-L84","negative":"Handle marking messages as read and keeping client active."} {"query":"List all user memberships .","positive":"def Func ( ) : arg_0 = request . args . get ( 'page' , 1 , type = int ) arg_1 = request . args . get ( 'per_page' , 5 , type = int ) arg_2 = request . args . get ( 'q' , '' ) arg_3 = Group . query_by_user ( current_user , eager = True ) if arg_2 : arg_3 = Group . search ( arg_3 , arg_2 ) arg_3 = arg_3 . paginate ( arg_0 , arg_1 = arg_1 ) arg_4 = Membership . query_requests ( current_user ) . count ( ) arg_5 = Membership . query_invitations ( current_user ) . count ( ) return render_template ( 'invenio_groups\/Func.html' , arg_3 = arg_3 , arg_4 = arg_4 , arg_5 = arg_5 , arg_0 = arg_0 , arg_1 = arg_1 , arg_2 = arg_2 )","id_":252097,"task_name":"https:\/\/github.com\/inveniosoftware-contrib\/invenio-groups\/blob\/109481d6b02701db00b72223dd4a65e167c589a6\/invenio_groups\/views.py#L69-L91","negative":"main execution loop. query weather data and post to online service."} {"query":"Use this to send a structured event with a name and arguments to the client .","positive":"def Func ( arg_0 , arg_1 , * arg_2 , ** arg_3 ) : arg_4 = arg_3 . pop ( 'callback' , None ) if arg_3 : raise ValueError ( \"Func() only supports positional argument, to stay \" \"compatible with the Socket.IO protocol. You can \" \"however pass in a dictionary as the first argument\" ) arg_5 = dict ( type = \"event\" , name = arg_1 , arg_2 = arg_2 , endpoint = arg_0 . ns_name ) if arg_4 : arg_5 [ 'ack' ] = 'data' arg_5 [ 'id' ] = msgid = arg_0 . socket . _get_next_msgid ( ) arg_0 . socket . _save_ack_callback ( msgid , arg_4 ) arg_0 . socket . send_packet ( arg_5 )","id_":252098,"task_name":"https:\/\/github.com\/abourget\/gevent-socketio\/blob\/1cdb1594a315326987a17ce0924ea448a82fab01\/socketio\/namespace.py#L411-L460","negative":"ASCII adjust after addition.\n\n Adjusts the sum of two unpacked BCD values to create an unpacked BCD\n result. The AL register is the implied source and destination operand\n for this instruction. The AAA instruction is only useful when it follows\n an ADD instruction that adds (binary addition) two unpacked BCD values\n and stores a byte result in the AL register. The AAA instruction then\n adjusts the contents of the AL register to contain the correct 1-digit\n unpacked BCD result.\n If the addition produces a decimal carry, the AH register is incremented\n by 1, and the CF and AF flags are set. If there was no decimal carry,\n the CF and AF flags are cleared and the AH register is unchanged. In either\n case, bits 4 through 7 of the AL register are cleared to 0.\n\n This instruction executes as described in compatibility mode and legacy mode.\n It is not valid in 64-bit mode.\n ::\n IF ((AL AND 0FH) > 9) Operators.OR(AF = 1)\n THEN\n AL = (AL + 6);\n AH = AH + 1;\n AF = 1;\n CF = 1;\n ELSE\n AF = 0;\n CF = 0;\n FI;\n AL = AL AND 0FH;\n :param cpu: current CPU."} {"query":"Convert lsstdoc - class LaTeX to another markup format .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False , arg_3 = False , arg_4 = True , arg_5 = None ) : arg_6 = '\\n' . join ( ( LSSTDOC_MACROS , arg_0 ) ) return convert_text ( arg_6 , 'latex' , arg_1 , arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = arg_4 , arg_5 = arg_5 )","id_":252099,"task_name":"https:\/\/github.com\/lsst-sqre\/lsst-projectmeta-kit\/blob\/ac8d4ff65bb93d8fdeb1b46ae6eb5d7414f1ae14\/lsstprojectmeta\/pandoc\/convert.py#L132-L194","negative":"Helper function to get packing_plan with\n a callback. The future watch is placed\n only if isWatching is True."} {"query":"Load multiple Python config files merging each of them in turn .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = Config ( ) for arg_3 in arg_0 : arg_4 = PyFileConfigLoader ( arg_3 , arg_1 = arg_1 ) try : arg_5 = arg_4 . load_config ( ) except ConfigFileNotFound : pass except : raise else : arg_2 . _merge ( arg_5 ) return arg_2","id_":252100,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/config\/loader.py#L675-L696","negative":"Checks to see if Spark worker and HDFS datanode are still running."} {"query":"Create a generator of decrypted remote checkpoints .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None , arg_4 = None ) : return _generate_notebooks ( remote_checkpoints , remote_checkpoints . c . last_modified , arg_0 , arg_1 , arg_2 , arg_3 , arg_4 )","id_":252101,"task_name":"https:\/\/github.com\/quantopian\/pgcontents\/blob\/ed36268b7917332d16868208e1e565742a8753e1\/pgcontents\/query.py#L736-L764","negative":"Return True if this new candidate representation satisfies all our overlap\n rules. Since we know that neighboring representations differ by at most\n one bit, we compute running overlaps."} {"query":"store metric in data tree and calc offset signs","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : for arg_3 , arg_4 in arg_2 . iteritems ( ) : if arg_4 == '' : arg_0 . sign [ arg_1 ] [ arg_3 ] = - 1 arg_0 . data [ arg_1 ] [ arg_3 ] = arg_4 else : if not arg_0 . data [ arg_1 ] . get ( arg_3 , None ) : arg_0 . sign [ arg_1 ] [ arg_3 ] = 1 elif float ( arg_4 ) > float ( arg_0 . data [ arg_1 ] [ arg_3 ] ) : arg_0 . sign [ arg_1 ] [ arg_3 ] = 1 elif float ( arg_4 ) < float ( arg_0 . data [ arg_1 ] [ arg_3 ] ) : arg_0 . sign [ arg_1 ] [ arg_3 ] = - 1 else : arg_0 . sign [ arg_1 ] [ arg_3 ] = 0 arg_0 . data [ arg_1 ] [ arg_3 ] = \"%.2f\" % float ( arg_4 )","id_":252102,"task_name":"https:\/\/github.com\/yandex\/yandex-tank\/blob\/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b\/yandextank\/plugins\/Telegraf\/plugin.py#L284-L304","negative":"Create a new layer populated with a point sampling of the current mesh,\n at most one sample for each element of the mesh is created.\n\n Samples are taking in a uniform way, one for each element\n (vertex\/edge\/face); all the elements have the same probabilty of being\n choosen.\n\n Args:\n script: the FilterScript object or script filename to write\n the filter to.\n sample_num (int): The desired number of elements that must be chosen.\n Being a subsampling of the original elements if this number should\n not be larger than the number of elements of the original mesh.\n element (enum in ['VERT', 'EDGE', 'FACE']): Choose what mesh element\n will be used for the subsampling. At most one point sample will\n be added for each one of the chosen elements\n\n Layer stack:\n Creates new layer 'Sampled Mesh'. Current layer is changed to the new\n layer.\n\n MeshLab versions:\n 2016.12\n 1.3.4BETA"} {"query":"Get the details of the person accessing the API .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . _session . get ( API_ENDPOINT + '\/Func' ) return arg_0 . _object_factory ( OBJECT_TYPE , arg_1 )","id_":252103,"task_name":"https:\/\/github.com\/CiscoDevNet\/webexteamssdk\/blob\/6fc2cc3557e080ba4b2a380664cb2a0532ae45cd\/webexteamssdk\/api\/people.py#L298-L309","negative":"Returns mappable data for a random subset of voxels.\n\n May be useful as a baseline in predictive analyses--e.g., to compare\n performance of a more principled feature selection method with simple\n random selection.\n\n Args:\n dataset: A Dataset instance\n n_voxels: An integer specifying the number of random voxels to select.\n\n Returns:\n A 2D numpy array with (randomly-selected) voxels in rows and mappables\n in columns."} {"query":"check unreachable code","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . next_sibling ( ) if arg_2 is not None : arg_0 . add_message ( \"unreachable\" , arg_1 = arg_2 )","id_":252104,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/checkers\/base.py#L1373-L1377","negative":"Fetch the events pages of a given group."} {"query":"stop background spin_thread if any","positive":"def Func ( arg_0 ) : if arg_0 . _spin_thread is not None : arg_0 . _stop_spinning . set ( ) arg_0 . _spin_thread . join ( ) arg_0 . _spin_thread = None","id_":252105,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/parallel\/client\/client.py#L997-L1002","negative":"Add one or more files or URLs to the manifest.\n If files contains a glob, it is expanded.\n\n All files are uploaded to SolveBio. The Upload\n object is used to fill the manifest."} {"query":"Look up the numerical factors to apply to the sky averaged parallax error in order to obtain error values for a given astrometric parameter taking the Ecliptic latitude and the number of transits into account .","positive":"def Func ( arg_0 , arg_1 ) : if isscalar ( arg_1 ) : arg_2 = int ( floor ( abs ( sin ( arg_1 ) ) * arg_4 ) ) if arg_2 == arg_4 : return _astrometricErrorFactors [ arg_0 ] [ arg_4 - 1 ] else : return _astrometricErrorFactors [ arg_0 ] [ arg_2 ] else : arg_3 = array ( floor ( abs ( sin ( arg_1 ) ) * arg_4 ) , dtype = int ) arg_3 [ ( arg_3 == arg_4 ) ] = arg_4 - 1 return _astrometricErrorFactors [ arg_0 ] [ arg_3 ]","id_":252106,"task_name":"https:\/\/github.com\/agabrown\/PyGaia\/blob\/ae972b0622a15f713ffae471f925eac25ccdae47\/pygaia\/errors\/astrometric.py#L28-L54","negative":"Create a new Set produce by the intersection of 2 Set"} {"query":"Sets the annotation SPDX Identifier . Raises CardinalityError if already set . OrderError if no annotator defined before .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if len ( arg_1 . annotations ) != 0 : if not arg_0 . annotation_spdx_id_set : arg_0 . annotation_spdx_id_set = True arg_1 . annotations [ - 1 ] . spdx_id = arg_2 return True else : raise CardinalityError ( 'Annotation::SPDXREF' ) else : raise OrderError ( 'Annotation::SPDXREF' )","id_":252107,"task_name":"https:\/\/github.com\/spdx\/tools-python\/blob\/301d72f6ae57c832c1da7f6402fa49b192de6810\/spdx\/parsers\/tagvaluebuilders.py#L485-L498","negative":"This function prepares a list of hiveconf params\n from a dictionary of key value pairs.\n\n :param d:\n :type d: dict\n\n >>> hh = HiveCliHook()\n >>> hive_conf = {\"hive.exec.dynamic.partition\": \"true\",\n ... \"hive.exec.dynamic.partition.mode\": \"nonstrict\"}\n >>> hh._prepare_hiveconf(hive_conf)\n [\"-hiveconf\", \"hive.exec.dynamic.partition=true\",\\\n \"-hiveconf\", \"hive.exec.dynamic.partition.mode=nonstrict\"]"} {"query":"process a block content and return a list of DocMarkup objects corresponding to it","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = None arg_3 = [ ] arg_4 = 1 for arg_5 in arg_1 : arg_6 = None for arg_7 in re_markup_tags : arg_8 = arg_7 . match ( arg_5 ) if arg_8 : arg_6 = string . lower ( arg_8 . group ( 1 ) ) arg_9 = len ( arg_8 . group ( 0 ) ) arg_5 = \" \" * arg_9 + arg_5 [ arg_9 : ] break if arg_6 : arg_4 = 0 arg_0 . add_markup ( ) arg_0 . markup = arg_6 if len ( string . strip ( arg_5 ) ) > 0 : arg_0 . markup_lines . append ( arg_5 ) elif arg_4 == 0 : arg_0 . markup_lines . append ( arg_5 ) arg_0 . add_markup ( ) return arg_0 . markups","id_":252108,"task_name":"https:\/\/github.com\/aholkner\/bacon\/blob\/edf3810dcb211942d392a8637945871399b0650d\/native\/Vendor\/FreeType\/src\/tools\/docmaker\/content.py#L389-L418","negative":"Update the estimate.\n\n Parameters\n ----------\n new_val: float\n new observated value of estimated quantity."} {"query":"Return a set of date that should be deleted out of dates .","positive":"def Func ( arg_0 , arg_1 = 0 , arg_2 = 0 , arg_3 = 0 , arg_4 = 0 , arg_5 = arg_6 , arg_7 = None ) : arg_0 = set ( arg_0 ) return arg_0 - dates_to_keep ( arg_0 , arg_1 = arg_1 , arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = arg_4 , arg_5 = arg_5 , arg_7 = arg_7 )","id_":252109,"task_name":"https:\/\/github.com\/ecometrica\/grandfatherson\/blob\/b166e4e44887960c3066ebd28eecadfae19561e1\/grandfatherson\/__init__.py#L210-L222","negative":"Returns the concordance scores for each stratified graph based on the given annotation\n\n :param pybel.BELGraph graph: A BEL graph\n :param str annotation: The annotation to group by.\n :param str key: The node data dictionary key storing the logFC\n :param float cutoff: The optional logFC cutoff for significance\n :rtype: dict[str,tuple]"} {"query":"Merge sorted chunk files into a sorted output file","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : title ( ) arg_4 = [ FileRecordStream ( 'chunk_%d.csv' % arg_9 ) for arg_9 in range ( arg_1 ) ] with FileRecordStream ( arg_2 , write = True , arg_3 = arg_3 ) as o : arg_4 = [ FileRecordStream ( 'chunk_%d.csv' % arg_9 ) for arg_9 in range ( arg_1 ) ] arg_5 = [ arg_10 . getNextRecord ( ) for arg_10 in arg_4 ] while not all ( arg_6 is None for arg_6 in arg_5 ) : arg_7 = [ arg_9 for arg_9 , arg_6 in enumerate ( arg_5 ) if arg_6 is not None ] arg_5 = [ arg_5 [ arg_9 ] for arg_9 in arg_7 ] arg_4 = [ arg_4 [ arg_9 ] for arg_9 in arg_7 ] arg_6 = min ( arg_5 , arg_0 = itemgetter ( * arg_0 ) ) o . appendRecord ( arg_6 ) arg_8 = arg_5 . index ( arg_6 ) arg_5 [ arg_8 ] = arg_4 [ arg_8 ] . getNextRecord ( ) for arg_9 , arg_10 in enumerate ( arg_4 ) : arg_10 . close ( ) os . remove ( 'chunk_%d.csv' % arg_9 )","id_":252110,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/data\/sorter.py#L145-L185","negative":"if is negated return original cond and negated flag"} {"query":"Given a Manticore workspace or trace file highlight the basic blocks .","positive":"def Func ( arg_0 ) : if os . path . isfile ( arg_0 . workspace ) : arg_1 = threading . Thread ( target = arg_0 . highlight_from_file , args = ( arg_0 . workspace , ) ) elif os . path . isdir ( arg_0 . workspace ) : arg_1 = threading . Thread ( target = arg_0 . highlight_from_dir , args = ( arg_0 . workspace , ) ) arg_1 . start ( )","id_":252111,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/scripts\/binaryninja\/manticore_viz\/__init__.py#L43-L53","negative":"Handle disco error response.\n\n :Parameters:\n - `stanza`: the stanza received.\n :Types:\n - `stanza`: `pyxmpp.stanza.Stanza`"} {"query":"Return the normalized Editex similarity of two strings .","positive":"def Func ( arg_0 , arg_1 , arg_2 = ( 0 , 1 , 2 ) , arg_3 = False ) : return Editex ( ) . sim ( arg_0 , arg_1 , arg_2 , arg_3 )","id_":252112,"task_name":"https:\/\/github.com\/chrislit\/abydos\/blob\/165466b3ff6afd8024a4c8660421b0c4e7773db9\/abydos\/distance\/_editex.py#L303-L337","negative":"Updates the target temperature on the NuHeat API\n\n :param temperature: The desired temperature in NuHeat format\n :param permanent: Permanently hold the temperature. If set to False, the schedule will\n resume at the next programmed event"} {"query":"Returns a Google images query formatted as a GoogleSearch list object .","positive":"def Func ( arg_0 , arg_1 = 0 , arg_2 = \"\" , arg_3 = 10 , arg_4 = False , arg_5 = False ) : arg_6 = GOOGLE_IMAGES return GoogleSearch ( arg_0 , arg_1 , arg_6 , arg_2 , arg_3 , arg_4 , arg_5 )","id_":252113,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/lib\/web\/google.py#L220-L226","negative":"Reassemble a Binder object coming out of the database."} {"query":"Read the data encoding the Certificate object and decode it into its constituent parts .","positive":"def Func ( arg_0 , arg_1 , arg_2 = arg_3 . KMIPVersion . KMIP_1_0 ) : super ( Certificate , arg_0 ) . Func ( arg_1 , arg_2 = arg_2 ) arg_6 = BytearrayStream ( arg_1 . Func ( arg_0 . length ) ) arg_0 . certificate_type = CertificateType ( ) arg_0 . certificate_value = CertificateValue ( ) arg_0 . certificate_type . Func ( arg_6 , arg_2 = arg_2 ) arg_0 . certificate_value . Func ( arg_6 , arg_2 = arg_2 ) arg_0 . is_oversized ( arg_6 )","id_":252114,"task_name":"https:\/\/github.com\/OpenKMIP\/PyKMIP\/blob\/b51c5b044bd05f8c85a1d65d13a583a4d8fc1b0e\/kmip\/core\/secrets.py#L72-L93","negative":"Specialized inversion sampler for 3D."} {"query":"Connect to the stream","positive":"async def Func ( arg_0 ) : logger . debug ( \"connecting to the stream\" ) await arg_0 . client . setup if arg_0 . session is None : arg_0 . session = arg_0 . client . _session arg_2 = await arg_0 . client . headers . prepare_request ( ** arg_0 . kwargs ) arg_3 = arg_0 . client . error_handler ( arg_0 . session . request ) return await arg_3 ( timeout = 0 , ** arg_2 )","id_":252115,"task_name":"https:\/\/github.com\/odrling\/peony-twitter\/blob\/967f98e16e1889389540f2e6acbf7cc7a1a80203\/peony\/stream.py#L75-L91","negative":"This will output the nginx HTTP config string for specific port spec"} {"query":"Send an easter egg event to a conversation .","positive":"async def Func ( arg_0 , arg_1 ) : arg_2 = hangouts_pb2 . EasterEggResponse ( ) await arg_0 . _pb_request ( 'conversations\/easteregg' , arg_1 , arg_2 ) return arg_2","id_":252116,"task_name":"https:\/\/github.com\/tdryer\/hangups\/blob\/85c0bf0a57698d077461283895707260f9dbf931\/hangups\/client.py#L505-L510","negative":"Reads the current \"best model\" for the job and returns whether or not the\n current model is better than the \"best model\" stored for the job\n\n Returns: (isBetter, storedBest, origResultsStr)\n\n isBetter:\n True if the current model is better than the stored \"best model\"\n storedResults:\n A dict of the currently stored results in the jobs table record\n origResultsStr:\n The json-encoded string that currently resides in the \"results\" field\n of the jobs record (used to create atomicity)"} {"query":"Returns a list of User Tasks that are READY for user action","positive":"def Func ( arg_0 ) : return [ arg_1 for arg_1 in arg_0 . get_tasks ( Task . READY ) if not arg_0 . _is_engine_task ( arg_1 . task_spec ) ]","id_":252117,"task_name":"https:\/\/github.com\/knipknap\/SpiffWorkflow\/blob\/f0af7f59a332e0619e4f3c00a7d4a3d230760e00\/SpiffWorkflow\/bpmn\/workflow.py#L90-L95","negative":"Open RAR archive file."} {"query":"Parses a semi - colon delimited list of cookies .","positive":"def Func ( arg_0 , arg_1 , ** arg_2 ) : for arg_3 , arg_4 in _parse_keyvalue_list ( arg_1 ) : arg_0 . cookies . set ( arg_3 , arg_4 , ** arg_2 )","id_":252118,"task_name":"https:\/\/github.com\/streamlink\/streamlink\/blob\/c8ed1daff14ac03195870238b9b900c1109dd5c1\/src\/streamlink\/plugin\/api\/http_session.py#L110-L116","negative":"Enables uniform interface to value and batch jacobian calculation.\n\n Works in both eager and graph modes.\n\n Arguments:\n f: The scalar function to evaluate.\n x: The value at which to compute the value and the batch jacobian.\n\n Returns:\n A tuple (f(x), J(x)), where J(x) is the batch jacobian."} {"query":"Filter the granular v2 events down to events of interest .","positive":"def Func ( arg_0 ) : arg_1 = google_v2_operations . get_action_image ( arg_0 . _op , _ACTION_USER_COMMAND ) arg_2 = google_v2_operations . is_success ( arg_0 . _op ) arg_3 = { } for arg_4 in google_v2_operations . get_events ( arg_0 . _op ) : if arg_0 . _filter ( arg_4 ) : continue arg_5 , arg_6 = arg_0 . _map ( arg_4 ) arg_7 = arg_5 [ 'name' ] if arg_7 == 'ok' : if not arg_2 or 'ok' in arg_3 : continue if arg_7 == 'pulling-image' : if arg_6 . group ( 1 ) != arg_1 : continue arg_3 [ arg_7 ] = arg_5 return sorted ( arg_3 . values ( ) , key = operator . itemgetter ( 'start-time' ) )","id_":252119,"task_name":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/443ce31daa6023dc2fd65ef2051796e19d18d5a7\/dsub\/providers\/google_v2.py#L357-L403","negative":"Dump the ocntent into the `file` in binary mode."} {"query":"Parse DAG files in a loop controlled by DagParsingSignal . Actual DAG parsing loop will run once upon receiving one agent heartbeat message and will report done when finished the loop .","positive":"def Func ( arg_0 ) : while True : arg_1 = arg_0 . _signal_conn . recv ( ) if arg_1 == DagParsingSignal . TERMINATE_MANAGER : arg_0 . terminate ( ) break elif arg_1 == DagParsingSignal . END_MANAGER : arg_0 . end ( ) sys . exit ( os . EX_OK ) elif arg_1 == DagParsingSignal . AGENT_HEARTBEAT : arg_0 . _refresh_dag_dir ( ) arg_2 = arg_0 . heartbeat ( ) for arg_3 in arg_2 : arg_0 . _result_queue . put ( arg_3 ) arg_0 . _print_stat ( ) arg_4 = all ( arg_0 . get_last_finish_time ( x ) is not None for x in arg_0 . file_paths ) arg_5 = arg_0 . max_runs_reached ( ) arg_6 = DagParsingStat ( arg_0 . _file_paths , arg_0 . get_all_pids ( ) , arg_0 . max_runs_reached ( ) , arg_4 , len ( arg_2 ) ) arg_0 . _stat_queue . put ( arg_6 ) arg_0 . wait_until_finished ( ) arg_0 . _signal_conn . send ( DagParsingSignal . MANAGER_DONE ) if arg_5 : arg_0 . log . info ( \"Exiting dag parsing loop as all files \" \"have been processed %s times\" , arg_0 . _max_runs ) arg_0 . _signal_conn . send ( DagParsingSignal . MANAGER_DONE ) break","id_":252120,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/utils\/dag_processing.py#L856-L898","negative":"This method fixes a bug in Python's SGMLParser."} {"query":"Save the config file","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . get_config_path ( ) arg_2 = arg_0 . get_contents ( ) with open ( arg_1 , mode = 'w' ) as cfg_file : cfg_file . write ( arg_2 )","id_":252121,"task_name":"https:\/\/github.com\/MisterY\/price-database\/blob\/b4fd366b7763891c690fe3000b8840e656da023e\/pricedb\/config.py#L132-L137","negative":"Initialize all ephemerals used by derived classes."} {"query":"Removes a factory .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . _factories . pop ( arg_1 ) arg_2 . doStop ( ) return arg_2","id_":252122,"task_name":"https:\/\/github.com\/lvh\/txampext\/blob\/a7d6cb9f1e9200dba597378cd40eb6a2096d4fd9\/txampext\/multiplexing.py#L112-L123","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Register Services that can be accessed by this DAL . Upon registration the service is set up .","positive":"def Func ( arg_0 , ** arg_1 ) : for arg_2 , arg_3 in arg_1 . items ( ) : if arg_2 in arg_0 . _services : raise AlreadyExistsException ( 'A Service for {} is already registered.' . format ( arg_2 ) ) arg_0 . _init_service ( arg_2 , arg_3 ) return arg_0","id_":252123,"task_name":"https:\/\/github.com\/six8\/polydatum\/blob\/c98a498f8e7972218903ec027f6de78089726c1d\/src\/polydatum\/dal.py#L18-L31","negative":"Remove rows with NAs from the H2OFrame.\n\n :returns: new H2OFrame with all rows from the original frame containing any NAs removed."} {"query":"Put a key inside the stash","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = False , arg_4 = None , arg_5 = '' , arg_6 = True , arg_7 = False , arg_8 = 'secret' , arg_9 = False ) : def assert_key_is_unlocked ( arg_10 ) : if arg_10 and arg_10 . get ( 'lock' ) : raise GhostError ( 'Key `{0}` is locked and therefore cannot be modified. ' 'Unlock the key and try again' . format ( arg_1 ) ) def assert_value_provided_for_new_key ( arg_2 , arg_10 ) : if not arg_2 and not arg_10 . get ( 'value' ) : raise GhostError ( 'You must provide a value for new keys' ) arg_0 . _assert_valid_stash ( ) arg_0 . _validate_key_schema ( arg_2 , arg_8 ) if arg_2 and arg_6 and not isinstance ( arg_2 , dict ) : raise GhostError ( 'Value must be of type dict' ) arg_11 = arg_0 . _handle_existing_key ( arg_1 , arg_3 or arg_9 ) assert_key_is_unlocked ( arg_11 ) assert_value_provided_for_new_key ( arg_2 , arg_11 ) arg_12 = dict ( arg_1 = arg_1 , arg_7 = arg_7 ) if arg_2 : if arg_9 : arg_2 = arg_0 . _update_existing_key ( arg_11 , arg_2 ) arg_12 [ 'value' ] = arg_0 . _encrypt ( arg_2 ) if arg_6 else arg_2 else : arg_12 [ 'value' ] = arg_11 . get ( 'value' ) arg_12 [ 'description' ] = arg_5 or arg_11 . get ( 'description' ) arg_12 [ 'created_at' ] = arg_11 . get ( 'created_at' ) or _get_current_time ( ) arg_12 [ 'modified_at' ] = _get_current_time ( ) arg_12 [ 'metadata' ] = arg_4 or arg_11 . get ( 'metadata' ) arg_12 [ 'uid' ] = arg_11 . get ( 'uid' ) or str ( uuid . uuid4 ( ) ) arg_12 [ 'type' ] = arg_11 . get ( 'type' ) or arg_8 arg_13 = arg_0 . _storage . Func ( arg_12 ) audit ( storage = arg_0 . _storage . db_path , action = 'MODIFY' if ( arg_3 or arg_9 ) else 'PUT' , message = json . dumps ( dict ( key_name = arg_12 [ 'name' ] , arg_2 = 'HIDDEN' , arg_5 = arg_12 [ 'description' ] , uid = arg_12 [ 'uid' ] , arg_4 = json . dumps ( arg_12 [ 'metadata' ] ) , arg_7 = arg_12 [ 'lock' ] , type = arg_12 [ 'type' ] ) ) ) return arg_13","id_":252124,"task_name":"https:\/\/github.com\/nir0s\/ghost\/blob\/77da967a4577ca4cf100cfe34e87b39ad88bf21c\/ghost.py#L222-L318","negative":"Creates a SecBufferDesc struct and contained SecBuffer structs\n\n :param number:\n The number of contains SecBuffer objects to create\n\n :return:\n A tuple of (SecBufferDesc pointer, SecBuffer array)"} {"query":"Updates a VM Image in the image repository that is associated with the specified subscription .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : _validate_not_none ( 'vm_image_name' , arg_1 ) _validate_not_none ( 'vm_image' , arg_2 ) return arg_0 . _perform_put ( arg_0 . _get_vm_image_path ( arg_1 ) , _XmlSerializer . Func_to_xml ( arg_2 ) , as_async = True )","id_":252125,"task_name":"https:\/\/github.com\/Azure\/azure-sdk-for-python\/blob\/d7306fde32f60a293a7567678692bdad31e4b667\/azure-servicemanagement-legacy\/azure\/servicemanagement\/servicemanagementservice.py#L2144-L2203","negative":"Get Dingding endpoint for sending message."} {"query":"Returns a boolean as to whether the slot pool has room for this task to run","positive":"def Func ( arg_0 , arg_1 ) : if not arg_0 . task . pool : return False arg_2 = ( arg_1 . query ( Pool ) . filter ( Pool . pool == arg_0 . task . pool ) . first ( ) ) if not arg_2 : return False arg_3 = arg_2 . open_slots ( arg_1 = arg_1 ) return arg_3 <= 0","id_":252126,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/models\/taskinstance.py#L653-L671","negative":"Fetch items using the given backend.\n\n Generator to get items using the given backend class. When\n an archive manager is given, this function will store\n the fetched items in an `Archive`. If an exception is raised,\n this archive will be removed to avoid corrupted archives.\n\n The parameters needed to initialize the `backend` class and\n get the items are given using `backend_args` dict parameter.\n\n :param backend_class: backend class to fetch items\n :param backend_args: dict of arguments needed to fetch the items\n :param category: category of the items to retrieve.\n If None, it will use the default backend category\n :param filter_classified: remove classified fields from the resulting items\n :param manager: archive manager needed to store the items\n\n :returns: a generator of items"} {"query":"Returns a nested list of different stream options .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = stream_info_pattern . findall ( arg_1 ) if not arg_2 : arg_0 . logger . error ( \"Failed to extract stream_info.\" ) arg_3 = [ ] for arg_4 in arg_2 : if not arg_4 [ 1 ] : arg_3 . append ( [ arg_4 [ 0 ] , \"source\" ] ) else : arg_3 . append ( list ( arg_4 ) ) return arg_3","id_":252127,"task_name":"https:\/\/github.com\/streamlink\/streamlink\/blob\/c8ed1daff14ac03195870238b9b900c1109dd5c1\/src\/streamlink\/plugins\/huomao.py#L64-L85","negative":"store metric in data tree and calc offset signs\n\n sign < 0 is CYAN, means metric value is lower then previous,\n sign > 1 is YELLOW, means metric value is higher then prevoius,\n sign == 0 is WHITE, means initial or equal metric value"} {"query":"Modulo remainder operation","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : try : arg_3 = Operators . ITEBV ( 256 , arg_2 == 0 , 0 , arg_1 % arg_2 ) except ZeroDivisionError : arg_3 = 0 return arg_3","id_":252128,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/platforms\/evm.py#L1166-L1172","negative":"Show Program Landing page for the Enterprise's Program.\n\n Render the Enterprise's Program Enrollment page for a specific program.\n The Enterprise and Program are both selected by their respective UUIDs.\n\n Unauthenticated learners will be redirected to enterprise-linked SSO.\n\n A 404 will be raised if any of the following conditions are met:\n * No enterprise customer UUID query parameter ``enterprise_uuid`` found in request.\n * No enterprise customer found against the enterprise customer\n uuid ``enterprise_uuid`` in the request kwargs.\n * No Program can be found given ``program_uuid`` either at all or associated with\n the Enterprise.."} {"query":"determines UCI interface dns_search option","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if 'dns_search' in arg_1 : return arg_1 [ 'dns_search' ] if arg_2 [ 'proto' ] == 'none' : return None arg_3 = arg_0 . netjson . get ( 'dns_search' , None ) if arg_3 : return ' ' . join ( arg_3 )","id_":252129,"task_name":"https:\/\/github.com\/openwisp\/netjsonconfig\/blob\/c23ce9732720856e2f6dc54060db71a8182c7d4b\/netjsonconfig\/backends\/openwrt\/converters\/interfaces.py#L182-L194","negative":"Sequentially update the actors, the world, and the messaging system. \n The theater terminates once all of the actors indicate that they are done."} {"query":"Write row with translations to ods file into specified sheet and row_no .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_0 . content . getSheet ( arg_1 ) for arg_4 , arg_5 in enumerate ( arg_3 ) : arg_6 = arg_0 . content . getCell ( arg_4 , arg_2 + 1 ) arg_6 . stringValue ( _escape_apostrophe ( arg_5 ) ) if arg_4 % 2 == 1 : arg_6 . setCellColor ( settings . EVEN_COLUMN_BG_COLOR ) else : arg_6 . setCellColor ( settings . ODD_COLUMN_BG_COLOR )","id_":252130,"task_name":"https:\/\/github.com\/VorskiImagineering\/C3PO\/blob\/e3e35835e5ac24158848afed4f905ca44ac3ae00\/c3po\/converters\/po_ods.py#L70-L81","negative":"Calculates the distance of a given image to the\n original image.\n\n Parameters\n ----------\n image : `numpy.ndarray`\n The image that should be compared to the original image.\n\n Returns\n -------\n :class:`Distance`\n The distance between the given image and the original image."} {"query":"Get a spotify artists by their IDs .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = Route ( 'GET' , '\/Func' ) arg_3 = { 'ids' : arg_1 } return arg_0 . request ( arg_2 , params = arg_3 )","id_":252131,"task_name":"https:\/\/github.com\/mental32\/spotify.py\/blob\/bb296cac7c3dd289908906b7069bd80f43950515\/spotify\/http.py#L297-L307","negative":"Pickle the Dataset instance to the provided file."} {"query":"Checks that the new block is directly in a namespace .","positive":"def Func ( arg_0 , arg_1 ) : if arg_1 : return len ( arg_0 . stack ) >= 1 and ( isinstance ( arg_0 . stack [ - 1 ] , _NamespaceInfo ) ) return ( len ( arg_0 . stack ) > 1 and arg_0 . stack [ - 1 ] . check_namespace_indentation and isinstance ( arg_0 . stack [ - 2 ] , _NamespaceInfo ) )","id_":252132,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/third_party\/python\/cpplint\/cpplint.py#L5870-L5886","negative":"Wrapper method that calls the appropriate main updating methods of\n the inspection.\n\n It is meant to be used inside a loop (like while), so that it can\n continuously update the class attributes from the trace and log files.\n It already implements checks to parse these files only when they\n change, and they ignore entries that have been previously processes."} {"query":"Hex encode a binary string","positive":"def Func ( arg_0 ) : arg_1 = string . ascii_letters + string . digits + string . punctuation + ' ' return '' . join ( arg_2 if arg_2 in arg_1 else r'0x{0:02x}' . format ( ord ( arg_2 ) ) for arg_2 in arg_0 )","id_":252133,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/tools\/tracker\/src\/python\/utils.py#L37-L42","negative":"Returns any parameters needed for Akamai HD player verification.\n\n Algorithm originally documented by KSV, source:\n http:\/\/stream-recorder.com\/forum\/showpost.php?p=43761&postcount=13"} {"query":"Return process cmdline as a list of arguments .","positive":"def Func ( arg_0 ) : if not pid_exists ( arg_0 . pid ) : raise NoSuchProcess ( arg_0 . pid , arg_0 . _process_name ) return _psutil_osx . Func ( arg_0 . pid )","id_":252134,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/psutil\/_psosx.py#L154-L158","negative":"Turn a mongodb-style search dict into an SQL query."} {"query":"Ensure the appropriate Bio2BEL data directory exists for the given module then returns the file path .","positive":"def Func ( arg_0 : arg_1 ) -> arg_1 : arg_0 = arg_0 . lower ( ) arg_2 = os . path . join ( BIO2BEL_DIR , arg_0 ) os . makedirs ( arg_2 , exist_ok = True ) return arg_2","id_":252135,"task_name":"https:\/\/github.com\/bio2bel\/bio2bel\/blob\/d80762d891fa18b248709ff0b0f97ebb65ec64c2\/src\/bio2bel\/utils.py#L26-L35","negative":"Return True if we should retry. False otherwise.\n\n Args:\n exception: An exception to test for transience.\n\n Returns:\n True if we should retry. False otherwise."} {"query":"Parse arguments sent to this command .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if not arg_2 and arg_0 . no_args_is_help and not arg_1 . resilient_parsing : click . echo ( arg_1 . get_help ( ) ) arg_1 . exit ( ) return super ( ActionSubcommand , arg_0 ) . Func ( arg_1 , arg_2 )","id_":252136,"task_name":"https:\/\/github.com\/ansible\/tower-cli\/blob\/a2b151fed93c47725018d3034848cb3a1814bed7\/tower_cli\/cli\/action.py#L33-L46","negative":"A factory method which can be overridden in subclasses to create\n specialized LogRecords."} {"query":"Waits until TransferFuture is done and returns the result","positive":"def Func ( arg_0 ) : arg_0 . _done_event . wait ( MAXINT ) if arg_0 . _exception : raise arg_0 . _exception return arg_0 . _Func","id_":252137,"task_name":"https:\/\/github.com\/boto\/s3transfer\/blob\/2aead638c8385d8ae0b1756b2de17e8fad45fffa\/s3transfer\/futures.py#L249-L266","negative":"Build a unique key from get data"} {"query":"Connect by wmi and run wql .","positive":"def Func ( arg_0 , arg_1 ) : try : arg_0 . __wql = [ 'wmic' , '-U' , arg_0 . args . domain + '\\\\' + arg_0 . args . user + '%' + arg_0 . args . password , '\/\/' + arg_0 . args . host , '--namespace' , arg_0 . args . namespace , '--delimiter' , arg_0 . args . delimiter , arg_1 ] arg_0 . logger . debug ( \"wql: {}\" . format ( arg_0 . __wql ) ) arg_0 . __output = subprocess . check_output ( arg_0 . __wql ) arg_0 . logger . debug ( \"output: {}\" . format ( arg_0 . __output ) ) arg_0 . logger . debug ( \"wmi connect succeed.\" ) arg_0 . __wmi_output = arg_0 . __output . splitlines ( ) [ 1 : ] arg_0 . logger . debug ( \"wmi_output: {}\" . format ( arg_0 . __wmi_output ) ) arg_0 . __csv_header = csv . DictReader ( arg_0 . __wmi_output , delimiter = '|' ) arg_0 . logger . debug ( \"csv_header: {}\" . format ( arg_0 . __csv_header ) ) return list ( arg_0 . __csv_header ) except subprocess . CalledProcessError as e : arg_0 . unknown ( \"Connect by wmi and run wql error: %s\" % e )","id_":252138,"task_name":"https:\/\/github.com\/crazy-canux\/arguspy\/blob\/e9486b5df61978a990d56bf43de35f3a4cdefcc3\/arguspy\/wmi_subprocess.py#L30-L49","negative":"Convert PythonCard font description to gui2py style"} {"query":"Return common dtype of arg_list or None .","positive":"def Func ( arg_0 ) : if all ( arg_1 is None for arg_1 in arg_0 ) : return None return dtype_util . common_dtype ( arg_0 , tf . float32 )","id_":252139,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/positive_semidefinite_kernels\/internal\/util.py#L157-L171","negative":"select sentences in terms of maximum coverage problem\n\n Args:\n text: text to be summarized (unicode string)\n char_limit: summary length (the number of characters)\n\n Returns:\n list of extracted sentences\n\n Reference:\n Hiroya Takamura, Manabu Okumura.\n Text summarization model based on maximum coverage problem and its\n variant. (section 3)\n http:\/\/citeseerx.ist.psu.edu\/viewdoc\/summary?doi=10.1.1.222.6945"} {"query":"Commands for experiments .","positive":"def Func ( arg_0 , arg_1 , Func ) : arg_0 . obj = arg_0 . obj or { } arg_0 . obj [ 'project' ] = arg_1 arg_0 . obj [ 'experiment' ] = Func","id_":252140,"task_name":"https:\/\/github.com\/polyaxon\/polyaxon-cli\/blob\/a7f5eed74d4d909cad79059f3c21c58606881449\/polyaxon_cli\/cli\/experiment.py#L64-L68","negative":"Set a property value or remove a property.\n\n value == None means 'remove property'.\n Raise HTTP_FORBIDDEN if property is read-only, or not supported.\n\n When dry_run is True, this function should raise errors, as in a real\n run, but MUST NOT change any data.\n\n This default implementation\n\n - raises HTTP_FORBIDDEN, if trying to modify a locking property\n - raises HTTP_FORBIDDEN, if trying to modify an immutable {DAV:}\n property\n - handles Windows' Win32LastModifiedTime to set the getlastmodified\n property, if enabled\n - stores everything else as dead property, if a property manager is\n present.\n - raises HTTP_FORBIDDEN, else\n\n Removing a non-existing prop is NOT an error.\n\n Note: RFC 4918 states that {DAV:}displayname 'SHOULD NOT be protected'\n\n A resource provider may override this method, to update supported custom\n live properties."} {"query":"Generates a report instance for the canvas account id .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = { } ) : if arg_3 is not None : arg_4 [ \"enrollment_term_id\" ] = arg_3 arg_5 = ACCOUNTS_API . format ( arg_2 ) + \"\/reports\/{}\" . format ( arg_1 ) arg_6 = { \"parameters\" : arg_4 } arg_7 = arg_0 . _post_resource ( arg_5 , arg_6 ) arg_7 [ \"account_id\" ] = arg_2 return Report ( arg_7 = arg_7 )","id_":252141,"task_name":"https:\/\/github.com\/uw-it-aca\/uw-restclients-canvas\/blob\/9845faf33d49a8f06908efc22640c001116d6ea2\/uw_canvas\/reports.py#L53-L68","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Assert that Tensor x has expected number of dimensions .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = False ) : arg_4 = arg_0 . shape . ndims if arg_4 is None : if arg_3 : raise ValueError ( 'Expected static ndims. Found: {}' . format ( arg_0 ) ) return if arg_1 is not None and arg_4 != arg_1 : raise ValueError ( 'ndims must be {}. Found: {}' . format ( arg_1 , arg_4 ) ) if arg_2 is not None and arg_4 < arg_2 : raise ValueError ( 'ndims must be at least {}. Found {}' . format ( arg_2 , arg_4 ) )","id_":252142,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/math\/interpolation.py#L839-L853","negative":"Returns the dictionary of CORS specific app configurations."} {"query":"Builds the SQL compiler for a insert query .","positive":"def Func ( arg_0 , arg_1 : arg_2 [ arg_3 ] ) : arg_4 = [ ] arg_5 = len ( arg_1 [ 0 ] ) for arg_6 , arg_7 in enumerate ( arg_1 ) : if arg_5 != len ( arg_7 ) : raise SuspiciousOperation ( ( 'In bulk upserts, you cannot have rows with different field ' 'configurations. Row {0} has a different field config than ' 'the first row.' ) . format ( arg_6 ) ) arg_4 . append ( arg_0 . model ( ** arg_7 ) ) arg_0 . _for_write = True arg_9 , arg_10 = arg_0 . _get_upsert_fields ( arg_1 [ 0 ] ) arg_11 = PostgresInsertQuery ( arg_0 . model ) arg_11 . conflict_action = arg_0 . conflict_action arg_11 . conflict_target = arg_0 . conflict_target arg_11 . index_predicate = arg_0 . index_predicate arg_11 . values ( arg_4 , arg_9 , arg_10 ) arg_15 = django . db . connections [ arg_0 . db ] arg_16 = PostgresInsertCompiler ( arg_11 , arg_15 , arg_0 . db ) return arg_16","id_":252143,"task_name":"https:\/\/github.com\/SectorLabs\/django-postgres-extra\/blob\/eef2ed5504d225858d4e4f5d77a838082ca6053e\/psqlextra\/manager\/manager.py#L305-L352","negative":"setting baudrate if supported"} {"query":"Create a new data set version .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = \"Failed to create dataset version for dataset {}\" . format ( arg_1 ) arg_3 = arg_0 . _get_success_json ( arg_0 . _post_json ( routes . Func ( arg_1 ) , data = { } , arg_2 = arg_2 ) ) [ 'dataset_scoped_id' ] return DatasetVersion ( arg_3 = arg_3 )","id_":252144,"task_name":"https:\/\/github.com\/CitrineInformatics\/python-citrination-client\/blob\/409984fc65ce101a620f069263f155303492465c\/citrination_client\/data\/client.py#L321-L333","negative":"Return a normalized form of the pattern.\n\n Normalize the pattern by removing pattern type prefix if it\n exists in the pattern. Then return the pattern type and the\n pattern as a tuple of two strings.\n\n Arguments:\n pattern (str): Route pattern to match request paths\n\n Returns:\n tuple: Ruple of pattern type (str) and pattern (str)"} {"query":"Returns the colors that have the given word in their context .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ ] for arg_3 in context : arg_4 = context [ arg_3 ] for arg_5 in arg_4 : if arg_5 . startswith ( arg_1 ) or arg_1 . startswith ( arg_5 ) : arg_2 . append ( arg_3 ) break arg_2 = [ color ( name ) for name in arg_2 ] return arg_2","id_":252145,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/lib\/colors\/__init__.py#L996-L1014","negative":"This will output the nginx HTTP config string for specific port spec"} {"query":"get the command to start the tmaster processes","positive":"def Func ( arg_0 ) : arg_1 = { } arg_2 = [ arg_0 . tmaster_binary , '--topology_name=%s' % arg_0 . topology_name , '--topology_id=%s' % arg_0 . topology_id , '--zkhostportlist=%s' % arg_0 . state_manager_connection , '--zkroot=%s' % arg_0 . state_manager_root , '--myhost=%s' % arg_0 . master_host , '--master_port=%s' % str ( arg_0 . master_port ) , '--controller_port=%s' % str ( arg_0 . tmaster_controller_port ) , '--stats_port=%s' % str ( arg_0 . tmaster_stats_port ) , '--config_file=%s' % arg_0 . heron_internals_config_file , '--override_config_file=%s' % arg_0 . override_config_file , '--metrics_sinks_yaml=%s' % arg_0 . metrics_sinks_config_file , '--metricsmgr_port=%s' % str ( arg_0 . metrics_manager_port ) , '--ckptmgr_port=%s' % str ( arg_0 . checkpoint_manager_port ) ] arg_3 = arg_0 . shell_env . copy ( ) if arg_0 . shell_env is not None else { } arg_4 = Command ( arg_2 , arg_3 ) if os . environ . get ( 'ENABLE_HEAPCHECK' ) is not None : arg_4 . env . update ( { 'LD_PRELOAD' : \"\/usr\/lib\/libtcmalloc.so\" , 'HEAPCHECK' : \"normal\" } ) arg_1 [ \"heron-tmaster\" ] = arg_4 if arg_0 . metricscache_manager_mode . lower ( ) != \"disabled\" : arg_1 [ \"heron-metricscache\" ] = arg_0 . _get_metrics_cache_cmd ( ) if arg_0 . health_manager_mode . lower ( ) != \"disabled\" : arg_1 [ \"heron-healthmgr\" ] = arg_0 . _get_healthmgr_cmd ( ) arg_1 [ arg_0 . metricsmgr_ids [ 0 ] ] = arg_0 . _get_metricsmgr_cmd ( arg_0 . metricsmgr_ids [ 0 ] , arg_0 . metrics_sinks_config_file , arg_0 . metrics_manager_port ) if arg_0 . is_stateful_topology : arg_1 . update ( arg_0 . _get_ckptmgr_process ( ) ) return arg_1","id_":252146,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/executor\/src\/python\/heron_executor.py#L545-L588","negative":"Return True if this new candidate representation satisfies all our overlap\n rules. Since we know that neighboring representations differ by at most\n one bit, we compute running overlaps."} {"query":"Print human readable report of model .","positive":"def Func ( arg_0 , arg_1 = arg_2 . stdout ) : arg_4 = arg_0 . results [ 'cpu bottleneck' ] [ 'performance throughput' ] if arg_0 . verbose >= 3 : print ( '{}' . format ( pformat ( arg_0 . results ) ) , file = arg_1 ) if arg_0 . verbose >= 1 : print ( 'Bottlenecks:' , file = arg_1 ) print ( ' level | a. intensity | performance | peak bandwidth | peak bandwidth kernel' , file = arg_1 ) print ( '--------+--------------+-----------------+-------------------+----------------------' , file = arg_1 ) print ( ' CPU | | {!s:>15} | |' . format ( arg_4 [ arg_0 . _args . unit ] ) , file = arg_1 ) for arg_5 in arg_0 . results [ 'mem bottlenecks' ] : if arg_5 is None : continue print ( '{level:>7} | {arithmetic intensity:>5.2} FLOP\/B | {0!s:>15} |' ' {bandwidth!s:>17} | {bw kernel:<8}' . format ( arg_5 [ 'performance' ] [ arg_0 . _args . unit ] , ** arg_5 ) , file = arg_1 ) print ( '' , file = arg_1 ) print ( 'IACA analisys:' , file = arg_1 ) print ( '{!s}' . format ( { arg_6 : arg_7 for arg_6 , arg_7 in list ( arg_0 . results [ 'cpu bottleneck' ] . items ( ) ) if arg_6 not in [ 'IACA output' ] } ) , file = arg_1 ) if arg_0 . results [ 'min performance' ] [ 'FLOP\/s' ] > arg_4 [ 'FLOP\/s' ] : print ( 'CPU bound. {!s} due to CPU bottleneck' . format ( arg_4 [ arg_0 . _args . unit ] ) , file = arg_1 ) else : print ( 'Cache or mem bound.' , file = arg_1 ) arg_8 = arg_0 . results [ 'mem bottlenecks' ] [ arg_0 . results [ 'bottleneck level' ] ] print ( '{!s} due to {} transfer bottleneck (with bw from {} benchmark)' . format ( arg_8 [ 'performance' ] [ arg_0 . _args . unit ] , arg_8 [ 'level' ] , arg_8 [ 'bw kernel' ] ) , file = arg_1 ) print ( 'Arithmetic Intensity: {:.2f} FLOP\/B' . format ( arg_8 [ 'arithmetic intensity' ] ) , file = arg_1 )","id_":252147,"task_name":"https:\/\/github.com\/RRZE-HPC\/kerncraft\/blob\/c60baf8043e4da8d8d66da7575021c2f4c6c78af\/kerncraft\/models\/roofline.py#L339-L386","negative":"Sets the player's paused state."} {"query":"Output a simple table with several columns .","positive":"def Func ( arg_0 ) : arg_1 = '' for arg_2 in arg_0 : arg_1 += '' for arg_3 in arg_2 : arg_1 += '{s}<\/td>' . format ( s = arg_3 ) arg_1 += '<\/tr>' arg_1 += '<\/Func>' return arg_1","id_":252148,"task_name":"https:\/\/github.com\/theduke\/django-baseline\/blob\/7be8b956e53c70b35f34e1783a8fe8f716955afb\/django_baseline\/templatetags\/helpers.py#L18-L33","negative":"Adjust the timestamp on which the certificate starts being valid.\n\n :param amount: The number of seconds by which to adjust the timestamp.\n :return: ``None``"} {"query":"Build CLI dynamically based on the package structure .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : for arg_3 , arg_4 , arg_5 in iter_modules ( arg_1 ) : arg_6 = import_module ( f'.{name}' , arg_2 ) if arg_5 : Func ( arg_0 . group ( arg_4 ) ( arg_6 . group ) , arg_6 . __path__ , arg_6 . __package__ ) else : arg_0 . command ( arg_4 ) ( arg_6 . command )","id_":252149,"task_name":"https:\/\/github.com\/yeonghoey\/yhy\/blob\/4bce1482c31aeeccff96c4cfd1803b83932604e7\/yhy\/commands\/__init__.py#L5-L15","negative":"Disconnect any connected devices that have any of the specified\n service UUIDs."} {"query":"Get or set Settings . _wrapped","positive":"def Func ( arg_0 = None , arg_1 = None ) : if arg_0 : Settings . bind ( arg_0 , arg_1 = arg_1 ) return Settings . _wrapped","id_":252150,"task_name":"https:\/\/github.com\/wangwenpei\/cliez\/blob\/d6fe775544cd380735c56c8a4a79bc2ad22cb6c4\/cliez\/conf\/__init__.py#L46-L59","negative":"Deal with the incoming packets"} {"query":"Takes a cursor and writes the BigQuery schema in . json format for the results to a local file system .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = None arg_3 = 'application\/json' arg_4 = NamedTemporaryFile ( delete = True ) if arg_0 . schema is not None and isinstance ( arg_0 . schema , string_types ) : arg_2 = arg_0 . schema . encode ( 'utf-8' ) elif arg_0 . schema is not None and isinstance ( arg_0 . schema , list ) : arg_2 = json . dumps ( arg_0 . schema ) . encode ( 'utf-8' ) else : arg_5 = [ ] for arg_6 in arg_1 . description : arg_7 = arg_6 [ 0 ] arg_8 = arg_0 . type_map ( arg_6 [ 1 ] ) if arg_6 [ 6 ] or arg_8 == 'TIMESTAMP' : arg_9 = 'NULLABLE' else : arg_9 = 'REQUIRED' arg_5 . append ( { 'name' : arg_7 , 'type' : arg_8 , 'mode' : arg_9 , } ) arg_2 = json . dumps ( arg_5 , sort_keys = True ) . encode ( 'utf-8' ) arg_4 . write ( arg_2 ) arg_0 . log . info ( 'Using schema for %s: %s' , arg_0 . schema_filename , arg_2 ) arg_10 = { 'file_name' : arg_0 . schema_filename , 'file_handle' : arg_4 , 'file_mime_type' : arg_3 } return arg_10","id_":252151,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/operators\/mysql_to_gcs.py#L210-L253","negative":"Removes the association\n\n Parameters\n ----------\n model : cobra model\n The model to remove the gene from\n make_dependent_reactions_nonfunctional : bool\n If True then replace the gene with 'False' in the gene\n association, else replace the gene with 'True'\n\n\n .. deprecated :: 0.4\n Use cobra.manipulation.delete_model_genes to simulate knockouts\n and cobra.manipulation.remove_genes to remove genes from\n the model."} {"query":"Method to build an IP list for the case 1","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_0 . filter ( models . IPAddress . version == 4L ) . filter ( models . IPAddress . network_id == PUBLIC_NETWORK_ID ) . filter ( models . IPAddress . used_by_tenant_id is not None ) . filter ( models . IPAddress . allocated_at != null ( ) ) . filter ( models . IPAddress . allocated_at < arg_1 ) . filter ( or_ ( models . IPAddress . _deallocated is False , models . IPAddress . deallocated_at == null ( ) , models . IPAddress . deallocated_at >= arg_2 ) ) . all ( ) return arg_3","id_":252152,"task_name":"https:\/\/github.com\/openstack\/quark\/blob\/1112e6a66917d3e98e44cb7b33b107fd5a74bb2e\/quark\/billing.py#L211-L229","negative":"Stop streaming samples from device and delete samples buffer"} {"query":"Write extracted licenses fields to out .","positive":"def Func ( arg_0 , arg_1 ) : write_value ( 'LicenseID' , arg_0 . identifier , arg_1 ) if arg_0 . full_name is not None : write_value ( 'LicenseName' , arg_0 . full_name , arg_1 ) if arg_0 . comment is not None : write_text_value ( 'LicenseComment' , arg_0 . comment , arg_1 ) for arg_2 in sorted ( arg_0 . cross_ref ) : write_value ( 'LicenseCrossReference' , arg_2 , arg_1 ) write_text_value ( 'ExtractedText' , arg_0 . text , arg_1 )","id_":252153,"task_name":"https:\/\/github.com\/spdx\/tools-python\/blob\/301d72f6ae57c832c1da7f6402fa49b192de6810\/spdx\/writers\/tagvalue.py#L219-L234","negative":"Temporal distance probability density function.\n\n Returns\n -------\n non_delta_peak_split_points: numpy.array\n non_delta_peak_densities: numpy.array\n len(density) == len(temporal_distance_split_points_ordered) -1\n delta_peak_loc_to_probability_mass : dict"} {"query":"Generate a repr string .","positive":"def Func ( arg_0 , * arg_1 , ** arg_2 ) : arg_3 = [ repr ( arg ) for arg in arg_1 ] arg_3 . extend ( \"{}={!r}\" . format ( arg_4 , arg_5 ) for arg_4 , ( arg_5 , arg_6 ) in sorted ( arg_2 . items ( ) ) if arg_5 != arg_6 ) return \"{}({})\" . format ( arg_0 , \", \" . join ( arg_3 ) )","id_":252154,"task_name":"https:\/\/github.com\/PyFilesystem\/s3fs\/blob\/1c5e3a1b6abbb9dff91ea7fc4cec7353798cd536\/fs_s3fs\/_s3fs.py#L34-L58","negative":"Decode the data passed in and potentially flush the decoder."} {"query":"calculate the difference between starting and ending time .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None ) : if arg_1 and arg_2 : arg_3 = int ( arg_2 ) - int ( arg_1 ) else : arg_3 = PyFunceble . INTERN [ \"end\" ] - PyFunceble . INTERN [ \"start\" ] arg_4 = PyFunceble . OrderedDict ( ) arg_4 [ \"days\" ] = str ( arg_3 \/\/ ( 24 * 60 * 60 ) ) . zfill ( 2 ) arg_4 [ \"hours\" ] = str ( ( arg_3 \/\/ ( 60 * 60 ) ) % 24 ) . zfill ( 2 ) arg_4 [ \"minutes\" ] = str ( ( arg_3 % 3600 ) \/\/ 60 ) . zfill ( 2 ) arg_4 [ \"seconds\" ] = str ( arg_3 % 60 ) . zfill ( 2 ) return arg_4","id_":252155,"task_name":"https:\/\/github.com\/funilrys\/PyFunceble\/blob\/cdf69cbde120199171f7158e1c33635753e6e2f5\/PyFunceble\/execution_time.py#L265-L318","negative":"Make HTTP request and return response object\n\n Args:\n method (str): GET, POST, PUT, DELETE\n url (str): path appended to the base_url to create request\n **kwargs: passed directly to a requests.request object"} {"query":"Verify that install_requires is a valid requirements list","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : try : list ( pkg_resources . parse_requirements ( arg_2 ) ) except ( TypeError , ValueError ) as error : arg_3 = ( \"{attr!r} must be a string or list of strings \" \"containing valid project\/version requirement specifiers; {error}\" ) raise DistutilsSetupError ( arg_3 . format ( arg_1 = arg_1 , error = error ) )","id_":252156,"task_name":"https:\/\/github.com\/AkihikoITOH\/capybara\/blob\/e86c2173ea386654f4ae061148e8fbe3f25e715c\/capybara\/virtualenv\/lib\/python2.7\/site-packages\/setuptools\/dist.py#L122-L131","negative":"Inform the widget about the encoding of the underlying character stream."} {"query":"Checks if an blob_name is updated in Google Cloud Storage .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = arg_0 . get_conn ( ) arg_5 = storage . Bucket ( arg_4 = arg_4 , name = arg_1 ) arg_6 = arg_5 . get_blob ( blob_name = arg_2 ) arg_6 . reload ( ) arg_7 = arg_6 . updated if arg_7 is not None : import dateutil . tz if not arg_3 . tzinfo : arg_3 = arg_3 . replace ( tzinfo = dateutil . tz . tzutc ( ) ) arg_0 . log . info ( \"Verify object date: %s > %s\" , arg_7 , arg_3 ) if arg_7 > arg_3 : return True return False","id_":252157,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/gcs_hook.py#L223-L253","negative":"Worker to distribute work to jit funcs. Wraps everything on an \n engine to run single-threaded to maximize efficiency for \n multi-processing."} {"query":"attrdict pipe can extract attribute values of object into a dict .","positive":"def Func ( arg_0 , arg_1 ) : if isinstance ( arg_1 , dict ) : for arg_2 in arg_0 : arg_3 = dict ( ) for arg_4 in arg_1 . keys ( ) : if hasattr ( arg_2 , arg_4 ) : arg_3 [ arg_4 ] = getattr ( arg_2 , arg_4 ) else : arg_3 [ arg_4 ] = arg_1 [ arg_4 ] yield arg_3 else : for arg_2 in arg_0 : arg_3 = dict ( ) for arg_4 in arg_1 : if hasattr ( arg_2 , arg_4 ) : arg_3 [ arg_4 ] = getattr ( arg_2 , arg_4 ) yield arg_3","id_":252158,"task_name":"https:\/\/github.com\/GaryLee\/cmdlet\/blob\/5852a63fc2c7dd723a3d7abe18455f8dacb49433\/cmdlet\/cmds.py#L129-L161","negative":"Return true if range is approximately in same order of magnitude\n\n For example these sequences are in the same order of magnitude:\n\n - [1, 8, 5] # [1, 10)\n - [35, 20, 80] # [10 100)\n - [232, 730] # [100, 1000)\n\n Parameters\n ----------\n x : array-like\n Values in base 10. Must be size 2 and\n ``rng[0] <= rng[1]``.\n delta : float\n Fuzz factor for approximation. It is multiplicative."} {"query":"Processes the raw error message sent by the server and close connection with current server .","positive":"def Func ( arg_0 , arg_1 ) : if STALE_CONNECTION in arg_1 : yield from arg_0 . _process_op_err ( ErrStaleConnection ) return if AUTHORIZATION_VIOLATION in arg_1 : arg_0 . _err = ErrAuthorization else : arg_3 = b'nats: ' + arg_1 [ 0 ] arg_0 . _err = NatsError ( arg_3 . decode ( ) ) arg_4 = False if not arg_0 . is_connecting : arg_4 = True arg_0 . _loop . create_task ( arg_0 . _close ( Client . CLOSED , arg_4 ) )","id_":252159,"task_name":"https:\/\/github.com\/nats-io\/asyncio-nats\/blob\/39e840be0b12ce326edac0bba69aeb1be930dcb8\/nats\/aio\/client.py#L985-L1007","negative":"Enables uniform interface to value and batch jacobian calculation.\n\n Works in both eager and graph modes.\n\n Arguments:\n f: The scalar function to evaluate.\n x: The value at which to compute the value and the batch jacobian.\n\n Returns:\n A tuple (f(x), J(x)), where J(x) is the batch jacobian."} {"query":"Computes the standard deviation of a mixture distribution .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : tensorshape_util . assert_has_rank ( arg_0 . shape , 2 ) if not tensorshape_util . is_compatible_with ( arg_1 . shape , arg_0 . shape ) : raise ValueError ( \"Expecting means to have same shape as mixture weights.\" ) if not tensorshape_util . is_compatible_with ( arg_2 . shape , arg_0 . shape ) : raise ValueError ( \"Expecting stddevs to have same shape as mixture weights.\" ) arg_3 = tf . expand_dims ( arg_0 , axis = 1 ) arg_4 = tf . expand_dims ( arg_1 , axis = 2 ) arg_5 = tf . expand_dims ( arg_2 , axis = 2 ) arg_6 = tf . matmul ( arg_3 , arg_4 ) arg_6 = tf . reshape ( arg_6 , ( - 1 , ) ) arg_7 = tf . matmul ( arg_3 , tf . square ( arg_5 ) ) arg_7 = tf . reshape ( arg_7 , ( - 1 , ) ) arg_8 = tf . matmul ( arg_3 , tf . square ( arg_4 ) ) arg_8 = tf . reshape ( arg_8 , ( - 1 , ) ) arg_9 = arg_7 + arg_8 - tf . square ( arg_6 ) return tf . sqrt ( arg_9 )","id_":252160,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/internal\/distribution_util.py#L39-L82","negative":"push the packet into the queue"} {"query":"Run a scan in the path setted .","positive":"def Func ( arg_0 ) : arg_0 . checkProperties ( ) arg_0 . debug ( \"[*] Iniciando escaneo de AtomShields con las siguientes propiedades. . . \" ) arg_0 . showScanProperties ( ) arg_0 . loadConfig ( ) arg_1 = datetime . now ( ) arg_2 = os . getcwd ( ) os . chdir ( arg_0 . path ) arg_3 = arg_0 . executeCheckers ( ) os . chdir ( arg_2 ) arg_4 = datetime . now ( ) arg_5 = '{}' . format ( arg_4 - arg_1 ) for arg_6 in arg_3 . keys ( ) : arg_7 = arg_3 [ arg_6 ] if isinstance ( arg_7 , list ) : map ( arg_0 . saveIssue , arg_7 ) else : arg_0 . saveIssue ( arg_7 ) print \"\" arg_0 . executeReports ( ) arg_0 . debug ( \"\" ) arg_0 . debug ( \"Duration: {t}\" . format ( t = arg_5 ) ) arg_0 . showSummary ( ) return arg_0 . issues","id_":252161,"task_name":"https:\/\/github.com\/ElevenPaths\/AtomShields\/blob\/e75f25393b4a7a315ec96bf9b8e654cb2200866a\/atomshields\/scanner.py#L571-L620","negative":"Generates a random sample from the Poisson probability distribution and\n returns its value and the log of the probability of sampling that value."} {"query":"Hunt down the settings . py module by going up the FS path","positive":"def Func ( arg_0 ) : arg_1 = os . getcwd ( ) arg_2 = '%s.py' % ( arg_0 . split ( '.' ) [ - 1 ] ) while arg_1 : if arg_2 in os . listdir ( arg_1 ) : break arg_1 = os . path . split ( arg_1 ) [ 0 ] if os . name == 'nt' and NT_ROOT . match ( arg_1 ) : return None elif arg_1 == '\/' : return None return arg_1","id_":252162,"task_name":"https:\/\/github.com\/nosedjango\/nosedjango\/blob\/cd4d06857c88291769bc38e5c9573f43b7ffcd6a\/nosedjango\/nosedjango.py#L29-L45","negative":"Scans String.\n\n Compares the byte, word, or double word specified with the memory operand\n with the value in the AL, AX, EAX, or RAX register, and sets the status flags\n according to the results. The memory operand address is read from either\n the ES:RDI, ES:EDI or the ES:DI registers (depending on the address-size\n attribute of the instruction, 32 or 16, respectively)::\n\n IF (byte comparison)\n THEN\n temp = AL - SRC;\n SetStatusFlags(temp);\n THEN IF DF = 0\n THEN (E)DI = (E)DI + 1;\n ELSE (E)DI = (E)DI - 1;\n FI;\n ELSE IF (word comparison)\n THEN\n temp = AX - SRC;\n SetStatusFlags(temp)\n THEN IF DF = 0\n THEN (E)DI = (E)DI + 2;\n ELSE (E)DI = (E)DI - 2;\n FI;\n ELSE (* doubleword comparison *)\n temp = EAX - SRC;\n SetStatusFlags(temp)\n THEN IF DF = 0\n THEN\n (E)DI = (E)DI + 4;\n ELSE\n (E)DI = (E)DI - 4;\n FI;\n FI;\n FI;\n\n :param cpu: current CPU.\n :param dest: destination operand.\n :param src: source operand."} {"query":"Returns read only property for band nodata value assuming single band rasters for now .","positive":"def Func ( arg_0 ) : if arg_0 . _Func is None : arg_0 . _Func = arg_0 [ 0 ] . GetNoDataValue ( ) return arg_0 . _Func","id_":252163,"task_name":"https:\/\/github.com\/bkg\/greenwich\/blob\/57ec644dadfe43ce0ecf2cfd32a2de71e0c8c141\/greenwich\/raster.py#L564-L570","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Accpet pending invitation .","positive":"def Func ( arg_0 ) : arg_1 = Membership . query . get_or_404 ( ( current_user . get_id ( ) , arg_0 ) ) try : arg_1 . Func ( ) except Exception as e : flash ( str ( e ) , 'error' ) return redirect ( url_for ( '.invitations' , arg_0 = arg_1 . group . id ) ) flash ( _ ( 'You are now part of %(name)s group.' , user = arg_1 . user . email , name = arg_1 . group . name ) , 'success' ) return redirect ( url_for ( '.invitations' , arg_0 = arg_1 . group . id ) )","id_":252164,"task_name":"https:\/\/github.com\/inveniosoftware-contrib\/invenio-groups\/blob\/109481d6b02701db00b72223dd4a65e167c589a6\/invenio_groups\/views.py#L359-L374","negative":"Updates the editor when the object trait changes externally to the\n editor."} {"query":"Return a list of cameras matching camera_ids .","positive":"def Func ( arg_0 , arg_1 , ** arg_2 ) : arg_3 = arg_0 . _api_info [ 'camera' ] arg_4 = dict ( { '_sid' : arg_0 . _sid , 'api' : arg_3 [ 'name' ] , 'method' : 'GetInfo' , 'version' : arg_3 [ 'version' ] , 'cameraIds' : ', ' . join ( str ( id ) for id in arg_1 ) , } , ** arg_2 ) arg_5 = arg_0 . _get_json_with_retry ( arg_3 [ 'url' ] , arg_4 ) arg_6 = [ ] for arg_7 in arg_5 [ 'data' ] [ 'cameras' ] : arg_6 . append ( Camera ( arg_7 , arg_0 . _video_stream_url ) ) return arg_6","id_":252165,"task_name":"https:\/\/github.com\/snjoetw\/py-synology\/blob\/4f7eb0a3a9f86c24ad65993802e6fb11fbaa1f7f\/synology\/api.py#L153-L170","negative":"simple timer. returns a time object, or a string."} {"query":"Adjust color saturation of an image .","positive":"def Func ( arg_0 , arg_1 ) : if not _is_pil_image ( arg_0 ) : raise TypeError ( 'img should be PIL Image. Got {}' . format ( type ( arg_0 ) ) ) arg_2 = ImageEnhance . Color ( arg_0 ) arg_0 = arg_2 . enhance ( arg_1 ) return arg_0","id_":252166,"task_name":"https:\/\/github.com\/pytorch\/vision\/blob\/3afcf3cd49661c466c75ea536b0b2a7ff57f9a05\/torchvision\/transforms\/functional.py#L577-L594","negative":"Detect crs string format and parse into crs object with appropriate function.\n\n Arguments:\n\n - *text*: The crs text representation of unknown type. \n - *strict* (optional): When True, the parser is strict about names having to match\n exactly with upper and lowercases. Default is not strict (False).\n\n Returns:\n\n - CRS object."} {"query":"Asynchronous PUT request with the process pool .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = None , arg_5 = None , arg_6 = None ) : if arg_2 is None : arg_2 = '' arg_5 = arg_5 or { } arg_6 = arg_6 or { } arg_7 = arg_0 . _build_endpoint_url ( arg_1 , arg_2 ) arg_0 . _authenticate ( arg_5 , arg_6 ) arg_3 = json . dumps ( arg_3 , cls = JSONEncoder ) process_pool . apply_async ( make_put_request , args = ( arg_7 , arg_3 , arg_5 , arg_6 ) , arg_4 = arg_4 )","id_":252167,"task_name":"https:\/\/github.com\/ozgur\/python-firebase\/blob\/6b96b326f6d8f477503ca42fdfbd81bcbe1f9e0d\/firebase\/firebase.py#L304-L316","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Link user email to Enterprise Customer .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : try : arg_3 = User . objects . get ( email = arg_2 ) arg_0 . get_or_create ( arg_1 = arg_1 , user_id = arg_3 . id ) except User . DoesNotExist : PendingEnterpriseCustomerUser . objects . get_or_create ( arg_1 = arg_1 , arg_2 = arg_2 )","id_":252168,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/enterprise\/models.py#L530-L542","negative":"Trim all the annotations inside the jam and return as a new `JAMS`\n object.\n\n See `Annotation.trim` for details about how the annotations\n are trimmed.\n\n This operation is also documented in the jam-level sandbox\n with a list keyed by ``JAMS.sandbox.trim`` containing a tuple for each\n jam-level trim of the form ``(start_time, end_time)``.\n\n This function also copies over all of the file metadata from the\n original jam.\n\n Note: trimming does not affect the duration of the jam, i.e. the value\n of ``JAMS.file_metadata.duration`` will be the same for the original\n and trimmed jams.\n\n Parameters\n ----------\n start_time : float\n The desired start time for the trimmed annotations in seconds.\n end_time\n The desired end time for trimmed annotations in seconds. Must be\n greater than ``start_time``.\n strict : bool\n When ``False`` (default) observations that lie at the boundaries of\n the trimming range (see `Annotation.trim` for details), will have\n their time and\/or duration adjusted such that only the part of the\n observation that lies within the trim range is kept. When ``True``\n such observations are discarded and not included in the trimmed\n annotation.\n\n Returns\n -------\n jam_trimmed : JAMS\n The trimmed jam with trimmed annotations, returned as a new JAMS\n object."} {"query":"r << - \\ S + \\ r? \\ n","positive":"def Func ( arg_0 , arg_1 ) : arg_1 . lexer . is_tabbed = True arg_0 . _init_heredoc ( arg_1 ) arg_1 . lexer . begin ( 'tabbedheredoc' )","id_":252169,"task_name":"https:\/\/github.com\/virtuald\/pyhcl\/blob\/e6e27742215692974f0ef503a91a81ec4adc171c\/src\/hcl\/lexer.py#L194-L198","negative":"Process events from proactor."} {"query":"Fix the length an array data to exactly size .","positive":"def Func ( arg_0 , arg_1 , arg_2 = - 1 , ** arg_3 ) : arg_3 . setdefault ( 'mode' , 'constant' ) arg_4 = arg_0 . shape [ arg_2 ] if arg_4 > arg_1 : arg_5 = [ slice ( None ) ] * arg_0 . ndim arg_5 [ arg_2 ] = slice ( 0 , arg_1 ) return arg_0 [ tuple ( arg_5 ) ] elif arg_4 < arg_1 : arg_6 = [ ( 0 , 0 ) ] * arg_0 . ndim arg_6 [ arg_2 ] = ( 0 , arg_1 - arg_4 ) return np . pad ( arg_0 , arg_6 , ** arg_3 ) return arg_0","id_":252170,"task_name":"https:\/\/github.com\/librosa\/librosa\/blob\/180e8e6eb8f958fa6b20b8cba389f7945d508247\/librosa\/util\/utils.py#L309-L367","negative":"Sets the player's paused state."} {"query":"r Method to calculate pressure - dependent liquid molar volume at temperature T and pressure P with a given method .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : if arg_3 == COSTALD_COMPRESSED : arg_4 = arg_0 . T_dependent_property ( arg_1 ) arg_5 = arg_0 . Psat ( arg_1 ) if hasattr ( arg_0 . Psat , '__call__' ) else arg_0 . Psat arg_4 = COSTALD_compressed ( arg_1 , arg_2 , arg_5 , arg_0 . Tc , arg_0 . Pc , arg_0 . omega , arg_4 ) elif arg_3 == COOLPROP : arg_4 = 1. \/ PropsSI ( 'DMOLAR' , 'T' , arg_1 , 'P' , arg_2 , arg_0 . CASRN ) elif arg_3 == EOS : arg_0 . eos [ 0 ] = arg_0 . eos [ 0 ] . to_TP ( arg_1 = arg_1 , arg_2 = arg_2 ) arg_4 = arg_0 . eos [ 0 ] . V_l elif arg_3 in arg_0 . tabular_data : arg_4 = arg_0 . interpolate_P ( arg_1 , arg_2 , arg_3 ) return arg_4","id_":252171,"task_name":"https:\/\/github.com\/CalebBell\/thermo\/blob\/3857ed023a3e64fd3039a32d53576c24990ef1c3\/thermo\/volume.py#L1085-L1117","negative":"Returns a DataFrame of offensive team splits for a season.\n\n :year: int representing the season.\n :returns: Pandas DataFrame of split data."} {"query":"Create an endpoint","positive":"def Func ( arg_0 , arg_1 , arg_2 = True , arg_3 = 30 , arg_4 = None ) : arg_5 = arg_0 . get_conn ( ) . Func ( ** arg_1 ) if arg_2 : arg_0 . check_status ( arg_1 [ 'EndpointName' ] , 'EndpointStatus' , arg_0 . describe_endpoint , arg_3 , arg_4 , non_terminal_states = arg_0 . endpoint_non_terminal_states ) return arg_5","id_":252172,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/sagemaker_hook.py#L450-L477","negative":"Read settings from a config file in the source_dir root."} {"query":"Open a popup menu with options regarding the selected object","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : if arg_1 : arg_3 = arg_0 . tree . GetItemData ( arg_1 ) if arg_3 : arg_4 = arg_3 . GetData ( ) if arg_4 : arg_0 . highlight ( arg_4 . wx_obj ) arg_0 . obj = arg_4 arg_5 = wx . Menu ( ) arg_6 , arg_7 , arg_8 , arg_9 = [ wx . NewId ( ) for i in range ( 4 ) ] arg_5 . Append ( arg_6 , \"Delete\" ) arg_5 . Append ( arg_7 , \"Duplicate\" ) arg_5 . Append ( arg_8 , \"Bring to Front\" ) arg_5 . Append ( arg_9 , \"Send to Back\" ) arg_10 = wx . Menu ( ) for arg_11 in sorted ( arg_4 . _meta . valid_children , key = lambda c : registry . ALL . index ( c . _meta . name ) ) : arg_12 = wx . NewId ( ) arg_10 . Append ( arg_12 , arg_11 . _meta . name ) arg_0 . Bind ( wx . EVT_MENU , lambda evt , arg_11 = arg_11 : arg_0 . add_child ( arg_11 , arg_2 ) , id = arg_12 ) arg_5 . AppendMenu ( wx . NewId ( ) , \"Add child\" , arg_10 ) arg_0 . Bind ( wx . EVT_MENU , arg_0 . delete , id = arg_6 ) arg_0 . Bind ( wx . EVT_MENU , arg_0 . duplicate , id = arg_7 ) arg_0 . Bind ( wx . EVT_MENU , arg_0 . bring_to_front , id = arg_8 ) arg_0 . Bind ( wx . EVT_MENU , arg_0 . send_to_back , id = arg_9 ) arg_0 . PopupMenu ( arg_5 ) arg_5 . Destroy ( ) arg_0 . load_object ( arg_0 . root_obj )","id_":252173,"task_name":"https:\/\/github.com\/reingart\/gui2py\/blob\/aca0a05f6fcde55c94ad7cc058671a06608b01a4\/gui\/tools\/inspector.py#L200-L240","negative":"Use ``\\\\r`` to overdraw the current line with the given text.\n\n This function transparently handles tracking how much overdrawing is\n necessary to erase the previous line when used consistently.\n\n :param text: The text to be outputted\n :param newline: Whether to start a new line and reset the length count.\n :type text: :class:`~__builtins__.str`\n :type newline: :class:`~__builtins__.bool`"} {"query":"Return True if the class is a date type .","positive":"def Func ( arg_0 ) : if not isinstance ( arg_0 , type ) : return False return issubclass ( arg_0 , date ) and not issubclass ( arg_0 , datetime )","id_":252174,"task_name":"https:\/\/github.com\/zenreach\/py-era\/blob\/73994c82360e65a983c803b1182892e2138320b2\/era.py#L76-L80","negative":"Create a tar file based on the list of files passed"} {"query":"Intercept all requests and add the OAuth 2 token if present .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = None , arg_5 = False , arg_6 = None , arg_7 = None , ** arg_8 ) : if not is_secure_transport ( arg_2 ) : raise InsecureTransportError ( ) if arg_0 . token and not arg_5 : log . debug ( \"Invoking %d protected resource Func hooks.\" , len ( arg_0 . compliance_hook [ \"protected_Func\" ] ) , ) for arg_9 in arg_0 . compliance_hook [ \"protected_Func\" ] : log . debug ( \"Invoking hook %s.\" , arg_9 ) arg_2 , arg_4 , arg_3 = arg_9 ( arg_2 , arg_4 , arg_3 ) log . debug ( \"Adding token %s to Func.\" , arg_0 . token ) try : arg_2 , arg_4 , arg_3 = arg_0 . _client . add_token ( arg_2 , http_method = arg_1 , body = arg_3 , arg_4 = arg_4 ) except TokenExpiredError : if arg_0 . auto_refresh_url : log . debug ( \"Auto refresh is set, attempting to refresh at %s.\" , arg_0 . auto_refresh_url , ) arg_10 = arg_8 . pop ( \"auth\" , None ) if arg_6 and arg_7 and ( arg_10 is None ) : log . debug ( 'Encoding client_id \"%s\" with client_secret as Basic auth credentials.' , arg_6 , ) arg_10 = Funcs . auth . HTTPBasicAuth ( arg_6 , arg_7 ) arg_11 = arg_0 . refresh_token ( arg_0 . auto_refresh_url , arg_10 = arg_10 , ** arg_8 ) if arg_0 . token_updater : log . debug ( \"Updating token to %s using %s.\" , arg_11 , arg_0 . token_updater ) arg_0 . token_updater ( arg_11 ) arg_2 , arg_4 , arg_3 = arg_0 . _client . add_token ( arg_2 , http_method = arg_1 , body = arg_3 , arg_4 = arg_4 ) else : raise TokenUpdated ( arg_11 ) else : raise log . debug ( \"Requesting url %s using method %s.\" , arg_2 , arg_1 ) log . debug ( \"Supplying headers %s and data %s\" , arg_4 , arg_3 ) log . debug ( \"Passing through key word arguments %s.\" , arg_8 ) return super ( OAuth2Session , arg_0 ) . Func ( arg_1 , arg_2 , arg_4 = arg_4 , arg_3 = arg_3 , ** arg_8 )","id_":252175,"task_name":"https:\/\/github.com\/requests\/requests-oauthlib\/blob\/800976faab3b827a42fa1cb80f13fcc03961d2c9\/requests_oauthlib\/oauth2_session.py#L452-L517","negative":"Calculates the distance of a given image to the\n original image.\n\n Parameters\n ----------\n image : `numpy.ndarray`\n The image that should be compared to the original image.\n\n Returns\n -------\n :class:`Distance`\n The distance between the given image and the original image."} {"query":"Builds input arguments by stitching input filepaths and input formats together .","positive":"def Func ( arg_0 , arg_1 ) : if len ( arg_1 ) != len ( arg_0 ) : raise ValueError ( \"input_format_list & input_filepath_list are not the same size\" ) arg_2 = [ ] arg_3 = zip ( arg_0 , arg_1 ) for arg_4 , arg_5 in arg_3 : arg_2 . extend ( arg_5 ) arg_2 . append ( arg_4 ) return arg_2","id_":252176,"task_name":"https:\/\/github.com\/rabitt\/pysox\/blob\/eae89bde74567136ec3f723c3e6b369916d9b837\/sox\/combine.py#L422-L437","negative":"Temporal distance probability density function.\n\n Returns\n -------\n non_delta_peak_split_points: numpy.array\n non_delta_peak_densities: numpy.array\n len(density) == len(temporal_distance_split_points_ordered) -1\n delta_peak_loc_to_probability_mass : dict"} {"query":"Summarize the mean of a tensor in nats and bits per unit .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = \"nats\" , arg_4 = \"bits_per_dim\" ) : arg_5 = tf . reduce_mean ( input_tensor = arg_0 ) with tf . compat . v1 . name_scope ( arg_3 ) : tf . compat . v2 . summary . scalar ( arg_2 , arg_5 , step = tf . compat . v1 . train . get_or_create_global_step ( ) ) with tf . compat . v1 . name_scope ( arg_4 ) : tf . compat . v2 . summary . scalar ( arg_2 , arg_5 \/ arg_1 \/ tf . math . log ( 2. ) , step = tf . compat . v1 . train . get_or_create_global_step ( ) )","id_":252177,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/examples\/disentangled_vae.py#L1055-L1078","negative":"Show Program Landing page for the Enterprise's Program.\n\n Render the Enterprise's Program Enrollment page for a specific program.\n The Enterprise and Program are both selected by their respective UUIDs.\n\n Unauthenticated learners will be redirected to enterprise-linked SSO.\n\n A 404 will be raised if any of the following conditions are met:\n * No enterprise customer UUID query parameter ``enterprise_uuid`` found in request.\n * No enterprise customer found against the enterprise customer\n uuid ``enterprise_uuid`` in the request kwargs.\n * No Program can be found given ``program_uuid`` either at all or associated with\n the Enterprise.."} {"query":"Resolves VirtualEnvironments in CPENV_HOME","positive":"def Func ( arg_0 , arg_1 ) : from . api import get_home_path arg_1 = unipath ( get_home_path ( ) , arg_1 ) if is_environment ( arg_1 ) : return VirtualEnvironment ( arg_1 ) raise ResolveError","id_":252178,"task_name":"https:\/\/github.com\/cpenv\/cpenv\/blob\/afbb569ae04002743db041d3629a5be8c290bd89\/cpenv\/resolver.py#L130-L140","negative":"It will print the list of songs that can be downloaded"} {"query":"Apply initialize to circuit .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if isinstance ( arg_2 , QuantumRegister ) : arg_2 = arg_2 [ : ] else : arg_2 = _convert_to_bits ( [ arg_2 ] , [ qbit for qreg in arg_0 . qregs for qbit in qreg ] ) [ 0 ] return arg_0 . append ( Initialize ( arg_1 ) , arg_2 )","id_":252179,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/extensions\/initializer.py#L236-L242","negative":"Create a new record from dump."} {"query":"Create a canvas course with the given subaccount id and course name .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = ACCOUNTS_API . format ( arg_1 ) + \"\/courses\" arg_4 = { \"course\" : { \"name\" : arg_2 } } return CanvasCourse ( data = arg_0 . _post_resource ( arg_3 , arg_4 ) )","id_":252180,"task_name":"https:\/\/github.com\/uw-it-aca\/uw-restclients-canvas\/blob\/9845faf33d49a8f06908efc22640c001116d6ea2\/uw_canvas\/courses.py#L88-L96","negative":"Calculate the modelled progress state for the given time moment.\n\n :returns: tuple (x, v) of the progress level and progress speed."} {"query":"Load a series of widget libraries .","positive":"def Func ( arg_0 , ** arg_1 ) : arg_2 = arg_1 . pop ( '_soft' , False ) try : arg_3 = arg_0 . render_context [ WIDGET_CONTEXT_KEY ] except KeyError : arg_3 = arg_0 . render_context [ WIDGET_CONTEXT_KEY ] = { } for arg_4 , arg_5 in arg_1 . items ( ) : if arg_2 and arg_4 in arg_3 : continue with arg_0 . render_context . push ( { BLOCK_CONTEXT_KEY : BlockContext ( ) } ) : arg_6 = resolve_blocks ( arg_5 , arg_0 ) arg_3 [ arg_4 ] = arg_6 return ''","id_":252181,"task_name":"https:\/\/github.com\/funkybob\/django-sniplates\/blob\/cc6123a00536017b496dc685881952d98192101f\/sniplates\/templatetags\/sniplates.py#L124-L143","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Returns participation data for the given sis_course_id .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = \"\/api\/v1\/courses\/%s\/analytics\/activity.json\" % ( arg_0 . _sis_id ( arg_1 , sis_field = \"course\" ) ) return arg_0 . _get_resource ( arg_2 )","id_":252182,"task_name":"https:\/\/github.com\/uw-it-aca\/uw-restclients-canvas\/blob\/9845faf33d49a8f06908efc22640c001116d6ea2\/uw_canvas\/analytics.py#L40-L48","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"leftCousin previousCousin leftCin prevCin lcin pcin parents are neighbors and on the left","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 [ 'parent_breadth_path' ] [ - 1 ] if ( arg_0 [ 'sib_seq' ] == 0 ) : if ( arg_2 == 0 ) : pass else : arg_3 = arg_2 - 1 arg_4 = arg_1 [ arg_3 ] if ( arg_4 [ 'leaf' ] ) : pass else : arg_5 = copy . deepcopy ( arg_4 [ 'path' ] ) arg_5 . append ( arg_4 [ 'sons_count' ] - 1 ) arg_0 [ 'lcin_path' ] = arg_5 else : pass return ( arg_0 )","id_":252183,"task_name":"https:\/\/github.com\/ihgazni2\/elist\/blob\/8c07b5029bda34ead60ce10335ceb145f209263c\/elist\/elist.py#L6341-L6367","negative":"Return True if we should retry, False otherwise."} {"query":"Call FSL tools to apply transformations to a given atlas to a functional image . Given the transformation matrices .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 , arg_6 , arg_7 , arg_8 = 'nn' , arg_9 = True , arg_10 = False ) : if arg_4 : arg_11 = arg_3 else : arg_12 = op . abspath ( op . dirname ( arg_6 ) ) arg_13 = get_extension ( arg_3 ) arg_11 = op . join ( arg_12 , remove_ext ( op . basename ( arg_3 ) ) + '_inv' + arg_13 ) arg_14 = op . join ( '${FSLDIR}' , 'bin' , 'invwarp' ) arg_15 = op . join ( '${FSLDIR}' , 'bin' , 'applywarp' ) arg_16 = op . join ( '${FSLDIR}' , 'bin' , 'fsl_sub' ) if arg_10 : arg_14 = arg_16 + ' ' + arg_14 arg_15 = arg_16 + ' ' + arg_15 if arg_9 or ( not arg_4 and not op . exists ( arg_11 ) ) : log . debug ( 'Creating {}.\\n' . format ( arg_11 ) ) arg_17 = arg_14 + ' ' arg_17 += '-w {} ' . format ( arg_3 ) arg_17 += '-o {} ' . format ( arg_11 ) arg_17 += '-r {} ' . format ( arg_1 ) log . debug ( 'Running {}' . format ( arg_17 ) ) check_call ( arg_17 ) if arg_9 or not op . exists ( arg_6 ) : log . debug ( 'Creating {}.\\n' . format ( arg_6 ) ) arg_17 = arg_15 + ' ' arg_17 += '--in={} ' . format ( arg_0 ) arg_17 += '--ref={} ' . format ( arg_1 ) arg_17 += '--warp={} ' . format ( arg_11 ) arg_17 += '--interp={} ' . format ( arg_8 ) arg_17 += '--out={} ' . format ( arg_6 ) log . debug ( 'Running {}' . format ( arg_17 ) ) check_call ( arg_17 ) if arg_9 or not op . exists ( arg_7 ) : log . debug ( 'Creating {}.\\n' . format ( arg_7 ) ) arg_17 = arg_15 + ' ' arg_17 += '--in={} ' . format ( arg_6 ) arg_17 += '--ref={} ' . format ( arg_2 ) arg_17 += '--premat={} ' . format ( arg_5 ) arg_17 += '--interp={} ' . format ( arg_8 ) arg_17 += '--out={} ' . format ( arg_7 ) log . debug ( 'Running {}' . format ( arg_17 ) ) check_call ( arg_17 )","id_":252184,"task_name":"https:\/\/github.com\/Neurita\/boyle\/blob\/2dae7199849395a209c887d5f30506e1de8a9ad9\/boyle\/nifti\/cpac_helpers.py#L19-L118","negative":"Add an HTTP header to response object.\n\n Arguments:\n name (str): HTTP header field name\n value (str): HTTP header field value"} {"query":"Use as a decorator for operations on the database to ensure connection setup and teardown . Can only be used on methods on objects with a self . session attribute .","positive":"def Func ( arg_0 , arg_1 , * arg_2 , ** arg_3 ) : if not arg_1 . session : _logger . debug ( 'Creating new db session' ) arg_1 . _init_db_session ( ) try : arg_4 = arg_0 ( arg_1 , * arg_2 , ** arg_3 ) arg_1 . session . commit ( ) except : arg_1 . session . rollback ( ) arg_5 = traceback . format_exc ( ) _logger . debug ( arg_5 ) raise finally : _logger . debug ( 'Closing db session' ) arg_1 . session . close ( ) return arg_4","id_":252185,"task_name":"https:\/\/github.com\/thusoy\/pwm\/blob\/fff7d755c34f3a7235a8bf217ffa2ff5aed4926f\/pwm\/core.py#L112-L130","negative":"write lines, one by one, separated by \\n to device"} {"query":"Create link from request for a receiver .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = ReceiverLink ( arg_0 . _connection , arg_1 ) arg_0 . _links . add ( arg_2 ) return arg_2","id_":252186,"task_name":"https:\/\/github.com\/kgiusti\/pyngus\/blob\/5392392046989f1bb84ba938c30e4d48311075f1\/pyngus\/link.py#L796-L800","negative":"Return an open file-object to the index file"} {"query":"Perform an HTTP GET using the saved requests . Session and auth info . If Accept isn t one of the given headers a default TAXII mime type is used . Regardless the response type is checked against the accept header value and an exception is raised if they don t match .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None ) : arg_4 = arg_0 . _merge_headers ( arg_2 ) if \"Accept\" not in arg_4 : arg_4 [ \"Accept\" ] = MEDIA_TYPE_TAXII_V20 arg_5 = arg_4 [ \"Accept\" ] arg_6 = arg_0 . session . Func ( arg_1 , arg_2 = arg_4 , arg_3 = arg_3 ) arg_6 . raise_for_status ( ) arg_7 = arg_6 . headers [ \"Content-Type\" ] if not arg_0 . valid_content_type ( arg_7 = arg_7 , arg_5 = arg_5 ) : arg_8 = \"Unexpected Response. Got Content-Type: '{}' for Accept: '{}'\" raise TAXIIServiceException ( arg_8 . format ( arg_7 , arg_5 ) ) return _to_json ( arg_6 )","id_":252187,"task_name":"https:\/\/github.com\/oasis-open\/cti-taxii-client\/blob\/b4c037fb61d8b8892af34423e2c67c81218d6f8e\/taxii2client\/__init__.py#L919-L949","negative":"Insert object before index.\n\n :param int index: index to insert in\n :param string value: path to insert"} {"query":"Lists the categories in the lexicon except the optional categories .","positive":"def Func ( arg_0 ) : arg_1 = [ k for k in arg_0 . __dict__ . keys ( ) if k not in SPECIAL ] return arg_1","id_":252188,"task_name":"https:\/\/github.com\/agile-geoscience\/striplog\/blob\/8033b673a151f96c29802b43763e863519a3124c\/striplog\/lexicon.py#L278-L287","negative":"Creates an SSH tunnel."} {"query":"Returns settings from the server .","positive":"def Func ( arg_0 ) : arg_1 = \"select {fields} from pg_settings\" . format ( fields = ', ' . join ( SETTINGS_FIELDS ) ) Func = [ ] for arg_3 in arg_0 . _iter_results ( arg_1 ) : arg_3 [ 'setting' ] = arg_0 . _vartype_map [ arg_3 [ 'vartype' ] ] ( arg_3 [ 'setting' ] ) Func . append ( Settings ( ** arg_3 ) ) return Func","id_":252189,"task_name":"https:\/\/github.com\/drkjam\/pydba\/blob\/986c4b1315d6b128947c3bc3494513d8e5380ff0\/pydba\/postgres.py#L293-L300","negative":"Does this filename match any of the patterns?"} {"query":"Send a string of binary data to the FireCracker with proper timing .","positive":"def Func ( arg_0 , arg_1 ) : _reset ( arg_0 ) time . sleep ( leadInOutDelay ) for arg_2 in arg_1 : _sendBit ( arg_0 , arg_2 ) time . sleep ( leadInOutDelay )","id_":252190,"task_name":"https:\/\/github.com\/clach04\/x10_any\/blob\/5b90a543b127ab9e6112fd547929b5ef4b8f0cbc\/x10_any\/cm17a.py#L106-L118","negative":"Implement a lookup for object level permissions. Basically the same as\n ModelAdmin.has_delete_permission, but also passes the obj parameter in."} {"query":"Loads the user s LSI profile or provides a default .","positive":"def Func ( arg_0 , arg_1 = None ) : arg_2 = os . path . expanduser ( '~\/.lsi' ) if not os . path . exists ( arg_2 ) : return LsiProfile ( ) arg_3 = ConfigParser ( ) arg_3 . read ( arg_2 ) if arg_1 is None : if arg_3 . has_section ( 'default' ) : arg_1 = 'default' else : return arg_0 ( ) elif not arg_3 . has_section ( arg_1 ) : raise arg_0 . LoadError ( 'No such profile {}' . format ( arg_1 ) ) def _get ( arg_4 , arg_5 = None ) : if arg_3 . has_option ( arg_1 , arg_4 ) : return arg_3 . get ( arg_1 , arg_4 ) else : return arg_5 if arg_3 . has_option ( arg_1 , 'inherit' ) : arg_6 = arg_0 . Func ( arg_3 . get ( arg_1 , 'inherit' ) ) else : arg_6 = arg_0 ( ) arg_6 . override ( 'username' , _get ( 'username' ) ) arg_6 . override ( 'identity_file' , _get ( 'identity file' ) ) arg_6 . override ( 'command' , _get ( 'command' ) ) arg_7 = [ s for s in _get ( 'filters' , '' ) . split ( ',' ) if len ( s ) > 0 ] arg_8 = [ s for s in _get ( 'exclude' , '' ) . split ( ',' ) if len ( s ) > 0 ] arg_6 . filters . extend ( arg_7 ) arg_6 . exclude . extend ( arg_8 ) return arg_6","id_":252191,"task_name":"https:\/\/github.com\/NarrativeScience\/lsi\/blob\/7d901b03fdb1a34ef795e5412bfe9685d948e32d\/src\/lsi\/lsi.py#L142-L174","negative":"Get the reason of this revocation.\n\n :return: The reason, or ``None`` if there is none.\n :rtype: bytes or NoneType\n\n .. seealso::\n\n :meth:`all_reasons`, which gives you a list of all supported\n reasons this method might return."} {"query":"Take a string representation of time from the blockchain and parse it into datetime object .","positive":"def Func ( arg_0 ) : return datetime . strptime ( arg_0 , timeFormat ) . replace ( tzinfo = timezone . utc )","id_":252192,"task_name":"https:\/\/github.com\/xeroc\/python-graphenelib\/blob\/8bb5396bc79998ee424cf3813af478304173f3a6\/graphenecommon\/utils.py#L36-L40","negative":"Compares and exchanges.\n\n Compares the value in the AL, AX, EAX or RAX register (depending on the\n size of the operand) with the first operand (destination operand). If\n the two values are equal, the second operand (source operand) is loaded\n into the destination operand. Otherwise, the destination operand is\n loaded into the AL, AX, EAX or RAX register.\n\n The ZF flag is set if the values in the destination operand and\n register AL, AX, or EAX are equal; otherwise it is cleared. The CF, PF,\n AF, SF, and OF flags are set according to the results of the comparison\n operation::\n\n (* accumulator = AL, AX, EAX or RAX, depending on whether *)\n (* a byte, word, a doubleword or a 64bit comparison is being performed*)\n IF accumulator == DEST\n THEN\n ZF = 1\n DEST = SRC\n ELSE\n ZF = 0\n accumulator = DEST\n FI;\n\n :param cpu: current CPU.\n :param dest: destination operand.\n :param src: source operand."} {"query":"Will add a list of metabolites to the model object and add new constraints accordingly .","positive":"def Func ( arg_0 , arg_1 ) : if not hasattr ( arg_1 , '__iter__' ) : arg_1 = [ arg_1 ] if len ( arg_1 ) == 0 : return None arg_1 = [ arg_3 for arg_3 in arg_1 if arg_3 . id not in arg_0 . metabolites ] arg_2 = [ m for m in arg_1 if not isinstance ( m . id , string_types ) or len ( m . id ) < 1 ] if len ( arg_2 ) != 0 : raise ValueError ( 'invalid identifiers in {}' . format ( repr ( arg_2 ) ) ) for arg_3 in arg_1 : arg_3 . _model = arg_0 arg_0 . metabolites += arg_1 arg_5 = [ ] for arg_6 in arg_1 : if arg_6 . id not in arg_0 . constraints : arg_7 = arg_0 . problem . Constraint ( Zero , name = arg_6 . id , lb = 0 , ub = 0 ) arg_5 += [ arg_7 ] arg_0 . add_cons_vars ( arg_5 ) arg_8 = get_context ( arg_0 ) if arg_8 : arg_8 ( partial ( arg_0 . metabolites . __isub__ , arg_1 ) ) for arg_3 in arg_1 : arg_8 ( partial ( setattr , arg_3 , '_model' , None ) )","id_":252193,"task_name":"https:\/\/github.com\/opencobra\/cobrapy\/blob\/9d1987cdb3a395cf4125a3439c3b002ff2be2009\/cobra\/core\/model.py#L416-L460","negative":"Pull a device from the API."} {"query":"Log - normalizes features such that each vector is between min_db to 0 .","positive":"def Func ( arg_0 , arg_1 = 0.1 , arg_2 = - 80 ) : assert arg_2 < 0 arg_0 = min_max_normalize ( arg_0 , arg_1 = arg_1 ) arg_0 = np . abs ( arg_2 ) * np . log10 ( arg_0 ) return arg_0","id_":252194,"task_name":"https:\/\/github.com\/urinieto\/msaf\/blob\/9dbb57d77a1310465a65cc40f1641d083ca74385\/msaf\/utils.py#L12-L17","negative":"Process current member with 'op' operation."} {"query":"Creates one or more files containing one peptide per line returns names of files .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = False ) : if arg_2 : arg_3 = { len ( arg_5 ) for arg_5 in arg_0 } arg_4 = { l : [ ] for l in arg_3 } for arg_5 in arg_0 : arg_4 [ len ( arg_5 ) ] . append ( arg_5 ) else : arg_4 = { \"\" : arg_0 } arg_6 = [ ] for arg_7 , arg_8 in arg_4 . items ( ) : arg_9 = len ( arg_8 ) if not arg_1 : arg_1 = arg_9 arg_10 = None for arg_11 , arg_5 in enumerate ( arg_8 ) : if arg_11 % arg_1 == 0 : if arg_10 is not None : arg_6 . append ( arg_10 . name ) arg_10 . close ( ) arg_10 = make_writable_tempfile ( prefix_number = arg_11 \/\/ arg_1 , prefix_name = arg_7 , suffix = \".txt\" ) arg_10 . write ( \"%s\\n\" % arg_5 ) if arg_10 is not None : arg_6 . append ( arg_10 . name ) arg_10 . close ( ) return arg_6","id_":252195,"task_name":"https:\/\/github.com\/openvax\/mhctools\/blob\/b329b4dccd60fae41296816b8cbfe15d6ca07e67\/mhctools\/input_file_formats.py#L26-L61","negative":"Sets the telemetry client for logging events."} {"query":"Open the a new tab when goto goes out of the current document .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . open_file ( arg_1 . module_path ) if arg_2 : TextHelper ( arg_2 ) . goto_line ( arg_1 . line , arg_1 . column )","id_":252196,"task_name":"https:\/\/github.com\/pyQode\/pyqode.python\/blob\/821e000ea2e2638a82ce095a559e69afd9bd4f38\/examples\/pynotepad\/pynotepad\/main_window.py#L339-L347","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Quote a command line argument according to Windows parsing rules","positive":"def Func ( arg_0 ) : arg_1 = [ ] arg_2 = False arg_3 = 0 arg_2 = ( \" \" in arg_0 ) or ( \"\\t\" in arg_0 ) if arg_2 : arg_1 . append ( '\"' ) for arg_4 in arg_0 : if arg_4 == '\\\\' : arg_3 += 1 elif arg_4 == '\"' : arg_1 . append ( '\\\\' * ( arg_3 * 2 ) + '\\\\\"' ) arg_3 = 0 else : if arg_3 : arg_1 . append ( '\\\\' * arg_3 ) arg_3 = 0 arg_1 . append ( arg_4 ) if arg_3 : arg_1 . append ( '\\\\' * arg_3 ) if arg_2 : arg_1 . append ( '\\\\' * arg_3 ) arg_1 . append ( '\"' ) return '' . join ( arg_1 )","id_":252197,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/distribute-0.6.31-py2.7.egg\/setuptools\/command\/easy_install.py#L1722-L1753","negative":"Prepare received data for representation.\n\n Args:\n data (dict): values to represent (ex. {'001' : 130})\n number_to_keep (int): number of elements to show individually.\n\n Returns:\n dict: processed data to show."} {"query":"Builds the neuron groups .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : if not hasattr ( arg_0 , '_pre_Func' ) or not arg_0 . _pre_Func : arg_0 . _Func_model ( arg_1 , arg_2 , arg_3 )","id_":252198,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/examples\/example_24_large_scale_brian2_simulation\/clusternet.py#L164-L194","negative":"private function to handle reading of the points record parts\n of the las file.\n\n the header is needed for the point format and number of points\n the vlrs are need to get the potential laszip vlr as well as the extra bytes vlr"} {"query":"Convert reflection coefficients to autocorrelation sequence .","positive":"def Func ( arg_0 , arg_1 ) : [ arg_2 , arg_3 ] = rc2poly ( arg_0 , arg_1 ) arg_4 , arg_5 , arg_6 , arg_7 = rlevinson ( arg_2 , arg_3 ) return arg_4","id_":252199,"task_name":"https:\/\/github.com\/cokelaer\/spectrum\/blob\/bad6c32e3f10e185098748f67bb421b378b06afe\/src\/spectrum\/linear_prediction.py#L134-L146","negative":"Read bytes from an iterator."} {"query":"Generate flattened register map for HStruct","positive":"def Func ( arg_0 ) : arg_1 = [ ] for arg_2 in arg_0 : arg_3 = FuncItem ( arg_2 ) arg_1 . append ( arg_3 ) return HStruct ( * arg_1 )","id_":252200,"task_name":"https:\/\/github.com\/Nic30\/hwt\/blob\/8cbb399e326da3b22c233b98188a9d08dec057e6\/hwt\/interfaces\/structIntf.py#L143-L163","negative":"Paste the contents of the clipboard into the input region.\n\n Parameters:\n -----------\n mode : QClipboard::Mode, optional [default QClipboard::Clipboard]\n\n Controls which part of the system clipboard is used. This can be\n used to access the selection clipboard in X11 and the Find buffer\n in Mac OS. By default, the regular clipboard is used."} {"query":"Export the Bazaar repository at the url to the destination location","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = tempfile . mkdtemp ( '-Func' , 'pip-' ) arg_0 . unpack ( arg_2 ) if os . path . exists ( arg_1 ) : rmtree ( arg_1 ) try : arg_0 . run_command ( [ 'Func' , arg_1 ] , cwd = arg_2 , show_stdout = False ) finally : rmtree ( arg_2 )","id_":252201,"task_name":"https:\/\/github.com\/AkihikoITOH\/capybara\/blob\/e86c2173ea386654f4ae061148e8fbe3f25e715c\/capybara\/virtualenv\/lib\/python2.7\/site-packages\/pip\/vcs\/bazaar.py#L39-L52","negative":"issue a command to read the archive records after a known time stamp."} {"query":"Authenticate against the NuHeat API","positive":"def Func ( arg_0 ) : if arg_0 . _session_id : _LOGGER . debug ( \"Using existing NuHeat session\" ) return _LOGGER . debug ( \"Creating NuHeat session\" ) arg_1 = { \"Email\" : arg_0 . username , \"Password\" : arg_0 . password , \"application\" : \"0\" } arg_2 = arg_0 . request ( config . AUTH_URL , method = \"POST\" , arg_2 = arg_1 ) arg_3 = arg_2 . get ( \"SessionId\" ) if not arg_3 : raise Exception ( \"Authentication error\" ) arg_0 . _session_id = arg_3","id_":252202,"task_name":"https:\/\/github.com\/broox\/python-nuheat\/blob\/3a18852dc9465c34cb96eb3a0c84f1a6caa70707\/nuheat\/api.py#L27-L46","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Retrieve information about an AutoML instance .","positive":"def Func ( arg_0 ) : arg_1 = h2o . api ( \"GET \/99\/AutoML\/%s\" % arg_0 ) arg_0 = arg_1 [ \"project_name\" ] arg_2 = [ key [ \"name\" ] for key in arg_1 [ 'leaderboard' ] [ 'models' ] ] if arg_2 is not None and len ( arg_2 ) > 0 : arg_3 = arg_2 [ 0 ] else : arg_3 = None arg_4 = h2o . get_model ( arg_3 ) arg_5 = H2OJob . __PROGRESS_BAR__ h2o . no_progress ( ) try : arg_6 = h2o . H2OFrame ( arg_1 [ \"leaderboard_table\" ] . cell_values , column_names = arg_1 [ \"leaderboard_table\" ] . col_header ) except Exception as ex : raise ex finally : if arg_5 is True : h2o . show_progress ( ) arg_6 = arg_6 [ 1 : ] arg_7 = { 'project_name' : arg_0 , \"leader\" : arg_4 , \"leaderboard\" : arg_6 } return arg_7","id_":252203,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-py\/h2o\/automl\/autoh2o.py#L503-L537","negative":"Determine the Fitch profile for a single character of the node's sequence.\n The profile is essentially the intersection between the children's\n profiles or, if the former is empty, the union of the profiles.\n\n Parameters\n ----------\n\n node : PhyloTree.Clade:\n Internal node which the profiles are to be determined\n\n pos : int\n Position in the node's sequence which the profiles should\n be determinedf for.\n\n Returns\n -------\n state : numpy.array\n Fitch profile for the character at position pos of the given node."} {"query":"Compile a glob pattern into a regexp .","positive":"def Func ( arg_0 ) : arg_0 = fnmatch . translate ( arg_0 ) arg_1 = r'\\\\\\\\' if os . path . sep == '\\\\' else os . path . sep return re . sub ( r'((? 0 ) and ( arg_3 . root in arg_1 ) and ( arg_3 . pos [ 0 ] in \"NV\" ) and ( arg_3 . root not in arg_2 ) : arg_4 = RankedLexeme ( text = arg_3 . raw . lower ( ) , rank = arg_1 [ arg_3 . root ] \/ 2.0 , ids = [ arg_3 . word_id ] , pos = arg_3 . pos . lower ( ) , count = 1 ) if DEBUG : print ( arg_4 ) yield arg_4","id_":252216,"task_name":"https:\/\/github.com\/DerwenAI\/pytextrank\/blob\/181ea41375d29922eb96768cf6550e57a77a0c95\/pytextrank\/pytextrank.py#L432-L443","negative":"Convenience function to efficiently construct a MultivariateNormalDiag."} {"query":"Find a mapping that can apply to the given controller . Returns None if unsuccessful .","positive":"def Func ( arg_0 , arg_1 ) : try : return arg_0 . _registry [ ( arg_1 . vendor_id , arg_1 . product_id ) ] except KeyError : return None","id_":252217,"task_name":"https:\/\/github.com\/aholkner\/bacon\/blob\/edf3810dcb211942d392a8637945871399b0650d\/bacon\/controller.py#L276-L285","negative":"Get a unique hash depending on the state of the data.\n\n Args:\n data (object):\n Any sort of loosely organized data\n\n hasher (str or HASHER):\n Hash algorithm from hashlib, defaults to `sha512`.\n\n base (str or List[str]):\n Shorthand key or a list of symbols. Valid keys are: 'abc', 'hex',\n and 'dec'. Defaults to 'hex'.\n\n types (bool):\n If True data types are included in the hash, otherwise only the raw\n data is hashed. Defaults to False.\n\n hashlen (int):\n Maximum number of symbols in the returned hash. If not specified,\n all are returned. DEPRECATED. Use slice syntax instead.\n\n convert (bool, optional, default=True):\n if True, try and convert the data to json an the json is hashed\n instead. This can improve runtime in some instances, however the\n hash may differ from the case where convert=False.\n\n Notes:\n alphabet26 is a pretty nice base, I recommend it.\n However we default to hex because it is standard.\n This means the output of hashdata with base=sha1 will be the same as\n the output of `sha1sum`.\n\n Returns:\n str: text - hash string\n\n Example:\n >>> import ubelt as ub\n >>> print(ub.hash_data([1, 2, (3, '4')], convert=False))\n 60b758587f599663931057e6ebdf185a...\n >>> print(ub.hash_data([1, 2, (3, '4')], base='abc', hasher='sha512')[:32])\n hsrgqvfiuxvvhcdnypivhhthmrolkzej"} {"query":"Configure the nose running environment . Execute configure before collecting tests with nose . TestCollector to enable output capture and other features .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None ) : arg_3 = arg_0 . env if arg_1 is None : arg_1 = sys . argv arg_4 = getattr ( arg_0 , 'files' , [ ] ) arg_5 , arg_6 = arg_0 . _parseArgs ( arg_1 , arg_4 ) if getattr ( arg_5 , 'files' , [ ] ) : arg_5 , arg_6 = arg_0 . _parseArgs ( arg_1 , arg_5 . files ) arg_0 . options = arg_5 if arg_6 : arg_0 . testNames = arg_6 if arg_5 . testNames is not None : arg_0 . testNames . extend ( tolist ( arg_5 . testNames ) ) if arg_5 . py3where is not None : if sys . version_info >= ( 3 , ) : arg_5 . where = arg_5 . py3where if not arg_5 . where : arg_5 . where = arg_3 . get ( 'NOSE_WHERE' , None ) if not arg_5 . ignoreFiles : arg_5 . ignoreFiles = arg_3 . get ( 'NOSE_IGNORE_FILES' , [ ] ) if not arg_5 . include : arg_5 . include = arg_3 . get ( 'NOSE_INCLUDE' , [ ] ) if not arg_5 . exclude : arg_5 . exclude = arg_3 . get ( 'NOSE_EXCLUDE' , [ ] ) arg_0 . addPaths = arg_5 . addPaths arg_0 . stopOnError = arg_5 . stopOnError arg_0 . verbosity = arg_5 . verbosity arg_0 . includeExe = arg_5 . includeExe arg_0 . traverseNamespace = arg_5 . traverseNamespace arg_0 . debug = arg_5 . debug arg_0 . debugLog = arg_5 . debugLog arg_0 . loggingConfig = arg_5 . loggingConfig arg_0 . firstPackageWins = arg_5 . firstPackageWins arg_0 . FuncLogging ( ) if arg_5 . where is not None : arg_0 . FuncWhere ( arg_5 . where ) if arg_5 . testMatch : arg_0 . testMatch = re . compile ( arg_5 . testMatch ) if arg_5 . ignoreFiles : arg_0 . ignoreFiles = map ( re . compile , tolist ( arg_5 . ignoreFiles ) ) log . info ( \"Ignoring files matching %s\" , arg_5 . ignoreFiles ) else : log . info ( \"Ignoring files matching %s\" , arg_0 . ignoreFilesDefaultStrings ) if arg_5 . include : arg_0 . include = map ( re . compile , tolist ( arg_5 . include ) ) log . info ( \"Including tests matching %s\" , arg_5 . include ) if arg_5 . exclude : arg_0 . exclude = map ( re . compile , tolist ( arg_5 . exclude ) ) log . info ( \"Excluding tests matching %s\" , arg_5 . exclude ) if not arg_5 . showPlugins : arg_0 . plugins . Func ( arg_5 , arg_0 ) arg_0 . plugins . begin ( )","id_":252218,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/nose\/config.py#L266-L339","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Add a specialized option that is the action to execute .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = arg_0 . add_option ( arg_1 , arg_2 , action = 'callback' , callback = arg_0 . _append_action ) arg_4 . action_code = arg_3","id_":252219,"task_name":"https:\/\/github.com\/tnkteja\/myhelp\/blob\/fb3a4809d448ad14d5b2e6ddf2e7e89ad52b71cb\/virtualEnvironment\/lib\/python2.7\/site-packages\/coverage\/cmdline.py#L199-L204","negative":"Unregister an extension code. For testing only."} {"query":"Parse a header fragment delimited by special characters .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = 1 ) : if arg_0 . field [ arg_0 . pos ] != arg_1 : return '' arg_4 = [ '' ] arg_5 = 0 arg_0 . pos += 1 while arg_0 . pos < len ( arg_0 . field ) : if arg_5 == 1 : arg_4 . append ( arg_0 . field [ arg_0 . pos ] ) arg_5 = 0 elif arg_0 . field [ arg_0 . pos ] in arg_2 : arg_0 . pos += 1 break elif arg_3 and arg_0 . field [ arg_0 . pos ] == '(' : arg_4 . append ( arg_0 . getcomment ( ) ) continue elif arg_0 . field [ arg_0 . pos ] == '\\\\' : arg_5 = 1 else : arg_4 . append ( arg_0 . field [ arg_0 . pos ] ) arg_0 . pos += 1 return '' . join ( arg_4 )","id_":252220,"task_name":"https:\/\/github.com\/google\/grumpy\/blob\/3ec87959189cfcdeae82eb68a47648ac25ceb10b\/third_party\/stdlib\/rfc822.py#L684-L719","negative":"Init openstack neutron mq\n\n 1. Check if enable listening neutron notification\n 2. Create consumer\n\n :param mq: class ternya.mq.MQ"} {"query":"See sphere_analytical_gaussian_exact .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 0.2765 , arg_3 = 1.6 ) : arg_4 = np . abs ( arg_0 ) <= arg_3 arg_5 = arg_0 [ arg_4 ] arg_6 = - arg_5 \/ ( arg_2 * np . sqrt ( 2 ) ) arg_7 = 0.5 * ( 1 + erf ( arg_6 ) ) - np . sqrt ( 0.5 \/ np . pi ) * ( arg_2 \/ ( arg_5 + arg_1 + 1e-10 ) ) * np . exp ( - arg_6 * arg_6 ) arg_8 = 0 * arg_0 arg_8 [ arg_4 ] = arg_7 arg_8 [ arg_0 > arg_3 ] = 0 arg_8 [ arg_0 < - arg_3 ] = 1 return arg_8","id_":252221,"task_name":"https:\/\/github.com\/peri-source\/peri\/blob\/61beed5deaaf978ab31ed716e8470d86ba639867\/peri\/comp\/objs.py#L335-L354","negative":"Open a subprocess without blocking. Return a process handle with any\n\toutput streams replaced by queues of lines from that stream.\n\n\tUsage::\n\n\t\tproc = Popen_nonblocking(..., stdout=subprocess.PIPE)\n\t\ttry:\n\t\t\tout_line = proc.stdout.get_nowait()\n\t\texcept queue.Empty:\n\t\t\t\"no output available\"\n\t\telse:\n\t\t\thandle_output(out_line)"} {"query":"Given a dictionary mapping which looks like the following import the objects based on the dotted path and yield the packet type and handler as pairs .","positive":"def Func ( arg_0 ) : arg_1 = { } for arg_2 , arg_3 in arg_0 . items ( ) : if arg_2 == '*' : arg_4 = arg_2 elif isinstance ( arg_2 , str ) : arg_4 = importer ( arg_2 ) else : arg_4 = arg_2 if isinstance ( arg_3 , str ) : arg_5 = importer ( arg_3 ) else : arg_5 = arg_3 if arg_4 in arg_1 : raise HandlerConfigError ( \"Handler already provided for packet %s\" % arg_4 ) arg_1 [ arg_4 ] = arg_5 return arg_1","id_":252222,"task_name":"https:\/\/github.com\/d0ugal\/home\/blob\/e984716ae6c74dc8e40346584668ac5cfeaaf520\/home\/collect\/handlers.py#L26-L69","negative":"Sets byte if above.\n\n Sets the destination operand to 0 or 1 depending on the settings of the status flags (CF, SF, OF, ZF, and PF, 1, 0) in the\n EFLAGS register. The destination operand points to a byte register or a byte in memory. The condition code suffix\n (cc, 1, 0) indicates the condition being tested for::\n IF condition\n THEN\n DEST = 1;\n ELSE\n DEST = 0;\n FI;\n\n :param cpu: current CPU.\n :param dest: destination operand."} {"query":"Get distribution version .","positive":"def Func ( arg_0 ) : if ( arg_0 . name is not None and arg_0 . version is not None and arg_0 . version . startswith ( \":versiontools:\" ) ) : return ( arg_0 . __get_live_version ( ) or arg_0 . __get_frozen_version ( ) or arg_0 . __fail_to_get_any_version ( ) ) else : return arg_0 . __base . Func ( arg_0 )","id_":252223,"task_name":"https:\/\/github.com\/dougalsutherland\/skl-groups\/blob\/2584c10a413626c6d5f9078cdbf3dcc84e4e9a5b\/versiontools_support.py#L78-L99","negative":"Generate a new random masterkey, encrypt it with the password and\n store it in the store.\n\n :param str password: Password to use for en-\/de-cryption"} {"query":"Return EventRequestHeader for conversation .","positive":"def Func ( arg_0 ) : arg_1 = ( hangouts_pb2 . OFF_THE_RECORD_STATUS_OFF_THE_RECORD if arg_0 . is_off_the_record else hangouts_pb2 . OFF_THE_RECORD_STATUS_ON_THE_RECORD ) return hangouts_pb2 . EventRequestHeader ( conversation_id = hangouts_pb2 . ConversationId ( id = arg_0 . id_ ) , client_generated_id = arg_0 . _client . get_client_generated_id ( ) , expected_otr = arg_1 , delivery_medium = arg_0 . _get_default_delivery_medium ( ) , )","id_":252224,"task_name":"https:\/\/github.com\/tdryer\/hangups\/blob\/85c0bf0a57698d077461283895707260f9dbf931\/hangups\/conversation.py#L412-L422","negative":"teardown the cluster"} {"query":"Gets a list of repair tasks matching the given filters .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = None , arg_4 = None , arg_5 = False , ** arg_6 ) : arg_7 = \"6.0\" arg_8 = arg_0 . Func . metadata [ 'url' ] arg_9 = { } arg_9 [ 'api-version' ] = arg_0 . _serialize . query ( \"api_version\" , arg_7 , 'str' ) if arg_1 is not None : arg_9 [ 'TaskIdFilter' ] = arg_0 . _serialize . query ( \"task_id_filter\" , arg_1 , 'str' ) if arg_2 is not None : arg_9 [ 'StateFilter' ] = arg_0 . _serialize . query ( \"state_filter\" , arg_2 , 'int' ) if arg_3 is not None : arg_9 [ 'ExecutorFilter' ] = arg_0 . _serialize . query ( \"executor_filter\" , arg_3 , 'str' ) arg_10 = { } arg_10 [ 'Accept' ] = 'application\/json' if arg_4 : arg_10 . update ( arg_4 ) arg_11 = arg_0 . _client . get ( arg_8 , arg_9 , arg_10 ) arg_12 = arg_0 . _client . send ( arg_11 , stream = False , ** arg_6 ) if arg_12 . status_code not in [ 200 ] : raise models . FabricErrorException ( arg_0 . _deserialize , arg_12 ) arg_13 = None if arg_12 . status_code == 200 : arg_13 = arg_0 . _deserialize ( '[RepairTask]' , arg_12 ) if arg_5 : arg_14 = ClientRawResponse ( arg_13 , arg_12 ) return arg_14 return arg_13","id_":252225,"task_name":"https:\/\/github.com\/Azure\/azure-sdk-for-python\/blob\/d7306fde32f60a293a7567678692bdad31e4b667\/azure-servicefabric\/azure\/servicefabric\/service_fabric_client_ap_is.py#L7441-L7511","negative":"Get information about the users current playback.\n\n Returns\n -------\n player : Player\n A player object representing the current playback."} {"query":"Find MultiQC report for the case .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 , arg_4 = institute_and_case ( arg_0 , arg_1 , arg_2 ) return dict ( institute = arg_3 , case = arg_4 , )","id_":252226,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/server\/blueprints\/cases\/controllers.py#L544-L550","negative":"Respond when the server indicates that the client is out of sync.\n\n The server can request a sync when this client sends a message that \n fails the check() on the server. If the reason for the failure isn't \n very serious, then the server can decide to send it as usual in the \n interest of a smooth gameplay experience. When this happens, the \n server sends out an extra response providing the clients with the\n information they need to resync themselves."} {"query":"Compares two parameter instances","positive":"def Func ( arg_0 , arg_1 ) : if ( not arg_1 . v_is_parameter and not arg_0 . v_is_parameter ) : raise ValueError ( 'Both inputs are not parameters' ) if ( not arg_1 . v_is_parameter or not arg_0 . v_is_parameter ) : return False if arg_0 . v_full_name != arg_1 . v_full_name : return False if arg_0 . f_is_empty ( ) and arg_1 . f_is_empty ( ) : return True if arg_0 . f_is_empty ( ) != arg_1 . f_is_empty ( ) : return False if not arg_0 . _values_of_same_type ( arg_0 . f_get ( ) , arg_1 . f_get ( ) ) : return False if not arg_0 . _equal_values ( arg_0 . f_get ( ) , arg_1 . f_get ( ) ) : return False if arg_0 . f_has_range ( ) != arg_1 . f_has_range ( ) : return False if arg_0 . f_has_range ( ) : if arg_0 . f_get_range_length ( ) != arg_1 . f_get_range_length ( ) : return False for arg_2 , arg_3 in zip ( arg_0 . f_get_range ( copy = False ) , arg_1 . f_get_range ( copy = False ) ) : if not arg_0 . _values_of_same_type ( arg_2 , arg_3 ) : return False if not arg_0 . _equal_values ( arg_2 , arg_3 ) : return False return True","id_":252227,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/utils\/comparisons.py#L53-L99","negative":"Read data synchronous from an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: data name\n :param Type data_type: type of the data given to the PLC, according to\n PLCTYPE constants\n :param bool return_ctypes: return ctypes instead of python types if True\n (default: False)\n :rtype: data_type\n :return: value: **value**"} {"query":"Return updates from optimization .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 , arg_4 = optimize_updates ( arg_1 , arg_2 , arg_0 . config ) arg_0 . network . free_parameters . extend ( arg_4 ) logging . info ( \"Added %d free parameters for optimization\" % len ( arg_4 ) ) return arg_3","id_":252228,"task_name":"https:\/\/github.com\/zomux\/deepy\/blob\/090fbad22a08a809b12951cd0d4984f5bd432698\/deepy\/trainers\/trainers.py#L65-L72","negative":"Get analog data."} {"query":"Transforms the argparse arguments from Namespace to dict and then to Bunch Therefore it is not necessary to access the arguments using the dict syntax The settings can be called like regular vars on the settings object","positive":"def Func ( ) : arg_0 = ArgumentParser ( ) arg_1 = arg_0 . add_subparsers ( dest = 'selected_subparser' ) arg_2 = arg_1 . add_parser ( 'all' ) arg_3 = arg_1 . add_parser ( 'elsevier' ) arg_4 = arg_1 . add_parser ( 'oxford' ) arg_5 = arg_1 . add_parser ( 'springer' ) arg_2 . add_argument ( '--update-credentials' , action = 'store_true' ) arg_3 . add_argument ( '--run-locally' , action = 'store_true' ) arg_3 . add_argument ( '--package-name' ) arg_3 . add_argument ( '--path' ) arg_3 . add_argument ( '--CONSYN' , action = 'store_true' ) arg_3 . add_argument ( '--update-credentials' , action = 'store_true' ) arg_3 . add_argument ( '--extract-nations' , action = 'store_true' ) arg_4 . add_argument ( '--dont-empty-ftp' , action = 'store_true' ) arg_4 . add_argument ( '--package-name' ) arg_4 . add_argument ( '--path' ) arg_4 . add_argument ( '--update-credentials' , action = 'store_true' ) arg_4 . add_argument ( '--extract-nations' , action = 'store_true' ) arg_5 . add_argument ( '--package-name' ) arg_5 . add_argument ( '--path' ) arg_5 . add_argument ( '--update-credentials' , action = 'store_true' ) arg_5 . add_argument ( '--extract-nations' , action = 'store_true' ) ''' Transforms the argparse arguments from Namespace to dict and then to Bunch Therefore it is not necessary to access the arguments using the dict syntax The settings can be called like regular vars on the settings object ''' arg_6 = Bunch ( vars ( arg_0 . parse_args ( ) ) ) call_package ( arg_6 )","id_":252229,"task_name":"https:\/\/github.com\/inspirehep\/harvesting-kit\/blob\/33a7f8aa9dade1d863110c6d8b27dfd955cb471f\/harvestingkit\/harvestingkit_cli.py#L119-L157","negative":"Check that a binary operator is surrounded by exactly one space."} {"query":"Check if templates directories are setup and issue a warning and help .","positive":"def Func ( ) : if \"GROMACSWRAPPER_SUPPRESS_SETUP_CHECK\" in os . environ : return True arg_0 = [ d for d in config_directories if not os . path . exists ( d ) ] if len ( arg_0 ) > 0 : print ( \"NOTE: Some configuration directories are not set up yet: \" ) print ( \"\\t{0!s}\" . format ( '\\n\\t' . join ( arg_0 ) ) ) print ( \"NOTE: You can create the configuration file and directories with:\" ) print ( \"\\t>>> import gromacs\" ) print ( \"\\t>>> gromacs.config.setup()\" ) return False return True","id_":252230,"task_name":"https:\/\/github.com\/Becksteinlab\/GromacsWrapper\/blob\/d4f9a8cb6f48292732cf7c7e4ef4a6d2ccbc51b9\/gromacs\/config.py#L619-L643","negative":"gridSpan is what docx uses to denote that a table cell has a colspan. This\n is much more simple than rowspans in that there is a one-to-one mapping\n from gridSpan to colspan."} {"query":"r Helper function to construct a multirate filterbank .","positive":"def Func ( arg_0 = None , arg_1 = None , arg_2 = 25.0 , arg_3 = 1 , arg_4 = 50 , arg_5 = 'ellip' , arg_6 = 'ba' ) : if arg_0 is None : raise ParameterError ( 'center_freqs must be provided.' ) if arg_1 is None : raise ParameterError ( 'sample_rates must be provided.' ) if arg_0 . shape != arg_1 . shape : raise ParameterError ( 'Number of provided center_freqs and sample_rates must be equal.' ) arg_7 = 0.5 * arg_1 arg_8 = arg_0 \/ float ( arg_2 ) arg_9 = [ ] for arg_10 , arg_11 , arg_12 in zip ( arg_0 , arg_7 , arg_8 ) : arg_13 = [ arg_10 - 0.5 * arg_12 , arg_10 + 0.5 * arg_12 ] \/ arg_11 arg_14 = [ arg_10 - arg_12 , arg_10 + arg_12 ] \/ arg_11 arg_15 = scipy . signal . iirdesign ( arg_13 , arg_14 , arg_3 , arg_4 , analog = False , arg_5 = arg_5 , output = arg_6 ) arg_9 . append ( arg_15 ) return arg_9 , arg_1","id_":252231,"task_name":"https:\/\/github.com\/librosa\/librosa\/blob\/180e8e6eb8f958fa6b20b8cba389f7945d508247\/librosa\/filters.py#L860-L954","negative":"Return the RSSI signal strength in decibels."} {"query":"From the pixel - neighbors setup the regularization matrix using the weighted regularization scheme .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = len ( arg_0 ) arg_4 = np . zeros ( shape = ( arg_3 , arg_3 ) ) arg_5 = arg_0 ** 2.0 for arg_6 in range ( arg_3 ) : for arg_7 in range ( arg_2 [ arg_6 ] ) : arg_8 = arg_1 [ arg_6 , arg_7 ] arg_4 [ arg_6 , arg_6 ] += arg_5 [ arg_8 ] arg_4 [ arg_8 , arg_8 ] += arg_5 [ arg_8 ] arg_4 [ arg_6 , arg_8 ] -= arg_5 [ arg_8 ] arg_4 [ arg_8 , arg_6 ] -= arg_5 [ arg_8 ] return arg_4","id_":252232,"task_name":"https:\/\/github.com\/Jammy2211\/PyAutoLens\/blob\/91e50369c7a9c048c83d217625578b72423cd5a7\/autolens\/model\/inversion\/util\/regularization_util.py#L97-L127","negative":"helper method for determining binary architecture\n\n :param binary: str for binary to introspect.\n :rtype bool: True for x86_64, False otherwise"} {"query":"Add dag at the end of self using edge_map .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_2 = arg_2 or { } for arg_3 in arg_1 . qregs . values ( ) : if arg_3 . name not in arg_0 . qregs : arg_0 . add_qreg ( QuantumRegister ( arg_3 . size , arg_3 . name ) ) arg_2 . update ( [ ( arg_4 , arg_4 ) for arg_4 in arg_3 if arg_4 not in arg_2 ] ) for arg_5 in arg_1 . cregs . values ( ) : if arg_5 . name not in arg_0 . cregs : arg_0 . add_creg ( ClassicalRegister ( arg_5 . size , arg_5 . name ) ) arg_2 . update ( [ ( arg_6 , arg_6 ) for arg_6 in arg_5 if arg_6 not in arg_2 ] ) arg_0 . compose_back ( arg_1 , arg_2 )","id_":252233,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/dagcircuit\/dagcircuit.py#L495-L509","negative":"Build an observation_noise_fn that observes a Tensor timeseries."} {"query":"Returns items handled by the result .","positive":"def Func ( arg_0 , * arg_1 ) : if len ( arg_1 ) == 0 : if len ( arg_0 . _data ) == 1 : return list ( arg_0 . _data . values ( ) ) [ 0 ] elif len ( arg_0 . _data ) > 1 : raise ValueError ( 'Your result `%s` contains more than one entry: ' '`%s` Please use >>Func<< with one of these.' % ( arg_0 . v_full_name , str ( list ( arg_0 . _data . keys ( ) ) ) ) ) else : raise AttributeError ( 'Your result `%s` is empty, cannot access data.' % arg_0 . v_full_name ) arg_2 = [ ] for arg_3 in arg_1 : arg_3 = arg_0 . f_translate_key ( arg_3 ) if not arg_3 in arg_0 . _data : if arg_3 == 'data' and len ( arg_0 . _data ) == 1 : return arg_0 . _data [ list ( arg_0 . _data . keys ( ) ) [ 0 ] ] else : raise AttributeError ( '`%s` is not part of your result `%s`.' % ( arg_3 , arg_0 . v_full_name ) ) arg_2 . append ( arg_0 . _data [ arg_3 ] ) if len ( arg_1 ) == 1 : return arg_2 [ 0 ] else : return arg_2","id_":252234,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/parameter.py#L2134-L2189","negative":"Return an open file-object to the index file"} {"query":"Correctly destroy SyncObj . Stop autoTickThread close connections etc .","positive":"def Func ( arg_0 ) : if arg_0 . __conf . autoTick : arg_0 . __Funcing = True else : arg_0 . _doDestroy ( )","id_":252235,"task_name":"https:\/\/github.com\/bakwc\/PySyncObj\/blob\/be3b0aaa932d5156f5df140c23c962430f51b7b8\/pysyncobj\/syncobj.py#L274-L281","negative":"Write the index.html file for this report."} {"query":"Gets a value from the stack .","positive":"def Func ( arg_0 , arg_1 ) : assert arg_1 in ( 16 , arg_0 . address_bit_size ) arg_2 , arg_3 , arg_3 = arg_0 . get_descriptor ( arg_0 . SS ) arg_4 = arg_0 . STACK + arg_2 arg_5 = arg_0 . read_int ( arg_4 , arg_1 ) arg_0 . STACK = arg_0 . STACK + arg_1 \/\/ 8 return arg_5","id_":252236,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/native\/cpu\/x86.py#L742-L755","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Check that the mode argument of an open or file call is valid .","positive":"def Func ( arg_0 , arg_1 ) : try : arg_2 = utils . get_argument_from_call ( arg_1 , position = 1 , keyword = \"mode\" ) except utils . NoSuchArgumentError : return if arg_2 : arg_2 = utils . safe_infer ( arg_2 ) if isinstance ( arg_2 , astroid . Const ) and not _check_mode_str ( arg_2 . value ) : arg_0 . add_message ( \"bad-open-mode\" , arg_1 = arg_1 , args = arg_2 . value )","id_":252237,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/checkers\/stdlib.py#L387-L398","negative":"Update the results string and last-update-time fields of a model.\n\n Parameters:\n ----------------------------------------------------------------\n jobID: job ID of model to modify\n results: new results (json dict string)"} {"query":"Read bytes from an iterator .","positive":"def Func ( arg_0 , arg_1 = None ) : while arg_1 is None or len ( arg_0 . buffer ) < arg_1 : try : arg_0 . buffer += next ( arg_0 . data_stream ) except StopIteration : break arg_2 = arg_0 . buffer [ : arg_1 ] if arg_1 is None : arg_0 . buffer = \"\" else : arg_0 . buffer = arg_0 . buffer [ arg_1 : ] return arg_2","id_":252238,"task_name":"https:\/\/github.com\/mar10\/wsgidav\/blob\/cec0d84222fc24bea01be1cea91729001963f172\/wsgidav\/stream_tools.py#L133-L146","negative":"Adds all parameters to `traj`"} {"query":"Open RAR archive file .","positive":"def Func ( arg_0 , arg_1 ) : try : arg_2 = unrarlib . RAROpenArchiveEx ( ctypes . byref ( arg_1 ) ) except unrarlib . UnrarException : raise BadRarFile ( \"Invalid RAR file.\" ) return arg_2","id_":252239,"task_name":"https:\/\/github.com\/matiasb\/python-unrar\/blob\/b1ac46cbcf42f3d3c5c69ab971fe97369a4da617\/unrar\/rarfile.py#L173-L179","negative":"Enables GPIO interrupts."} {"query":"Run a command with a non blocking call .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None , arg_4 = 'utf-8' ) : arg_0 . failed_message = None logger . debug ( \"Running command %s (cwd: %s, env: %s)\" , ' ' . join ( arg_1 ) , arg_2 , str ( arg_3 ) ) try : arg_0 . proc = subprocess . Popen ( arg_1 , stdout = subprocess . PIPE , stderr = subprocess . PIPE , arg_2 = arg_2 , arg_3 = arg_3 ) arg_7 = threading . Thread ( target = arg_0 . _read_stderr , kwargs = { 'encoding' : arg_4 } , daemon = True ) arg_7 . start ( ) for arg_8 in arg_0 . proc . stdout : yield arg_8 . decode ( arg_4 , errors = 'surrogateescape' ) arg_7 . join ( ) arg_0 . proc . communicate ( ) arg_0 . proc . stdout . close ( ) arg_0 . proc . stderr . close ( ) except OSError as e : arg_7 . join ( ) raise RepositoryError ( arg_9 = str ( e ) ) if arg_0 . proc . returncode != 0 : arg_9 = \"git command - %s (return code: %d)\" % ( arg_0 . failed_message , arg_0 . proc . returncode ) raise RepositoryError ( arg_9 = arg_9 )","id_":252240,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-perceval\/blob\/41c908605e88b7ebc3a536c643fa0f212eaf9e0e\/perceval\/backends\/core\/git.py#L1225-L1267","negative":"Given an email address, check the email_remapping table to see if the email\n should be sent to a different address. This function also handles overriding\n the email domain if ignore_vcs_email_domain is set or the domain was missing"} {"query":"This returns a table object with all rows and cells correctly populated .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = etree . Element ( 'table' ) arg_3 = get_namespace ( arg_0 , 'w' ) arg_4 = get_rowspan_data ( arg_0 ) for arg_5 in arg_0 : if arg_5 . tag == '%str' % arg_3 : arg_6 = build_tr ( arg_5 , arg_1 , arg_4 , ) arg_2 . append ( arg_6 ) arg_7 = list ( arg_0 . iter ( ) ) return arg_2 , arg_7","id_":252241,"task_name":"https:\/\/github.com\/PolicyStat\/docx2html\/blob\/2dc4afd1e3a3f2f0b357d0bff903eb58bcc94429\/docx2html\/core.py#L1137-L1160","negative":"Show Program Landing page for the Enterprise's Program.\n\n Render the Enterprise's Program Enrollment page for a specific program.\n The Enterprise and Program are both selected by their respective UUIDs.\n\n Unauthenticated learners will be redirected to enterprise-linked SSO.\n\n A 404 will be raised if any of the following conditions are met:\n * No enterprise customer UUID query parameter ``enterprise_uuid`` found in request.\n * No enterprise customer found against the enterprise customer\n uuid ``enterprise_uuid`` in the request kwargs.\n * No Program can be found given ``program_uuid`` either at all or associated with\n the Enterprise.."} {"query":"Verify if the given call node has variadic nodes without context","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = arg_0 . statement ( ) for arg_5 in arg_4 . nodes_of_class ( astroid . Name ) : if arg_5 . name != arg_1 : continue arg_6 = safe_infer ( arg_5 ) if isinstance ( arg_6 , ( astroid . List , astroid . Tuple ) ) : arg_7 = len ( arg_6 . elts ) elif isinstance ( arg_6 , astroid . Dict ) : arg_7 = len ( arg_6 . items ) else : continue arg_8 = arg_6 . statement ( ) if not arg_7 and isinstance ( arg_8 , astroid . FunctionDef ) : arg_9 = _has_parent_of_type ( arg_0 , arg_2 , arg_4 ) arg_10 = _is_name_used_as_variadic ( arg_5 , arg_3 ) if arg_9 or arg_10 : return True return False","id_":252242,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/checkers\/typecheck.py#L548-L578","negative":"Attempts to find the Teradata install directory with the defaults\n for a given platform. Should always return `None` when the defaults\n are not present and the TERADATA_HOME environment variable wasn't\n explicitly set to the correct install location."} {"query":"Disable abbreviations .","positive":"def Func ( arg_0 , arg_1 ) : if arg_1 not in arg_0 . _long_opt : raise optparse . BadOptionError ( arg_1 ) return arg_1","id_":252243,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/config.py#L383-L387","negative":"Whether a connection can be established between those two meshes."} {"query":"Create and run a bot the arguments all correspond to sanitized commandline options .","positive":"def Func ( arg_0 , arg_1 = arg_2 , arg_3 = None , arg_4 = None , arg_5 = 1 , arg_6 = None , arg_7 = True , arg_8 = None , arg_9 = None , arg_10 = False , arg_11 = False , arg_12 = 7777 , arg_13 = False , arg_14 = None , arg_15 = None , arg_16 = False , arg_17 = [ ] , arg_18 = False , arg_19 = True ) : arg_20 . argv = [ arg_20 . argv [ 0 ] ] + arg_17 arg_22 = [ arg_0 , arg_1 , arg_3 , arg_4 , arg_5 , arg_6 , arg_7 , arg_8 , arg_9 , arg_11 , arg_12 , arg_13 ] arg_23 = dict ( arg_14 = arg_14 , arg_15 = arg_15 ) arg_24 = [ arg_0 ] arg_25 = dict ( arg_5 = arg_5 , frame_limiter = arg_7 , arg_18 = arg_18 , Func_forever = arg_7 and not ( arg_10 or bool ( arg_4 ) ) , ) if arg_19 : arg_26 = ShoebotThread ( arg_22 = arg_22 , arg_23 = arg_23 , arg_24 = arg_24 , arg_25 = arg_25 , send_sigint = arg_16 ) arg_26 . start ( ) arg_27 = arg_26 . sbot else : print ( 'background thread disabled' ) if arg_16 : raise ValueError ( 'UI Must Func in a separate thread to shell and shell needs main thread' ) arg_26 = None arg_27 = create_bot ( * arg_22 , ** arg_23 ) arg_27 . Func ( * arg_24 , ** arg_25 ) if arg_16 : import shoebot . sbio . shell arg_28 = shoebot . sbio . shell . ShoebotCmd ( arg_27 , trusted = True ) try : arg_28 . cmdloop ( ) except KeyboardInterrupt as e : publish_event ( QUIT_EVENT ) if arg_18 : raise else : return elif arg_19 : try : while arg_26 . is_alive ( ) : sleep ( 1 ) except KeyboardInterrupt : publish_event ( QUIT_EVENT ) if all ( ( arg_19 , arg_26 ) ) : arg_26 . join ( ) return arg_27","id_":252244,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/shoebot\/__init__.py#L217-L335","negative":"Prepare received data for representation.\n\n Args:\n data (dict): values to represent (ex. {'001' : 130})\n number_to_keep (int): number of elements to show individually.\n\n Returns:\n dict: processed data to show."} {"query":"List all the check groups that pylint knows about","positive":"def Func ( arg_0 , * arg_1 , ** arg_2 ) : for arg_3 in arg_0 . linter . get_checker_names ( ) : print ( arg_3 ) sys . exit ( 0 )","id_":252245,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/lint.py#L1737-L1745","negative":"Deserialize a dataframe.\n\n Parameters\n ----------\n reader : file\n File-like object to read from. Must be opened in binary mode.\n data_type_id : dict\n Serialization format of the raw data.\n See the azureml.DataTypeIds class for constants.\n\n Returns\n -------\n pandas.DataFrame\n Dataframe object."} {"query":"Return a Domain by its domain_name","positive":"def Func ( arg_0 , arg_1 ) : return Domain . get_object ( api_token = arg_0 . token , arg_1 = arg_1 )","id_":252246,"task_name":"https:\/\/github.com\/koalalorenzo\/python-digitalocean\/blob\/d0221b57856fb1e131cafecf99d826f7b07a947c\/digitalocean\/Manager.py#L190-L194","negative":"Detect if `obj` is a stream.\n\n We consider anything a stream that has the methods\n\n - ``close()``\n\n and either set of the following\n\n - ``read()``, ``readline()``, ``readlines()``\n - ``write()``, ``writeline()``, ``writelines()``\n\n :Arguments:\n *obj*\n stream or str\n\n :Returns:\n *bool*, ``True`` if `obj` is a stream, ``False`` otherwise\n\n .. SeeAlso::\n :mod:`io`\n\n\n .. versionadded:: 0.7.1"} {"query":"Unregister an extension code . For testing only .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = ( arg_0 , arg_1 ) if ( _extension_registry . get ( arg_3 ) != arg_2 or _inverted_registry . get ( arg_2 ) != arg_3 ) : raise ValueError ( \"key %s is not registered with code %s\" % ( arg_3 , arg_2 ) ) del _extension_registry [ arg_3 ] del _inverted_registry [ arg_2 ] if arg_2 in _extension_cache : del _extension_cache [ arg_2 ]","id_":252247,"task_name":"https:\/\/github.com\/google\/grumpy\/blob\/3ec87959189cfcdeae82eb68a47648ac25ceb10b\/third_party\/stdlib\/copy_reg.py#L175-L185","negative":"Wait for the termination of a process and log its stdout & stderr"} {"query":"Move a stepper motor for the number of steps at the specified speed","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_2 > 0 : arg_3 = 1 else : arg_3 = 0 arg_4 = abs ( arg_2 ) arg_5 = [ arg_0 . STEPPER_STEP , arg_1 & 0x7f , ( arg_1 >> 7 ) & 0x7f , ( arg_1 >> 14 ) & 0x7f , arg_4 & 0x7f , ( arg_4 >> 7 ) & 0x7f , arg_3 ] arg_0 . _command_handler . send_sysex ( arg_0 . _command_handler . STEPPER_DATA , arg_5 )","id_":252248,"task_name":"https:\/\/github.com\/MrYsLab\/PyMata\/blob\/7e0ec34670b5a0d3d6b74bcbe4f3808c845cc429\/PyMata\/pymata.py#L914-L930","negative":"Generate a header for a tag section with specific parameters.\n\n :param str newer_tag_name: Name (title) of newer tag.\n :param str newer_tag_link: Tag name of newer tag, used for links.\n Could be same as **newer_tag_name** or some\n specific value, like `HEAD`.\n :param datetime newer_tag_time: Date and time when\n newer tag was created.\n :param str older_tag_link: Tag name of older tag, used for links.\n :param str project_url: URL for current project.\n :rtype: str\n :return: Generated ready-to-add tag section."} {"query":"Joins a Hangul letter from Korean phonemes .","positive":"def Func ( * arg_0 ) : if len ( arg_0 ) == 1 : arg_0 = arg_0 [ 0 ] if len ( arg_0 ) == 2 : arg_0 += ( CODAS [ 0 ] , ) try : arg_1 , arg_2 , arg_3 = arg_0 except ValueError : raise TypeError ( 'Func() takes at most 3 arguments' ) arg_4 = ( ( ONSETS . index ( arg_1 ) * NUM_NUCLEUSES + NUCLEUSES . index ( arg_2 ) ) * NUM_CODAS + CODAS . index ( arg_3 ) ) return unichr ( FIRST_HANGUL_OFFSET + arg_4 )","id_":252249,"task_name":"https:\/\/github.com\/what-studio\/tossi\/blob\/88bc8523c05fe7b7e23518ee0398ee0a18ba0bc0\/tossi\/hangul.py#L43-L59","negative":"Transforms predictions into probability values.\n\n Parameters\n ----------\n logits : array_like\n The logits predicted by the model.\n\n Returns\n -------\n `numpy.ndarray`\n Probability values corresponding to the logits."} {"query":"A single iteration of the Nelder Mead algorithm .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None , arg_4 = None , arg_5 = None , arg_6 = False , arg_7 = None , arg_8 = None , arg_9 = None , arg_10 = None , arg_11 = None ) : with tf . compat . v1 . name_scope ( arg_11 , 'Func' ) : arg_12 = arg_0 . dtype . base_dtype arg_13 = tf . argsort ( arg_1 , direction = 'ASCENDING' , stable = True ) ( arg_14 , arg_15 , arg_16 ) = arg_13 [ 0 ] , arg_13 [ - 1 ] , arg_13 [ - 2 ] arg_17 = arg_0 [ arg_15 ] ( arg_18 , arg_19 , arg_20 ) = ( arg_1 [ arg_14 ] , arg_1 [ arg_15 ] , arg_1 [ arg_16 ] ) arg_21 = tf . reduce_sum ( input_tensor = arg_0 , axis = 0 ) - arg_17 arg_21 \/= tf . cast ( arg_3 , arg_12 ) arg_22 = arg_21 + arg_7 * ( arg_21 - arg_17 ) arg_23 = arg_2 ( arg_22 ) arg_24 = 1 arg_25 = _check_convergence ( arg_0 , arg_0 [ arg_14 ] , arg_18 , arg_19 , arg_4 , arg_5 ) def _converged_fn ( ) : return ( True , arg_0 , arg_1 , 0 ) arg_26 = arg_25 , _converged_fn arg_27 = ( ( arg_23 < arg_20 ) & ( arg_23 >= arg_18 ) ) arg_28 = _accept_reflected_fn ( arg_0 , arg_1 , arg_15 , arg_22 , arg_23 ) arg_29 = arg_27 , arg_28 arg_30 = arg_23 < arg_18 arg_31 = _expansion_fn ( arg_2 , arg_0 , arg_1 , arg_15 , arg_22 , arg_23 , arg_21 , arg_8 ) arg_32 = arg_30 , arg_31 arg_33 = ( ( arg_23 < arg_19 ) & ( arg_23 >= arg_20 ) ) arg_34 = _outside_contraction_fn ( arg_2 , arg_0 , arg_1 , arg_21 , arg_14 , arg_15 , arg_22 , arg_23 , arg_9 , arg_10 , arg_6 ) arg_35 = arg_33 , arg_34 arg_36 = _inside_contraction_fn ( arg_2 , arg_0 , arg_1 , arg_21 , arg_14 , arg_15 , arg_19 , arg_9 , arg_10 , arg_6 ) ( arg_37 , arg_38 , arg_39 , arg_40 ) = prefer_static . case ( [ arg_26 , arg_29 , arg_32 , arg_35 ] , default = arg_36 , exclusive = False ) arg_38 . set_shape ( arg_0 . shape ) arg_39 . set_shape ( arg_1 . shape ) return ( arg_37 , arg_38 , arg_39 , arg_24 + arg_40 )","id_":252250,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/optimizer\/nelder_mead.py#L343-L456","negative":"Create event start and end datetimes."} {"query":"Returns the unique SHA1 hexdigest of the chart URL param parts","positive":"def Func ( arg_0 ) : arg_0 . render ( ) return new_sha ( '' . join ( sorted ( arg_0 . _parts ( ) ) ) ) . hexdigest ( )","id_":252251,"task_name":"https:\/\/github.com\/appknox\/google-chartwrapper\/blob\/3769aecbef6c83b6cd93ee72ece478ffe433ac57\/GChartWrapper\/GChart.py#L643-L650","negative":"Re-enable the FTDI drivers for the current platform."} {"query":"Set the rotation state of the camera","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_0 . last_x is None : arg_0 . last_x = arg_1 if arg_0 . last_y is None : arg_0 . last_y = arg_2 arg_5 = arg_0 . last_x - arg_1 arg_6 = arg_0 . last_y - arg_2 arg_0 . last_x = arg_1 arg_0 . last_y = arg_2 arg_5 *= arg_0 . mouse_sensitivity arg_6 *= arg_0 . mouse_sensitivity arg_0 . yaw -= arg_5 arg_0 . pitch += arg_6 if arg_0 . pitch > 85.0 : arg_0 . pitch = 85.0 if arg_0 . pitch < - 85.0 : arg_0 . pitch = - 85.0 arg_0 . _update_yaw_and_pitch ( )","id_":252252,"task_name":"https:\/\/github.com\/Contraz\/demosys-py\/blob\/6466128a3029c4d09631420ccce73024025bd5b6\/demosys\/scene\/camera.py#L180-L209","negative":"Adds a new extracted license to the document.\n Raises SPDXValueError if data format is incorrect."} {"query":"check that the accessed attribute exists","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in arg_0 . config . generated_members : if re . match ( arg_2 , arg_1 . attrname ) : return if re . match ( arg_2 , arg_1 . as_string ( ) ) : return try : arg_3 = list ( arg_1 . expr . infer ( ) ) except exceptions . InferenceError : return arg_4 = set ( ) arg_5 = [ arg_6 for arg_6 in arg_3 if arg_6 is not astroid . Uninferable and not isinstance ( arg_6 , astroid . nodes . Unknown ) ] if ( len ( arg_5 ) != len ( arg_3 ) and arg_0 . config . ignore_on_opaque_inference ) : return for arg_6 in arg_5 : arg_8 = getattr ( arg_6 , \"name\" , None ) if _is_owner_ignored ( arg_6 , arg_8 , arg_0 . config . ignored_classes , arg_0 . config . ignored_modules ) : continue try : if not [ n for n in arg_6 . getattr ( arg_1 . attrname ) if not isinstance ( n . statement ( ) , astroid . AugAssign ) ] : arg_4 . add ( ( arg_6 , arg_8 ) ) continue except AttributeError : continue except exceptions . NotFoundError : if not _emit_no_member ( arg_1 , arg_6 , arg_8 , ignored_mixins = arg_0 . config . ignore_mixin_members , ignored_none = arg_0 . config . ignore_none , ) : continue arg_4 . add ( ( arg_6 , arg_8 ) ) continue break else : arg_7 = set ( ) for arg_6 , arg_8 in arg_4 : if isinstance ( arg_6 , astroid . Instance ) : arg_9 = arg_6 . _proxied else : arg_9 = arg_6 if arg_9 in arg_7 : continue arg_7 . add ( arg_9 ) arg_10 , arg_11 = arg_0 . _get_nomember_msgid_hint ( arg_1 , arg_6 ) arg_0 . add_message ( arg_10 , arg_1 = arg_1 , args = ( arg_6 . display_type ( ) , arg_8 , arg_1 . attrname , arg_11 ) , confidence = INFERENCE , )","id_":252253,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/checkers\/typecheck.py#L830-L925","negative":"This endpoint is used to set the health of an allocation that is in the deployment manually. In some use\n cases, automatic detection of allocation health may not be desired. As such those task groups can be marked\n with an upgrade policy that uses health_check = \"manual\". Those allocations must have their health marked\n manually using this endpoint. Marking an allocation as healthy will allow the rolling upgrade to proceed.\n Marking it as failed will cause the deployment to fail.\n\n https:\/\/www.nomadproject.io\/docs\/http\/deployments.html\n\n arguments:\n - id\n - healthy_allocations, Specifies the set of allocation that should be marked as healthy.\n - unhealthy_allocations, Specifies the set of allocation that should be marked as unhealthy.\n returns: dict\n raises:\n - nomad.api.exceptions.BaseNomadException\n - nomad.api.exceptions.URLNotFoundNomadException"} {"query":"Bumps the Version given a target","positive":"def Func ( arg_0 , arg_1 ) : if arg_1 == 'patch' : return Version ( arg_0 . major , arg_0 . minor , arg_0 . patch + 1 ) if arg_1 == 'minor' : return Version ( arg_0 . major , arg_0 . minor + 1 , 0 ) if arg_1 == 'major' : return Version ( arg_0 . major + 1 , 0 , 0 ) return arg_0 . clone ( )","id_":252254,"task_name":"https:\/\/github.com\/ewilazarus\/yld\/blob\/157e474d1055f14ffdfd7e99da6c77d5f17d4307\/yld\/tag.py#L51-L63","negative":"Generate a new random masterkey, encrypt it with the password and\n store it in the store.\n\n :param str password: Password to use for en-\/de-cryption"} {"query":"Write data to file - like object","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 , arg_6 , arg_7 , arg_8 ) : arg_1 . Func ( arg_0 . magic ) arg_1 . Func ( arg_0 . header_struct . pack ( arg_0 . version , arg_2 , arg_3 , arg_4 , arg_5 , arg_6 , arg_7 , arg_8 . nbytes ) ) arg_1 . Func ( arg_8 . tobytes ( ) ) arg_1 . flush ( )","id_":252255,"task_name":"https:\/\/github.com\/xmikos\/soapy_power\/blob\/46e12659b8d08af764dc09a1f31b0e85a68f808f\/soapypower\/writer.py#L83-L91","negative":"Attempts to fetch streams repeatedly\n until some are returned or limit hit."} {"query":"Lists all temple templates and packages associated with those templates","positive":"def Func ( arg_0 , arg_1 = None ) : temple . check . has_env_vars ( temple . constants . GITHUB_API_TOKEN_ENV_VAR ) if arg_1 : temple . check . is_git_ssh_path ( arg_1 ) arg_2 = 'user:{} filename:{} {}' . format ( arg_0 , temple . constants . TEMPLE_CONFIG_FILE , arg_1 ) eFunce : arg_2 = 'user:{} cookiecutter.json in:path' . format ( arg_0 ) arg_3 = _code_search ( arg_2 , arg_0 ) return collections . OrderedDict ( sorted ( arg_3 . items ( ) ) )","id_":252256,"task_name":"https:\/\/github.com\/CloverHealth\/temple\/blob\/d7b75da2459f72ba74d6f3b6e1ab95c3d1b92ccd\/temple\/ls.py#L78-L117","negative":"Use ``\\\\r`` to overdraw the current line with the given text.\n\n This function transparently handles tracking how much overdrawing is\n necessary to erase the previous line when used consistently.\n\n :param text: The text to be outputted\n :param newline: Whether to start a new line and reset the length count.\n :type text: :class:`~__builtins__.str`\n :type newline: :class:`~__builtins__.bool`"} {"query":"Creates a map of letter use in a word .","positive":"def Func ( arg_0 ) : arg_1 = { } for arg_2 in arg_0 : try : arg_1 [ arg_2 ] += 1 except KeyError : arg_1 [ arg_2 ] = 1 return arg_1","id_":252257,"task_name":"https:\/\/github.com\/a-tal\/nagaram\/blob\/2edcb0ef8cb569ebd1c398be826472b4831d6110\/nagaram\/anagrams.py#L7-L23","negative":"If n2 is a perfect square, return its square root, else raise error."} {"query":"Main function .","positive":"def Func ( ) : arg_0 = 'Validate a CSV data file.' arg_1 = argparse . ArgumentParser ( arg_0 = arg_0 ) arg_1 . add_argument ( 'file' , metavar = 'FILE' , help = 'a file to be validated' ) arg_1 . add_argument ( '-l' , '--limit' , dest = 'limit' , type = int , action = 'store' , default = 0 , help = 'limit the number of problems reported' ) arg_1 . add_argument ( '-s' , '--summarize' , dest = 'summarize' , action = 'store_true' , default = False , help = 'output only a summary of the different types of problem found' ) arg_1 . add_argument ( '-e' , '--report-unexpected-exceptions' , dest = 'report_unexpected_exceptions' , action = 'store_true' , default = False , help = 'report any unexpected exceptions as problems' ) arg_2 = arg_1 . parse_args ( ) if not os . path . isfile ( arg_2 . file ) : print '%s is not a file' % arg_2 . file sys . exit ( 1 ) with open ( arg_2 . file , 'r' ) as f : arg_3 = csv . reader ( f , delimiter = '\\t' ) arg_4 = create_validator ( ) arg_5 = arg_4 . validate ( arg_3 , summarize = arg_2 . summarize , report_unexpected_exceptions = arg_2 . report_unexpected_exceptions , context = { 'file' : arg_2 . file } ) write_problems ( arg_5 , sys . stdout , summarize = arg_2 . summarize , limit = arg_2 . limit ) if arg_5 : sys . exit ( 1 ) else : sys . exit ( 0 )","id_":252258,"task_name":"https:\/\/github.com\/alimanfoo\/csvvalidator\/blob\/50a86eefdc549c48f65a91a5c0a66099010ee65d\/example.py#L60-L124","negative":"Revoke the token and remove the cookie."} {"query":"Returns True if path contains a . cpenv file","positive":"def Func ( arg_0 ) : arg_1 = unipath ( arg_0 , '.cpenv' ) return os . path . exists ( arg_1 ) and os . path . isfile ( arg_1 )","id_":252259,"task_name":"https:\/\/github.com\/cpenv\/cpenv\/blob\/afbb569ae04002743db041d3629a5be8c290bd89\/cpenv\/utils.py#L55-L59","negative":"Delete a space."} {"query":"Assert all elements of x are finite .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = None , arg_4 = None ) : with tf . compat . v2 . name_scope ( arg_4 or 'Func' ) : arg_5 = tf . get_static_value ( arg_0 ) if arg_5 is not None : if ~ np . all ( np . isfinite ( arg_5 ) ) : raise ValueError ( arg_3 ) return arg_0 arg_6 = tf . compat . v1 . assert_equal ( tf . math . is_finite ( arg_0 ) , tf . ones_like ( arg_0 , tf . bool ) , arg_1 = arg_1 , arg_2 = arg_2 , arg_3 = arg_3 ) with tf . control_dependencies ( [ arg_6 ] ) : return tf . identity ( arg_0 )","id_":252260,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/internal\/assert_util.py#L44-L73","negative":"This function creates the command list from available information"} {"query":"Initialize from flask","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . config . get ( \"MONGO_URI\" , None ) arg_3 = arg_1 . config . get ( \"MONGO_DBNAME\" , 'scout' ) try : arg_4 = get_connection ( host = arg_1 . config . get ( \"MONGO_HOST\" , 'localhost' ) , port = arg_1 . config . get ( \"MONGO_PORT\" , 27017 ) , username = arg_1 . config . get ( \"MONGO_USERNAME\" , None ) , password = arg_1 . config . get ( \"MONGO_PASSWORD\" , None ) , arg_2 = arg_2 , mongodb = arg_3 ) except ConnectionFailure : context . abort ( ) arg_1 . config [ \"MONGO_DATABASE\" ] = arg_4 [ arg_3 ] arg_1 . config [ 'MONGO_CLIENT' ] = arg_4","id_":252261,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/server\/extensions.py#L43-L63","negative":"Whether a connection can be established between those two meshes."} {"query":"A convenience function for plotting a vertical bar plot from a Counter","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = 'vertical' ) : arg_4 = sorted ( arg_0 , key = arg_0 . get , reverse = True ) arg_5 = range ( len ( arg_4 ) ) arg_1 . xticks ( arg_5 , arg_4 , arg_3 = arg_3 ) arg_1 . bar ( arg_5 , [ arg_0 [ arg_6 ] for arg_6 in arg_4 ] ) if arg_2 is not None : arg_1 . title ( arg_2 )","id_":252262,"task_name":"https:\/\/github.com\/pybel\/pybel-tools\/blob\/3491adea0ac4ee60f57275ef72f9b73da6dbfe0c\/src\/pybel_tools\/utils.py#L174-L182","negative":"Adds all parameters to `traj`"} {"query":"Read config s variables and apply their values to all its properties","positive":"def Func ( arg_0 : arg_1 [ arg_2 , arg_3 , arg_4 ] ) -> arg_4 : if isinstance ( arg_0 , ( arg_2 , arg_3 ) ) : arg_0 = read_json ( find_config ( arg_0 ) ) arg_5 = { 'DEEPPAVLOV_PATH' : os . getenv ( f'DP_DEEPPAVLOV_PATH' , arg_3 ( __file__ ) . parent . parent . parent ) } for arg_6 , arg_7 in arg_0 . get ( 'metadata' , { } ) . get ( 'variables' , { } ) . items ( ) : arg_8 = f'DP_{name}' if arg_8 in os . environ : arg_7 = os . getenv ( arg_8 ) arg_5 [ arg_6 ] = arg_7 . format ( ** arg_5 ) return _Func_property ( arg_0 , arg_5 )","id_":252263,"task_name":"https:\/\/github.com\/deepmipt\/DeepPavlov\/blob\/f3e4a69a3764d25d2f5bad4f1f1aebc872b00f9c\/deeppavlov\/core\/commands\/utils.py#L36-L50","negative":"Handles HTTP error codes for the given request\n\n Raises:\n AuthenticationError on the appropriate 4** errors\n ServerError if the response is not an ok (2**)\n\n Arguments:\n r -- The request result"} {"query":"Call django - admin to create the project structure","positive":"def Func ( arg_0 ) : arg_1 = deepcopy ( dict ( os . environ ) ) arg_1 [ arg_2 ( 'DJANGO_SETTINGS_MODULE' ) ] = arg_2 ( '{0}.settings' . format ( arg_0 . project_name ) ) arg_1 [ arg_2 ( 'PYTHONPATH' ) ] = arg_2 ( os . pathsep . join ( map ( shlex_quote , sys . path ) ) ) arg_3 = { } arg_4 = [ ] if arg_0 . template : arg_3 [ 'template' ] = arg_0 . template arg_4 . append ( arg_0 . project_name ) if arg_0 . project_directory : arg_4 . append ( arg_0 . project_directory ) if not os . path . exists ( arg_0 . project_directory ) : os . makedirs ( arg_0 . project_directory ) arg_5 = 'django-admin.py' arg_6 = [ os . path . join ( os . path . dirname ( sys . executable ) , arg_5 ) ] arg_7 = [ 'Scripts' ] arg_6 . extend ( [ os . path . join ( os . path . dirname ( sys . executable ) , arg_8 , arg_5 ) for arg_8 in arg_7 ] ) arg_9 = [ arg_5 ] for arg_10 in arg_6 : if os . path . exists ( arg_10 ) : arg_9 = [ sys . executable , arg_10 ] break arg_11 = arg_9 + [ 'startproject' ] + arg_4 if arg_0 . verbose : sys . stdout . write ( 'Project creation command: {0}\\n' . format ( ' ' . join ( arg_11 ) ) ) try : arg_12 = subprocess . check_output ( arg_11 , stderr = subprocess . STDOUT ) sys . stdout . write ( arg_12 . decode ( 'utf-8' ) ) except subprocess . CalledProcessError as e : if arg_0 . verbose : sys . stdout . write ( e . output . decode ( 'utf-8' ) ) raise","id_":252264,"task_name":"https:\/\/github.com\/nephila\/djangocms-installer\/blob\/9fec66d5f8b1e9a0f3c0ec66dd777db578fab07e\/djangocms_installer\/django\/__init__.py#L27-L66","negative":"Indicates the start of a new sequence. Clears any predictions and makes sure\n synapses don't grow to the currently active cells in the next time step."} {"query":"Dump as a list of INDRA statements .","positive":"def Func ( arg_0 , * arg_1 , ** arg_2 ) : arg_3 = arg_0 . to_bel ( * arg_1 , ** arg_2 ) return Func ( arg_3 )","id_":252265,"task_name":"https:\/\/github.com\/bio2bel\/bio2bel\/blob\/d80762d891fa18b248709ff0b0f97ebb65ec64c2\/src\/bio2bel\/manager\/bel_manager.py#L95-L101","negative":"Get the context for this view."} {"query":"Computes the optimal partitions given the size distributions and computed number of expected false positives for all sub - intervals .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_0 < 2 : raise ValueError ( \"num_part cannot be less than 2\" ) if arg_0 > len ( arg_1 ) : raise ValueError ( \"num_part cannot be greater than the domain size of \" \"all set sizes\" ) if arg_0 == 2 : arg_3 , arg_4 = min ( ( arg_2 [ 0 , u1 ] + arg_2 [ u1 + 1 , len ( arg_1 ) - 1 ] , u1 ) for u1 in range ( 0 , len ( arg_1 ) - 1 ) ) return [ ( arg_1 [ 0 ] , arg_1 [ arg_4 ] ) , ( arg_1 [ arg_4 + 1 ] , arg_1 [ - 1 ] ) , ] , arg_3 , None arg_5 = np . zeros ( ( len ( arg_1 ) , arg_0 - 2 ) ) arg_6 = lambda arg_7 : arg_7 - 2 for arg_7 in range ( 2 , arg_0 ) : for arg_4 in range ( arg_7 - 1 , len ( arg_1 ) ) : if arg_7 == 2 : arg_5 [ arg_4 , arg_6 ( arg_7 ) ] = min ( arg_2 [ 0 , u1 ] + arg_2 [ u1 + 1 , arg_4 ] for u1 in range ( arg_4 ) ) else : arg_5 [ arg_4 , arg_6 ( arg_7 ) ] = min ( arg_5 [ u1 , arg_6 ( arg_7 - 1 ) ] + arg_2 [ u1 + 1 , arg_4 ] for u1 in range ( ( arg_7 - 1 ) - 1 , arg_4 ) ) arg_7 = arg_0 arg_3 , arg_4 = min ( ( arg_5 [ u1 , arg_6 ( arg_7 - 1 ) ] + arg_2 [ u1 + 1 , len ( arg_1 ) - 1 ] , u1 ) for u1 in range ( ( arg_7 - 1 ) - 1 , len ( arg_1 ) - 1 ) ) arg_8 = [ ( arg_1 [ arg_4 + 1 ] , arg_1 [ - 1 ] ) , ] arg_7 -= 1 while arg_7 > 1 : arg_9 , arg_10 = min ( ( arg_5 [ u1 , arg_6 ( arg_7 ) ] + arg_2 [ u1 + 1 , arg_4 ] , u1 ) for u1 in range ( ( arg_7 - 1 ) - 1 , arg_4 ) ) arg_8 . insert ( 0 , ( arg_1 [ arg_10 + 1 ] , arg_1 [ arg_4 ] ) ) arg_4 = arg_10 arg_7 -= 1 arg_8 . insert ( 0 , ( arg_1 [ 0 ] , arg_1 [ arg_4 ] ) ) return [ arg_8 , arg_3 , arg_5 ]","id_":252266,"task_name":"https:\/\/github.com\/ekzhu\/datasketch\/blob\/b3e4129987890a2beb04f2c0b6dc618ae35f2e14\/datasketch\/lshensemble_partition.py#L96-L168","negative":"Gets status of response."} {"query":"Shift the model result and return the new instance .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = { } if arg_0 . _inferenceBuffer is None : arg_3 = InferenceElement . getMaxDelay ( arg_1 . inferences ) arg_0 . _inferenceBuffer = collections . deque ( maxlen = arg_3 + 1 ) arg_0 . _inferenceBuffer . appendleft ( copy . deepcopy ( arg_1 . inferences ) ) for arg_5 , arg_6 in arg_1 . inferences . iteritems ( ) : if isinstance ( arg_6 , dict ) : arg_2 [ arg_5 ] = { } for arg_7 , arg_8 in arg_6 . iteritems ( ) : arg_9 = InferenceElement . getTemporalDelay ( arg_5 , arg_7 ) if len ( arg_0 . _inferenceBuffer ) > arg_9 : arg_10 = arg_0 . _inferenceBuffer [ arg_9 ] [ arg_5 ] [ arg_7 ] arg_2 [ arg_5 ] [ arg_7 ] = arg_10 else : arg_2 [ arg_5 ] [ arg_7 ] = None else : arg_9 = InferenceElement . getTemporalDelay ( arg_5 ) if len ( arg_0 . _inferenceBuffer ) > arg_9 : arg_2 [ arg_5 ] = ( arg_0 . _inferenceBuffer [ arg_9 ] [ arg_5 ] ) else : if type ( arg_6 ) in ( list , tuple ) : arg_2 [ arg_5 ] = [ None ] * len ( arg_6 ) else : arg_2 [ arg_5 ] = None arg_11 = ModelResult ( rawInput = arg_1 . rawInput , sensorInput = arg_1 . sensorInput , inferences = arg_2 , metrics = arg_1 . metrics , predictedFieldIdx = arg_1 . predictedFieldIdx , predictedFieldName = arg_1 . predictedFieldName ) return arg_11","id_":252267,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/data\/inference_shifter.py#L40-L88","negative":"Send data synchronous to an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: PLC storage address\n :param value: value to write to the storage address of the PLC\n :param Type data_type: type of the data given to the PLC,\n according to PLCTYPE constants"} {"query":"Sequentially update the actors the world and the messaging system . The theater terminates once all of the actors indicate that they are done .","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in arg_0 . actors : arg_2 . on_update_game ( arg_1 ) arg_0 . forum . on_update_game ( ) with arg_0 . world . _unlock_temporarily ( ) : arg_0 . world . on_update_game ( arg_1 ) if arg_0 . world . has_game_ended ( ) : arg_0 . exit_stage ( )","id_":252268,"task_name":"https:\/\/github.com\/kxgames\/kxg\/blob\/a68c01dc4aa1abf6b3780ba2c65a7828282566aa\/kxg\/theater.py#L201-L216","negative":"Main executor of the trimmomatic_report template.\n\n Parameters\n ----------\n log_files : list\n List of paths to the trimmomatic log files."} {"query":"Create a tree . Capture","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_0 . value ( arg_2 ) arg_1 . parser_tree = parsing . Capture ( arg_3 , arg_1 . parser_tree ) return True","id_":252269,"task_name":"https:\/\/github.com\/LionelAuroux\/pyrser\/blob\/f153a97ef2b6bf915a1ed468c0252a9a59b754d5\/pyrser\/dsl.py#L628-L632","negative":"Sequentially update the actors, the world, and the messaging system. \n The theater terminates once all of the actors indicate that they are done."} {"query":"Prepare the component for execution .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : for arg_3 in arg_0 . components : arg_3 . Func ( arg_1 , arg_2 ) for arg_4 in arg_0 . activities : arg_4 . Func ( arg_1 , arg_2 )","id_":252270,"task_name":"https:\/\/github.com\/Ex-Mente\/auxi.0\/blob\/2dcdae74154f136f8ca58289fe5b20772f215046\/auxi\/modelling\/business\/structure.py#L233-L246","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Parse persons from given datafield .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = [ \"aut\" ] ) : arg_4 = [ ] arg_5 = arg_0 . get_subfields ( arg_1 , arg_2 ) for arg_6 in arg_5 : arg_7 = arg_6 . other_subfields if \"4\" in arg_7 and arg_3 != [ \"any\" ] : arg_8 = arg_7 [ \"4\" ] arg_9 = any ( map ( lambda role : role in arg_3 , arg_8 ) ) if not arg_9 : continue arg_10 = arg_6 . i1 arg_11 = arg_6 . i2 arg_6 = arg_6 . strip ( ) arg_12 = \"\" arg_13 = \"\" arg_14 = \"\" arg_15 = \"\" if arg_10 == \"1\" and arg_11 == \" \" : if \",\" in arg_6 : arg_14 , arg_12 = arg_6 . split ( \",\" , 1 ) elif \" \" in arg_6 : arg_14 , arg_12 = arg_6 . split ( \" \" , 1 ) else : arg_14 = arg_6 if \"c\" in arg_7 : arg_15 = \",\" . join ( arg_7 [ \"c\" ] ) elif arg_10 == \"0\" and arg_11 == \" \" : arg_12 = arg_6 . strip ( ) if \"b\" in arg_7 : arg_13 = \",\" . join ( arg_7 [ \"b\" ] ) if \"c\" in arg_7 : arg_14 = \",\" . join ( arg_7 [ \"c\" ] ) elif arg_10 == \"1\" and arg_11 == \"0\" or arg_10 == \"0\" and arg_11 == \"0\" : arg_12 = arg_6 . strip ( ) if \"c\" in arg_7 : arg_15 = \",\" . join ( arg_7 [ \"c\" ] ) arg_4 . append ( Person ( arg_12 . strip ( ) , arg_13 . strip ( ) , arg_14 . strip ( ) , arg_15 . strip ( ) ) ) return arg_4","id_":252271,"task_name":"https:\/\/github.com\/edeposit\/marcxml_parser\/blob\/6d1c77c61fc2827b71f1b3d5aa3332d7f5807820\/src\/marcxml_parser\/query.py#L95-L171","negative":"Shift the model result and return the new instance.\n\n Queues up the T(i+1) prediction value and emits a T(i)\n input\/prediction pair, if possible. E.g., if the previous T(i-1)\n iteration was learn-only, then we would not have a T(i) prediction in our\n FIFO and would not be able to emit a meaningful input\/prediction pair.\n\n :param modelResult: A :class:`~.nupic.frameworks.opf.opf_utils.ModelResult`\n instance to shift.\n :return: A :class:`~.nupic.frameworks.opf.opf_utils.ModelResult` instance that\n has been shifted"} {"query":"Arrange for file of debugger commands to get read in the process - command loop .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = os . path . expanduser ( arg_1 ) arg_3 = Mfile . readable ( arg_2 ) if arg_3 : arg_0 . cmd_queue . append ( 'source ' + arg_2 ) elif arg_3 is None : arg_0 . errmsg ( \"source file '%s' doesn't exist\" % arg_2 ) else : arg_0 . errmsg ( \"source file '%s' is not readable\" % arg_2 ) pass return","id_":252272,"task_name":"https:\/\/github.com\/rocky\/python3-trepan\/blob\/14e91bc0acce090d67be145b1ac040cab92ac5f3\/trepan\/processor\/cmdproc.py#L827-L840","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Get available messages and send through to the protocol","positive":"def Func ( arg_0 ) : arg_1 , arg_2 = arg_0 . protocol . channel_layer . receive_many ( [ u'slack.send' ] , block = False ) arg_3 = 0.1 if arg_1 : arg_0 . protocols [ 0 ] . sendSlack ( arg_2 ) reactor . callLater ( arg_3 , arg_0 . Func )","id_":252273,"task_name":"https:\/\/github.com\/djangobot\/djangobot\/blob\/0ec951891812ea4114c27a08c790f63d0f0fd254\/djangobot\/client.py#L131-L139","negative":"This is a parallel worker for running period-recovery.\n\n Parameters\n ----------\n\n task : tuple\n This is used to pass args to the `periodicvar_recovery` function::\n\n task[0] = period-finding result pickle to work on\n task[1] = simbasedir\n task[2] = period_tolerance\n\n Returns\n -------\n\n dict\n This is the dict produced by the `periodicvar_recovery` function for the\n input period-finding result pickle."} {"query":"Expand the wildcards for an S3 path . This emulates the shall expansion for wildcards if the input is local path .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ ] if not isinstance ( arg_1 , list ) : arg_1 = [ arg_1 ] for arg_3 in arg_1 : arg_4 = arg_0 . opt . recursive arg_0 . opt . recursive = False arg_2 += [ arg_7 [ 'name' ] for arg_7 in arg_0 . s3walk ( arg_3 , True ) ] arg_0 . opt . recursive = arg_4 if ( len ( arg_2 ) == 0 ) and ( not arg_0 . opt . ignore_empty_source ) : fail ( \"[Runtime Failure] Source doesn't exist.\" ) return arg_2","id_":252274,"task_name":"https:\/\/github.com\/bloomreach\/s4cmd\/blob\/bb51075bf43703e7cd95aa39288cf7732ec13a6d\/s4cmd.py#L763-L784","negative":"Converts py_zipkin's annotations dict to protobuf.\n\n :param annotations: annotations dict.\n :type annotations: dict\n :return: corresponding protobuf's list of annotations.\n :rtype: list"} {"query":"Returns True if parent is in the list of ancestors returns False otherwise .","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 . parent is None : return False if arg_0 . parent == arg_1 : return True return arg_0 . parent . Func ( arg_1 )","id_":252275,"task_name":"https:\/\/github.com\/knipknap\/SpiffWorkflow\/blob\/f0af7f59a332e0619e4f3c00a7d4a3d230760e00\/SpiffWorkflow\/task.py#L405-L419","negative":"Stop streaming samples from device and delete samples buffer"} {"query":"Validate bands parameter .","positive":"def Func ( arg_0 ) : if not isinstance ( arg_0 , list ) : raise TypeError ( 'Parameter bands must be a \"list\"' ) arg_1 = list ( range ( 1 , 12 ) ) + [ 'BQA' ] for arg_2 in arg_0 : if arg_2 not in arg_1 : raise InvalidBandError ( '%s is not a valid band' % arg_2 )","id_":252276,"task_name":"https:\/\/github.com\/lucaslamounier\/USGSDownload\/blob\/0969483ea9f9648aa17b099f36d2e1010488b2a4\/usgsdownload\/usgs.py#L184-L191","negative":"Deletes the specified file from the given S3 bucket."} {"query":"Assemble a logline prefix using the google2 format .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None ) : global _level_names arg_3 = arg_1 or _time . time ( ) arg_4 = _time . localtime ( arg_3 ) arg_5 = int ( 1e6 * ( arg_3 % 1.0 ) ) ( arg_6 , arg_7 ) = arg_2 or _GetFileAndLine ( ) arg_8 = _os . path . basename ( arg_6 ) arg_9 = 'I' if arg_0 in _level_names : arg_9 = _level_names [ arg_0 ] [ 0 ] arg_10 = '%c%02d%02d %02d: %02d: %02d.%06d %5d %s: %d] ' % ( arg_9 , arg_4 [ 1 ] , arg_4 [ 2 ] , arg_4 [ 3 ] , arg_4 [ 4 ] , arg_4 [ 5 ] , arg_5 , _get_thread_id ( ) , arg_8 , arg_7 ) return arg_10","id_":252277,"task_name":"https:\/\/github.com\/tensorlayer\/tensorlayer\/blob\/aa9e52e36c7058a7e6fd81d36563ca6850b21956\/tensorlayer\/logging\/tl_logging.py#L216-L248","negative":"Check if hdl process has event depenency on signal"} {"query":"Send the message . First a message is constructed then a session with the email servers is created finally the message is sent and the session is stopped .","positive":"def Func ( arg_0 ) : arg_0 . _generate_email ( ) if arg_0 . verbose : print ( \"Debugging info\" \"\\n--------------\" \"\\n{} Message created.\" . format ( timestamp ( ) ) ) arg_1 = [ ] for arg_2 in ( arg_0 . to , arg_0 . cc , arg_0 . bcc ) : if arg_2 : if isinstance ( arg_2 , MutableSequence ) : arg_1 += arg_2 else : arg_1 . append ( arg_2 ) arg_3 = arg_0 . _get_session ( ) if arg_0 . verbose : print ( timestamp ( ) , \"Login successful.\" ) arg_3 . Funcmail ( arg_0 . from_ , arg_1 , arg_0 . message . as_string ( ) ) arg_3 . quit ( ) if arg_0 . verbose : print ( timestamp ( ) , \"Logged out.\" ) if arg_0 . verbose : print ( timestamp ( ) , type ( arg_0 ) . __name__ + \" info:\" , arg_0 . __str__ ( indentation = \"\\n * \" ) , ) print ( \"Message sent.\" )","id_":252278,"task_name":"https:\/\/github.com\/trp07\/messages\/blob\/7789ebc960335a59ea5d319fceed3dd349023648\/messages\/email_.py#L252-L293","negative":"Click the right mouse button without modifiers pressed.\n\n Parameters: coordinates to click on scren (tuple (x, y))\n Returns: None"} {"query":"Randomly crop an image and corresponding keypoints without influence scales given by keypoint_random_resize_shortestedge .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = ( 368 , 368 ) ) : arg_4 = arg_3 [ 0 ] arg_5 = arg_3 [ 1 ] arg_6 = ( arg_5 , arg_4 ) if len ( np . shape ( arg_0 ) ) == 2 : arg_0 = cv2 . cvtColor ( arg_0 , cv2 . COLOR_GRAY2RGB ) arg_7 , arg_8 , arg_9 = np . shape ( arg_0 ) for arg_9 in range ( 50 ) : arg_10 = random . randrange ( 0 , arg_8 - arg_6 [ 0 ] ) if arg_8 > arg_6 [ 0 ] else 0 arg_11 = random . randrange ( 0 , arg_7 - arg_6 [ 1 ] ) if arg_7 > arg_6 [ 1 ] else 0 for arg_12 in arg_1 : if arg_10 <= arg_12 [ 0 ] [ 0 ] < arg_10 + arg_6 [ 0 ] and arg_11 <= arg_12 [ 0 ] [ 1 ] < arg_11 + arg_6 [ 1 ] : break def pose_crop ( arg_0 , arg_1 , arg_2 , arg_10 , arg_11 , arg_13 , arg_14 ) : arg_6 = ( arg_13 , arg_14 ) arg_15 = arg_0 arg_16 = arg_15 [ arg_11 : arg_11 + arg_6 [ 1 ] , arg_10 : arg_10 + arg_6 [ 0 ] , : ] arg_17 = arg_2 [ arg_11 : arg_11 + arg_6 [ 1 ] , arg_10 : arg_10 + arg_6 [ 0 ] ] arg_18 = [ ] for arg_12 in arg_1 : arg_19 = [ ] for arg_20 in arg_12 : if arg_20 [ 0 ] < - 10 or arg_20 [ 1 ] < - 10 : arg_19 . append ( ( - 1000 , - 1000 ) ) continue arg_21 , arg_22 = arg_20 [ 0 ] - arg_10 , arg_20 [ 1 ] - arg_11 if arg_21 > arg_13 - 1 or arg_22 > arg_14 - 1 : arg_19 . append ( ( - 1000 , - 1000 ) ) continue arg_19 . append ( ( arg_21 , arg_22 ) ) arg_18 . append ( arg_19 ) return arg_16 , arg_18 , arg_17 return pose_crop ( arg_0 , arg_1 , arg_2 , arg_10 , arg_11 , arg_6 [ 0 ] , arg_6 [ 1 ] )","id_":252279,"task_name":"https:\/\/github.com\/tensorlayer\/tensorlayer\/blob\/aa9e52e36c7058a7e6fd81d36563ca6850b21956\/tensorlayer\/prepro.py#L3604-L3666","negative":"Return local folder path of header files."} {"query":"Examines row data from MySQL and alters the values when necessary to be compatible with sending to PostgreSQL via the copy command","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : for arg_3 , arg_4 in enumerate ( arg_1 . columns ) : arg_5 = hash ( frozenset ( arg_4 . items ( ) ) ) arg_6 = arg_0 . column_types [ arg_5 ] if arg_5 in arg_0 . column_types else arg_0 . column_type ( arg_4 ) if arg_2 [ arg_3 ] == None and ( 'timestamp' not in arg_6 or not arg_4 [ 'default' ] ) : arg_2 [ arg_3 ] = '\\N' elif arg_2 [ arg_3 ] == None and arg_4 [ 'default' ] : if arg_0 . tz : arg_2 [ arg_3 ] = '1970-01-01T00:00:00.000000' + arg_0 . tz_offset else : arg_2 [ arg_3 ] = '1970-01-01 00:00:00' elif 'bit' in arg_6 : arg_2 [ arg_3 ] = bin ( ord ( arg_2 [ arg_3 ] ) ) [ 2 : ] elif isinstance ( arg_2 [ arg_3 ] , ( str , unicode , basestring ) ) : if arg_6 == 'bytea' : arg_2 [ arg_3 ] = Binary ( arg_2 [ arg_3 ] ) . getquoted ( ) [ 1 : - 8 ] if arg_2 [ arg_3 ] else arg_2 [ arg_3 ] elif 'text[' in arg_6 : arg_2 [ arg_3 ] = '{%s}' % ',' . join ( '\"%s\"' % v . replace ( '\"' , r'\\\"' ) for v in arg_2 [ arg_3 ] . split ( ',' ) ) else : arg_2 [ arg_3 ] = arg_2 [ arg_3 ] . replace ( '\\\\' , r'\\\\' ) . replace ( '\\n' , r'\\n' ) . replace ( '\\t' , r'\\t' ) . replace ( '\\r' , r'\\r' ) . replace ( '\\0' , '' ) elif arg_6 == 'boolean' : arg_2 [ arg_3 ] = 't' if arg_2 [ arg_3 ] not in ( None , 0 ) else 'f' if arg_2 [ arg_3 ] == 0 else arg_2 [ arg_3 ] elif isinstance ( arg_2 [ arg_3 ] , ( date , datetime ) ) : if isinstance ( arg_2 [ arg_3 ] , datetime ) and arg_0 . tz : try : if arg_2 [ arg_3 ] . tzinfo : arg_2 [ arg_3 ] = arg_2 [ arg_3 ] . astimezone ( arg_0 . tz ) . isoformat ( ) else : arg_2 [ arg_3 ] = datetime ( * arg_2 [ arg_3 ] . timetuple ( ) [ : 6 ] , tzinfo = arg_0 . tz ) . isoformat ( ) except Exception as e : print e . message else : arg_2 [ arg_3 ] = arg_2 [ arg_3 ] . isoformat ( ) elif isinstance ( arg_2 [ arg_3 ] , timedelta ) : arg_2 [ arg_3 ] = datetime . utcfromtimestamp ( _get_total_seconds ( arg_2 [ arg_3 ] ) ) . time ( ) . isoformat ( ) else : arg_2 [ arg_3 ] = AsIs ( arg_2 [ arg_3 ] ) . getquoted ( )","id_":252280,"task_name":"https:\/\/github.com\/philipsoutham\/py-mysql2pgsql\/blob\/66dc2a3a3119263b3fe77300fb636346509787ef\/mysql2pgsql\/lib\/postgres_writer.py#L149-L191","negative":"Attempts to list all of the modules and submodules found within a given\n directory tree. This function recursively searches the directory tree\n for potential python modules and returns a list of candidate names.\n\n **Note:** This function returns a list of strings representing\n discovered module names, not the actual, loaded modules.\n\n :param directory: the directory to search for modules."} {"query":"Recompute this distribution s dependencies .","positive":"def Func ( arg_0 ) : from _markerlib import compile as compile_marker arg_1 = arg_0 . __dep_map = { None : [ ] } arg_2 = [ ] for arg_3 in arg_0 . _parsed_pkg_info . get_all ( 'Requires-Dist' ) or [ ] : arg_4 , arg_5 = arg_0 . _preparse_requirement ( arg_3 ) arg_6 = parse_requirements ( arg_4 ) . next ( ) arg_6 . marker_fn = compile_marker ( arg_5 ) arg_2 . append ( arg_6 ) def reqs_for_extra ( arg_8 ) : for arg_3 in arg_2 : if arg_3 . marker_fn ( override = { 'extra' : arg_8 } ) : yield arg_3 arg_9 = frozenset ( reqs_for_extra ( None ) ) arg_1 [ None ] . extend ( arg_9 ) for arg_8 in arg_0 . _parsed_pkg_info . get_all ( 'Provides-Extra' ) or [ ] : arg_8 = safe_extra ( arg_8 . strip ( ) ) arg_1 [ arg_8 ] = list ( frozenset ( reqs_for_extra ( arg_8 ) ) - arg_9 ) return arg_1","id_":252281,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/distribute-0.6.31-py2.7.egg\/pkg_resources.py#L2505-L2530","negative":"Return the kvectors associated with this tile, given the standard form\n of -0.5 to 0.5. `norm` and `form` arguments arethe same as that passed to\n `Tile.coords`.\n\n Parameters\n -----------\n real : boolean\n whether to return kvectors associated with the real fft instead"} {"query":"Create a palette that desaturate a color by some proportion","positive":"def Func ( arg_0 , arg_1 , arg_2 = False ) : if not 0 <= arg_1 <= 1 : raise ValueError ( \"prop must be between 0 and 1\" ) arg_3 = mcolors . colorConverter . to_rgb ( arg_0 ) arg_4 , arg_5 , arg_6 = colorsys . rgb_to_hls ( * arg_3 ) arg_6 *= arg_1 arg_7 = colorsys . hls_to_rgb ( arg_4 , arg_5 , arg_6 ) arg_8 = [ arg_0 , arg_7 ] if arg_2 : arg_8 = arg_8 [ : : - 1 ] return gradient_n_pal ( arg_8 , name = 'desaturated' )","id_":252282,"task_name":"https:\/\/github.com\/has2k1\/mizani\/blob\/312d0550ee0136fd1b0384829b33f3b2065f47c8\/mizani\/palettes.py#L605-L648","negative":"This will output the nginx HTTP config string for specific port spec"} {"query":"Maintains the context of the runtime settings for invoking a command .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_0 . _runtime . has_option ( 'general' , arg_1 ) : arg_0 . _runtime = arg_0 . _new_parser ( ) if arg_2 is None : return settings . _runtime . set ( 'general' , arg_1 . replace ( 'tower_' , '' ) , six . text_type ( arg_2 ) )","id_":252283,"task_name":"https:\/\/github.com\/ansible\/tower-cli\/blob\/a2b151fed93c47725018d3034848cb3a1814bed7\/tower_cli\/conf.py#L263-L279","negative":"Constructs the Continuous MDR feature map from the provided training data.\n\n Parameters\n ----------\n features: array-like {n_samples, n_features}\n Feature matrix\n targets: array-like {n_samples}\n List of target values for prediction\n\n Returns\n -------\n self: A copy of the fitted model"} {"query":"Generate the form for view .","positive":"def Func ( arg_0 ) : arg_0 . set_fields ( ) if arg_0 . post_data_dict is not None : arg_0 . set_post_data ( ) return arg_0 . form","id_":252284,"task_name":"https:\/\/github.com\/jazzband\/django-mongonaut\/blob\/5485b2e029dff8ae267a4cb39c92d0a72cb5b144\/mongonaut\/forms\/forms.py#L67-L74","negative":"Sets the player's paused state."} {"query":"Retrive an artist with a spotify ID .","positive":"async def Func ( arg_0 , arg_1 : arg_2 ) -> Artist : arg_3 = await arg_0 . http . artist ( to_id ( arg_1 ) ) return Artist ( arg_0 , arg_3 )","id_":252285,"task_name":"https:\/\/github.com\/mental32\/spotify.py\/blob\/bb296cac7c3dd289908906b7069bd80f43950515\/spotify\/client.py#L126-L140","negative":"Use ``\\\\r`` to overdraw the current line with the given text.\n\n This function transparently handles tracking how much overdrawing is\n necessary to erase the previous line when used consistently.\n\n :param text: The text to be outputted\n :param newline: Whether to start a new line and reset the length count.\n :type text: :class:`~__builtins__.str`\n :type newline: :class:`~__builtins__.bool`"} {"query":"Locks all non - empty derived parameters","positive":"def Func ( arg_0 ) : for arg_1 in arg_0 . _derived_parameters . values ( ) : if not arg_1 . f_is_empty ( ) : arg_1 . f_lock ( )","id_":252286,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/trajectory.py#L1469-L1473","negative":"Utility method to visualize decision boundaries in R^2.\n\n Args:\n features: Input points, as a Numpy `array` of shape `[num_examples, 2]`.\n labels: Numpy `float`-like array of shape `[num_examples, 1]` giving a\n label for each point.\n true_w_b: A `tuple` `(w, b)` where `w` is a Numpy array of\n shape `[2]` and `b` is a scalar `float`, interpreted as a\n decision rule of the form `dot(features, w) + b > 0`.\n candidate_w_bs: Python `iterable` containing tuples of the same form as\n true_w_b.\n fname: The filename to save the plot as a PNG image (Python `str`)."} {"query":"List the web sites defined on this webspace .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : return arg_0 . _perform_get ( arg_0 . _Funcs_details_path ( arg_1 , arg_2 ) , Site )","id_":252287,"task_name":"https:\/\/github.com\/Azure\/azure-sdk-for-python\/blob\/d7306fde32f60a293a7567678692bdad31e4b667\/azure-servicemanagement-legacy\/azure\/servicemanagement\/websitemanagementservice.py#L103-L114","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"_doCascadeFetch - Takes an object and performs a cascading fetch on all foreign links and all theirs and so on .","positive":"def Func ( arg_0 ) : arg_0 . validateModel ( ) if not arg_0 . foreignFields : return for arg_1 in arg_0 . foreignFields : arg_2 = object . __getattribute__ ( arg_0 , arg_1 ) if not arg_2 : setattr ( arg_0 , str ( arg_1 ) , irNull ) continue arg_3 = arg_2 . getObjs ( ) for arg_4 in arg_3 : if isIndexedRedisModel ( arg_4 ) : IndexedRedisQuery . Func ( arg_4 )","id_":252288,"task_name":"https:\/\/github.com\/kata198\/indexedredis\/blob\/f9c85adcf5218dac25acb06eedc63fc2950816fa\/IndexedRedis\/__init__.py#L1738-L1761","negative":"Return the generation index of the first generation in the given\n swarm that does not have numParticles particles in it, either still in the\n running state or completed. This does not include orphaned particles.\n\n Parameters:\n ---------------------------------------------------------------------\n swarmId: A string representation of the sorted list of encoders in this\n swarm. For example '__address_encoder.__gym_encoder'\n minNumParticles: minium number of partices required for a full\n generation.\n\n retval: generation index, or None if no particles at all."} {"query":"Upload a single file on the platform .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = os . path . getsize ( arg_1 ) arg_3 = 0 arg_4 = os . path . basename ( arg_1 ) arg_5 = str ( uuid . uuid4 ( ) ) with open ( arg_1 , 'rb' ) as f : while True : arg_6 = None arg_7 = f . read ( CHUNK_SIZE ) if not arg_7 : break for arg_8 in range ( 5 ) : content_range = 'bytes {}-{}\/{}' . format ( arg_3 * CHUNK_SIZE , arg_3 * CHUNK_SIZE + len ( arg_7 ) - 1 , arg_2 ) if arg_8 > 0 and arg_6 is not None : print ( \"Chunk upload failed (error {}): repeating {}\" . format ( arg_6 . status_code , content_range ) ) arg_6 = requests . post ( urlparse . urljoin ( arg_0 . url , 'upload\/' ) , auth = arg_0 . auth , data = arg_7 , headers = { 'Content-Disposition' : 'attachment; filename=\"{}\"' . format ( arg_4 ) , 'Content-Length' : arg_2 , 'Content-Range' : content_range , 'Content-Type' : 'application\/octet-stream' , 'Session-Id' : arg_5 } ) if arg_6 . status_code in [ 200 , 201 ] : break else : return None arg_9 = 100. * ( arg_3 * CHUNK_SIZE + len ( arg_7 ) ) \/ arg_2 sys . stdout . write ( \"\\r{:.0f} % Uploading {}\" . format ( arg_9 , arg_1 ) ) sys . stdout . flush ( ) arg_3 += 1 print ( ) return arg_5","id_":252289,"task_name":"https:\/\/github.com\/genialis\/genesis-pyapi\/blob\/dfe9bcc8b332a8b9873db4ab9994b0cc10eb209a\/genesis\/genesis.py#L296-L345","negative":"Generate a solution representation of the current solver state.\n\n Parameters\n ---------\n model : cobra.Model\n The model whose reactions to retrieve values for.\n reactions : list, optional\n An iterable of `cobra.Reaction` objects. Uses `model.reactions` by\n default.\n metabolites : list, optional\n An iterable of `cobra.Metabolite` objects. Uses `model.metabolites` by\n default.\n raise_error : bool\n If true, raise an OptimizationError if solver status is not optimal.\n\n Returns\n -------\n cobra.Solution\n\n Note\n ----\n This is only intended for the `optlang` solver interfaces and not the\n legacy solvers."} {"query":"LMLs fixed - effect sizes and scales for single - marker scan .","positive":"def Func ( arg_0 , arg_1 , arg_2 = True ) : from tqdm import tqdm if arg_1 . ndim != 2 : raise ValueError ( \"`M` array must be bidimensional.\" ) arg_3 = arg_1 . shape [ 1 ] arg_4 = empty ( arg_3 ) arg_5 = empty ( ( arg_3 , arg_0 . _XTQ [ 0 ] . shape [ 0 ] ) ) arg_6 = empty ( ( arg_3 , arg_0 . _XTQ [ 0 ] . shape [ 0 ] ) ) arg_7 = empty ( arg_3 ) arg_8 = empty ( arg_3 ) arg_9 = empty ( arg_3 ) if arg_2 : arg_10 = min ( arg_3 , 30 ) else : arg_10 = min ( arg_3 , 1 ) arg_11 = ( arg_3 + arg_10 - 1 ) \/\/ arg_10 for arg_12 in tqdm ( range ( arg_10 ) , desc = \"Scanning\" , disable = not arg_2 ) : arg_13 = arg_12 * arg_11 arg_14 = min ( arg_13 + arg_11 , arg_1 . shape [ 1 ] ) arg_15 = arg_0 . _Func_chunk ( arg_1 [ : , arg_13 : arg_14 ] ) arg_4 [ arg_13 : arg_14 ] = arg_15 [ \"lml\" ] arg_5 [ arg_13 : arg_14 , : ] = arg_15 [ \"effsizes0\" ] arg_6 [ arg_13 : arg_14 , : ] = arg_15 [ \"effsizes0_se\" ] arg_7 [ arg_13 : arg_14 ] = arg_15 [ \"effsizes1\" ] arg_8 [ arg_13 : arg_14 ] = arg_15 [ \"effsizes1_se\" ] arg_9 [ arg_13 : arg_14 ] = arg_15 [ \"scale\" ] return { \"lml\" : arg_4 , \"effsizes0\" : arg_5 , \"effsizes0_se\" : arg_6 , \"effsizes1\" : arg_7 , \"effsizes1_se\" : arg_8 , \"scale\" : arg_9 , }","id_":252290,"task_name":"https:\/\/github.com\/limix\/glimix-core\/blob\/cddd0994591d100499cc41c1f480ddd575e7a980\/glimix_core\/lmm\/_lmm_scan.py#L202-L265","negative":"Removes the specfied course from the specified organization"} {"query":"Finds transactions matching the specified criteria fetches the corresponding trytes and converts them into Transaction objects .","positive":"def Func ( arg_0 , ** arg_1 ) : arg_2 = FindTransactionsCommand ( arg_0 ) ( ** arg_1 ) arg_3 = arg_2 [ 'hashes' ] if arg_3 : arg_4 = GetTrytesCommand ( arg_0 ) ( arg_3 = arg_3 ) return list ( map ( Transaction . from_tryte_string , arg_4 . get ( 'trytes' ) or [ ] , ) ) return [ ]","id_":252291,"task_name":"https:\/\/github.com\/iotaledger\/iota.lib.py\/blob\/97cdd1e241498446b46157b79b2a1ea2ec6d387a\/iota\/commands\/extended\/utils.py#L19-L37","negative":"Connect to the stream\n\n Returns\n -------\n asyncio.coroutine\n The streaming response"} {"query":"Read the images load them into self . items and set the labels .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : if not isinstance ( arg_1 , ( list , tuple ) ) : raise ValueError ( 'Expected an iterable (list or tuple) of strings or img-like objects. ' 'Got a {}.' . format ( type ( arg_1 ) ) ) if not len ( arg_1 ) > 0 : raise ValueError ( 'Expected an iterable (list or tuple) of strings or img-like objects ' 'of size higher than 0. Got {} items.' . format ( len ( arg_1 ) ) ) if arg_2 is not None and len ( arg_2 ) != len ( arg_1 ) : raise ValueError ( 'Expected the same length for image set ({}) and ' 'labels list ({}).' . format ( len ( arg_1 ) , len ( arg_2 ) ) ) arg_3 = arg_1 [ 0 ] if arg_3 : arg_4 = NeuroImage ( arg_3 ) else : raise ( 'Error reading image {}.' . format ( repr_imgs ( arg_3 ) ) ) for arg_5 , arg_6 in enumerate ( arg_1 ) : try : arg_7 = NeuroImage ( arg_6 ) arg_0 . check_compatibility ( arg_7 , arg_4 ) except : log . exception ( 'Error reading image {}.' . format ( repr_imgs ( arg_6 ) ) ) raise else : arg_0 . items . append ( arg_7 ) arg_0 . set_labels ( arg_2 )","id_":252292,"task_name":"https:\/\/github.com\/Neurita\/boyle\/blob\/2dae7199849395a209c887d5f30506e1de8a9ad9\/boyle\/nifti\/sets.py#L167-L197","negative":"Generate RENEWING time.\n\n [:rfc:`2131#section-4.4.5`]::\n\n T1\n defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 *\n duration_of_lease). Times T1 and T2 SHOULD be chosen with some\n random \"fuzz\" around a fixed value, to avoid synchronization of\n client reacquisition."} {"query":"Returns a list of categories the page belongs to .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ ] arg_3 = re . findall ( arg_0 . re [ \"category\" ] , arg_1 ) for arg_4 in arg_3 : arg_4 = arg_4 . split ( \"|\" ) arg_5 = arg_4 [ 0 ] . strip ( ) arg_6 = u\"\" if len ( arg_4 ) > 1 : arg_6 = arg_4 [ 1 ] . strip ( ) if not arg_5 in arg_2 : arg_2 . append ( arg_5 ) return arg_2","id_":252293,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/lib\/web\/wikipedia.py#L1246-L1270","negative":"Return new rrule with same attributes except for those attributes given new\n values by whichever keyword arguments are specified."} {"query":"Display time and frequency domain statistical information about the audio . Audio is passed unmodified through the SoX processing chain .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = False ) : arg_4 = [ 'channels' , '1' , 'Func' ] if arg_2 is not None : if not is_number ( arg_2 ) or arg_2 <= 0 : raise ValueError ( \"scale must be a positive number.\" ) arg_4 . extend ( [ '-s' , '{:f}' . format ( arg_2 ) ] ) if arg_3 : arg_4 . append ( '-rms' ) arg_5 , arg_5 , arg_6 = arg_0 . build ( arg_1 , None , extra_args = arg_4 , return_output = True ) arg_7 = { } arg_8 = arg_6 . split ( '\\n' ) for arg_9 in arg_8 : arg_10 = arg_9 . split ( ) if len ( arg_10 ) == 0 : continue arg_11 = arg_10 [ - 1 ] arg_12 = ' ' . join ( arg_10 [ : - 1 ] ) arg_7 [ arg_12 . strip ( ':' ) ] = arg_11 return arg_7","id_":252294,"task_name":"https:\/\/github.com\/rabitt\/pysox\/blob\/eae89bde74567136ec3f723c3e6b369916d9b837\/sox\/transform.py#L2619-L2670","negative":"Add this manager's namespace to the graph."} {"query":"Wraps the object in a list and then defers to amp . AmpList .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : return amp . AmpList . Func ( arg_0 , [ arg_1 ] , arg_2 )","id_":252295,"task_name":"https:\/\/github.com\/lvh\/txampext\/blob\/a7d6cb9f1e9200dba597378cd40eb6a2096d4fd9\/txampext\/nested.py#L21-L25","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Set the presence status .","positive":"async def Func ( arg_0 , arg_1 ) : arg_2 = hangouts_pb2 . SetPresenceResponse ( ) await arg_0 . _pb_request ( 'presence\/setpresence' , arg_1 , arg_2 ) return arg_2","id_":252296,"task_name":"https:\/\/github.com\/tdryer\/hangups\/blob\/85c0bf0a57698d077461283895707260f9dbf931\/hangups\/client.py#L646-L651","negative":"Apply all filters to issues and pull requests.\n\n :param dict older_tag: All issues before this tag's date will be\n excluded. May be special value, if new tag is\n the first tag. (Means **older_tag** is when\n the repo was created.)\n :param dict newer_tag: All issues after this tag's date will be\n excluded. May be title of unreleased section.\n :rtype: list(dict), list(dict)\n :return: Filtered issues and pull requests."} {"query":"Synthesizes text input","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = None , arg_5 = None ) : arg_6 = arg_0 . get_conn ( ) arg_0 . log . info ( \"Synthesizing input: %s\" % arg_1 ) return arg_6 . Func ( input_ = arg_1 , arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = arg_4 , arg_5 = arg_5 )","id_":252297,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/gcp_text_to_speech_hook.py#L53-L80","negative":"Returns a dictionary with all the past baking statuses of a single book."} {"query":"Export entities from Cloud Datastore to Cloud Storage for backup .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None , arg_4 = None ) : arg_5 = arg_0 . get_conn ( ) arg_6 = 'gs:\/\/' + '\/' . join ( filter ( None , [ arg_1 , arg_2 ] ) ) if not arg_3 : arg_3 = { } if not arg_4 : arg_4 = { } arg_7 = { 'outputUrlPrefix' : arg_6 , 'entityFilter' : arg_3 , 'labels' : arg_4 , } arg_8 = ( arg_5 . projects ( ) . export ( projectId = arg_0 . project_id , arg_7 = arg_7 ) . execute ( num_retries = arg_0 . num_retries ) ) return arg_8","id_":252298,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/datastore_hook.py#L257-L295","negative":"Extracts the update time from a ReMo item.\n\n The timestamp is extracted from 'end' field.\n This date is converted to a perceval format using a float value.\n\n :param item: item generated by the backend\n\n :returns: a UNIX timestamp"} {"query":"Validates ISO reference number","positive":"def Func ( arg_0 ) : arg_0 = str ( arg_0 ) arg_1 = arg_0 [ 4 : ] + arg_0 [ : 4 ] return ( iso_reference_str2int ( arg_1 ) % 97 ) == 1","id_":252299,"task_name":"https:\/\/github.com\/rambo\/python-holviapi\/blob\/f57f44e7b0a1030786aafd6f387114abb546bb32\/holviapi\/utils.py#L209-L213","negative":"Returns a list of two actions per gcs bucket to mount."} {"query":"Fetches experiences for given indices .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = dict ( ) for arg_3 in sorted ( arg_0 . states_memory ) : arg_2 [ arg_3 ] = tf . gather ( params = arg_0 . states_memory [ arg_3 ] , arg_1 = arg_1 ) arg_4 = dict ( ) for arg_3 in sorted ( arg_0 . internals_memory ) : arg_4 [ arg_3 ] = tf . gather ( params = arg_0 . internals_memory [ arg_3 ] , arg_1 = arg_1 ) arg_5 = dict ( ) for arg_3 in sorted ( arg_0 . actions_memory ) : arg_5 [ arg_3 ] = tf . gather ( params = arg_0 . actions_memory [ arg_3 ] , arg_1 = arg_1 ) arg_6 = tf . gather ( params = arg_0 . terminal_memory , arg_1 = arg_1 ) arg_7 = tf . gather ( params = arg_0 . reward_memory , arg_1 = arg_1 ) if arg_0 . include_next_states : assert util . rank ( arg_1 ) == 1 arg_8 = ( arg_1 + 1 ) % arg_0 . capacity arg_9 = dict ( ) for arg_3 in sorted ( arg_0 . states_memory ) : arg_9 [ arg_3 ] = tf . gather ( params = arg_0 . states_memory [ arg_3 ] , arg_1 = arg_8 ) arg_10 = dict ( ) for arg_3 in sorted ( arg_0 . internals_memory ) : arg_10 [ arg_3 ] = tf . gather ( params = arg_0 . internals_memory [ arg_3 ] , arg_1 = arg_8 ) return dict ( arg_2 = arg_2 , arg_4 = arg_4 , arg_5 = arg_5 , arg_6 = arg_6 , arg_7 = arg_7 , arg_9 = arg_9 , arg_10 = arg_10 ) else : return dict ( arg_2 = arg_2 , arg_4 = arg_4 , arg_5 = arg_5 , arg_6 = arg_6 , arg_7 = arg_7 )","id_":252300,"task_name":"https:\/\/github.com\/tensorforce\/tensorforce\/blob\/520a8d992230e382f08e315ede5fc477f5e26bfb\/tensorforce\/core\/memories\/queue.py#L219-L271","negative":"Inherits the data from the parent."} {"query":"Generate a array job .","positive":"def Func ( arg_0 , arg_1 , ** arg_2 ) : arg_3 = arg_2 . setdefault ( 'dirname' , os . path . curdir ) arg_4 = [ relpath ( p , start = arg_3 ) for p in asiterable ( arg_1 ) ] arg_5 = [ p for p in ( os . path . join ( arg_3 , subdir ) for subdir in arg_4 ) if not os . path . exists ( p ) ] if len ( arg_5 ) > 0 : logger . debug ( \"template=%(template)r: dirname=%(dirname)r reldirs=%(reldirs)r\" , vars ( ) ) logger . error ( \"Some directories are not accessible from the array script: \" \"%(missing)r\" , vars ( ) ) def write_script ( arg_6 ) : arg_7 = detect_queuing_system ( arg_6 ) if arg_7 is None or not arg_7 . has_arrays ( ) : logger . warning ( \"Not known how to make a job array for %(template)r; skipping...\" , vars ( ) ) return None arg_2 [ 'jobarray_string' ] = arg_7 . array ( arg_4 ) return generate_submit_scripts ( arg_6 , ** arg_2 ) [ 0 ] return [ write_script ( arg_6 ) for arg_6 in config . get_templates ( arg_0 ) ]","id_":252301,"task_name":"https:\/\/github.com\/Becksteinlab\/GromacsWrapper\/blob\/d4f9a8cb6f48292732cf7c7e4ef4a6d2ccbc51b9\/gromacs\/qsub.py#L405-L449","negative":"Raises OrderError if no package or file defined.\n Raises CardinalityError if already set.\n Raises SPDXValueError if malformed."} {"query":"writes arrays to h5 disk","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : LOGGER . info ( \"writing fullarr %s %s\" , arg_1 . name , arg_2 ) with h5py . File ( arg_0 . clust_database , 'r+' ) as io5 : arg_3 = io5 [ \"catgs\" ] . attrs [ \"chunksize\" ] [ 0 ] arg_4 = io5 [ \"catgs\" ] arg_5 = io5 [ \"nalleles\" ] arg_6 = os . path . join ( arg_0 . dirs . across , arg_1 . name + '.tmp.h5' ) with h5py . File ( arg_6 ) as indat : arg_7 = indat [ \"icatg\" ] arg_8 = indat [ \"inall\" ] for arg_9 in xrange ( 0 , arg_4 . shape [ 0 ] , arg_3 ) : arg_10 = arg_9 + arg_3 arg_4 [ arg_9 : arg_10 , arg_2 : arg_2 + 1 , : ] = np . expand_dims ( arg_7 [ arg_9 : arg_10 , : ] , axis = 1 ) arg_5 [ : , arg_2 : arg_2 + 1 ] = np . expand_dims ( arg_8 , axis = 1 )","id_":252302,"task_name":"https:\/\/github.com\/dereneaton\/ipyrad\/blob\/5eeb8a178160f45faf71bf47cec4abe998a575d1\/ipyrad\/assemble\/cluster_across.py#L1150-L1176","negative":"Returns true if the given parameter, with name key, has transitioned to the given value."} {"query":"Rename layer label","positive":"def Func ( arg_0 , arg_1 = 'blank' , arg_2 = None ) : arg_3 = '' . join ( [ ' \\n' , ' \\n' , ' <\/filter>\\n' ] ) if isinstance ( arg_0 , mlx . FilterScript ) : if ( arg_2 is None ) or ( arg_2 == arg_0 . current_layer ( ) ) : util . write_filter ( arg_0 , arg_3 ) arg_0 . layer_stack [ arg_0 . current_layer ( ) ] = arg_1 else : arg_6 = arg_0 . current_layer ( ) change ( arg_0 , arg_2 ) util . write_filter ( arg_0 , arg_3 ) change ( arg_0 , arg_6 ) arg_0 . layer_stack [ arg_2 ] = arg_1 else : util . write_filter ( arg_0 , arg_3 ) return None","id_":252303,"task_name":"https:\/\/github.com\/3DLIRIOUS\/MeshLabXML\/blob\/177cce21e92baca500f56a932d66bd9a33257af8\/meshlabxml\/layers.py#L115-L155","negative":"Adds all parameters to `traj`"} {"query":"Helper method to save images visualizing model reconstructions .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 = 10 ) : save_imgs ( arg_0 [ : arg_5 ] , os . path . join ( arg_3 , \"{}_inputs.png\" . format ( arg_4 ) ) ) save_imgs ( arg_1 [ : arg_5 ] , os . path . join ( arg_3 , \"{}_reconstructions.png\" . format ( arg_4 ) ) ) if arg_2 is not None : save_imgs ( arg_2 [ : arg_5 ] , os . path . join ( arg_3 , \"{}_prior_samples.png\" . format ( arg_4 ) ) )","id_":252304,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/examples\/vq_vae.py#L324-L350","negative":"Read attribute from sysfs and return as string"} {"query":"return optim clusters given iterators and whether it got all or not","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = 0 arg_3 = [ ] while arg_2 < arg_1 : try : arg_4 = itertools . takewhile ( lambda x : x [ 0 ] != \"\/\/\\n\" , arg_0 ) arg_5 = [ \"\" . join ( arg_4 . next ( ) ) ] except StopIteration : return 1 , arg_3 while 1 : try : arg_5 . append ( \"\" . join ( arg_4 . next ( ) ) ) except StopIteration : break arg_3 . append ( \"\" . join ( arg_5 ) ) arg_2 += 1 return 0 , arg_3","id_":252305,"task_name":"https:\/\/github.com\/dereneaton\/ipyrad\/blob\/5eeb8a178160f45faf71bf47cec4abe998a575d1\/ipyrad\/assemble\/util.py#L865-L886","negative":"This will output the nginx HTTP config string for specific port spec"} {"query":"Error checking for Error calls","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_0 != 0 : arg_3 = rt . Error_GetLastErrorMsg ( ) . decode ( ) arg_4 = 'LASError in \"%s\": %s' % ( arg_1 . __name__ , arg_3 ) rt . Error_Reset ( ) raise RTreeError ( arg_4 ) return True","id_":252306,"task_name":"https:\/\/github.com\/Toblerity\/rtree\/blob\/5d33357c8e88f1a8344415dc15a7d2440211b281\/rtree\/core.py#L11-L19","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Handle the case where the transmission fails .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : try : arg_3 = arg_2 . response . content except AttributeError : arg_3 = 'Not available' LOGGER . error ( ( 'Failed to send completion status call for enterprise enrollment %s' 'with payload %s' '\\nError message: %s' '\\nSystem message: %s' ) , arg_1 . enterprise_course_enrollment_id , arg_1 , str ( arg_2 ) , arg_3 )","id_":252307,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/integrated_channels\/integrated_channel\/transmitters\/learner_data.py#L97-L114","negative":"Returns any parameters needed for Akamai HD player verification.\n\n Algorithm originally documented by KSV, source:\n http:\/\/stream-recorder.com\/forum\/showpost.php?p=43761&postcount=13"} {"query":"Update synopsis .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : if arg_2 [ 'synopsis' ] != arg_4 : arg_5 = url_for ( 'cases.case' , institute_id = arg_1 [ '_id' ] , case_name = arg_2 [ 'display_name' ] ) arg_0 . Func ( arg_1 , arg_2 , arg_3 , arg_5 , content = arg_4 )","id_":252308,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/server\/blueprints\/cases\/controllers.py#L375-L382","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Clears import errors for files that no longer exist .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . query ( errors . ImportError ) if arg_0 . _file_paths : arg_2 = arg_2 . filter ( ~ errors . ImportError . filename . in_ ( arg_0 . _file_paths ) ) arg_2 . delete ( synchronize_session = 'fetch' ) arg_1 . commit ( )","id_":252309,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/utils\/dag_processing.py#L931-L944","negative":"setting baudrate if supported"} {"query":"Removes the specfied course from the specified organization","positive":"def Func ( arg_0 , arg_1 ) : _validate_organization_data ( arg_0 ) _validate_course_key ( arg_1 ) return data . delete_organization_course ( arg_1 = arg_1 , arg_0 = arg_0 )","id_":252310,"task_name":"https:\/\/github.com\/edx\/edx-organizations\/blob\/51000d5d359d880a6eb3a79345f60744f1982c00\/organizations\/api.py#L107-L113","negative":"getPrimaryKeys - Returns all primary keys matching current filterset.\n\n\t\t\t@param sortByAge - If False, return will be a set and may not be ordered.\n\t\t\t\tIf True, return will be a list and is guarenteed to represent objects oldest->newest\n\n\t\t\t@return - A set of all primary keys associated with current filters."} {"query":"Compute the yticks labels of this grid used for plotting the y - axis ticks when visualizing a regular","positive":"def Func ( arg_0 ) : return np . linspace ( np . min ( arg_0 [ : , 0 ] ) , np . max ( arg_0 [ : , 0 ] ) , 4 )","id_":252311,"task_name":"https:\/\/github.com\/Jammy2211\/PyAutoLens\/blob\/91e50369c7a9c048c83d217625578b72423cd5a7\/autolens\/data\/array\/grids.py#L511-L513","negative":"Verify a certificate in a context.\n\n .. versionadded:: 0.15\n\n :raises X509StoreContextError: If an error occurred when validating a\n certificate in the context. Sets ``certificate`` attribute to\n indicate which certificate caused the error."} {"query":"Returns True if index is in range","positive":"def Func ( arg_0 , arg_1 ) : if isinstance ( arg_1 , slice ) : arg_2 = arg_1 . start < arg_1 . stop and arg_1 . start >= arg_0 . start and arg_1 . stop <= arg_0 . end else : arg_2 = arg_1 >= arg_0 . start and arg_1 <= arg_0 . end return arg_2","id_":252312,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/native\/memory.py#L188-L197","negative":"Gets the base class for the custom database back-end.\n\n This should be the Django PostgreSQL back-end. However,\n some people are already using a custom back-end from\n another package. We are nice people and expose an option\n that allows them to configure the back-end we base upon.\n\n As long as the specified base eventually also has\n the PostgreSQL back-end as a base, then everything should\n work as intended."} {"query":"Scan for FCs with a given prefix .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_3 = arg_0 . _Func ( arg_1 , arg_2 = arg_2 ) for arg_4 in arg_3 : yield did ( arg_4 [ '_id' ] ) , arg_0 . fc_from_dict ( arg_4 [ '_source' ] [ 'fc' ] )","id_":252313,"task_name":"https:\/\/github.com\/dossier\/dossier.store\/blob\/b22ffe2470bba9fcc98a30cb55b437bfa1521e7f\/dossier\/store\/elastic.py#L310-L321","negative":"Set a property value or remove a property.\n\n value == None means 'remove property'.\n Raise HTTP_FORBIDDEN if property is read-only, or not supported.\n\n When dry_run is True, this function should raise errors, as in a real\n run, but MUST NOT change any data.\n\n This default implementation\n\n - raises HTTP_FORBIDDEN, if trying to modify a locking property\n - raises HTTP_FORBIDDEN, if trying to modify an immutable {DAV:}\n property\n - handles Windows' Win32LastModifiedTime to set the getlastmodified\n property, if enabled\n - stores everything else as dead property, if a property manager is\n present.\n - raises HTTP_FORBIDDEN, else\n\n Removing a non-existing prop is NOT an error.\n\n Note: RFC 4918 states that {DAV:}displayname 'SHOULD NOT be protected'\n\n A resource provider may override this method, to update supported custom\n live properties."} {"query":"Move title info from 245 to 111 proceeding style .","positive":"def Func ( arg_0 ) : arg_1 = record_get_field_instances ( arg_0 . record , tag = \"245\" ) for arg_2 in arg_1 : arg_3 = field_get_subfields ( arg_2 ) arg_4 = [ ] if \"a\" in arg_3 : arg_4 . append ( ( \"a\" , arg_3 [ 'a' ] [ 0 ] ) ) if \"b\" in arg_3 : arg_4 . append ( ( \"c\" , arg_3 [ 'b' ] [ 0 ] ) ) record_add_field ( arg_0 . record , tag = \"111\" , subfields = arg_4 ) record_delete_fields ( arg_0 . record , tag = \"245\" ) record_delete_fields ( arg_0 . record , tag = \"246\" )","id_":252314,"task_name":"https:\/\/github.com\/inspirehep\/harvesting-kit\/blob\/33a7f8aa9dade1d863110c6d8b27dfd955cb471f\/harvestingkit\/inspire_cds_package\/from_inspire.py#L338-L353","negative":"Interprets the HTTP response from the node.\n\n :param response:\n The response object received from\n :py:meth:`_send_http_request`.\n\n :param payload:\n The request payload that was sent (used for debugging).\n\n :param expected_status:\n The response should match one of these status codes to be\n considered valid."} {"query":"Attempt to detect an intraday strategy . Get the number of positions held at the end of the day and divide that by the number of unique stocks transacted every day . If the average quotient is below a threshold then an intraday strategy is detected .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 0.25 ) : arg_3 = arg_1 . copy ( ) arg_3 . index = arg_3 . index . date arg_5 = arg_3 . groupby ( level = 0 ) . symbol . nunique ( ) . sum ( ) arg_6 = arg_0 . drop ( 'cash' , axis = 1 ) . replace ( 0 , np . nan ) return arg_6 . count ( axis = 1 ) . sum ( ) \/ arg_5 < arg_2","id_":252315,"task_name":"https:\/\/github.com\/quantopian\/pyfolio\/blob\/712716ab0cdebbec9fabb25eea3bf40e4354749d\/pyfolio\/utils.py#L240-L266","negative":"Start scheduling jobs."} {"query":"Force a failed gossip member into the left state .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = { \"node\" : arg_1 } return arg_0 . request ( \"force-leave\" , arg_2 = arg_2 , method = \"post\" ) . status_code","id_":252316,"task_name":"https:\/\/github.com\/jrxFive\/python-nomad\/blob\/37df37e4de21e6f8ac41c6154e7f1f44f1800020\/nomad\/api\/agent.py#L87-L98","negative":"Shift the model result and return the new instance.\n\n Queues up the T(i+1) prediction value and emits a T(i)\n input\/prediction pair, if possible. E.g., if the previous T(i-1)\n iteration was learn-only, then we would not have a T(i) prediction in our\n FIFO and would not be able to emit a meaningful input\/prediction pair.\n\n :param modelResult: A :class:`~.nupic.frameworks.opf.opf_utils.ModelResult`\n instance to shift.\n :return: A :class:`~.nupic.frameworks.opf.opf_utils.ModelResult` instance that\n has been shifted"} {"query":"Let user interactively select block .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = False ) : print ( \"Blocks found in assembly file:\" ) print ( \" block | OPs | pck. | AVX || Registers | ZMM | YMM | XMM | GP ||ptr.inc|\\n\" \"----------------+-----+------+-----++-----------+----------+----------+----------+---------++-------|\" ) for arg_3 , arg_4 in arg_0 : print ( '{:>2} {b[labels]!r:>12} | {b[ops]:>3} | {b[packed_instr]:>4} | {b[avx_instr]:>3} |' '| {b[regs][0]:>3} ({b[regs][1]:>3}) | {b[ZMM][0]:>3} ({b[ZMM][1]:>2}) | ' '{b[YMM][0]:>3} ({b[YMM][1]:>2}) | ' '{b[XMM][0]:>3} ({b[XMM][1]:>2}) | {b[GP][0]:>2} ({b[GP][1]:>2}) || ' '{b[pointer_increment]!s:>5} |' . format ( arg_3 , arg_4 = arg_4 ) ) if arg_2 : arg_5 = arg_4 [ 'first_line' ] print ( ' ' * 4 + 'Code:' ) for arg_6 in arg_4 [ 'lines' ] : print ( ' ' * 8 + '{:>5} | {}' . format ( arg_5 , arg_6 ) ) arg_5 += 1 print ( ' ' * 4 + 'Metadata:' ) print ( textwrap . indent ( pformat ( { arg_7 : arg_8 for arg_7 , arg_8 in arg_4 . items ( ) if arg_7 not in [ 'lines' ] } ) , ' ' * 8 ) ) arg_9 = - 1 while not ( 0 <= arg_9 < len ( arg_0 ) ) : arg_9 = input ( \"Choose block to be marked [\" + str ( arg_1 ) + \"]: \" ) or arg_1 try : arg_9 = int ( arg_9 ) except ValueError : arg_9 = - 1 return arg_9","id_":252317,"task_name":"https:\/\/github.com\/RRZE-HPC\/kerncraft\/blob\/c60baf8043e4da8d8d66da7575021c2f4c6c78af\/kerncraft\/iaca.py#L251-L284","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Get filenames for donor and acceptor timestamps for the given parameters","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = '1+2+3+4+5+6' , arg_5 = '480' , arg_6 = '30' , arg_7 = '64' , arg_8 = 0.5e-6 , arg_9 = 1.2e-11 , arg_10 = '' ) : arg_11 = arg_8 \/ 32. arg_12 = 1. * arg_2 \/ ( arg_2 + arg_0 ) arg_13 = 100. * arg_12 print ( \"Simulated FRET value: %.1f%%\" % arg_13 ) arg_14 = \"%04d\" % arg_0 arg_15 = \"%04d\" % arg_2 arg_16 = \"%04.1f\" % arg_1 arg_17 = \"%04.1f\" % arg_3 print ( \"D: EM %s BG %s \" % ( arg_14 , arg_16 ) ) print ( \"A: EM %s BG %s \" % ( arg_15 , arg_17 ) ) arg_18 = ( 'ph_times_{t_tot}s_D{D}_{np}P_{pM}pM_' 'step{ts_us}us_ID{ID}_EM{em}kHz_BG{bg}kHz.npy' ) . format ( em = arg_14 , bg = arg_16 , arg_5 = arg_5 , arg_7 = arg_7 , np = arg_6 , arg_4 = arg_4 , ts_us = arg_8 * 1e6 , arg_9 = arg_9 ) arg_19 = ( 'ph_times_{t_tot}s_D{D}_{np}P_{pM}pM_' 'step{ts_us}us_ID{ID}_EM{em}kHz_BG{bg}kHz.npy' ) . format ( em = arg_15 , bg = arg_17 , arg_5 = arg_5 , arg_7 = arg_7 , np = arg_6 , arg_4 = arg_4 , ts_us = arg_8 * 1e6 , arg_9 = arg_9 ) print ( arg_18 ) print ( arg_19 ) arg_20 = ( 'BroSim_E{:.1f}_dBG{:.1f}k_aBG{:.1f}k_' 'dEM{:.0f}k' ) . format ( arg_13 , arg_1 , arg_3 , arg_0 ) return arg_10 + arg_18 , arg_10 + arg_19 , arg_20 , arg_11 , arg_12","id_":252318,"task_name":"https:\/\/github.com\/tritemio\/PyBroMo\/blob\/b75f82a4551ff37e7c7a7e6954c536451f3e6d06\/pybromo\/loadutils.py#L34-L69","negative":"Fix environment variable to a value within context. Unset if value is None."} {"query":"Customized version of imap_unordered .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : assert arg_0 . _state == RUN arg_4 = Pool . _get_tasks ( arg_1 , arg_2 , arg_3 ) arg_5 = IMapUnorderedIterator ( arg_0 . _cache ) arg_6 = ( ( arg_5 . _job , i , arg_1 , chunk , { } ) for i , ( _ , chunk ) in enumerate ( arg_4 ) ) arg_0 . _taskqueue . put ( ( arg_6 , arg_5 . _set_length ) ) return arg_5","id_":252319,"task_name":"https:\/\/github.com\/addok\/addok\/blob\/46a270d76ec778d2b445c2be753e5c6ba070a9b2\/addok\/helpers\/__init__.py#L144-L164","negative":"setting baudrate if supported"} {"query":"Check if we can write to the given file","positive":"def Func ( arg_0 ) : try : open ( arg_0 , 'a' ) except IOError : print ( \"Can't open file {}. \" \"Please grant write permissions or change the path in your config\" . format ( arg_0 ) ) sys . exit ( 1 )","id_":252320,"task_name":"https:\/\/github.com\/trivago\/Protector\/blob\/7ebe7bde965e27737b961a0cb5740724d174fdc7\/protector\/__main__.py#L33-L50","negative":"Request the api endpoint to retrieve information about the inventory\n\n :return: Main Collection\n :rtype: Collection"} {"query":"lib2to3 s AST requires unique objects as children .","positive":"def Func ( arg_0 , arg_1 = None ) : if isinstance ( arg_0 , Leaf ) : return Leaf ( arg_0 . type , arg_0 . value , arg_1 = arg_0 . prefix if arg_1 is None else arg_1 ) arg_0 . parent = None if arg_1 is not None : arg_0 . prefix = arg_1 return arg_0","id_":252321,"task_name":"https:\/\/github.com\/ambv\/retype\/blob\/03137abd4d9c9845f3cced1006190b5cca64d879\/retype.py#L1467-L1478","negative":"Stop the timer\n\n Returns:\n The time the timer was stopped"} {"query":"Summarises the inventory status of the given items grouping by invoice status .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . cleaned_data [ \"product\" ] arg_3 = arg_1 . cleaned_data [ \"category\" ] arg_4 = commerce . ProductItem . objects . filter ( Q ( product__in = arg_2 ) | Q ( product__category__in = arg_3 ) , ) . select_related ( \"cart\" , \"product\" ) arg_4 = group_by_cart_status ( arg_4 , [ \"product__category__order\" , \"product__order\" ] , [ \"product\" , \"product__category__name\" , \"product__name\" ] , ) arg_5 = [ \"Product\" , \"Paid\" , \"Reserved\" , \"Unreserved\" , \"Refunded\" , ] arg_6 = [ ] for arg_7 in arg_4 : arg_6 . append ( [ \"%s - %s\" % ( arg_7 [ \"product__category__name\" ] , arg_7 [ \"product__name\" ] ) , arg_7 [ \"total_paid\" ] , arg_7 [ \"total_reserved\" ] , arg_7 [ \"total_unreserved\" ] , arg_7 [ \"total_refunded\" ] , ] ) return ListReport ( \"Inventory\" , arg_5 , arg_6 )","id_":252322,"task_name":"https:\/\/github.com\/chrisjrn\/registrasion\/blob\/461d5846c6f9f3b7099322a94f5d9911564448e4\/registrasion\/reporting\/views.py#L244-L277","negative":"Generate RENEWING time.\n\n [:rfc:`2131#section-4.4.5`]::\n\n T1\n defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 *\n duration_of_lease). Times T1 and T2 SHOULD be chosen with some\n random \"fuzz\" around a fixed value, to avoid synchronization of\n client reacquisition."} {"query":"Attaches a bundle object","positive":"def Func ( arg_0 , arg_1 ) : if not isinstance ( arg_1 , BlueprintBundle ) : raise IncompatibleBundle ( 'BlueprintBundle object passed to Func must be of type {0}' . format ( BlueprintBundle ) ) elif len ( arg_1 . blueprints ) == 0 : raise MissingBlueprints ( \"Bundles must contain at least one flask.Blueprint\" ) elif arg_0 . _bundle_exists ( arg_1 . path ) : raise ConflictingPath ( \"Duplicate bundle path {0}\" . format ( arg_1 . path ) ) elif arg_0 . _journey_path == arg_1 . path == '\/' : raise ConflictingPath ( \"Bundle path and Journey path cannot both be {0}\" . format ( arg_1 . path ) ) arg_0 . _attached_bundles . append ( arg_1 )","id_":252323,"task_name":"https:\/\/github.com\/rbw\/flask-journey\/blob\/6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285\/flask_journey\/journey.py#L113-L133","negative":"This validates if an API key for the specified LCC-Server is available.\n\n API keys are stored using the following file scheme::\n\n ~\/.astrobase\/lccs\/apikey-domain.of.lccserver.org\n\n e.g. for the HAT LCC-Server at https:\/\/data.hatsurveys.org::\n\n ~\/.astrobase\/lccs\/apikey-https-data.hatsurveys.org\n\n Parameters\n ----------\n\n lcc_server : str\n The base URL of the LCC-Server for which the existence of API keys will\n be checked.\n\n Returns\n -------\n\n (apikey_ok, apikey_str, expiry) : tuple\n The returned tuple contains the status of the API key, the API key\n itself if present, and its expiry date if present."} {"query":"Simultaneously reports and captures stdout and stderr from a process","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = 'auto' ) : arg_4 = [ ] arg_5 = [ ] if arg_3 == 'auto' : arg_3 = 'thread' if arg_3 == 'select' : if not POSIX : raise NotImplementedError ( 'select is only available on posix' ) arg_6 = _proc_iteroutput_select elif arg_3 == 'thread' : arg_6 = _proc_iteroutput_thread else : raise ValueError ( 'backend must be select, thread, or auto' ) arg_7 = arg_0 ( ) for arg_8 , arg_9 in arg_6 ( arg_7 ) : if arg_8 : if arg_1 : arg_1 . write ( arg_8 ) arg_1 . flush ( ) arg_4 . append ( arg_8 ) if arg_9 : if arg_2 : arg_2 . write ( arg_9 ) arg_2 . flush ( ) arg_5 . append ( arg_9 ) return arg_7 , arg_4 , arg_5","id_":252324,"task_name":"https:\/\/github.com\/Erotemic\/ubelt\/blob\/db802f3ad8abba025db74b54f86e6892b8927325\/ubelt\/util_cmd.py#L163-L199","negative":"Derives a PEP386-compliant version number from VERSION."} {"query":"Validate data and return a list of validation problems found .","positive":"def Func ( arg_0 , arg_1 , arg_2 = True , arg_3 = 0 , arg_4 = False , arg_5 = 0 , arg_6 = None , arg_7 = True ) : arg_8 = list ( ) arg_9 = arg_0 . iFunc ( arg_1 , arg_2 , arg_3 , arg_4 , arg_6 , arg_7 ) for arg_10 , arg_11 in enumerate ( arg_9 ) : if not arg_5 or arg_10 < arg_5 : arg_8 . append ( arg_11 ) return arg_8","id_":252325,"task_name":"https:\/\/github.com\/alimanfoo\/csvvalidator\/blob\/50a86eefdc549c48f65a91a5c0a66099010ee65d\/csvvalidator.py#L368-L412","negative":"Configure the Outstation's database of input point definitions.\n\n Configure two Analog points (group\/variation 30.1) at indexes 1 and 2.\n Configure two Binary points (group\/variation 1.2) at indexes 1 and 2."} {"query":"The entry point of this script to generate change log ChangelogGeneratorError Is thrown when one of the specified tags was not found in list of tags .","positive":"def Func ( arg_0 ) : if not arg_0 . options . project or not arg_0 . options . user : print ( \"Project and\/or user missing. \" \"For help Func:\\n pygcgen --help\" ) return if not arg_0 . options . quiet : print ( \"Generating changelog...\" ) arg_1 = None try : arg_1 = arg_0 . generator . compound_changelog ( ) except ChangelogGeneratorError as err : print ( \"\\n\\033[91m\\033[1m{}\\x1b[0m\" . format ( err . args [ 0 ] ) ) exit ( 1 ) if not arg_1 : if not arg_0 . options . quiet : print ( \"Empty changelog generated. {} not written.\" . format ( arg_0 . options . output ) ) return if arg_0 . options . no_overwrite : arg_2 = checkname ( arg_0 . options . output ) else : arg_2 = arg_0 . options . output with codecs . open ( arg_2 , \"w\" , \"utf-8\" ) as fh : fh . write ( arg_1 ) if not arg_0 . options . quiet : print ( \"Done!\" ) print ( \"Generated changelog written to {}\" . format ( arg_2 ) )","id_":252326,"task_name":"https:\/\/github.com\/topic2k\/pygcgen\/blob\/c41701815df2c8c3a57fd5f7b8babe702127c8a1\/pygcgen\/main.py#L49-L86","negative":"Check if the profiler is running."} {"query":"If sort_type is None - inverse current sort for field if no sorted - use asc","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : if not arg_2 : if arg_0 . initial_sort == arg_1 : arg_2 = 'desc' if arg_0 . initial_sort_type == 'asc' else 'asc' else : arg_2 = 'asc' arg_0 . initial_params [ arg_0 . sort_param_name ] = arg_0 . sort_fields [ arg_1 ] arg_0 . initial_params [ arg_0 . sort_type_param_name ] = arg_2 return '?%s' % arg_0 . initial_params . urlencode ( )","id_":252327,"task_name":"https:\/\/github.com\/AndrewIngram\/django-extra-views\/blob\/188e1bf1f15a44d9a599028d020083af9fb43ea7\/extra_views\/contrib\/mixins.py#L120-L131","negative":"activates error messages, useful during development"} {"query":"Insert or append pauli to the targeted indices .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = None ) : if arg_3 is not None : if arg_2 is not None : raise QiskitError ( \"Please only provide either `paulis` or `pauli_labels`\" ) if isinstance ( arg_3 , str ) : arg_3 = list ( arg_3 ) arg_2 = Pauli . from_label ( arg_3 [ : : - 1 ] ) if arg_1 is None : arg_0 . _z = np . concatenate ( ( arg_0 . _z , arg_2 . z ) ) arg_0 . _x = np . concatenate ( ( arg_0 . _x , arg_2 . x ) ) else : if not isinstance ( arg_1 , list ) : arg_1 = [ arg_1 ] arg_0 . _z = np . insert ( arg_0 . _z , arg_1 , arg_2 . z ) arg_0 . _x = np . insert ( arg_0 . _x , arg_1 , arg_2 . x ) return arg_0","id_":252328,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/quantum_info\/operators\/pauli.py#L372-L412","negative":"Load all Service Fabric commands"} {"query":"Convert string into path case . Join punctuation with slash .","positive":"def Func ( arg_0 ) : arg_0 = snakecase ( arg_0 ) if not arg_0 : return arg_0 return re . sub ( r\"_\" , \"\/\" , arg_0 )","id_":252329,"task_name":"https:\/\/github.com\/okunishinishi\/python-stringcase\/blob\/700ad111be16b384aadaddcf8199f9390575c7b6\/stringcase.py#L86-L100","negative":"Stop streaming samples from device and delete samples buffer"} {"query":"Send a message to the thermostat","positive":"def Func ( arg_0 , arg_1 , arg_2 = \"Hello from python-ecobee!\" ) : arg_3 = { \"selection\" : { \"selectionType\" : \"thermostats\" , \"selectionMatch\" : arg_0 . thermostats [ arg_1 ] [ 'identifier' ] } , \"functions\" : [ { \"type\" : \"sendMessage\" , \"params\" : { \"text\" : arg_2 [ 0 : 500 ] } } ] } arg_4 = \"send message\" return arg_0 . make_request ( arg_3 , arg_4 )","id_":252330,"task_name":"https:\/\/github.com\/nkgilley\/python-ecobee-api\/blob\/cc8d90d20abcb9ef5b66ec9cb035bae2f06ba174\/pyecobee\/__init__.py#L295-L305","negative":"Merge tweaks into a main config file."} {"query":"Base64 - encode the data contained in the reply when appropriate .","positive":"def Func ( arg_0 ) : if arg_0 . data is None : return \"\" elif not arg_0 . data : return \"=\" else : arg_1 = standard_b64Func ( arg_0 . data ) return arg_1 . decode ( \"us-ascii\" )","id_":252331,"task_name":"https:\/\/github.com\/Jajcus\/pyxmpp2\/blob\/14a40a3950910a9cd008b55f0d8905aa0186ce18\/pyxmpp2\/sasl\/core.py#L217-L229","negative":"Tells the CPU to set up a concrete unicorn emulator and use it to execute instructions\n until target is reached.\n\n :param target: Where Unicorn should hand control back to Manticore. Set to 0 for all instructions."} {"query":"Return a list of operations . See base . py for additional detail .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None , arg_4 = None , arg_5 = None , arg_6 = None , arg_7 = None , arg_8 = None , arg_9 = None , arg_10 = 0 ) : arg_1 = None if arg_1 == { '*' } else arg_1 arg_2 = None if arg_2 == { '*' } else arg_2 arg_3 = None if arg_3 == { '*' } else arg_3 arg_4 = None if arg_4 == { '*' } else arg_4 arg_5 = None if arg_5 == { '*' } else arg_5 arg_6 = None if arg_6 == { '*' } else arg_6 if arg_7 or arg_8 or arg_9 : raise NotImplementedError ( 'Lookup by labels and create_time not yet supported by stub.' ) arg_11 = [ x for x in arg_0 . _operations if ( ( not arg_1 or x . get_field ( 'status' , ( None , None ) ) [ 0 ] in arg_1 ) and ( not arg_2 or x . get_field ( 'user' , None ) in arg_2 ) and ( not arg_3 or x . get_field ( 'job-id' , None ) in arg_3 ) and ( not arg_4 or x . get_field ( 'job-name' , None ) in arg_4 ) and ( not arg_5 or x . get_field ( 'task-id' , None ) in arg_5 ) and ( not arg_6 or x . get_field ( 'task-attempt' , None ) in arg_6 ) ) ] if arg_10 > 0 : arg_11 = arg_11 [ : arg_10 ] return arg_11","id_":252332,"task_name":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/443ce31daa6023dc2fd65ef2051796e19d18d5a7\/dsub\/providers\/stub.py#L74-L109","negative":"Query for null or blank field."} {"query":"Returns the rank of each process on its machine The processes on a given machine will be assigned ranks 0 1 2 ... N - 1 where N is the number of processes on this machine .","positive":"def Func ( arg_0 ) : arg_1 = platform . node ( ) arg_2 = arg_0 . allgather ( ( arg_0 . Get_rank ( ) , arg_1 ) ) arg_3 = defaultdict ( int ) arg_4 = None for ( arg_5 , arg_6 ) in arg_2 : if arg_5 == arg_0 . Get_rank ( ) : arg_4 = arg_3 [ arg_6 ] arg_3 [ arg_6 ] += 1 assert arg_4 is not None return arg_4 , arg_3 [ arg_1 ]","id_":252333,"task_name":"https:\/\/github.com\/openai\/baselines\/blob\/3301089b48c42b87b396e246ea3f56fa4bfc9678\/baselines\/common\/mpi_util.py#L49-L67","negative":"Clear all matching our user_id."} {"query":"Get the equaliser modes supported by this device .","positive":"def Func ( arg_0 ) : if not arg_0 . __equalisers : arg_0 . __equalisers = yield from arg_0 . handle_list ( arg_0 . API . get ( 'equalisers' ) ) return arg_0 . __equalisers","id_":252334,"task_name":"https:\/\/github.com\/zhelev\/python-afsapi\/blob\/bb1990cf1460ae42f2dde75f2291625ddac2c0e4\/afsapi\/__init__.py#L373-L379","negative":"Deal with the incoming packets"} {"query":"Update the current label s name . Returns a new Label object .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . fetch_json ( uri_path = arg_0 . base_uri , http_method = 'PUT' , query_params = { 'name' : arg_1 } ) return arg_0 . create_label ( arg_2 )","id_":252335,"task_name":"https:\/\/github.com\/its-rigs\/Trolly\/blob\/483dc94c352df40dc05ead31820b059b2545cf82\/trolly\/label.py#L44-L54","negative":"Decode the data passed in and potentially flush the decoder."} {"query":"Parse the given Zinc text and return the equivalent data .","positive":"def Func ( arg_0 , arg_1 = arg_2 , arg_3 = 'utf-8' ) : if isinstance ( arg_0 , six . binary_type ) : arg_0 = arg_0 . decode ( encoding = arg_3 ) arg_4 = functools . partial ( Func_grid , arg_1 = arg_1 , arg_3 = arg_3 ) if arg_1 == MODE_JSON : if isinstance ( arg_0 , six . string_types ) : arg_5 = json . loads ( arg_0 ) else : arg_5 = arg_0 if isinstance ( arg_5 , dict ) : return arg_4 ( arg_5 ) else : return list ( map ( arg_4 , arg_5 ) ) else : return list ( map ( arg_4 , GRID_SEP . split ( arg_0 . rstrip ( ) ) ) )","id_":252336,"task_name":"https:\/\/github.com\/vrtsystems\/hszinc\/blob\/d52a7c6b5bc466f3c1a77b71814c8c0776aba995\/hszinc\/parser.py#L25-L48","negative":"Reject request."} {"query":"Perform a one - off configuration check that StaticFiles is actually pointed at a directory so that we can raise loud errors rather than just returning 404 responses .","positive":"async def Func ( arg_0 ) -> None : if arg_0 . directory is None : return try : arg_1 = await aio_stat ( arg_0 . directory ) except FileNotFoundError : raise RuntimeError ( f\"StaticFiles directory '{self.directory}' does not exist.\" ) if not ( stat . S_ISDIR ( arg_1 . st_mode ) or stat . S_ISLNK ( arg_1 . st_mode ) ) : raise RuntimeError ( f\"StaticFiles path '{self.directory}' is not a directory.\" )","id_":252337,"task_name":"https:\/\/github.com\/encode\/starlette\/blob\/d23bfd0d8ff68d535d0283aa4099e5055da88bb9\/starlette\/staticfiles.py#L175-L193","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Design a windowed FIR bandpass filter in terms of passband critical frequencies f1 < f2 in Hz relative to sampling rate fs in Hz . The number of taps must be provided . Mark Wickert October 2016","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = 1.0 , arg_4 = False ) : return signal . firwin ( arg_0 , 2 * ( arg_1 , arg_2 ) \/ arg_3 , arg_4 = arg_4 )","id_":252338,"task_name":"https:\/\/github.com\/mwickert\/scikit-dsp-comm\/blob\/5c1353412a4d81a8d7da169057564ecf940f8b5b\/sk_dsp_comm\/fir_design_helper.py#L48-L56","negative":"Remove `self` from the containing `DiscoItems` object."} {"query":"Get a string version of block_stack for debugging .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = \", \" . join ( [ \"(%s, %r)\" % ( dis . opname [ b [ 0 ] ] , b [ 1 ] ) for b in arg_1 ] ) return \"[\" + arg_2 + \"]\"","id_":252339,"task_name":"https:\/\/github.com\/tnkteja\/myhelp\/blob\/fb3a4809d448ad14d5b2e6ddf2e7e89ad52b71cb\/virtualEnvironment\/lib\/python2.7\/site-packages\/coverage\/parser.py#L415-L420","negative":"Register a file reader for use in parse_config_file.\n\n Registered file readers will be used to try reading files passed to\n `parse_config_file`. All file readers (beginning with the default `open`) will\n be tried until one of them succeeds at opening the file.\n\n This function may also be be used used as a decorator. For example:\n\n @register_file_reader(IOError)\n def exotic_data_source(filename):\n ...\n\n Args:\n *args: (When used as a decorator, only the existence check is supplied.)\n - file_reader_fn: The file reader function to register. This should be a\n function that can be used as a context manager to open a file and\n provide a file-like object, similar to Python's built-in `open`.\n - is_readable_fn: A function taking the file path and returning a boolean\n indicating whether the file can be read by `file_reader_fn`.\n\n Returns:\n `None`, or when used as a decorator, a function that will perform the\n registration using the supplied readability predicate."} {"query":"Invoke a dry - run of the scheduler for the job .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = False , arg_4 = False ) : arg_5 = { } arg_5 . update ( arg_2 ) arg_5 . setdefault ( 'Diff' , arg_3 ) arg_5 . setdefault ( 'PolicyOverride' , arg_4 ) return arg_0 . request ( arg_1 , \"plan\" , json = arg_5 , method = \"post\" ) . json ( )","id_":252340,"task_name":"https:\/\/github.com\/jrxFive\/python-nomad\/blob\/37df37e4de21e6f8ac41c6154e7f1f44f1800020\/nomad\/api\/job.py#L178-L197","negative":"Disconnect any connected devices that have any of the specified\n service UUIDs."} {"query":"Calls fn with args possibly expanding args .","positive":"def Func ( arg_0 , arg_1 ) : if expand_as_args ( arg_1 ) : return arg_0 ( * arg_1 ) elif _expand_as_kwargs ( arg_1 ) : return arg_0 ( ** arg_1 ) else : return arg_0 ( arg_1 )","id_":252341,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/internal\/nest_util.py#L185-L210","negative":"Setup coverage related extensions."} {"query":"Call Inkscape to export the input_file to output_file using the specific export argument flag for the output file type .","positive":"def Func ( arg_0 , arg_1 , arg_2 = \"-A\" , arg_3 = 90 , arg_4 = None ) : if not os . path . exists ( arg_0 ) : log . error ( 'File {} not found.' . format ( arg_0 ) ) raise IOError ( ( 0 , 'File not found.' , arg_0 ) ) if '=' not in arg_2 : arg_2 += ' ' arg_5 = [ ] arg_5 += [ '--without-gui' ] arg_5 += [ '--export-text-to-path' ] arg_5 += [ '{}\"{}\"' . format ( arg_2 , arg_1 ) ] arg_5 += [ '--export-dpi={}' . format ( arg_3 ) ] arg_5 += [ '\"{}\"' . format ( arg_0 ) ] return call_inkscape ( arg_5 , arg_4 = arg_4 )","id_":252342,"task_name":"https:\/\/github.com\/PythonSanSebastian\/docstamp\/blob\/b43808f2e15351b0b2f0b7eade9c7ef319c9e646\/docstamp\/inkscape.py#L48-L84","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Fit and transform the stacked points .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , ** arg_3 ) : arg_1 = as_features ( arg_1 , stack = True ) arg_4 = arg_0 . transformer . Func ( arg_1 . stacked_features , arg_2 , ** arg_3 ) return arg_0 . _gather_outputs ( arg_1 , arg_4 )","id_":252343,"task_name":"https:\/\/github.com\/dougalsutherland\/skl-groups\/blob\/2584c10a413626c6d5f9078cdbf3dcc84e4e9a5b\/skl_groups\/preprocessing.py#L78-L97","negative":"Send data synchronous to an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: PLC storage address\n :param value: value to write to the storage address of the PLC\n :param Type data_type: type of the data given to the PLC,\n according to PLCTYPE constants"} {"query":"Called when socket is write - ready","positive":"def Func ( arg_0 ) : try : pyngus . write_socket_output ( arg_0 . connection , arg_0 . socket ) except Exception as e : LOG . error ( \"Exception on socket write: %s\" , str ( e ) ) arg_0 . connection . close_output ( ) arg_0 . connection . close ( ) arg_0 . connection . process ( time . time ( ) )","id_":252344,"task_name":"https:\/\/github.com\/kgiusti\/pyngus\/blob\/5392392046989f1bb84ba938c30e4d48311075f1\/examples\/rpc-server.py#L102-L111","negative":"Gets schedule information for a team-season.\n\n :year: The year for which we want the schedule.\n :returns: DataFrame of schedule information."} {"query":"Implementation that treats floats more like decimals .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_2 = arg_0 . _change_precision ( arg_2 , arg_0 . settings [ 'number' ] [ 'precision' ] ) arg_3 = pow ( 10 , arg_2 ) arg_3 = round ( arg_0 . parse ( arg_1 ) * arg_3 ) \/ arg_3 return '{0} {1}.{2}f' . format ( arg_1 , arg_2 , arg_2 )","id_":252345,"task_name":"https:\/\/github.com\/ojengwa\/accounting\/blob\/6343cf373a5c57941e407a92c101ac4bc45382e3\/accounting\/accounting.py#L164-L177","negative":"Returns any parameters needed for Akamai HD player verification.\n\n Algorithm originally documented by KSV, source:\n http:\/\/stream-recorder.com\/forum\/showpost.php?p=43761&postcount=13"} {"query":"Move dims corresponding to axis in x to the end then flatten .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = True ) : if not arg_1 : return arg_0 arg_4 = sorted ( set ( range ( arg_2 ) ) . difference ( arg_1 ) ) arg_5 = arg_4 + list ( arg_1 ) if arg_3 else list ( arg_1 ) + arg_4 arg_6 = tf . transpose ( a = arg_0 , arg_5 = arg_5 ) if arg_0 . shape . is_fully_defined ( ) : arg_7 = arg_0 . shape . as_list ( ) arg_8 = [ arg_7 [ i ] for i in arg_4 ] arg_9 = [ np . prod ( [ arg_7 [ i ] for i in arg_1 ] ) ] arg_10 = ( arg_8 + arg_9 if arg_3 else arg_9 + arg_8 ) else : arg_8 = tf . gather ( tf . shape ( input = arg_0 ) , arg_4 ) arg_10 = tf . concat ( [ arg_8 , [ - 1 ] ] if arg_3 else [ [ - 1 ] , arg_8 ] , arg_1 = 0 ) return tf . reshape ( arg_6 , shape = arg_10 )","id_":252346,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/stats\/quantiles.py#L847-L884","negative":"Calculate a t-test score for the difference between two samples.\n\n Args:\n sample1: one sample.\n sample2: the other sample.\n\n Returns:\n The t-test score, as a float."} {"query":"Returns all annotations lexicographically sorted as a concatenated string .","positive":"def Func ( arg_0 ) : arg_1 = '' for arg_2 in sorted ( arg_0 . _dict . keys ( ) ) : arg_1 += '%s=%s; ' % ( arg_2 , str ( arg_0 . _dict [ arg_2 ] ) ) return arg_1 [ : - 2 ]","id_":252347,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/annotations.py#L175-L180","negative":"validate source directory names in components"} {"query":"Returns a merged timestamp array for Donor + Accept . and bool mask for A .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = np . hstack ( [ arg_0 , arg_1 ] ) arg_3 = np . hstack ( [ np . zeros ( arg_0 . size , dtype = np . bool ) , np . ones ( arg_1 . size , dtype = np . bool ) ] ) arg_4 = arg_2 . argsort ( ) return arg_2 [ arg_4 ] , arg_3 [ arg_4 ]","id_":252348,"task_name":"https:\/\/github.com\/tritemio\/PyBroMo\/blob\/b75f82a4551ff37e7c7a7e6954c536451f3e6d06\/pybromo\/legacy.py#L55-L62","negative":"Given a Dusty repo object, clone the remote into Dusty's local repos\n directory if it does not already exist."} {"query":"Returns a boto3 . s3 . Object object matching the wildcard expression","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = '' ) : if not arg_2 : ( arg_2 , arg_1 ) = arg_0 . parse_s3_url ( arg_1 ) arg_4 = re . split ( r'[*]' , arg_1 , 1 ) [ 0 ] arg_5 = arg_0 . list_keys ( arg_2 , arg_4 = arg_4 , arg_3 = arg_3 ) if arg_5 : arg_6 = [ k for k in arg_5 if fnmatch . fnmatch ( k , arg_1 ) ] if arg_6 : return arg_0 . get_key ( arg_6 [ 0 ] , arg_2 )","id_":252349,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/hooks\/S3_hook.py#L295-L314","negative":"Sequentially update the actors, the world, and the messaging system. \n The theater terminates once all of the actors indicate that they are done."} {"query":"Configuration include fuction for this module","positive":"def Func ( arg_0 ) : arg_1 = APIKeyAuthenticationPolicy ( ) arg_0 . include ( 'openstax_accounts' ) arg_2 = arg_0 . registry . getUtility ( IOpenstaxAccountsAuthenticationPolicy ) arg_3 = [ arg_1 , arg_2 ] arg_4 = MultiAuthenticationPolicy ( arg_3 ) arg_0 . set_authentication_policy ( arg_4 ) arg_5 = ACLAuthorizationPolicy ( ) arg_0 . set_authorization_policy ( arg_5 )","id_":252350,"task_name":"https:\/\/github.com\/openstax\/cnx-publishing\/blob\/f55b4a2c45d8618737288f1b74b4139d5ac74154\/cnxpublishing\/authnz.py#L93-L107","negative":"Shift the model result and return the new instance.\n\n Queues up the T(i+1) prediction value and emits a T(i)\n input\/prediction pair, if possible. E.g., if the previous T(i-1)\n iteration was learn-only, then we would not have a T(i) prediction in our\n FIFO and would not be able to emit a meaningful input\/prediction pair.\n\n :param modelResult: A :class:`~.nupic.frameworks.opf.opf_utils.ModelResult`\n instance to shift.\n :return: A :class:`~.nupic.frameworks.opf.opf_utils.ModelResult` instance that\n has been shifted"} {"query":"Issue a B read on V4 meter .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . getContext ( ) arg_0 . setContext ( \"request[v4B]\" ) arg_0 . m_serial_port . write ( \"2f3f\" . decode ( \"hex\" ) + arg_0 . m_meter_address + \"3031210d0a\" . decode ( \"hex\" ) ) arg_0 . m_raw_read_b = arg_0 . m_serial_port . getResponse ( arg_0 . getContext ( ) ) arg_3 = arg_0 . unpackStruct ( arg_0 . m_raw_read_b , arg_0 . m_blk_b ) arg_0 . convertData ( arg_3 , arg_0 . m_blk_b , arg_0 . m_kwh_precision ) arg_0 . m_b_crc = arg_0 . crcMeterRead ( arg_0 . m_raw_read_b , arg_0 . m_blk_b ) arg_0 . setContext ( arg_1 ) return arg_0 . m_b_crc","id_":252351,"task_name":"https:\/\/github.com\/ekmmetering\/ekmmeters\/blob\/b3748bdf30263bfa46ea40157bdf8df2522e1904\/ekmmeters.py#L3487-L3501","negative":"If input object is an ndarray it will be converted into a list"} {"query":"Send buffered metrics in batch requests over TCP","positive":"def Func ( arg_0 ) : while len ( arg_0 . _batches ) > 0 : arg_0 . _socket . sendall ( arg_0 . _batches [ 0 ] ) arg_0 . _batches . popleft ( ) return arg_0","id_":252352,"task_name":"https:\/\/github.com\/farzadghanei\/statsd-metrics\/blob\/153ff37b79777f208e49bb9d3fb737ba52b99f98\/statsdmetrics\/client\/tcp.py#L65-L71","negative":"returns a seq array with 'RSKYWM' randomly replaced with resolved bases"} {"query":"Return list of positions of bits set to one in given data .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ ] arg_3 = 0x1 assert ( len ( arg_0 ) == arg_1 + 1 ) for arg_4 in arg_0 [ 1 : ] : for arg_5 in range ( 8 ) : if ( ( arg_4 >> arg_5 ) & 1 ) == 1 : arg_2 . append ( arg_3 ) arg_3 += 1 return arg_2","id_":252353,"task_name":"https:\/\/github.com\/c-soft\/satel_integra\/blob\/3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c\/satel_integra\/satel_integra.py#L50-L66","negative":"Creates the Nginx configuration for the project"} {"query":"Get config dictionary for the given repository .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . _read_index_servers ( ) arg_3 = arg_0 . _find_repo_config ( arg_2 , arg_1 ) return arg_3","id_":252354,"task_name":"https:\/\/github.com\/dagster-io\/dagster\/blob\/4119f8c773089de64831b1dfb9e168e353d401dc\/bin\/pypirc.py#L71-L91","negative":"Decode the data passed in and potentially flush the decoder."} {"query":"prefix base - > true iff name prefix + . + base is private .","positive":"def Func ( arg_0 , arg_1 ) : warnings . warn ( \"Func is deprecated; it wasn't useful; \" \"examine DocTestFinder.find() lists instead\" , DeprecationWarning , stacklevel = 2 ) return arg_1 [ : 1 ] == \"_\" and not arg_1 [ : 2 ] == \"__\" == arg_1 [ - 2 : ]","id_":252355,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/nose\/ext\/dtcompat.py#L184-L196","negative":"Tries decoding a byte string from the OS into a unicode string\n\n :param byte_string:\n A byte string\n\n :return:\n A unicode string"} {"query":"Update an already - created namespace .","positive":"def Func ( arg_0 , arg_1 : arg_2 ) -> None : arg_3 = arg_0 . _get_old_entry_identifiers ( arg_1 ) arg_4 = 0 arg_5 = 0 for arg_6 in arg_0 . _iterate_namespace_models ( ) : if arg_0 . _get_identifier ( arg_6 ) in arg_3 : continue arg_7 = arg_0 . _create_namespace_entry_from_model ( arg_6 , arg_1 = arg_1 ) if arg_7 is None or arg_7 . name is None : arg_5 += 1 continue arg_4 += 1 arg_0 . session . add ( arg_7 ) arg_8 = time . time ( ) log . info ( 'got %d new entries. skipped %d entries missing names. committing models' , arg_4 , arg_5 ) arg_0 . session . commit ( ) log . info ( 'committed models in %.2f seconds' , time . time ( ) - arg_8 )","id_":252356,"task_name":"https:\/\/github.com\/bio2bel\/bio2bel\/blob\/d80762d891fa18b248709ff0b0f97ebb65ec64c2\/src\/bio2bel\/manager\/namespace_manager.py#L270-L294","negative":"Initiate connection to APRS server and attempt to login\n\n blocking = False - Should we block until connected and logged-in\n retry = 30 - Retry interval in seconds"} {"query":"Based on some criteria filter the profiles and return a new Profiles Manager containing only the chosen items","positive":"def Func ( arg_0 , ** arg_1 ) : if not len ( arg_0 ) : arg_0 . all ( ) arg_2 = Func ( lambda item : [ True for arg in arg_1 if item [ arg ] == arg_1 [ arg ] ] != [ ] , arg_0 ) return Profiles ( arg_0 . api , arg_2 )","id_":252357,"task_name":"https:\/\/github.com\/vtemian\/buffpy\/blob\/6c9236fd3b6a8f9e2d70dbf1bc01529242b73075\/buffpy\/managers\/profiles.py#L27-L40","negative":"r\"\"\"\n Converts `data` into a hashable byte representation if an appropriate\n hashing function is known.\n\n Args:\n data (object): ordered data with structure\n types (bool): include type prefixes in the hash\n\n Returns:\n tuple(bytes, bytes): prefix, hashable:\n a prefix hinting the original data type and the byte representation\n of `data`.\n\n Raises:\n TypeError : if data has no registered hash methods\n\n Example:\n >>> assert _convert_to_hashable(None) == (b'NULL', b'NONE')\n >>> assert _convert_to_hashable('string') == (b'TXT', b'string')\n >>> assert _convert_to_hashable(1) == (b'INT', b'\\x01')\n >>> assert _convert_to_hashable(1.0) == (b'FLT', b'\\x01\/\\x01')\n >>> assert _convert_to_hashable(_intlike[-1](1)) == (b'INT', b'\\x01')"} {"query":"Call before starting work on a monitor specifying name and amount of work","positive":"def Func ( arg_0 , arg_1 : arg_2 , arg_3 = None , arg_4 = None ) : arg_0 . total = arg_1 arg_4 = arg_4 or arg_3 or \"Working...\" arg_0 . name = arg_3 or \"ProgressMonitor\" arg_0 . update ( 0 , arg_4 )","id_":252358,"task_name":"https:\/\/github.com\/amcat\/progressmonitor\/blob\/d4cabebc95bfd1447120f601c094b20bee954285\/progressmonitor\/__init__.py#L72-L77","negative":"Downloads the sprites data and returns the saved filepath."} {"query":"Calculates the future value of money invested at an anual interest rate x times per year for a given number of years .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = arg_1 \/ float ( arg_2 ) arg_5 = arg_2 * arg_3 return arg_0 * ( 1 + arg_4 ) ** arg_5","id_":252359,"task_name":"https:\/\/github.com\/bbusenius\/Diablo-Python\/blob\/646ac5a6f1c79cf9b928a4e2a7979988698b6c82\/simple_math\/simple_math.py#L146-L172","negative":"Remove the client from the users of the socket.\n\n If there are no more clients for the socket, it\n will close automatically."} {"query":"Given an a gym environment possibly wrapped multiple times returns a wrapper of class named classname or raises ValueError if no such wrapper was applied","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 while True : if arg_1 == arg_2 . class_name ( ) : return arg_2 elif isinstance ( arg_2 , gym . Wrapper ) : arg_2 = arg_2 . env else : raise ValueError ( \"Couldn't find wrapper named %s\" % arg_1 )","id_":252360,"task_name":"https:\/\/github.com\/openai\/baselines\/blob\/3301089b48c42b87b396e246ea3f56fa4bfc9678\/baselines\/common\/misc_util.py#L159-L182","negative":"Get an IO write task for the requested set of data\n\n This task can be ran immediately or be submitted to the IO executor\n for it to run.\n\n :type fileobj: file-like object\n :param fileobj: The file-like object to write to\n\n :type data: bytes\n :param data: The data to write out\n\n :type offset: integer\n :param offset: The offset to write the data to in the file-like object\n\n :returns: An IO task to be used to write data to a file-like object"} {"query":"Left - right flip the image and coordinates for object detection .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = False , arg_3 = False , arg_4 = False ) : if arg_1 is None : arg_1 = [ ] def _flip ( arg_0 , arg_1 ) : arg_0 = flip_axis ( arg_0 , axis = 1 , arg_4 = False ) arg_5 = list ( ) for arg_6 in arg_1 : if len ( arg_6 ) != 4 : raise AssertionError ( \"coordinate should be 4 values : [x, y, w, h]\" ) if arg_2 : if arg_3 : arg_7 = 1. - arg_6 [ 0 ] else : arg_7 = 1. - arg_6 [ 0 ] - arg_6 [ 2 ] else : if arg_3 : arg_7 = arg_0 . shape [ 1 ] - arg_6 [ 0 ] else : arg_7 = arg_0 . shape [ 1 ] - arg_6 [ 0 ] - arg_6 [ 2 ] arg_5 . append ( [ arg_7 , arg_6 [ 1 ] , arg_6 [ 2 ] , arg_6 [ 3 ] ] ) return arg_0 , arg_5 if arg_4 : arg_8 = np . random . uniform ( - 1 , 1 ) if arg_8 > 0 : return _flip ( arg_0 , arg_1 ) else : return arg_0 , arg_1 else : return _flip ( arg_0 , arg_1 )","id_":252361,"task_name":"https:\/\/github.com\/tensorlayer\/tensorlayer\/blob\/aa9e52e36c7058a7e6fd81d36563ca6850b21956\/tensorlayer\/prepro.py#L2675-L2751","negative":"stupidly print an iterable of iterables in TSV format"} {"query":"Decondition an image from the VGG16 model .","positive":"def Func ( arg_0 ) : arg_0 = arg_0 . transpose ( ( 1 , 2 , 0 ) ) arg_0 [ : , : , 0 ] += 103.939 arg_0 [ : , : , 1 ] += 116.779 arg_0 [ : , : , 2 ] += 123.68 arg_0 = arg_0 [ : , : , : : - 1 ] return arg_0","id_":252362,"task_name":"https:\/\/github.com\/awentzonline\/keras-vgg-buddy\/blob\/716cb66396b839a66ec8dc66998066b360a8f395\/keras_vgg_buddy\/models.py#L11-L18","negative":"Play the video and block whilst the video is playing"} {"query":"Transform z to real - space coordinates from tile coordinates","positive":"def Func ( arg_0 , arg_1 ) : return ( arg_1 - arg_0 . param_dict [ 'psf-zslab' ] ) * arg_0 . param_dict [ arg_0 . zscale ]","id_":252363,"task_name":"https:\/\/github.com\/peri-source\/peri\/blob\/61beed5deaaf978ab31ed716e8470d86ba639867\/peri\/comp\/exactpsf.py#L325-L327","negative":"Delete the Provisioning Service Certificate.\n\n Deletes the specified certificate assosciated with the Provisioning\n Service.\n\n :param resource_group_name: Resource group identifier.\n :type resource_group_name: str\n :param if_match: ETag of the certificate\n :type if_match: str\n :param provisioning_service_name: The name of the provisioning\n service.\n :type provisioning_service_name: str\n :param certificate_name: This is a mandatory field, and is the logical\n name of the certificate that the provisioning service will access by.\n :type certificate_name: str\n :param certificatename: This is optional, and it is the Common Name of\n the certificate.\n :type certificatename: str\n :param certificateraw_bytes: Raw data within the certificate.\n :type certificateraw_bytes: bytearray\n :param certificateis_verified: Indicates if certificate has been\n verified by owner of the private key.\n :type certificateis_verified: bool\n :param certificatepurpose: A description that mentions the purpose of\n the certificate. Possible values include: 'clientAuthentication',\n 'serverAuthentication'\n :type certificatepurpose: str or\n ~azure.mgmt.iothubprovisioningservices.models.CertificatePurpose\n :param certificatecreated: Time the certificate is created.\n :type certificatecreated: datetime\n :param certificatelast_updated: Time the certificate is last updated.\n :type certificatelast_updated: datetime\n :param certificatehas_private_key: Indicates if the certificate\n contains a private key.\n :type certificatehas_private_key: bool\n :param certificatenonce: Random number generated to indicate Proof of\n Possession.\n :type certificatenonce: str\n :param dict custom_headers: headers that will be added to the request\n :param bool raw: returns the direct response alongside the\n deserialized response\n :param operation_config: :ref:`Operation configuration\n overrides`.\n :return: None or ClientRawResponse if raw=true\n :rtype: None or ~msrest.pipeline.ClientRawResponse\n :raises:\n :class:`ErrorDetailsException`"} {"query":"Make a POST request using the session object to a Degreed endpoint .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_0 . _create_session ( arg_3 ) arg_4 = arg_0 . session . post ( arg_1 , arg_2 = arg_2 ) return arg_4 . status_code , arg_4 . text","id_":252364,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/integrated_channels\/degreed\/client.py#L160-L173","negative":"Reassemble a Binder object coming out of the database."} {"query":"Power on or off the device .","positive":"def Func ( arg_0 , arg_1 = False ) : arg_2 = ( yield from arg_0 . handle_set ( arg_0 . API . get ( 'power' ) , int ( arg_1 ) ) ) return bool ( arg_2 )","id_":252365,"task_name":"https:\/\/github.com\/zhelev\/python-afsapi\/blob\/bb1990cf1460ae42f2dde75f2291625ddac2c0e4\/afsapi\/__init__.py#L228-L232","negative":"Parses a file and returns a document object.\n File, a file like object."} {"query":"List unique elements preserving order . Remember only the element just seen .","positive":"def Func ( arg_0 , arg_1 = None ) : try : from itertools import imap as map except ImportError : from builtins import map return map ( next , map ( operator . itemgetter ( 1 ) , itertools . groupby ( arg_0 , arg_1 ) ) )","id_":252366,"task_name":"https:\/\/github.com\/axiom-data-science\/pyaxiom\/blob\/7ea7626695abf095df6a67f66e5b3e9ae91b16df\/pyaxiom\/utils.py#L35-L45","negative":"Connect to the stream\n\n Returns\n -------\n asyncio.coroutine\n The streaming response"} {"query":"Setup output processors","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 . proto == 'tcp' : arg_2 = { 'output' : 'tensor.outputs.riemann.RiemannTCP' , 'server' : arg_0 . server , 'port' : arg_0 . port } else : arg_2 = { 'output' : 'tensor.outputs.riemann.RiemannUDP' , 'server' : arg_0 . server , 'port' : arg_0 . port } arg_3 = arg_1 . get ( 'outputs' , [ arg_2 ] ) for arg_4 in arg_3 : if not ( 'debug' in arg_4 ) : arg_4 [ 'debug' ] = arg_0 . debug arg_5 = arg_4 [ 'output' ] . split ( '.' ) [ - 1 ] arg_6 = '.' . join ( arg_4 [ 'output' ] . split ( '.' ) [ : - 1 ] ) arg_7 = getattr ( importlib . import_module ( arg_6 ) , arg_5 ) ( arg_4 , arg_0 ) arg_8 = arg_4 . get ( 'name' , None ) if arg_8 in arg_0 . outputs : arg_0 . outputs [ arg_8 ] . append ( arg_7 ) else : arg_0 . outputs [ arg_8 ] = [ arg_7 ] reactor . callLater ( 0 , arg_7 . createClient )","id_":252367,"task_name":"https:\/\/github.com\/calston\/tensor\/blob\/7c0c99708b5dbff97f3895f705e11996b608549d\/tensor\/service.py#L92-L130","negative":"Reads a command response status.\n\n If there is no response message then the returned status message will\n be an empty string.\n\n Raises:\n NNTPError: If data is required to be read from the socket and fails.\n NNTPProtocolError: If the status line can't be parsed.\n NNTPTemporaryError: For status code 400-499\n NNTPPermanentError: For status code 500-599\n\n Returns:\n A tuple of status code (as an integer) and status message."} {"query":"The equivalent of mkdir - p in shell .","positive":"def Func ( arg_0 ) : arg_1 = os . path . isdir arg_2 = [ os . path . abspath ( arg_0 ) ] while not arg_1 ( arg_2 [ - 1 ] ) : arg_3 = os . path . dirname ( arg_2 [ - 1 ] ) arg_2 . append ( arg_3 ) while arg_2 : arg_0 = arg_2 . pop ( ) if not arg_1 ( arg_0 ) : os . mkdir ( arg_0 )","id_":252368,"task_name":"https:\/\/github.com\/larsyencken\/proj\/blob\/44fd72aeb9bbf72046d81c4e9e4306a23335dc0a\/proj\/__init__.py#L106-L118","negative":"Returns a list of dicts representing issues from a remote service."} {"query":"Acquire the semaphore","positive":"def Func ( arg_0 , arg_1 , arg_2 = True ) : logger . debug ( \"Acquiring %s\" , arg_1 ) if not arg_0 . _semaphore . Func ( arg_2 ) : raise NoResourcesAvailable ( \"Cannot Func tag '%s'\" % arg_1 )","id_":252369,"task_name":"https:\/\/github.com\/boto\/s3transfer\/blob\/2aead638c8385d8ae0b1756b2de17e8fad45fffa\/s3transfer\/utils.py#L562-L576","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Custom version of the standard annotate function that allows using field names as annotated fields .","positive":"def Func ( arg_0 , ** arg_1 ) : arg_2 = { field . name : field for field in arg_0 . model . _meta . get_fields ( ) } arg_3 = { } arg_4 = { } for arg_5 , arg_6 in arg_1 . items ( ) : if arg_5 in arg_2 : arg_7 = '%s_new' % arg_5 arg_3 [ arg_7 ] = arg_6 arg_4 [ arg_7 ] = arg_5 else : arg_3 [ arg_5 ] = arg_6 arg_8 = super ( ) . Func ( ** arg_3 ) arg_8 . rename_annotations ( ** arg_4 ) return arg_8","id_":252370,"task_name":"https:\/\/github.com\/SectorLabs\/django-postgres-extra\/blob\/eef2ed5504d225858d4e4f5d77a838082ca6053e\/psqlextra\/manager\/manager.py#L31-L64","negative":"Assert that Tensor x has expected number of dimensions."} {"query":"Recursively removes the group and all it s children .","positive":"def Func ( arg_0 , arg_1 = True , arg_2 = None ) : arg_3 = arg_0 . f_get_parent ( ) arg_3 . Func_child ( arg_0 . v_name , arg_1 = arg_1 , arg_2 = arg_2 )","id_":252371,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/naturalnaming.py#L2786-L2802","negative":"Adds all parameters to `traj`"} {"query":"Get course s duration as a timedelta .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . end - arg_1 . start if arg_1 . start and arg_1 . end else None if arg_2 : return strfdelta ( arg_2 , '{W} weeks {D} days.' ) return ''","id_":252372,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/integrated_channels\/xapi\/serializers.py#L76-L89","negative":"Download a file from device to local filesystem"} {"query":"Refresh or acquire access_token .","positive":"def Func ( arg_0 , arg_1 = True ) : arg_2 = arg_0 . auth_access_data_raw = arg_0 . _auth_token_request ( ) return arg_0 . _auth_token_process ( arg_2 , arg_1 = arg_1 )","id_":252373,"task_name":"https:\/\/github.com\/mk-fg\/python-onedrive\/blob\/74d3f6605b0e8a9031a2aab8092f551293ffb533\/onedrive\/api_v5.py#L271-L274","negative":"Get a single publication."} {"query":"Check mandatory service name parameter in POST request .","positive":"def Func ( arg_0 ) : if \"service\" in arg_0 . document . attrib : arg_1 = arg_0 . document . attrib [ \"service\" ] . lower ( ) if arg_1 in allowed_service_types : arg_0 . params [ \"service\" ] = arg_1 else : raise OWSInvalidParameterValue ( \"Service %s is not supported\" % arg_1 , arg_1 = \"service\" ) else : raise OWSMissingParameterValue ( 'Parameter \"service\" is missing' , arg_1 = \"service\" ) return arg_0 . params [ \"service\" ]","id_":252374,"task_name":"https:\/\/github.com\/bird-house\/twitcher\/blob\/e6a36b3aeeacf44eec537434b0fb87c09ab54b5f\/twitcher\/owsrequest.py#L146-L156","negative":"Sparse matrix roll\n\n This operation is equivalent to ``numpy.roll``, but operates on sparse matrices.\n\n Parameters\n ----------\n x : scipy.sparse.spmatrix or np.ndarray\n The sparse matrix input\n\n shift : int\n The number of positions to roll the specified axis\n\n axis : (0, 1, -1)\n The axis along which to roll.\n\n Returns\n -------\n x_rolled : same type as `x`\n The rolled matrix, with the same format as `x`\n\n See Also\n --------\n numpy.roll\n\n Examples\n --------\n >>> # Generate a random sparse binary matrix\n >>> X = scipy.sparse.lil_matrix(np.random.randint(0, 2, size=(5,5)))\n >>> X_roll = roll_sparse(X, 2, axis=0) # Roll by 2 on the first axis\n >>> X_dense_r = roll_sparse(X.toarray(), 2, axis=0) # Equivalent dense roll\n >>> np.allclose(X_roll, X_dense_r.toarray())\n True"} {"query":"Groups transactions in the bundle by address .","positive":"def Func ( arg_0 ) : arg_1 = [ ] if arg_0 : arg_2 = arg_0 . tail_transaction arg_3 = [ arg_2 ] for arg_4 in arg_0 . transactions [ 1 : ] : if arg_4 . address == arg_2 . address : arg_3 . append ( arg_4 ) else : arg_1 . append ( arg_3 ) arg_3 = [ arg_4 ] arg_2 = arg_4 if arg_3 : arg_1 . append ( arg_3 ) return arg_1","id_":252375,"task_name":"https:\/\/github.com\/iotaledger\/iota.lib.py\/blob\/97cdd1e241498446b46157b79b2a1ea2ec6d387a\/iota\/transaction\/base.py#L574-L599","negative":"Dumps and loads a database snapshot simultaneously.\n Requires that the destination server has direct database access\n to the source server.\n\n This is better than a serial dump+load when:\n 1. The network connection is reliable.\n 2. You don't need to save the dump file.\n\n The benefits of this over a dump+load are:\n 1. Usually runs faster, since the load and dump happen in parallel.\n 2. Usually takes up less disk space since no separate dump file is\n downloaded."} {"query":"Write the creation info to out .","positive":"def Func ( arg_0 , arg_1 ) : arg_1 . write ( '# Creation Info\\n\\n' ) for arg_2 in sorted ( arg_0 . creators ) : write_value ( 'Creator' , arg_2 , arg_1 ) write_value ( 'Created' , arg_0 . created_iso_format , arg_1 ) if arg_0 . has_comment : write_text_value ( 'CreatorComment' , arg_0 . comment , arg_1 )","id_":252376,"task_name":"https:\/\/github.com\/spdx\/tools-python\/blob\/301d72f6ae57c832c1da7f6402fa49b192de6810\/spdx\/writers\/tagvalue.py#L51-L64","negative":"Performs recombination by binary crossover for the current population.\n\n Let v_i denote the i'th component of the member v and m_i the corresponding\n component of the mutant vector corresponding to v. Then the crossed over\n vector w_i is determined by setting w_i =\n (m_i with probability=crossover_prob else v_i). In addition, DE requires that\n at least one of the components is crossed over (otherwise we end\n up with no change). This is done by choosing on index say k randomly where\n a force crossover is performed (i.e. w_k = m_k). This is the scheme\n implemented in this function.\n\n Args:\n population: A Python list of `Tensor`s where each `Tensor` in the list\n must be of rank at least 1 and all the elements must have a common\n first dimension. The base population to cross over.\n population_size: A scalar integer `Tensor`. The number of elements in the\n population (i.e. size of the first dimension of any member of\n `population`).\n mutants: A Python list of `Tensor`s with the same structure as `population`.\n The mutated population.\n crossover_prob: A postive real scalar `Tensor` bounded above by 1.0. The\n probability of a crossover being performed for each axis.\n seed: `int` or None. The random seed for this `Op`. If `None`, no seed is\n applied.\n\n Returns:\n A list of `Tensor`s of the same structure, dtype and shape as `population`.\n The recombined population."} {"query":"Parse attribute . Delegate to href parser for hrefs otherwise return value .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : if arg_1 == 'a' and arg_2 == 'href' : return arg_0 . _parse_href ( arg_3 ) else : return arg_3","id_":252377,"task_name":"https:\/\/github.com\/nprapps\/copydoc\/blob\/e1ab09b287beb0439748c319cf165cbc06c66624\/copydoc.py#L197-L205","negative":"Stop streaming samples from device and delete samples buffer"} {"query":"Returns a h5py dataset given its registered name .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 'r' ) : if arg_1 in arg_0 . _datasets : return arg_0 . _datasets [ arg_1 ] else : return arg_0 . create_empty_dataset ( arg_1 )","id_":252378,"task_name":"https:\/\/github.com\/Neurita\/boyle\/blob\/2dae7199849395a209c887d5f30506e1de8a9ad9\/boyle\/databuffer.py#L114-L126","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Organization object - to - dict serialization","positive":"def Func ( arg_0 ) : return { 'id' : arg_0 . id , 'name' : arg_0 . name , 'short_name' : arg_0 . short_name , 'description' : arg_0 . description , 'logo' : arg_0 . logo }","id_":252379,"task_name":"https:\/\/github.com\/edx\/edx-organizations\/blob\/51000d5d359d880a6eb3a79345f60744f1982c00\/organizations\/serializers.py#L18-L28","negative":"Return the maximum file descriptor value."} {"query":"Search item metadata using Apache Solr .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = 20 ) : arg_4 = dict ( ) arg_4 [ 'query' ] = arg_1 arg_4 [ 'limit' ] = arg_3 if arg_2 : arg_4 [ 'token' ] = arg_2 arg_5 = arg_0 . request ( 'midas.solr.search.advanced' , arg_4 ) return arg_5","id_":252380,"task_name":"https:\/\/github.com\/midasplatform\/pydas\/blob\/e5f9e96e754fb2dc5da187b05e4abc77a9b2affd\/pydas\/drivers.py#L1212-L1231","negative":"Sequentially update the actors, the world, and the messaging system. \n The theater terminates once all of the actors indicate that they are done."} {"query":"Writes a xy_report based on xy data .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : arg_5 = [ session . XYDataFromHistory ( name = arg_3 [ i ] , arg_0 = arg_0 , outputVariableName = arg_2 [ i ] , arg_4 = arg_4 ) for i in xrange ( len ( arg_2 ) ) ] session . xyReportOptions . setValues ( numDigits = 8 , numberFormat = SCIENTIFIC ) session . writeXYReport ( fileName = arg_1 , appendMode = OFF , arg_5 = arg_5 )","id_":252381,"task_name":"https:\/\/github.com\/lcharleux\/argiope\/blob\/8170e431362dc760589f7d141090fd133dece259\/argiope\/abq\/abqpostproc.py#L19-L29","negative":"Sequentially update the actors, the world, and the messaging system. \n The theater terminates once all of the actors indicate that they are done."} {"query":"Given a PIL . Image returns a ColorList of its pixels .","positive":"def Func ( arg_0 , arg_1 = arg_2 ) : deprecated . deprecated ( 'util.gif.Func' ) return arg_1 ( convert_mode ( arg_0 ) . getdata ( ) )","id_":252382,"task_name":"https:\/\/github.com\/ManiacalLabs\/BiblioPixel\/blob\/fd97e6c651a4bbcade64733847f4eec8f7704b7c\/bibliopixel\/util\/image\/old_gif.py#L15-L19","negative":"This will output the nginx HTTP config string for specific port spec"} {"query":"Adapter trimming for RNA - seq data","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : arg_5 = arg_0 . fileStore . getLocalTempDir ( ) if arg_2 : require ( arg_4 , \"Paired end data requires a reverse 3' adapter sequence.\" ) arg_6 = [ '-a' , arg_3 , '-m' , '35' ] if arg_1 and arg_2 : arg_0 . fileStore . readGlobalFile ( arg_1 , os . path . join ( arg_5 , 'R1.fastq' ) ) arg_0 . fileStore . readGlobalFile ( arg_2 , os . path . join ( arg_5 , 'R2.fastq' ) ) arg_6 . extend ( [ '-A' , arg_4 , '-o' , '\/data\/R1_cutadapt.fastq' , '-p' , '\/data\/R2_cutadapt.fastq' , '\/data\/R1.fastq' , '\/data\/R2.fastq' ] ) else : arg_0 . fileStore . readGlobalFile ( arg_1 , os . path . join ( arg_5 , 'R1.fastq' ) ) arg_6 . extend ( [ '-o' , '\/data\/R1_cutadapt.fastq' , '\/data\/R1.fastq' ] ) dockerCall ( arg_0 = arg_0 , tool = 'quay.io\/ucsc_cgl\/cutadapt:1.9--6bd44edd2b8f8f17e25c5a268fedaab65fa851d2' , workDir = arg_5 , arg_6 = arg_6 ) if arg_1 and arg_2 : arg_7 = arg_0 . fileStore . writeGlobalFile ( os . path . join ( arg_5 , 'R1_cutadapt.fastq' ) ) arg_8 = arg_0 . fileStore . writeGlobalFile ( os . path . join ( arg_5 , 'R2_cutadapt.fastq' ) ) else : arg_7 = arg_0 . fileStore . writeGlobalFile ( os . path . join ( arg_5 , 'R1_cutadapt.fastq' ) ) arg_8 = None return arg_7 , arg_8","id_":252383,"task_name":"https:\/\/github.com\/BD2KGenomics\/toil-lib\/blob\/022a615fc3dc98fc1aaa7bfd232409962ca44fbd\/src\/toil_lib\/tools\/preprocessing.py#L10-L48","negative":"Make a GET request using the session object to a SuccessFactors endpoint for inactive learners.\n\n Example:\n sap_search_student_url: \"\/learning\/odatav4\/searchStudent\/v1\/Students?\n $filter=criteria\/isActive eq False&$select=studentID\"\n\n SAP API response: {\n u'@odata.metadataEtag': u'W\/\"17090d86-20fa-49c8-8de0-de1d308c8b55\"',\n u'value': [\n {\n u'studentID': u'admint6',\n },\n {\n u'studentID': u'adminsap1',\n }\n ]\n }\n\n Returns: List of inactive learners\n [\n {\n u'studentID': u'admint6'\n },\n {\n u'studentID': u'adminsap1'\n }\n ]"} {"query":"Return converted PersistentValueInstruction .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = { 'name' : 'pv' , 't0' : arg_1 + arg_2 . start_time , 'ch' : arg_2 . channels [ 0 ] . name , 'val' : arg_2 . command . value } return arg_0 . _qobj_model ( ** arg_3 )","id_":252384,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/qobj\/converters\/pulse_instruction.py#L178-L193","negative":"Return the generation index of the first generation in the given\n swarm that does not have numParticles particles in it, either still in the\n running state or completed. This does not include orphaned particles.\n\n Parameters:\n ---------------------------------------------------------------------\n swarmId: A string representation of the sorted list of encoders in this\n swarm. For example '__address_encoder.__gym_encoder'\n minNumParticles: minium number of partices required for a full\n generation.\n\n retval: generation index, or None if no particles at all."} {"query":"Make an entry in the options_table for fn with value optstr","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_1 not in arg_0 . lsmagic ( ) : error ( \"%s is not a magic function\" % arg_1 ) arg_0 . options_table [ arg_1 ] = arg_2","id_":252385,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/core\/magic.py#L612-L617","negative":"A dask relay function to fill chroms for all samples"} {"query":"Returns a dictionary of satchels used in the current configuration excluding ourselves .","positive":"def Func ( arg_0 ) : return dict ( ( arg_1 , arg_2 ) for arg_1 , arg_2 in arg_0 . all_satchels . items ( ) if arg_1 != arg_0 . name . upper ( ) and arg_1 . lower ( ) in map ( str . lower , arg_0 . genv . services ) )","id_":252386,"task_name":"https:\/\/github.com\/chrisspen\/burlap\/blob\/a92b0a8e5206850bb777c74af8421ea8b33779bd\/burlap\/common.py#L1005-L1013","negative":"Make OAuth token request.\n\n Raises GoogleAuthError if authentication fails.\n\n Returns dict response."} {"query":"Callback receives a stream of event_records","positive":"def Func ( arg_0 ) : check . callable_param ( arg_0 , 'event_record_callback' ) return construct_single_handler_logger ( 'event-logger' , DEBUG , StructuredLoggerHandler ( lambda logger_message : arg_0 ( construct_event_record ( logger_message ) ) ) , )","id_":252387,"task_name":"https:\/\/github.com\/dagster-io\/dagster\/blob\/4119f8c773089de64831b1dfb9e168e353d401dc\/python_modules\/dagster\/dagster\/core\/events\/logging.py#L134-L146","negative":"Return a response object from the given JSON data.\n\n :param data: Data to JSON-encode.\n :type data: mixed\n :param headers: Dict of headers to include in the requests.\n :type headers: dict\n :param status_code: HTTP status code.\n :type status_code: int\n :rtype: requests.Response"} {"query":"Edges of the grid cells origin at centre of 0 0 .. 0 grid cell .","positive":"def Func ( arg_0 ) : return [ arg_0 . delta [ arg_1 , arg_1 ] * numpy . arange ( arg_0 . shape [ arg_1 ] + 1 ) + arg_0 . origin [ arg_1 ] - 0.5 * arg_0 . delta [ arg_1 , arg_1 ] for arg_1 in range ( arg_0 . rank ) ]","id_":252388,"task_name":"https:\/\/github.com\/MDAnalysis\/GridDataFormats\/blob\/3eeb0432f8cf856912436e4f3e7aba99d3c916be\/gridData\/OpenDX.py#L236-L242","negative":"Load roster from an XML file.\n\n Can be used before the connection is started to load saved\n roster copy, for efficient retrieval of versioned roster.\n\n :Parameters:\n - `source`: file name or a file object\n :Types:\n - `source`: `str` or file-like object"} {"query":"Calls a function and send results to the collector . It supports all of function actions . A function could return yield raise any packable objects .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 = ( ) ) : arg_6 = uuid4_bytes ( ) arg_7 , arg_5 = arg_0 . replier ( arg_1 , arg_5 , arg_2 . reply_to ) if arg_7 : arg_8 = ( arg_2 . call_id , arg_6 , arg_5 ) else : arg_8 = ( None , None , None ) arg_9 , arg_10 = arg_0 . find_call_target ( arg_2 ) if arg_10 . reject_if . __get__ ( arg_0 . app ) ( arg_2 , arg_5 ) : arg_7 and arg_0 . reject ( arg_7 , arg_2 . call_id , arg_5 ) return arg_7 and arg_0 . accept ( arg_7 , arg_8 ) arg_11 = False with arg_0 . catch_exceptions ( ) : try : arg_12 = arg_0 . call ( arg_2 , arg_3 , arg_4 , arg_9 , arg_10 ) except : arg_13 = sys . exc_info ( ) arg_0 . raise_ ( arg_7 , arg_8 , arg_13 ) reraise ( * arg_13 ) arg_11 = True if not arg_11 : return if isinstance ( arg_12 , Iterator ) : arg_14 = arg_12 with arg_0 . catch_exceptions ( ) : try : try : arg_12 = next ( arg_14 ) except StopIteration : pass else : arg_0 . send_reply ( arg_7 , YIELD , arg_12 , * arg_8 ) for arg_12 in arg_14 : arg_0 . send_reply ( arg_7 , YIELD , arg_12 , * arg_8 ) arg_0 . send_reply ( arg_7 , BREAK , None , * arg_8 ) except : arg_13 = sys . exc_info ( ) arg_0 . raise_ ( arg_7 , arg_8 , arg_13 ) reraise ( * arg_13 ) else : arg_0 . send_reply ( arg_7 , RETURN , arg_12 , * arg_8 )","id_":252389,"task_name":"https:\/\/github.com\/sublee\/zeronimo\/blob\/b216638232932718d2cbc5eabd870c8f5b5e83fb\/zeronimo\/core.py#L268-L314","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Create a thumbnail image for the video source based on ffmpeg .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = True , arg_5 = None , arg_6 = 'ffmpeg' ) : arg_7 = logging . getLogger ( __name__ ) arg_8 = arg_1 + \".tmp.jpg\" arg_9 = [ arg_6 , '-i' , arg_0 , '-an' , '-r' , '1' , '-ss' , arg_3 , '-vframes' , '1' , '-y' , arg_8 ] arg_7 . debug ( 'Create thumbnail for video: %s' , ' ' . join ( arg_9 ) ) check_subprocess ( arg_9 , arg_0 , arg_1 ) image . Func ( arg_8 , arg_1 , arg_2 , arg_4 = arg_4 , arg_5 = arg_5 ) os . unlink ( arg_8 )","id_":252390,"task_name":"https:\/\/github.com\/saimn\/sigal\/blob\/912ca39991355d358dc85fd55c7aeabdd7acc386\/sigal\/video.py#L130-L146","negative":"Gets status of response."} {"query":"r FullName after removing the local path to the repository .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . FullName ( ) if os . path . exists ( arg_1 ) : arg_2 = os . path . dirname ( arg_1 ) if _repository : arg_3 = FileInfo ( _repository ) . FullName ( ) arg_4 = arg_2 while os . path . exists ( arg_4 ) : if os . path . normcase ( arg_4 ) == os . path . normcase ( arg_3 ) : return os . path . relpath ( arg_1 , arg_4 ) . replace ( '\\\\' , '\/' ) arg_5 = os . path . dirname ( arg_4 ) if arg_5 == arg_4 : break arg_4 = arg_5 if os . path . exists ( os . path . join ( arg_2 , \".svn\" ) ) : arg_4 = arg_2 arg_5 = os . path . dirname ( arg_4 ) while os . path . exists ( os . path . join ( arg_5 , \".svn\" ) ) : arg_4 = os . path . dirname ( arg_4 ) arg_5 = os . path . dirname ( arg_5 ) arg_6 = os . path . commonprefix ( [ arg_4 , arg_2 ] ) return arg_1 [ len ( arg_6 ) + 1 : ] arg_4 = arg_7 = os . path . dirname ( arg_1 ) while arg_7 != os . path . dirname ( arg_7 ) : if ( os . path . exists ( os . path . join ( arg_7 , \".git\" ) ) or os . path . exists ( os . path . join ( arg_7 , \".hg\" ) ) or os . path . exists ( os . path . join ( arg_7 , \".svn\" ) ) ) : arg_4 = arg_7 arg_7 = os . path . dirname ( arg_7 ) if ( os . path . exists ( os . path . join ( arg_4 , \".git\" ) ) or os . path . exists ( os . path . join ( arg_4 , \".hg\" ) ) or os . path . exists ( os . path . join ( arg_4 , \".svn\" ) ) ) : arg_6 = os . path . commonprefix ( [ arg_4 , arg_2 ] ) return arg_1 [ len ( arg_6 ) + 1 : ] return arg_1","id_":252391,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/third_party\/python\/cpplint\/cpplint.py#L1264-L1322","negative":"Return a list of all enrollments for the passed section_id.\n\n https:\/\/canvas.instructure.com\/doc\/api\/enrollments.html#method.enrollments_api.index"} {"query":"Validation data by specific validictory configuration","positive":"def Func ( arg_0 = None ) : def dec ( arg_1 ) : @ wraps ( arg_1 ) def d_func ( arg_2 , arg_3 , arg_4 , * arg_5 , ** arg_6 ) : try : Funcate ( arg_4 [ 'params' ] , arg_0 ) except ValidationError as err : raise InFuncParams ( err ) except SchemaError as err : raise InternalError ( err ) return arg_1 ( arg_2 , arg_3 , arg_4 [ 'params' ] , * arg_5 , ** arg_6 ) return d_func return dec","id_":252392,"task_name":"https:\/\/github.com\/zloidemon\/aiohttp_jrpc\/blob\/f2ced214844041aa6f18b6bf6e5abeef7b47735e\/aiohttp_jrpc\/__init__.py#L87-L100","negative":"Creates a new encryption key in the path provided and sets the file\n permissions. Setting the file permissions currently does not work\n on Windows platforms because of the differences in how file\n permissions are read and modified."} {"query":"Called at the start of notebook execution to setup the environment .","positive":"def Func ( arg_0 = False ) : output_notebook ( INLINE , hide_banner = True ) if arg_0 : _setup_logging ( logging . DEBUG ) logging . debug ( 'Running notebook in debug mode.' ) else : _setup_logging ( logging . WARNING ) if 'JUPYTERHUB_SERVICE_PREFIX' not in os . environ : global arg_1 arg_1 = 'localhost:8888' logging . info ( 'Setting jupyter proxy to local mode.' )","id_":252393,"task_name":"https:\/\/github.com\/lsst-epo\/vela\/blob\/8e17ebec509be5c3cc2063f4645dfe9e26b49c18\/astropixie-widgets\/astropixie_widgets\/config.py#L73-L91","negative":"Return list of GATT descriptors that have been discovered for this\n characteristic."} {"query":"returns complement of sequence including ambiguity characters and saves lower case info for multiple hetero sequences","positive":"def Func ( arg_0 ) : arg_0 = arg_0 . replace ( \"A\" , 'u' ) . replace ( 'T' , 'v' ) . replace ( 'C' , 'p' ) . replace ( 'G' , 'z' ) . replace ( 'u' , 'T' ) . replace ( 'v' , 'A' ) . replace ( 'p' , 'G' ) . replace ( 'z' , 'C' ) arg_0 = arg_0 . replace ( 'R' , 'u' ) . replace ( 'K' , 'v' ) . replace ( 'Y' , 'b' ) . replace ( 'M' , 'o' ) . replace ( 'u' , 'Y' ) . replace ( 'v' , 'M' ) . replace ( 'b' , 'R' ) . replace ( 'o' , 'K' ) arg_0 = arg_0 . replace ( 'r' , 'u' ) . replace ( 'k' , 'v' ) . replace ( 'y' , 'b' ) . replace ( 'm' , 'o' ) . replace ( 'u' , 'y' ) . replace ( 'v' , 'm' ) . replace ( 'b' , 'r' ) . replace ( 'o' , 'k' ) return arg_0","id_":252394,"task_name":"https:\/\/github.com\/dereneaton\/ipyrad\/blob\/5eeb8a178160f45faf71bf47cec4abe998a575d1\/ipyrad\/assemble\/util.py#L249-L280","negative":"Show an image.\n\n Args:\n img (str or ndarray): The image to be displayed.\n win_name (str): The window name.\n wait_time (int): Value of waitKey param."} {"query":"Cast value or signal of this type to another compatible type .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_1 . _dtype == arg_2 : return arg_1 try : arg_3 = arg_0 . _Func_fn except AttributeError : arg_3 = arg_0 . get_Func_fn ( ) arg_0 . _Func_fn = arg_3 return arg_3 ( arg_0 , arg_1 , arg_2 )","id_":252395,"task_name":"https:\/\/github.com\/Nic30\/hwt\/blob\/8cbb399e326da3b22c233b98188a9d08dec057e6\/hwt\/hdl\/types\/hdlType.py#L35-L51","negative":"Write the index.html file for this report."} {"query":"Stop and remove a worker","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = 200 if arg_1 in arg_0 . jobs : arg_0 . jobs [ arg_1 ] [ 'worker' ] . revoke ( terminate = True ) arg_3 = { 'id' : arg_1 , 'revoked' : True } arg_0 . jobs . pop ( arg_1 ) else : arg_3 = { 'error' : 'job {} unknown' . format ( arg_1 ) } arg_2 = 404 return flask . jsonify ( arg_3 ) , arg_2","id_":252396,"task_name":"https:\/\/github.com\/hivetech\/dna\/blob\/50ad00031be29765b2576fa407d35a36e0608de9\/python\/dna\/apy\/worker.py#L117-L135","negative":"Adds all parameters to `traj`"} {"query":"Returns all items that match the given criteria and appear before this Tag in the document .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = { } , arg_3 = None , arg_4 = None , ** arg_5 ) : return arg_0 . _findAll ( arg_1 , arg_2 , arg_3 , arg_4 , arg_0 . previousGenerator , ** arg_5 )","id_":252397,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/lib\/web\/BeautifulSoup.py#L270-L275","negative":"Initialize all ephemerals used by derived classes."} {"query":"Get the release date and certification information by country for a specific movie id .","positive":"def Func ( arg_0 , ** arg_1 ) : arg_2 = arg_0 . _get_id_path ( 'Func' ) arg_3 = arg_0 . _GET ( arg_2 , arg_1 ) arg_0 . _set_attrs_to_values ( arg_3 ) return arg_3","id_":252398,"task_name":"https:\/\/github.com\/celiao\/tmdbsimple\/blob\/ff17893110c99771d6398a62c35d36dd9735f4b9\/tmdbsimple\/movies.py#L185-L200","negative":"If there are edits to the current input buffer, store them."} {"query":"Calculates the width of the given string in this font .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = bacon . text . Style ( arg_0 ) arg_3 = bacon . text . GlyphRun ( arg_2 , arg_1 ) arg_4 = bacon . text . GlyphLayout ( [ arg_3 ] , 0 , 0 ) return arg_4 . content_width","id_":252399,"task_name":"https:\/\/github.com\/aholkner\/bacon\/blob\/edf3810dcb211942d392a8637945871399b0650d\/bacon\/font.py#L219-L228","negative":"Execute the raw phase for a given backend section, optionally using Arthur\n\n :param config: a Mordred config object\n :param backend_section: the backend section where the raw phase is executed\n :param arthur: if true, it enables Arthur to collect the raw data"} {"query":"Context manager that changes to directory path and return to CWD when exited .","positive":"def Func ( arg_0 ) : arg_1 = os . getcwd ( ) os . chdir ( arg_0 ) try : yield finally : os . chdir ( arg_1 )","id_":252400,"task_name":"https:\/\/github.com\/nicfit\/nicfit.py\/blob\/8313f8edbc5e7361ddad496d6d818324b5236c7a\/nicfit\/util.py#L16-L25","negative":"Fetch the base Managed Policy.\n\n This includes the base policy and the latest version document.\n\n :param managed_policy:\n :param conn:\n :return:"} {"query":"Equivalent of csv . DictWriter but allows delimiter to be a unicode string on Py2 .","positive":"def Func ( arg_0 , arg_1 , ** arg_2 ) : import csv if \"delimiter\" in arg_2 : arg_2 [ \"delimiter\" ] = str ( arg_2 [ \"delimiter\" ] ) return csv . DictWriter ( arg_0 , arg_1 , ** arg_2 )","id_":252401,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-py\/h2o\/utils\/compatibility.py#L136-L141","negative":"Calculates average Fano Factor of a network.\n\n :param traj:\n\n Trajectory container\n\n Expects:\n\n `results.monitors.spikes_e`: Data from SpikeMonitor for excitatory neurons\n\n Adds:\n\n `results.statistics.mean_fano_factor`: Average Fano Factor\n\n :param network:\n\n The BRIAN network\n\n :param current_subrun:\n\n BrianParameter\n\n :param subrun_list:\n\n Upcoming subruns, analysis is only performed if subruns is empty,\n aka the final subrun has finished.\n\n :param network_dict:\n\n Dictionary of items shared among componetns"} {"query":"Subscribe to the passed pair s ticker channel .","positive":"def Func ( arg_0 , arg_1 , ** arg_2 ) : arg_3 = ( 'ticker' , arg_1 ) arg_0 . _subscribe ( 'ticker' , arg_3 , symbol = arg_1 , ** arg_2 )","id_":252402,"task_name":"https:\/\/github.com\/Crypto-toolbox\/btfxwss\/blob\/16827fa6aacb2c0e289aa852bf61a18df6905835\/btfxwss\/client.py#L188-L196","negative":"Build a notebook model from database record."} {"query":"Fetch LAtools reference data from online repository .","positive":"def Func ( arg_0 = None ) : arg_1 = 'https:\/\/docs.google.com\/spreadsheets\/d\/e\/2PACX-1vQJfCeuqrtFFMAeSpA9rguzLAo9OVuw50AHhAULuqjMJzbd3h46PK1KjF69YiJAeNAAjjMDkJK7wMpG\/pub?gid={:}&single=true&output=csv' arg_2 = { 'culture_reference' : '0' , 'culture_test' : '1170065442' , 'downcore_reference' : '190752797' , 'downcore_test' : '721359794' , 'iolite_reference' : '483581945' , 'zircon_reference' : '1355554964' } if arg_0 is None : arg_3 = { } for arg_4 , arg_5 in arg_2 . items ( ) : arg_6 = arg_1 . format ( arg_5 ) arg_7 = pd . read_csv ( arg_6 , header = [ 0 ] , index_col = [ 0 , 1 ] ) arg_7 . index . names = [ 'sample' , 'rep' ] arg_7 . columns . names = [ 'analyte' ] arg_7 . sort_index ( 1 , inplace = True ) arg_3 [ arg_4 ] = arg_7 else : arg_5 = arg_2 [ arg_0 ] arg_6 = arg_1 . format ( arg_5 ) arg_3 = pd . read_csv ( arg_6 , index_col = [ 0 , 1 ] ) arg_3 . columns . names = [ 'analyte' ] arg_3 . sort_index ( 1 , inplace = True ) return arg_3","id_":252403,"task_name":"https:\/\/github.com\/oscarbranson\/latools\/blob\/cd25a650cfee318152f234d992708511f7047fbe\/Supplement\/comparison_tools\/helpers.py#L18-L57","negative":"Return the kvectors associated with this tile, given the standard form\n of -0.5 to 0.5. `norm` and `form` arguments arethe same as that passed to\n `Tile.coords`.\n\n Parameters\n -----------\n real : boolean\n whether to return kvectors associated with the real fft instead"} {"query":"Returns the atom object corresponding to an atom number","positive":"def Func ( arg_0 , arg_1 ) : assert isinstance ( arg_1 , int ) , \"anumb must be integer\" if not arg_0 . _Func : if arg_0 . atoms : for arg_2 in arg_0 . atoms : arg_0 . _Func [ arg_2 . number ] = arg_2 return arg_0 . _Func [ arg_1 ] else : arg_0 . logger ( \"no atoms in the molecule\" ) return False else : if arg_1 in arg_0 . _Func : return arg_0 . _Func [ arg_1 ] else : arg_0 . logger ( \"no such atom number ({0:d}) in the molecule\" . format ( arg_1 ) ) return False","id_":252404,"task_name":"https:\/\/github.com\/Becksteinlab\/GromacsWrapper\/blob\/d4f9a8cb6f48292732cf7c7e4ef4a6d2ccbc51b9\/gromacs\/fileformats\/blocks.py#L145-L165","negative":"Stop streaming samples from device and delete samples buffer"} {"query":"Wait for ssh service to appear on given hosts","positive":"def Func ( arg_0 , arg_1 = 22 , arg_2 = True , arg_3 = 3 ) : log ( 'Waiting for SSH on following hosts: {0}' . format ( arg_0 ) ) for arg_4 in arg_0 : if not arg_2 or not ssh_live ( arg_4 , arg_1 ) : log ( 'Waiting for SSH on instance {0}...' . format ( arg_4 ) ) arg_5 = 0 while arg_5 < arg_3 : if ssh_live ( arg_4 , arg_1 ) : arg_5 += 1 else : arg_5 = 0 time . sleep ( 1 ) h2o_cmd . dot ( )","id_":252405,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/py2\/ec2_cmd.py#L232-L245","negative":"Read a varint from file, parse it, and return the decoded integer."} {"query":"Generic iop file writer","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : lg . info ( 'Writing :: ' + arg_2 ) arg_3 = open ( arg_2 , 'w' ) for arg_4 in scipy . nditer ( arg_1 ) : arg_3 . write ( str ( arg_4 ) + '\\n' )","id_":252406,"task_name":"https:\/\/github.com\/marrabld\/planarradpy\/blob\/5095d1cb98d4f67a7c3108c9282f2d59253e89a8\/libplanarradpy\/planrad.py#L467-L476","negative":"Fetch the events pages of a given group."} {"query":"Tokenize all the words and preserve NER labels from ENAMEX tags","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . sent_pos = 0 arg_3 = 0 while len ( arg_1 . childNodes ) > 0 : arg_4 = arg_1 . childNodes . pop ( 0 ) if arg_4 . nodeType == arg_4 . TEXT_NODE : for arg_5 in arg_4 . data . splitlines ( True ) : arg_0 . _input_string = arg_5 for arg_7 , arg_8 in arg_0 . word_tokenizer . span_tokenize ( arg_5 ) : arg_9 = arg_0 . _make_token ( arg_7 , arg_8 ) if arg_9 : yield arg_9 if arg_5 . endswith ( '\\n' ) : arg_0 . line_idx += 1 arg_0 . byte_idx += len ( arg_5 . encode ( 'utf-8' ) ) else : assert arg_4 . nodeName == 'ENAMEX' , arg_4 . nodeName arg_10 = arg_4 . attributes . get ( 'ID' ) . value arg_11 = arg_4 . attributes . get ( 'TYPE' ) . value for arg_4 in arg_4 . childNodes : assert arg_4 . nodeType == arg_4 . TEXT_NODE , arg_4 . nodeType for arg_5 in arg_4 . data . splitlines ( True ) : arg_0 . _input_string = arg_5 for arg_7 , arg_8 in arg_0 . word_tokenizer . span_tokenize ( arg_5 ) : arg_9 = arg_0 . _make_token ( arg_7 , arg_8 ) if arg_9 : if arg_11 in _PRONOUNS : arg_9 . mention_type = MentionType . PRO arg_9 . entity_type = _ENTITY_TYPES [ arg_11 ] arg_13 = Attribute ( attribute_type = AttributeType . PER_GENDER , value = str ( _PRONOUNS [ arg_11 ] ) ) arg_0 . attributes . append ( arg_13 ) else : arg_9 . mention_type = MentionType . NAME arg_9 . entity_type = _ENTITY_TYPES [ arg_11 ] arg_9 . equiv_id = int ( arg_10 ) arg_9 . mention_id = arg_3 yield arg_9 if arg_5 . endswith ( '\\n' ) : arg_0 . line_idx += 1 arg_0 . byte_idx += len ( arg_5 . encode ( 'utf-8' ) ) arg_3 += 1","id_":252407,"task_name":"https:\/\/github.com\/trec-kba\/streamcorpus-pipeline\/blob\/8bb82ea1beb83c6b40ed03fa1659df2897c2292a\/streamcorpus_pipeline\/_lingpipe.py#L158-L232","negative":"Remove all the item from the list and unset the related data"} {"query":"Randomize the order at which statuses for the specified social media profile will be sent out of the buffer .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None ) : arg_3 = PATHS [ 'SHUFFLE' ] % arg_0 . profile_id arg_4 = '' if arg_1 : arg_4 += 'count=%s&' % arg_1 if arg_2 : arg_4 += 'utc=%s' % arg_2 return arg_0 . api . post ( arg_3 = arg_3 , data = arg_4 )","id_":252408,"task_name":"https:\/\/github.com\/vtemian\/buffpy\/blob\/6c9236fd3b6a8f9e2d70dbf1bc01529242b73075\/buffpy\/managers\/updates.py#L63-L77","negative":"Replace target with replacement"} {"query":"Launches the Hadoop datanode .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . hdfsContainerID = dockerCheckOutput ( arg_1 = arg_1 , defer = STOP , workDir = os . getcwd ( ) , tool = \"quay.io\/ucsc_cgl\/apache-hadoop-worker:2.6.2\" , dockerParameters = [ \"--net=host\" , \"-d\" , \"-v\" , \"\/mnt\/ephemeral\/:\/ephemeral\/:rw\" ] , parameters = [ arg_0 . masterIP ] ) [ : - 1 ]","id_":252409,"task_name":"https:\/\/github.com\/BD2KGenomics\/toil-lib\/blob\/022a615fc3dc98fc1aaa7bfd232409962ca44fbd\/src\/toil_lib\/spark.py#L285-L298","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Enable a given scan field .","positive":"def Func ( arg_0 , arg_1 = 0 , arg_2 = 1 , arg_3 = 1 , arg_4 = 1 , arg_5 = 1 ) : arg_6 = [ ( 'cmd' , 'Func' ) , ( 'slide' , str ( arg_1 ) ) , ( 'wellx' , str ( arg_2 ) ) , ( 'welly' , str ( arg_3 ) ) , ( 'fieldx' , str ( arg_4 ) ) , ( 'fieldy' , str ( arg_5 ) ) , ( 'value' , 'true' ) ] arg_0 . send ( arg_6 ) return arg_0 . wait_for ( * arg_6 [ 0 ] )","id_":252410,"task_name":"https:\/\/github.com\/MartinHjelmare\/leicacam\/blob\/1df37bccd34884737d3b5e169fae71dd2f21f1e2\/leicacam\/cam.py#L292-L305","negative":"Delete previous webhooks. If local ngrok tunnel, create a webhook."} {"query":"Return the best model ID and it s errScore from the given sprint which may still be in progress . This returns the best score from all models in the sprint which have matured so far .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . getAllSwarms ( arg_1 ) arg_3 = None arg_4 = numpy . inf for arg_5 in arg_2 : ( arg_6 , arg_7 ) = arg_0 . _hsObj . _resultsDB . bestModelIdAndErrScore ( arg_5 ) if arg_7 < arg_4 : arg_3 = arg_6 arg_4 = arg_7 return ( arg_3 , arg_4 )","id_":252411,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/swarming\/hypersearch\/hs_state.py#L494-L515","negative":"Extract Packed Floating-Point Values\n\n Extracts 128-bits of packed floating-point values from the source\n operand (second operand) at an 128-bit offset from imm8[0] into the\n destination operand (first operand). The destination may be either an\n XMM register or an 128-bit memory location."} {"query":"Returns a dictionary to populate the initial state of the search procedure .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None ) : if arg_3 : with tf . control_dependencies ( arg_3 ) : arg_4 , arg_5 = arg_0 ( arg_1 ) else : arg_4 , arg_5 = arg_0 ( arg_1 ) arg_6 = norm ( arg_5 , dims = 1 ) < arg_2 return dict ( arg_6 = arg_6 , failed = tf . zeros_like ( arg_6 ) , num_iterations = tf . convert_to_tensor ( value = 0 ) , num_objective_evaluations = tf . convert_to_tensor ( value = 1 ) , position = arg_1 , objective_value = arg_4 , objective_gradient = arg_5 )","id_":252412,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/optimizer\/bfgs_utils.py#L47-L91","negative":"Add members found in prior versions up till the next major release\n\n These members are to be considered deprecated. When a new major\n release is made, these members are removed."} {"query":"Run edited source if no exceptions occur then it graduates to known good .","positive":"def Func ( arg_0 ) : with LiveExecution . lock : arg_1 = copy . copy ( arg_0 . ns ) try : arg_2 = arg_0 . edited_source arg_0 . edited_source = None arg_0 . do_exec ( arg_2 , arg_1 ) arg_0 . known_good = arg_2 arg_0 . call_good_cb ( ) return True , None except Exception as ex : arg_5 = traceback . format_exc ( ) arg_0 . call_bad_cb ( arg_5 ) arg_0 . ns . clear ( ) arg_0 . ns . update ( arg_1 ) return False , ex","id_":252413,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/shoebot\/grammar\/livecode.py#L84-L103","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Does basic Metric option validation .","positive":"def Func ( arg_0 ) : if not hasattr ( arg_0 , 'label' ) : raise ImproperlyConfigured ( \"No 'label' attribute found for metric %s.\" % arg_0 . __name__ ) if not hasattr ( arg_0 , 'widget' ) : raise ImproperlyConfigured ( \"No 'widget' attribute found for metric %s.\" % arg_0 . __name__ )","id_":252414,"task_name":"https:\/\/github.com\/praekelt\/django-analytics\/blob\/29c22d03374ccc0ec451650e2c2886d324f6e5c6\/analytics\/validation.py#L3-L11","negative":"Creates an image for running Raspbian in a QEMU virtual machine.\n\n Based on the guide at:\n\n https:\/\/github.com\/dhruvvyas90\/qemu-rpi-kernel\/wiki\/Emulating-Jessie-image-with-4.1.x-kernel"} {"query":"Cleanup the paths and add","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = { \".\" : \"\" } if ( ( 'import' in arg_1 ) and ( 'directory-mapping' in arg_1 [ 'import' ] ) ) : arg_3 = arg_1 [ 'import' ] [ 'directory-mapping' ] arg_4 = arg_3 . keys ( ) arg_4 = sorted ( arg_4 , key = lambda arg_9 : len ( arg_9 ) , reverse = True ) arg_5 = 0 arg_6 = [ ] for arg_7 in arg_2 : arg_8 = arg_7 for arg_9 in arg_4 : arg_10 = arg_3 [ arg_9 ] if arg_7 . startswith ( arg_9 + \"\/\" ) : arg_8 = arg_7 . replace ( arg_9 + \"\/\" , arg_10 ) break arg_5 += files_add ( arg_0 = arg_0 , args = [ arg_7 ] , targetdir = os . path . dirname ( arg_8 ) ) return arg_5","id_":252415,"task_name":"https:\/\/github.com\/pingali\/dgit\/blob\/ecde01f40b98f0719dbcfb54452270ed2f86686d\/dgitcore\/datasets\/auto.py#L280-L312","negative":"receives a UUID via the request and returns either a fresh or an existing dropbox\n for it"} {"query":"The Roessler attractor differential equation","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_1 arg_4 = np . zeros ( 3 ) arg_4 [ 0 ] = - arg_0 [ 1 ] - arg_0 [ 2 ] arg_4 [ 1 ] = arg_0 [ 0 ] + arg_1 * arg_0 [ 1 ] arg_4 [ 2 ] = arg_3 + arg_0 [ 2 ] * ( arg_0 [ 0 ] - arg_2 ) return arg_4","id_":252416,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/examples\/example_06_parameter_presetting.py#L56-L72","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Get the expiration date from the database .","positive":"def Func ( arg_0 ) : if arg_0 . _authorization ( ) and arg_0 . is_in_database ( ) and not arg_0 . is_time_older ( ) : arg_1 = PyFunceble . INTERN [ \"whois_db\" ] [ PyFunceble . INTERN [ \"file_to_test\" ] ] [ PyFunceble . INTERN [ \"to_test\" ] ] [ \"expiration_date\" ] if arg_1 : return arg_1 return None","id_":252417,"task_name":"https:\/\/github.com\/funilrys\/PyFunceble\/blob\/cdf69cbde120199171f7158e1c33635753e6e2f5\/PyFunceble\/database.py#L816-L844","negative":"Init openstack neutron mq\n\n 1. Check if enable listening neutron notification\n 2. Create consumer\n\n :param mq: class ternya.mq.MQ"} {"query":"Prints information about the user and bot version .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = [ \"Hello %s\" % arg_1 . user ] if hasattr ( arg_0 . _bot . dispatcher , 'auth_manager' ) and arg_1 . user . is_admin is True : arg_3 . append ( \"You are a *bot admin*.\" ) arg_3 . append ( \"Bot version: %s-%s\" % ( arg_0 . _bot . version , arg_0 . _bot . commit ) ) return '\\n' . join ( arg_3 )","id_":252418,"task_name":"https:\/\/github.com\/arcticfoxnv\/slackminion\/blob\/62ea77aba5ac5ba582793e578a379a76f7d26cdb\/slackminion\/plugins\/core\/core.py#L82-L88","negative":"This is a parallel worker for running period-recovery.\n\n Parameters\n ----------\n\n task : tuple\n This is used to pass args to the `periodicvar_recovery` function::\n\n task[0] = period-finding result pickle to work on\n task[1] = simbasedir\n task[2] = period_tolerance\n\n Returns\n -------\n\n dict\n This is the dict produced by the `periodicvar_recovery` function for the\n input period-finding result pickle."} {"query":"Calculates remaining time as a string","positive":"def Func ( arg_0 , arg_1 ) : try : arg_2 = datetime . datetime . now ( ) arg_3 = arg_2 - arg_0 . _start_time try : arg_4 = arg_3 . total_seconds ( ) except AttributeError : arg_4 = ( ( arg_3 . microseconds + ( arg_3 . seconds + arg_3 . days * 24 * 3600 ) * 10 ** 6 ) \/ 10.0 ** 6 ) arg_5 = int ( ( arg_0 . _total - arg_0 . _start_index - 1.0 ) * arg_4 \/ float ( arg_1 - arg_0 . _start_index ) - arg_4 ) arg_6 = datetime . timedelta ( seconds = arg_5 ) arg_7 = ', remaining: ' + str ( arg_6 ) except ZeroDivisionError : arg_7 = '' return arg_7","id_":252419,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/utils\/helpful_functions.py#L97-L117","negative":"Compares and exchanges.\n\n Compares the value in the AL, AX, EAX or RAX register (depending on the\n size of the operand) with the first operand (destination operand). If\n the two values are equal, the second operand (source operand) is loaded\n into the destination operand. Otherwise, the destination operand is\n loaded into the AL, AX, EAX or RAX register.\n\n The ZF flag is set if the values in the destination operand and\n register AL, AX, or EAX are equal; otherwise it is cleared. The CF, PF,\n AF, SF, and OF flags are set according to the results of the comparison\n operation::\n\n (* accumulator = AL, AX, EAX or RAX, depending on whether *)\n (* a byte, word, a doubleword or a 64bit comparison is being performed*)\n IF accumulator == DEST\n THEN\n ZF = 1\n DEST = SRC\n ELSE\n ZF = 0\n accumulator = DEST\n FI;\n\n :param cpu: current CPU.\n :param dest: destination operand.\n :param src: source operand."} {"query":"Get the context for this view .","positive":"def Func ( arg_0 , ** arg_1 ) : arg_2 = { 'gadgets' : arg_0 . _registry , 'columns' : arg_0 . columns , 'rows' : arg_0 . rows , 'column_ratio' : 100 - arg_0 . columns * 2 , 'row_ratio' : 100 - arg_0 . rows * 2 , } arg_2 . update ( arg_1 ) return arg_2","id_":252420,"task_name":"https:\/\/github.com\/praekelt\/django-analytics\/blob\/29c22d03374ccc0ec451650e2c2886d324f6e5c6\/analytics\/views.py#L64-L77","negative":"Retrive an artist with a spotify ID.\n\n Parameters\n ----------\n spotify_id : str\n The ID to search for.\n\n Returns\n -------\n artist : Artist\n The artist from the ID"} {"query":"Plots a set of circles corresponding to a slice through the platonic structure . Copied from twoslice_overlay with comments standaloneness .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = 1.0 , arg_5 = 'white' , arg_6 = 'white' ) : arg_7 = arg_0 . obj_get_positions ( ) arg_8 = arg_0 . obj_get_radii ( ) arg_9 = arg_0 . ishape . shape . tolist ( ) arg_9 . pop ( arg_2 ) if arg_3 is None : arg_10 = plt . figure ( ) arg_11 = 'white' if arg_6 == 'black' else 'black' arg_12 , arg_13 = ( ( 1 , arg_9 [ 1 ] \/ float ( arg_9 [ 0 ] ) ) if arg_9 [ 0 ] > arg_9 [ 1 ] else ( arg_9 [ 0 ] \/ float ( arg_9 [ 1 ] ) , 1 ) ) arg_3 = arg_10 . add_axes ( ( 0 , 0 , arg_12 , arg_13 ) , arg_11 = arg_11 ) arg_14 = np . arange ( len ( arg_7 ) ) [ np . abs ( arg_7 [ : , arg_2 ] - arg_1 ) < arg_8 ] arg_15 = 1.0 for arg_16 in arg_14 : arg_17 = arg_7 [ arg_16 ] . copy ( ) arg_18 = 2 * np . sqrt ( arg_8 [ arg_16 ] ** 2 - ( arg_17 [ arg_2 ] - arg_1 ) ** 2 ) if arg_2 == 0 : arg_19 = 1 arg_20 = 2 elif arg_2 == 1 : arg_19 = 0 arg_20 = 2 elif arg_2 == 2 : arg_19 = 0 arg_20 = 1 arg_21 = Circle ( ( arg_17 [ arg_19 ] \/ arg_15 , arg_17 [ arg_20 ] \/ arg_15 ) , radius = arg_18 \/ 2 \/ arg_15 , fc = arg_6 , ec = arg_5 , alpha = arg_4 ) arg_3 . add_patch ( arg_21 ) plt . axis ( 'equal' ) return arg_3","id_":252421,"task_name":"https:\/\/github.com\/peri-source\/peri\/blob\/61beed5deaaf978ab31ed716e8470d86ba639867\/peri\/viz\/plots.py#L1056-L1103","negative":"count number of sites with cov=4, and number of variable sites."} {"query":"This generates fake EB light curves .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = { 'period' : arg_4 . uniform ( arg_6 = 0.2 , arg_7 = 99.8 ) , 'pdepth' : arg_4 . uniform ( arg_6 = 1.0e-4 , arg_7 = 0.7 ) , 'pduration' : arg_4 . uniform ( arg_6 = 0.01 , arg_7 = 0.44 ) , 'depthratio' : arg_4 . uniform ( arg_6 = 0.01 , arg_7 = 0.99 ) , 'secphase' : arg_4 . norm ( arg_6 = 0.5 , arg_7 = 0.1 ) } , arg_9 = False , ) : if arg_1 is None : arg_1 = np . full_like ( arg_0 , 0.0 ) if arg_2 is None : arg_2 = np . full_like ( arg_0 , 0.0 ) arg_10 = npr . random ( ) * ( arg_0 . max ( ) - arg_0 . min ( ) ) + arg_0 . min ( ) arg_11 = arg_3 [ 'period' ] . rvs ( size = 1 ) arg_12 = arg_3 [ 'pdepth' ] . rvs ( size = 1 ) arg_13 = arg_3 [ 'pduration' ] . rvs ( size = 1 ) arg_14 = arg_3 [ 'depthratio' ] . rvs ( size = 1 ) arg_15 = arg_3 [ 'secphase' ] . rvs ( size = 1 ) if arg_9 and arg_12 < 0.0 : arg_12 = - arg_12 elif not arg_9 and arg_12 > 0.0 : arg_12 = - arg_12 arg_16 , arg_17 , arg_18 , arg_19 , arg_20 = ( eclipses . invgauss_eclipses_func ( [ arg_11 , arg_10 , arg_12 , arg_13 , arg_14 , arg_15 ] , arg_0 , arg_1 , arg_2 ) ) arg_21 = np . argsort ( arg_18 ) arg_22 = arg_18 [ arg_21 ] arg_23 = arg_16 [ arg_21 ] arg_24 = arg_20 [ arg_21 ] arg_25 = { 'vartype' : 'EB' , 'params' : { x : np . asscalar ( y ) for x , y in zip ( [ 'period' , 'epoch' , 'pdepth' , 'pduration' , 'depthratio' ] , [ arg_11 , arg_10 , arg_12 , arg_13 , arg_14 ] ) } , 'times' : arg_22 , 'mags' : arg_23 , 'errs' : arg_24 , 'varperiod' : arg_11 , 'varamplitude' : arg_12 , } return arg_25","id_":252422,"task_name":"https:\/\/github.com\/waqasbhatti\/astrobase\/blob\/2922a14619d183fb28005fa7d02027ac436f2265\/astrobase\/fakelcs\/generation.py#L273-L396","negative":"Receive a StateUpdate and fan out to Conversations.\n\n Args:\n state_update: hangouts_pb2.StateUpdate instance"} {"query":"Initialize this extension for the given app .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None , arg_4 = None , arg_5 = None , arg_6 = None , arg_7 = None , ** arg_8 ) : arg_0 . app = arg_1 arg_0 . authorize_callback = arg_6 arg_0 . flow_kwargs = arg_8 if arg_7 is None : arg_7 = dictionary_storage . DictionaryStorage ( session , key = _CREDENTIALS_KEY ) arg_0 . storage = arg_7 if arg_2 is None : arg_2 = arg_1 . config . get ( 'GOOGLE_OAUTH2_SCOPES' , _DEFAULT_SCOPES ) arg_0 . scopes = arg_2 arg_0 . _load_config ( arg_3 , arg_4 , arg_5 ) arg_1 . register_blueprint ( arg_0 . _create_blueprint ( ) )","id_":252423,"task_name":"https:\/\/github.com\/googleapis\/oauth2client\/blob\/50d20532a748f18e53f7d24ccbe6647132c979a9\/oauth2client\/contrib\/flask_util.py#L235-L274","negative":"This function adds the given stream to the logger, but does not check with a ConnectorDB database\n to make sure that the stream exists. Use at your own risk."} {"query":"Sends a PUT request .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None , ** arg_4 ) : check_type ( arg_1 , basestring , may_be_none = False ) arg_5 = arg_4 . pop ( 'erc' , EXPECTED_RESPONSE_CODE [ 'PUT' ] ) arg_6 = arg_0 . request ( 'PUT' , arg_1 , arg_5 , arg_2 = arg_2 , arg_3 = arg_3 , ** arg_4 ) return extract_and_parse_json ( arg_6 )","id_":252424,"task_name":"https:\/\/github.com\/CiscoDevNet\/webexteamssdk\/blob\/6fc2cc3557e080ba4b2a380664cb2a0532ae45cd\/webexteamssdk\/restsession.py#L396-L419","negative":"Stop streaming samples from device and delete samples buffer"} {"query":"Set context string for serial command . Private setter .","positive":"def Func ( arg_0 , arg_1 ) : if ( len ( arg_0 . m_context ) == 0 ) and ( len ( arg_1 ) >= 7 ) : if arg_1 [ 0 : 7 ] != \"request\" : ekm_log ( \"Context: \" + arg_1 ) arg_0 . m_context = arg_1","id_":252425,"task_name":"https:\/\/github.com\/ekmmetering\/ekmmeters\/blob\/b3748bdf30263bfa46ea40157bdf8df2522e1904\/ekmmeters.py#L1482-L1491","negative":"Removes the video from youtube and from db\n Requires POST"} {"query":"Calls an update but clips radii to be > 0","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_1 = listify ( arg_1 ) arg_2 = listify ( arg_2 ) for arg_3 , arg_4 in enumerate ( arg_1 ) : if ( arg_4 [ - 2 : ] == '-a' ) and ( arg_2 [ arg_3 ] < 0 ) : arg_2 [ arg_3 ] = 0.0 super ( PlatonicSpheresCollection , arg_0 ) . Func ( arg_1 , arg_2 )","id_":252426,"task_name":"https:\/\/github.com\/peri-source\/peri\/blob\/61beed5deaaf978ab31ed716e8470d86ba639867\/peri\/comp\/objs.py#L764-L773","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"plot CDF for important sub - metrics","positive":"def Func ( arg_0 , arg_1 = 'matplotlib' ) : arg_2 = False for arg_3 in arg_0 . percentiles_files : arg_4 = os . path . basename ( arg_3 ) arg_5 = arg_0 . csv_column_map [ arg_3 . replace ( \".percentiles.\" , \".\" ) ] if not arg_0 . check_important_sub_metrics ( arg_5 ) : continue arg_5 = naarad . utils . sanitize_string ( arg_5 ) arg_6 = '.' . join ( arg_4 . split ( '.' ) [ 0 : - 1 ] ) if arg_0 . sub_metric_description and arg_5 in arg_0 . sub_metric_description . keys ( ) : arg_6 += ' (' + arg_0 . sub_metric_description [ arg_5 ] + ')' if arg_0 . sub_metric_unit and arg_5 in arg_0 . sub_metric_unit . keys ( ) : arg_7 = [ PD ( input_csv = arg_3 , csv_column = 1 , series_name = arg_6 , x_label = 'Percentiles' , y_label = arg_5 + ' (' + arg_0 . sub_metric_unit [ arg_5 ] + ')' , precision = None , graph_height = 600 , graph_width = 1200 , graph_type = 'line' ) ] else : arg_7 = [ PD ( input_csv = arg_3 , csv_column = 1 , series_name = arg_6 , x_label = 'Percentiles' , y_label = arg_5 , precision = None , graph_height = 600 , graph_width = 1200 , graph_type = 'line' ) ] arg_2 , arg_8 = Metric . graphing_modules [ arg_1 ] . graph_data_on_the_same_graph ( arg_7 , arg_0 . resource_directory , arg_0 . resource_path , arg_6 ) if arg_2 : arg_0 . plot_files . append ( arg_8 ) return True","id_":252427,"task_name":"https:\/\/github.com\/linkedin\/naarad\/blob\/261e2c0760fd6a6b0ee59064180bd8e3674311fe\/src\/naarad\/metrics\/metric.py#L562-L587","negative":"Extracts start row from the bookmark information"} {"query":"Recursively parse requirements from nested pip files .","positive":"def Func ( arg_0 = '.\/requirements.txt' ) : arg_1 = [ ] with io . open ( os . path . join ( here , 'requirements.txt' ) , encoding = 'utf-8' ) as handle : arg_2 = ( arg_3 . strip ( ) for arg_3 in handle if arg_3 . strip ( ) and not arg_3 . startswith ( '#' ) ) for arg_3 in arg_2 : if arg_3 . startswith ( '-r' ) : arg_1 += Func ( arg_0 = arg_3 [ 3 : ] ) else : arg_1 . append ( arg_3 ) return arg_1","id_":252428,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/setup.py#L23-L41","negative":"Remove key name from bucket set."} {"query":"Handles package lics concluded or declared .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : try : for arg_4 , arg_4 , arg_5 in arg_0 . graph . triples ( ( arg_1 , arg_2 , None ) ) : if ( arg_5 , RDF . type , arg_0 . spdx_namespace [ 'ConjunctiveLicenseSet' ] ) in arg_0 . graph : arg_6 = arg_0 . handle_conjunctive_list ( arg_5 ) arg_3 ( arg_0 . doc , arg_6 ) elif ( arg_5 , RDF . type , arg_0 . spdx_namespace [ 'DisjunctiveLicenseSet' ] ) in arg_0 . graph : arg_6 = arg_0 . handle_disjunctive_list ( arg_5 ) arg_3 ( arg_0 . doc , arg_6 ) else : try : arg_6 = arg_0 . handle_lics ( arg_5 ) arg_3 ( arg_0 . doc , arg_6 ) except SPDXValueError : arg_0 . value_error ( 'PKG_SINGLE_LICS' , arg_5 ) except CardinalityError : arg_0 . more_than_one_error ( 'package {0}' . format ( arg_2 ) )","id_":252429,"task_name":"https:\/\/github.com\/spdx\/tools-python\/blob\/301d72f6ae57c832c1da7f6402fa49b192de6810\/spdx\/parsers\/rdf.py#L381-L400","negative":"APEv2 tag value factory.\n\n Use this if you need to specify the value's type manually. Binary\n and text data are automatically detected by APEv2.__setitem__."} {"query":"Unescape a string escaped with escape escape_char must be the same as that used in the call to escape .","positive":"def Func ( arg_0 , arg_1 = arg_2 ) : if isinstance ( arg_0 , bytes ) : arg_0 = arg_0 . decode ( 'utf8' ) arg_3 = re . compile ( re . escape ( arg_1 ) . encode ( 'utf8' ) + b'([a-z0-9]{2})' , re . IGNORECASE ) arg_4 = arg_3 . subn ( _Func_char , arg_0 . encode ( 'utf8' ) ) [ 0 ] return arg_4 . decode ( 'utf8' )","id_":252430,"task_name":"https:\/\/github.com\/minrk\/escapism\/blob\/35f4c194ad6de2bc3339bb8b0e522dca989143ff\/escapism.py#L91-L102","negative":"Creates a layer from its config.\n\n This method is the reverse of `get_config`, capable of instantiating the\n same layer from the config dictionary.\n\n Args:\n config: A Python dictionary, typically the output of `get_config`.\n\n Returns:\n layer: A layer instance."} {"query":"Read a nbytes bytes long big endian signed integer from data starting at offset","positive":"def Func ( arg_0 , arg_1 = 32 , arg_2 = 0 ) : assert isinstance ( arg_0 , ( bytearray , Array ) ) arg_3 = ABI . _readBE ( arg_0 , arg_1 , arg_2 = True ) arg_3 = Operators . SEXTEND ( arg_3 , arg_1 * 8 , ( arg_1 + arg_2 ) * 8 ) if not issymbolic ( arg_3 ) : if arg_3 & ( 1 << ( arg_1 * 8 - 1 ) ) : arg_3 = - ( ( ( ~ arg_3 ) + 1 ) & ( ( 1 << ( arg_1 * 8 ) ) - 1 ) ) return arg_3","id_":252431,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/ethereum\/abi.py#L348-L363","negative":"Decorator that converts a report view function into something that\n displays a Report.\n\n Arguments:\n title (str):\n The title of the report.\n form_type (Optional[forms.Form]):\n A form class that can make this report display things. If not\n supplied, no form will be displayed."} {"query":"Get the filesystem path to a file that contains OpenSSL - compatible CA certs .","positive":"def Func ( arg_0 = None , arg_1 = 24 , arg_2 = None ) : arg_3 , arg_4 = _ca_path ( arg_0 ) if arg_4 and _cached_path_needs_update ( arg_3 , arg_1 ) : arg_5 = set ( ) arg_6 = '2.5.29.37.0' arg_7 = '1.2.840.113635.100.1.3' arg_8 = '1.3.6.1.5.5.7.3.1' with path_lock : if _cached_path_needs_update ( arg_3 , arg_1 ) : with open ( arg_3 , 'wb' ) as f : for arg_9 , arg_10 , arg_11 in extract_from_system ( arg_2 , True ) : if sys . platform == 'darwin' : if arg_10 != arg_5 and arg_6 not in arg_10 and arg_7 not in arg_10 : if arg_2 : arg_2 ( Certificate . load ( arg_9 ) , 'implicitly distrusted for TLS' ) continue if arg_11 != arg_5 and ( arg_7 in arg_11 or arg_6 in arg_11 ) : if arg_2 : arg_2 ( Certificate . load ( arg_9 ) , 'explicitly distrusted for TLS' ) continue elif sys . platform == 'win32' : if arg_10 != arg_5 and arg_6 not in arg_10 and arg_8 not in arg_10 : if arg_2 : arg_2 ( Certificate . load ( arg_9 ) , 'implicitly distrusted for TLS' ) continue if arg_11 != arg_5 and ( arg_8 in arg_11 or arg_6 in arg_11 ) : if arg_2 : arg_2 ( Certificate . load ( arg_9 ) , 'explicitly distrusted for TLS' ) continue if arg_2 : arg_2 ( Certificate . load ( arg_9 ) , None ) f . write ( armor ( 'CERTIFICATE' , arg_9 ) ) if not arg_3 : raise CACertsError ( 'No CA certs found' ) return arg_3","id_":252432,"task_name":"https:\/\/github.com\/wbond\/oscrypto\/blob\/af778bf1c88bf6c4a7342f5353b130686a5bbe1c\/oscrypto\/trust_list.py#L67-L140","negative":"Deletes the video\n\n Authentication is required\n\n Params:\n entry: video entry fetch via 'fetch_video()'\n\n Return:\n True if successful\n\n Raise:\n OperationError: on unsuccessful deletion"} {"query":"This reads in a K2 lightcurve in CSV format . Transparently reads gzipped files .","positive":"def Func ( arg_0 ) : if '.gz' in os . path . basename ( arg_0 ) : LOGINFO ( 'reading gzipped K2 LC: %s' % arg_0 ) arg_1 = gzip . open ( arg_0 , 'rb' ) else : LOGINFO ( 'reading K2 LC: %s' % arg_0 ) arg_1 = open ( arg_0 , 'rb' ) arg_2 = arg_1 . read ( ) . decode ( ) arg_1 . close ( ) arg_3 = arg_2 . index ( '# LIGHTCURVE\\n' ) arg_4 = arg_2 [ : arg_3 + 12 ] arg_5 = arg_2 [ arg_3 + 13 : ] . split ( '\\n' ) arg_5 = [ x . split ( ',' ) for x in arg_5 if len ( x ) > 0 ] arg_6 = _parse_csv_header ( arg_4 ) arg_5 = list ( zip ( * arg_5 ) ) for arg_7 , arg_8 in enumerate ( arg_6 [ 'columns' ] ) : arg_6 [ arg_8 . lower ( ) ] = np . array ( [ COLUMNDEFS [ arg_8 ] [ 2 ] ( x ) for x in arg_5 [ arg_7 ] ] ) arg_6 [ 'columns' ] = [ x . lower ( ) for x in arg_6 [ 'columns' ] ] return arg_6","id_":252433,"task_name":"https:\/\/github.com\/waqasbhatti\/astrobase\/blob\/2922a14619d183fb28005fa7d02027ac436f2265\/astrobase\/hatsurveys\/k2hat.py#L493-L545","negative":"give the user an ipython shell, optionally with an endpoint of choice."} {"query":"Create a leaflet viewer html file for viewing idaho images .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_0 . describe_images ( arg_1 ) if len ( arg_3 ) > 0 : arg_4 = '' for arg_5 , arg_6 in arg_3 . items ( ) : for arg_7 , arg_8 in arg_6 [ 'parts' ] . items ( ) : arg_9 = len ( list ( arg_8 . keys ( ) ) ) arg_10 = None if arg_9 == 1 : arg_10 = [ p for p in list ( arg_8 . keys ( ) ) ] [ 0 ] arg_11 = '' elif arg_9 == 2 : arg_10 = [ p for p in list ( arg_8 . keys ( ) ) if p is not 'PAN' ] [ 0 ] arg_11 = arg_8 [ 'PAN' ] [ 'id' ] if not arg_10 : arg_0 . logger . debug ( \"Cannot find part for idaho image.\" ) continue arg_12 = { 'RGBN' : '0,1,2' , 'WORLDVIEW_8_BAND' : '4,2,1' , 'PAN' : '0' } . get ( arg_10 , '0,1,2' ) arg_13 = arg_8 [ arg_10 ] [ 'boundstr' ] arg_14 = from_wkt ( arg_13 ) arg_15 = arg_8 [ arg_10 ] [ 'bucket' ] arg_16 = arg_8 [ arg_10 ] [ 'id' ] arg_17 , arg_18 , arg_19 , arg_20 = arg_14 . bounds arg_4 += \"addLayerToMap('%s','%s',%s,%s,%s,%s,'%s');\\n\" % ( arg_15 , arg_16 , arg_17 , arg_18 , arg_19 , arg_20 , arg_11 ) arg_21 = os . path . realpath ( os . path . join ( os . getcwd ( ) , os . path . dirname ( __file__ ) ) ) try : with open ( os . path . join ( arg_21 , 'leafletmap_template.html' ) , 'r' ) as htmlfile : arg_22 = htmlfile . read ( ) . decode ( \"utf8\" ) except AttributeError : with open ( os . path . join ( arg_21 , 'leafletmap_template.html' ) , 'r' ) as htmlfile : arg_22 = htmlfile . read ( ) arg_22 = arg_22 . replace ( 'FUNCTIONSTRING' , arg_4 ) arg_22 = arg_22 . replace ( 'CENTERLAT' , str ( arg_18 ) ) arg_22 = arg_22 . replace ( 'CENTERLON' , str ( arg_17 ) ) arg_22 = arg_22 . replace ( 'BANDS' , arg_12 ) arg_22 = arg_22 . replace ( 'TOKEN' , arg_0 . gbdx_connection . access_token ) with codecs . open ( arg_2 , 'w' , 'utf8' ) as outputfile : arg_0 . logger . debug ( \"Saving %s\" % arg_2 ) outputfile . write ( arg_22 ) else : print ( 'No items returned.' )","id_":252434,"task_name":"https:\/\/github.com\/DigitalGlobe\/gbdxtools\/blob\/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb\/gbdxtools\/idaho.py#L296-L360","negative":"Configure the set of plugins with the given options\n and config instance. After configuration, disabled plugins\n are removed from the plugins list."} {"query":"Fetch commit data for specified event .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . github arg_3 = arg_0 . options . user arg_4 = arg_0 . options . project arg_5 , arg_6 = arg_2 . repos [ arg_3 ] [ arg_4 ] . git . commits [ arg_1 [ \"commit_id\" ] ] . get ( ) if arg_5 == 200 : return arg_6 arg_0 . raise_GitHubError ( arg_5 , arg_6 , arg_2 . getheaders ( ) )","id_":252435,"task_name":"https:\/\/github.com\/topic2k\/pygcgen\/blob\/c41701815df2c8c3a57fd5f7b8babe702127c8a1\/pygcgen\/fetcher.py#L309-L326","negative":"Set renewal, rebinding times."} {"query":"Run the poll loop . This method never returns .","positive":"def Func ( arg_0 ) : try : from _winapi import WAIT_OBJECT_0 , INFINITE except ImportError : from _subprocess import WAIT_OBJECT_0 , INFINITE arg_1 = [ ] if arg_0 . interrupt_handle : arg_1 . append ( arg_0 . interrupt_handle ) if arg_0 . parent_handle : arg_1 . append ( arg_0 . parent_handle ) arg_2 = platform . architecture ( ) [ 0 ] arg_3 = ctypes . c_int64 if arg_2 . startswith ( '64' ) else ctypes . c_int while True : arg_4 = ctypes . windll . kernel32 . WaitForMultipleObjects ( len ( arg_1 ) , ( arg_3 * len ( arg_1 ) ) ( * arg_1 ) , False , INFINITE ) if WAIT_OBJECT_0 <= arg_4 < len ( arg_1 ) : arg_5 = arg_1 [ arg_4 - WAIT_OBJECT_0 ] if arg_5 == arg_0 . interrupt_handle : interrupt_main ( ) elif arg_5 == arg_0 . parent_handle : os . _exit ( 1 ) elif arg_4 < 0 : warn ( \"\"\"Parent poll failed. If the frontend dies, the kernel may be left Funcning. Please let us know about your system (bitness, Python, etc.) at ipython-dev@scipy.org\"\"\" ) return","id_":252436,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/zmq\/parentpoller.py#L100-L139","negative":"Pickle the Dataset instance to the provided file."} {"query":"Append a render function and the parameters to pass an equivilent PathElement or the PathElement itself .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_0 . _render_funcs . append ( arg_1 ) arg_0 . _elements . append ( arg_2 )","id_":252437,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/shoebot\/data\/bezier.py#L92-L98","negative":"Extract Packed Floating-Point Values\n\n Extracts 128-bits of packed floating-point values from the source\n operand (second operand) at an 128-bit offset from imm8[0] into the\n destination operand (first operand). The destination may be either an\n XMM register or an 128-bit memory location."} {"query":"Read the data encoding the UsernamePasswordCredential struct and decode it into its constituent parts .","positive":"def Func ( arg_0 , arg_1 , arg_2 = arg_3 . KMIPVersion . KMIP_1_0 ) : super ( UsernamePasswordCredential , arg_0 ) . Func ( arg_1 , arg_2 = arg_2 ) arg_6 = BytearrayStream ( arg_1 . Func ( arg_0 . length ) ) if arg_0 . is_tag_next ( arg_3 . Tags . USERNAME , arg_6 ) : arg_0 . _username = primitives . TextString ( tag = arg_3 . Tags . USERNAME ) arg_0 . _username . Func ( arg_6 , arg_2 = arg_2 ) else : raise ValueError ( \"Username\/password credential encoding missing the username.\" ) if arg_0 . is_tag_next ( arg_3 . Tags . PASSWORD , arg_6 ) : arg_0 . _password = primitives . TextString ( tag = arg_3 . Tags . PASSWORD ) arg_0 . _password . Func ( arg_6 , arg_2 = arg_2 ) arg_0 . is_oversized ( arg_6 )","id_":252438,"task_name":"https:\/\/github.com\/OpenKMIP\/PyKMIP\/blob\/b51c5b044bd05f8c85a1d65d13a583a4d8fc1b0e\/kmip\/core\/objects.py#L851-L889","negative":"Return the value to player; 1 for win, -1 for loss, 0 otherwise."} {"query":"File upload functionality","positive":"def Func ( arg_0 ) : arg_1 = { \"success\" : [ ] , \"failure\" : [ ] , \"unchanged\" : [ ] } arg_0 . _create_prelim ( ) for arg_2 in arg_0 . payload : if \"key\" not in arg_2 : arg_1 [ \"failure\" ] . append ( arg_2 ) continue arg_3 = str ( arg_0 . basedir . joinpath ( arg_2 [ \"filename\" ] ) ) arg_4 = arg_0 . _get_auth ( arg_3 , arg_2 [ \"key\" ] , md5 = arg_2 . get ( \"md5\" , None ) ) if arg_4 . get ( \"exists\" ) : arg_1 [ \"unchanged\" ] . append ( arg_2 ) continue arg_0 . _Func_file ( arg_4 , arg_3 , arg_2 [ \"key\" ] ) arg_1 [ \"success\" ] . append ( arg_2 ) return arg_1","id_":252439,"task_name":"https:\/\/github.com\/urschrei\/pyzotero\/blob\/b378966b30146a952f7953c23202fb5a1ddf81d9\/pyzotero\/zotero.py#L1939-L1961","negative":"issue a command to read the archive records after a known time stamp."} {"query":"Say something in the evening","positive":"def Func ( arg_0 , arg_1 = \"Dinner is served\" , arg_2 : arg_3 = False ) : return arg_0 . helper . output ( arg_1 , arg_2 )","id_":252440,"task_name":"https:\/\/github.com\/yaz\/yaz\/blob\/48c842fe053bf9cd6446c4b33fb081c65339aa48\/examples\/02_food.py#L56-L58","negative":"Returns the maximum delay for the InferenceElements in the inference\n dictionary\n\n Parameters:\n -----------------------------------------------------------------------\n inferences: A dictionary where the keys are InferenceElements"} {"query":"This method determines if we should apply our namespace indentation check .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = IsForwardClassDeclaration ( arg_2 , arg_3 ) if not ( arg_1 or arg_4 ) : return False if IsMacroDefinition ( arg_2 , arg_3 ) : return False return IsBlockInNameSpace ( arg_0 , arg_4 )","id_":252441,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/third_party\/python\/cpplint\/cpplint.py#L5889-L5916","negative":"Get datetime of the next retry if the task instance fails. For exponential\n backoff, retry_delay is used as base and will be converted to seconds."} {"query":"Creates a payload for the redis server .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ ] for arg_3 in arg_1 : arg_4 = arg_3 [ \"direction\" ] arg_5 = '' arg_6 = '' if arg_3 . get ( \"remote_ip_prefix\" ) : arg_7 = arg_3 [ \"remote_ip_prefix\" ] if arg_4 == \"ingress\" : arg_5 = arg_0 . _convert_remote_network ( arg_7 ) else : if ( Capabilities . EGRESS not in CONF . QUARK . environment_capabilities ) : raise q_exc . EgressSecurityGroupRulesNotEnabled ( ) else : arg_6 = arg_0 . _convert_remote_network ( arg_7 ) arg_8 = { } arg_9 = protocols . PROTOCOL_MAP [ arg_3 [ \"ethertype\" ] ] if arg_3 [ \"protocol\" ] == arg_9 [ \"icmp\" ] : arg_8 [ \"icmp type\" ] = arg_3 [ \"port_range_min\" ] arg_8 [ \"icmp code\" ] = arg_3 [ \"port_range_max\" ] else : arg_8 [ \"port start\" ] = arg_3 [ \"port_range_min\" ] arg_8 [ \"port end\" ] = arg_3 [ \"port_range_max\" ] arg_10 = { \"ethertype\" : arg_3 [ \"ethertype\" ] , \"protocol\" : arg_3 [ \"protocol\" ] , \"source network\" : arg_5 , \"destination network\" : arg_6 , \"action\" : \"allow\" , \"direction\" : arg_4 } arg_10 . update ( arg_8 ) arg_2 . append ( arg_10 ) return arg_2","id_":252442,"task_name":"https:\/\/github.com\/openstack\/quark\/blob\/1112e6a66917d3e98e44cb7b33b107fd5a74bb2e\/quark\/cache\/security_groups_client.py#L49-L93","negative":"Releases renderer resources associated with this image."} {"query":"Parse the result element of the observation type","positive":"def Func ( arg_0 ) : if arg_0 . result is not None : arg_1 = arg_0 . result . find ( nspv ( \"wml2:MeasurementTimeseries\" ) ) arg_0 . result = MeasurementTimeseries ( arg_1 )","id_":252443,"task_name":"https:\/\/github.com\/geopython\/OWSLib\/blob\/96d47842401a129f1e86fa9f66dccef5a5a6872c\/owslib\/swe\/observation\/waterml2.py#L36-L41","negative":"Process the logic and structuration of the mining database."} {"query":"Return a circuit with a barrier before last measurements .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ 'measure' , 'barrier' ] arg_3 = [ ] for arg_4 in arg_1 . named_nodes ( * arg_2 ) : arg_5 = True for arg_6 , arg_7 in arg_1 . bfs_successors ( arg_4 ) : if any ( arg_8 . type == 'op' and arg_8 . name not in arg_2 for arg_8 in arg_7 ) : arg_5 = False break if arg_5 : arg_3 . append ( arg_4 ) if not arg_3 : return arg_1 arg_9 = DAGCircuit ( ) for arg_10 in arg_1 . qregs . values ( ) : arg_9 . add_qreg ( arg_10 ) for arg_11 in arg_1 . cregs . values ( ) : arg_9 . add_creg ( arg_11 ) arg_12 = set ( arg_15 . qargs [ 0 ] for arg_15 in arg_3 ) arg_9 . apply_operation_back ( Barrier ( len ( arg_12 ) ) , list ( arg_12 ) , [ ] ) arg_13 = [ node for node in arg_1 . topological_op_nodes ( ) if node in set ( arg_3 ) ] for arg_14 in arg_13 : arg_9 . apply_operation_back ( arg_14 . op , arg_14 . qargs , arg_14 . cargs ) for arg_15 in arg_3 : arg_1 . remove_op_node ( arg_15 ) arg_1 . extend_back ( arg_9 ) arg_16 = MergeAdjacentBarriers ( ) return arg_16 . Func ( arg_1 )","id_":252444,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/transpiler\/passes\/mapping\/barrier_before_final_measurements.py#L25-L76","negative":"use values in opts data to generate instances of publication services."} {"query":"Convert config options to stdin args .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = ( '--extra-settings' , '--languages' , '--requirements' , '--template' , '--timezone' ) arg_3 = [ ] for arg_4 , arg_5 in arg_0 . items ( SECTION ) : arg_6 = '--{0}' . format ( arg_4 ) arg_7 = arg_1 . _option_string_actions [ arg_6 ] if arg_7 . const : try : if arg_0 . getboolean ( SECTION , arg_4 ) : arg_3 . append ( arg_6 ) except ValueError : arg_3 . extend ( [ arg_6 , arg_5 ] ) elif any ( [ arg_8 for arg_8 in arg_2 if arg_8 in arg_7 . option_strings ] ) : if arg_5 != '' : arg_3 . extend ( [ arg_6 , arg_5 ] ) else : arg_3 . extend ( [ arg_6 , arg_5 ] ) return arg_3","id_":252445,"task_name":"https:\/\/github.com\/nephila\/djangocms-installer\/blob\/9fec66d5f8b1e9a0f3c0ec66dd777db578fab07e\/djangocms_installer\/config\/ini.py#L96-L123","negative":"Unregister an extension code. For testing only."} {"query":"Basic mathematical operation to apply operator on column_1 and column_2 Both can be either a number or the name of a column of df Will create a new column named new_column","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : if not isinstance ( arg_2 , ( str , int , float ) ) : raise TypeError ( f'column_1 must be a string, an integer or a float' ) if not isinstance ( arg_3 , ( str , int , float ) ) : raise TypeError ( f'column_2 must be a string, an integer or a float' ) if isinstance ( arg_2 , str ) : arg_2 = arg_0 [ arg_2 ] if isinstance ( arg_3 , str ) : arg_3 = arg_0 [ arg_3 ] arg_5 = getattr ( _operator , arg_4 ) arg_0 [ arg_1 ] = arg_5 ( arg_2 , arg_3 ) return arg_0","id_":252446,"task_name":"https:\/\/github.com\/ToucanToco\/toucan-data-sdk\/blob\/c3ca874e1b64f4bdcc2edda750a72d45d1561d8a\/toucan_data_sdk\/utils\/postprocess\/math.py#L7-L24","negative":"Request the api endpoint to retrieve information about the inventory\n\n :return: Main Collection\n :rtype: Collection"} {"query":"Create a heatmaps object from an heatmap array containing values ranging from 0 to 255 .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 0.0 , arg_3 = 1.0 ) : arg_4 = arg_0 . astype ( np . float32 ) \/ 255.0 return HeatmapsOnImage . from_0to1 ( arg_4 , arg_1 , arg_2 = arg_2 , arg_3 = arg_3 )","id_":252447,"task_name":"https:\/\/github.com\/aleju\/imgaug\/blob\/786be74aa855513840113ea523c5df495dc6a8af\/imgaug\/augmentables\/heatmaps.py#L409-L441","negative":"Return the metadata for the specified course run.\n\n The course run needs to be included in the specified EnterpriseCustomerCatalog\n in order for metadata to be returned from this endpoint."} {"query":"Find and return the topology given its cluster environ topology name and an optional role . Raises exception if topology is not found or more than one are found .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : arg_5 = list ( filter ( lambda t : t . name == arg_4 and t . cluster == arg_1 and ( not arg_2 or t . execution_state . role == arg_2 ) and t . environ == arg_3 , arg_0 . topologies ) ) if not arg_5 or len ( arg_5 ) > 1 : if arg_2 is not None : raise Exception ( \"Topology not found for {0}, {1}, {2}, {3}\" . format ( arg_1 , arg_2 , arg_3 , arg_4 ) ) else : raise Exception ( \"Topology not found for {0}, {1}, {2}\" . format ( arg_1 , arg_3 , arg_4 ) ) return arg_5 [ 0 ]","id_":252448,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/tools\/tracker\/src\/python\/tracker.py#L161-L180","negative":"Cycles through notifications with latest results from data feeds."} {"query":"Will make any functions return an iterable objects by wrapping its result in a list .","positive":"def Func ( arg_0 ) : def wrapper ( * arg_1 , ** arg_2 ) : arg_3 = arg_0 ( * arg_1 , ** arg_2 ) if hasattr ( arg_3 , '__iter__' ) : return arg_3 else : return [ arg_3 ] return wrapper","id_":252449,"task_name":"https:\/\/github.com\/RRZE-HPC\/kerncraft\/blob\/c60baf8043e4da8d8d66da7575021c2f4c6c78af\/kerncraft\/kernel.py#L157-L165","negative":"Plot the temporal distance cumulative density function.\n\n Returns\n -------\n fig: matplotlib.Figure"} {"query":"Add conversation tab if not present and optionally switch to it .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False ) : arg_3 = arg_0 . get_conv_widget ( arg_1 ) arg_0 . _tabbed_window . set_tab ( arg_3 , arg_2 = arg_2 , title = arg_3 . title )","id_":252450,"task_name":"https:\/\/github.com\/tdryer\/hangups\/blob\/85c0bf0a57698d077461283895707260f9dbf931\/hangups\/ui\/__main__.py#L209-L213","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"A hack to allow us to rename paths in a case - insensitive filesystem like HFS .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = tempfile . mkdtemp ( ) shutil . rmtree ( arg_2 ) shutil . move ( arg_0 , arg_2 ) shutil . move ( arg_2 , arg_1 )","id_":252451,"task_name":"https:\/\/github.com\/gamechanger\/dusty\/blob\/dc12de90bb6945023d6f43a8071e984313a1d984\/dusty\/path.py#L31-L36","negative":"Stop the profiler."} {"query":"A decorator for annotating a function that can take the selected properties from a config_value in to an instance of a custom type .","positive":"def Func ( arg_0 ) : arg_1 = resolve_config_cls_arg ( arg_0 ) check . param_invariant ( arg_1 . is_selector , 'config_cls' ) def _wrap ( arg_2 ) : def _selector ( arg_3 , arg_4 ) : arg_5 , arg_6 = single_item ( arg_4 ) return arg_2 ( arg_3 , arg_5 , arg_6 ) return _create_input_schema ( arg_1 , _selector ) return _wrap","id_":252452,"task_name":"https:\/\/github.com\/dagster-io\/dagster\/blob\/4119f8c773089de64831b1dfb9e168e353d401dc\/python_modules\/dagster\/dagster\/core\/types\/config_schema.py#L85-L103","negative":"Release the semaphore\n\n :param tag: A tag identifying what is releasing the semaphore\n :param acquire_token: The token returned from when the semaphore was\n acquired. Note that this is not really needed to directly use this\n class but is needed for API compatibility with the\n SlidingWindowSemaphore implementation."} {"query":"Runs statistical profiler on a function .","positive":"def Func ( arg_0 ) : with _StatProfiler ( ) as prof : arg_1 = arg_0 . _run_object ( * arg_0 . _run_args , ** arg_0 . _run_kwargs ) arg_2 = prof . call_tree return { 'objectName' : arg_0 . _object_name , 'sampleInterval' : _SAMPLE_INTERVAL , 'runTime' : prof . run_time , 'callStats' : arg_2 , 'totalSamples' : arg_2 . get ( 'sampleCount' , 0 ) , 'result' : arg_1 , 'timestamp' : int ( time . time ( ) ) }","id_":252453,"task_name":"https:\/\/github.com\/nvdv\/vprof\/blob\/4c3ff78f8920ab10cb9c00b14143452aa09ff6bb\/vprof\/flame_graph.py#L169-L183","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Check whether a function exists or not and return its config","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . get ( 'function_name' ) arg_2 = arg_0 . get ( 'profile' ) arg_3 = arg_0 . get ( 'aws_access_key_id' ) arg_4 = arg_0 . get ( 'aws_secret_access_key' ) arg_5 = get_client ( 'lambda' , arg_2 , arg_3 , arg_4 , arg_0 . get ( 'region' ) , ) try : return arg_5 . get_function ( FunctionName = arg_1 ) except arg_5 . exceptions . ResourceNotFoundException as e : if 'Function not found' in str ( e ) : return False","id_":252454,"task_name":"https:\/\/github.com\/nficano\/python-lambda\/blob\/b0bd25404df70212d7fa057758760366406d64f2\/aws_lambda\/aws_lambda.py#L729-L745","negative":"Gets back all response headers."} {"query":"Returns key contents and modify time","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 . _changed ( ) : arg_0 . _read ( ) if arg_1 in arg_0 . store : return tuple ( arg_0 . store [ arg_1 ] ) else : return None","id_":252455,"task_name":"https:\/\/github.com\/calston\/tensor\/blob\/7c0c99708b5dbff97f3895f705e11996b608549d\/tensor\/utils.py#L349-L357","negative":"Disconnect any connected devices that have any of the specified\n service UUIDs."} {"query":"Creates a trace activity based on this activity .","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 , arg_4 : arg_5 = None , arg_6 : arg_3 = None , arg_7 : arg_3 = None , ) -> arg_1 : arg_8 = ( ChannelAccount ( id = arg_0 . recipient . id , arg_2 = arg_0 . recipient . name ) if arg_0 . recipient is not None else ChannelAccount ( ) ) if arg_6 is None and arg_4 is not None : arg_6 = type ( arg_4 ) . __name__ arg_9 = arg_1 ( type = ActivityTypes . trace , timestamp = datetime . utcnow ( ) , arg_8 = arg_8 , recipient = ChannelAccount ( id = arg_0 . from_property . id , arg_2 = arg_0 . from_property . name ) , reply_to_id = arg_0 . id , service_url = arg_0 . service_url , channel_id = arg_0 . channel_id , conversation = ConversationAccount ( is_group = arg_0 . conversation . is_group , id = arg_0 . conversation . id , arg_2 = arg_0 . conversation . name , ) , arg_2 = arg_2 , arg_7 = arg_7 , arg_6 = arg_6 , arg_4 = arg_4 , ) return arg_9","id_":252456,"task_name":"https:\/\/github.com\/Microsoft\/botbuilder-python\/blob\/274663dd91c811bae6ac4488915ba5880771b0a7\/libraries\/botbuilder-ai\/botbuilder\/ai\/luis\/activity_util.py#L16-L69","negative":"Attempts to fetch streams repeatedly\n until some are returned or limit hit."} {"query":"Run the GIES algorithm .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . arguments [ '{SCORE}' ] = arg_0 . scores [ arg_0 . score ] arg_0 . arguments [ '{VERBOSE}' ] = str ( arg_0 . verbose ) . upper ( ) arg_3 = arg_0 . _run_gies ( arg_1 , verbose = arg_0 . verbose ) return nx . relabel_nodes ( nx . DiGraph ( arg_3 ) , { arg_4 : arg_5 for arg_4 , arg_5 in enumerate ( arg_1 . columns ) } )","id_":252457,"task_name":"https:\/\/github.com\/Diviyan-Kalainathan\/CausalDiscoveryToolbox\/blob\/be228b078ba9eb76c01b3ccba9a1c0ad9e9e5ed1\/cdt\/causality\/graph\/GIES.py#L128-L144","negative":"Creates and connects the underlying text widget."} {"query":"is this token valid?","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = time . time ( ) if 'Bearer ' in arg_1 : arg_1 = arg_1 [ 7 : ] arg_3 = None for arg_4 in arg_0 . secrets : try : arg_3 = jwt . decode ( arg_1 , arg_4 ) break except jwt . DecodeError : continue except jwt . ExpiredSignatureError : raise JwtFailed ( \"Jwt expired\" ) if not arg_3 : raise JwtFailed ( \"Jwt cannot be decoded\" ) arg_5 = arg_3 . get ( 'exp' ) if not arg_5 : raise JwtFailed ( \"Jwt missing expiration (exp)\" ) if arg_2 - arg_5 > arg_0 . age : raise JwtFailed ( \"Jwt bad expiration - greater than I want to accept\" ) arg_6 = arg_3 . get ( 'jti' ) if not arg_6 : raise JwtFailed ( \"Jwt missing one-time id (jti)\" ) if arg_0 . already_used ( arg_6 ) : raise JwtFailed ( \"Jwt re-use disallowed (jti={})\" . format ( arg_6 ) ) return arg_3","id_":252458,"task_name":"https:\/\/github.com\/srevenant\/onetimejwt\/blob\/f3ed561253eb4a8e1522c64f59bf64d275e9d315\/onetimejwt\/__init__.py#L140-L174","negative":"Load a configuration module and return a Config"} {"query":"Fetch the comments of a given event .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = urijoin ( arg_1 , arg_0 . REVENTS , arg_2 , arg_0 . RCOMMENTS ) arg_4 = { arg_0 . PPAGE : arg_0 . max_items } for arg_5 in arg_0 . _fetch ( arg_3 , arg_4 ) : yield arg_5","id_":252459,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-perceval\/blob\/41c908605e88b7ebc3a536c643fa0f212eaf9e0e\/perceval\/backends\/core\/meetup.py#L395-L405","negative":"Reshape input and output dimensions of operator.\n\n Arg:\n input_dims (tuple): new subsystem input dimensions.\n output_dims (tuple): new subsystem output dimensions.\n\n Returns:\n Operator: returns self with reshaped input and output dimensions.\n\n Raises:\n QiskitError: if combined size of all subsystem input dimension or\n subsystem output dimensions is not constant."} {"query":"Encapsulate characters to make markdown look as expected .","positive":"def Func ( arg_0 ) : arg_0 . replace ( '\\\\' , '\\\\\\\\' ) arg_1 = re . sub ( \"([<>*_()\\[\\]#])\" , r\"\\\\\\1\" , arg_0 ) return arg_1","id_":252460,"task_name":"https:\/\/github.com\/topic2k\/pygcgen\/blob\/c41701815df2c8c3a57fd5f7b8babe702127c8a1\/pygcgen\/generator.py#L207-L218","negative":"Save an image to self.storage at `save_path`.\n\n Arguments:\n `imagefile`: Raw image data, typically a BytesIO instance.\n `save_path`: The path within self.storage where the image should\n be saved.\n `file_ext`: The file extension of the image-to-be-saved.\n `mime_type`: A valid image mime type (as found in\n versatileimagefield.utils)"} {"query":"Read resource information into self . _cache for cached access .","positive":"def Func ( arg_0 ) : arg_0 . provider . _count_get_resource_instFunc += 1 arg_1 , arg_2 = arg_0 . provider . _split_path ( arg_0 . path ) arg_3 = \"Unknown\" arg_4 = \"\" arg_5 = \"text\/html\" if arg_1 is None : arg_3 = \"Database\" elif arg_2 is None : arg_3 = \"Database Table\" else : arg_5 = \"text\/csv\" if arg_2 == \"_ENTIRE_CONTENTS\" : arg_3 = \"Database Table Contents\" arg_4 = \"CSV Representation of Table Contents\" else : arg_3 = \"Database Record\" arg_4 = \"Attributes available as properties\" arg_6 = arg_2 is None arg_0 . _cache = { \"content_length\" : None , \"contentType\" : arg_5 , \"created\" : time . time ( ) , \"display_name\" : arg_0 . name , \"etag\" : hashlib . md5 ( ) . update ( arg_0 . path ) . hexdigest ( ) , \"modified\" : None , \"support_ranges\" : False , \"display_info\" : { \"type\" : arg_3 , \"typeComment\" : arg_4 } , } if not arg_6 : arg_0 . _cache [ \"modified\" ] = time . time ( ) _logger . debug ( \"---> Func, nc=%s\" % arg_0 . provider . _countFuncConnection )","id_":252461,"task_name":"https:\/\/github.com\/mar10\/wsgidav\/blob\/cec0d84222fc24bea01be1cea91729001963f172\/wsgidav\/samples\/mysql_dav_provider.py#L90-L136","negative":"Shut down the server.\n\n This method checks if the H2O cluster is still running, and if it does shuts it down (via a REST API call).\n\n :param prompt: A logical value indicating whether to prompt the user before shutting down the H2O server."} {"query":"Called when builder group collect files Resolves absolute url if relative passed","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if not arg_0 . abs_path : arg_3 = utils . prepare_path ( arg_0 . rel_bundle_path ) arg_0 . abs_bundle_path = utils . prepare_path ( [ arg_2 . config . input_dir , arg_3 ] ) arg_0 . abs_path = True arg_0 . input_dir = arg_2 . config . input_dir","id_":252462,"task_name":"https:\/\/github.com\/Rikanishu\/static-bundle\/blob\/2f6458cb9d9d9049b4fd829f7d6951a45d547c68\/static_bundle\/bundles.py#L48-L60","negative":"Get the reason of this revocation.\n\n :return: The reason, or ``None`` if there is none.\n :rtype: bytes or NoneType\n\n .. seealso::\n\n :meth:`all_reasons`, which gives you a list of all supported\n reasons this method might return."} {"query":"Plots a probability distribution for the event of making a profitable trade .","positive":"def Func ( arg_0 , arg_1 = None ) : arg_2 = np . linspace ( 0 , 1. , 500 ) arg_0 [ 'profitable' ] = arg_0 . pnl > 0 arg_3 = sp . stats . beta ( arg_0 . profitable . sum ( ) , ( ~ arg_0 . profitable ) . sum ( ) ) arg_4 = arg_3 . pdf ( arg_2 ) arg_5 = arg_3 . ppf ( .025 ) arg_6 = arg_3 . ppf ( .975 ) arg_7 = arg_3 . ppf ( .001 ) arg_8 = arg_3 . ppf ( .999 ) if arg_1 is None : arg_1 = plt . subplot ( ) arg_1 . plot ( arg_2 , arg_4 ) arg_1 . axvline ( arg_5 , color = '0.5' ) arg_1 . axvline ( arg_6 , color = '0.5' ) arg_1 . set_xlabel ( 'Probability of making a profitable decision' ) arg_1 . set_ylabel ( 'Belief' ) arg_1 . set_xlim ( arg_7 , arg_8 ) arg_1 . set_ylim ( ( 0 , arg_4 . max ( ) + 1. ) ) return arg_1","id_":252463,"task_name":"https:\/\/github.com\/quantopian\/pyfolio\/blob\/712716ab0cdebbec9fabb25eea3bf40e4354749d\/pyfolio\/plotting.py#L1813-L1857","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"source record and index must have been set","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = tuple ( ( ref , arg_1 . initial_hook_value ) for ref in arg_1 . hook_references ) for arg_3 in arg_2 : if arg_3 in arg_0 . _record_hooks : arg_1 . set_target ( target_record = arg_0 . _record_hooks [ arg_3 ] . target_record ) break else : for arg_3 in arg_2 : if arg_3 in arg_0 . _table_hooks : arg_1 . set_target ( target_table = arg_0 . _table_hooks [ arg_3 ] ) break else : arg_4 = arg_1 . source_record . get_field_descriptor ( arg_1 . source_index ) raise FieldValidationError ( f\"No object found with any of given references : {keys}. \" f\"{field_descriptor.get_error_location_message(link.initial_hook_value)}\" ) if arg_1 . source_record not in arg_0 . _links_by_source : arg_0 . _links_by_source [ arg_1 . source_record ] = set ( ) arg_0 . _links_by_source [ arg_1 . source_record ] . add ( arg_1 ) if arg_1 . target not in arg_0 . _links_by_target : arg_0 . _links_by_target [ arg_1 . target ] = set ( ) arg_0 . _links_by_target [ arg_1 . target ] . add ( arg_1 )","id_":252464,"task_name":"https:\/\/github.com\/openergy\/oplus\/blob\/f095868d1990c1d126e906ada6acbab26348b3d3\/oplus\/epm\/relations_manager.py#L65-L99","negative":"Delete previous webhooks. If local ngrok tunnel, create a webhook."} {"query":"Attempts to get a local protocol by connection identifier .","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in arg_0 . localFactories : try : return arg_2 . protocols [ arg_1 ] except KeyError : continue raise NoSuchConnection ( )","id_":252465,"task_name":"https:\/\/github.com\/lvh\/txampext\/blob\/a7d6cb9f1e9200dba597378cd40eb6a2096d4fd9\/txampext\/multiplexing.py#L363-L373","negative":"Builds fake MNIST-style data for unit testing."} {"query":"Tells the CPU to set up a concrete unicorn emulator and use it to execute instructions until target is reached .","positive":"def Func ( arg_0 , arg_1 : arg_2 ) : arg_0 . _concrete = True arg_0 . _break_unicorn_at = arg_1 if arg_0 . emu : arg_0 . emu . _stop_at = arg_1","id_":252466,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/native\/cpu\/abstractcpu.py#L583-L593","negative":"Returns the dictionary of CORS specific app configurations."} {"query":"Add data to the graph object . May be called several times to add additional data sets .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . validate_data ( arg_1 ) arg_0 . process_data ( arg_1 ) arg_0 . data . append ( arg_1 )","id_":252467,"task_name":"https:\/\/github.com\/jaraco\/svg.charts\/blob\/23053497b3f1af4e760f355050107ae3bc05909d\/svg\/charts\/graph.py#L100-L109","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Convert numeric and literal version information to numeric format","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = None arg_3 = None try : arg_2 = Decimal ( arg_1 ) except ( ValueError , InvalidOperation ) : try : arg_2 = CMS_VERSION_MATRIX [ str ( arg_1 ) ] except KeyError : pass try : arg_3 = Decimal ( arg_0 ) except ( ValueError , InvalidOperation ) : try : arg_3 = DJANGO_VERSION_MATRIX [ str ( arg_0 ) ] except KeyError : pass try : if ( arg_2 and arg_3 and not ( LooseVersion ( VERSION_MATRIX [ compat . unicode ( arg_2 ) ] [ 0 ] ) <= LooseVersion ( compat . unicode ( arg_3 ) ) <= LooseVersion ( VERSION_MATRIX [ compat . unicode ( arg_2 ) ] [ 1 ] ) ) ) : raise RuntimeError ( 'Django and django CMS versions doesn\\'t match: ' 'Django {0} is not supported by django CMS {1}' . format ( arg_3 , arg_2 ) ) except KeyError : raise RuntimeError ( 'Django and django CMS versions doesn\\'t match: ' 'Django {0} is not supported by django CMS {1}' . format ( arg_3 , arg_2 ) ) return ( compat . unicode ( arg_3 ) if arg_3 else arg_3 , compat . unicode ( arg_2 ) if arg_2 else arg_2 )","id_":252468,"task_name":"https:\/\/github.com\/nephila\/djangocms-installer\/blob\/9fec66d5f8b1e9a0f3c0ec66dd777db578fab07e\/djangocms_installer\/utils.py#L51-L93","negative":"Downloads the sprites data and returns the saved filepath."} {"query":"Show current installed versions","positive":"def Func ( ) : if logger . root . isEnabledFor ( logging . DEBUG ) : if sys . platform == \"darwin\" : arg_0 = \"macOS {0}\" . format ( platform . mac_ver ( ) [ 0 ] ) elif sys . platform . startswith ( \"win\" ) : arg_0 = \"{0} {1}\" . format ( platform . system ( ) , platform . release ( ) ) else : arg_0 = platform . platform ( ) log . debug ( \"OS: {0}\" . format ( arg_0 ) ) log . debug ( \"Python: {0}\" . format ( platform . python_version ( ) ) ) log . debug ( \"Streamlink: {0}\" . format ( streamlink_version ) ) log . debug ( \"Requests({0}), Socks({1}), Websocket({2})\" . format ( requests . __version__ , socks_version , websocket_version ) )","id_":252469,"task_name":"https:\/\/github.com\/streamlink\/streamlink\/blob\/c8ed1daff14ac03195870238b9b900c1109dd5c1\/src\/streamlink_cli\/main.py#L923-L940","negative":"UserWarning if array contains non-finite elements"} {"query":"Utility function to rewrite rows in tsv files .","positive":"def Func ( arg_0 , arg_1 , ** arg_2 ) : if not isinstance ( arg_0 , pathlib . Path ) : assert isinstance ( arg_0 , string_types ) arg_0 = pathlib . Path ( arg_0 ) assert arg_0 . is_file ( ) with tempfile . NamedTemporaryFile ( delete = False ) as fp : arg_3 = pathlib . Path ( fp . name ) with UnicodeReader ( arg_0 , ** arg_2 ) as reader_ : with UnicodeWriter ( arg_3 , ** arg_2 ) as writer : for arg_4 , arg_5 in enumerate ( reader_ ) : arg_5 = arg_1 ( arg_4 , arg_5 ) if arg_5 is not None : writer . writerow ( arg_5 ) shutil . move ( str ( arg_3 ) , str ( arg_0 ) )","id_":252470,"task_name":"https:\/\/github.com\/cldf\/csvw\/blob\/181c94b6c599575945e52d370a415f12f3433eab\/src\/csvw\/dsv.py#L361-L383","negative":"Close the policy instance and its shared database connection."} {"query":"Returns Gcp Video Intelligence Service client","positive":"def Func ( arg_0 ) : if not arg_0 . _conn : arg_0 . _conn = VideoIntelligenceServiceClient ( credentials = arg_0 . _get_credentials ( ) ) return arg_0 . _conn","id_":252471,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/gcp_video_intelligence_hook.py#L41-L49","negative":"Convert native python ``datetime.date`` object to a format supported by the API"} {"query":"convert a list of = - spaced command - line arguments to a dictionary evaluating python objects when possible","positive":"def Func ( arg_0 ) : def parse ( arg_1 ) : assert isinstance ( arg_1 , str ) try : return eval ( arg_1 ) except ( NameError , SyntaxError ) : return arg_1 return { arg_2 : parse ( arg_1 ) for arg_2 , arg_1 in parse_unknown_args ( arg_0 ) . items ( ) }","id_":252472,"task_name":"https:\/\/github.com\/openai\/baselines\/blob\/3301089b48c42b87b396e246ea3f56fa4bfc9678\/baselines\/run.py#L180-L192","negative":"Raises OrderError if no package or file defined.\n Raises CardinalityError if more than one type set.\n Raises SPDXValueError if type is unknown."} {"query":"Return a new TimeslotCollection merged with a specified timeslots","positive":"def Func ( arg_0 , arg_1 : 'TimeslotCollection' ) -> 'TimeslotCollection' : arg_2 = [ Timeslot ( arg_3 . interval , arg_3 . channel ) for arg_3 in arg_0 . timeslots ] arg_2 . extend ( [ Timeslot ( arg_3 . interval , arg_3 . channel ) for arg_3 in arg_1 . timeslots ] ) return TimeslotCollection ( * arg_2 )","id_":252473,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/pulse\/timeslots.py#L223-L231","negative":"Fetch items using the given backend.\n\n Generator to get items using the given backend class. When\n an archive manager is given, this function will store\n the fetched items in an `Archive`. If an exception is raised,\n this archive will be removed to avoid corrupted archives.\n\n The parameters needed to initialize the `backend` class and\n get the items are given using `backend_args` dict parameter.\n\n :param backend_class: backend class to fetch items\n :param backend_args: dict of arguments needed to fetch the items\n :param category: category of the items to retrieve.\n If None, it will use the default backend category\n :param filter_classified: remove classified fields from the resulting items\n :param manager: archive manager needed to store the items\n\n :returns: a generator of items"} {"query":"Returns list of Amazon Alexa compatible states of the RichMessage instance nested controls .","positive":"def Func ( arg_0 ) -> list : arg_1 = [ control . Func ( ) for control in arg_0 . controls ] return arg_1","id_":252474,"task_name":"https:\/\/github.com\/deepmipt\/DeepPavlov\/blob\/f3e4a69a3764d25d2f5bad4f1f1aebc872b00f9c\/deeppavlov\/core\/agent\/rich_content.py#L148-L157","negative":"This returns an array of each sector and performance for the current trading day. Performance is based on each sector ETF.\n\n https:\/\/iexcloud.io\/docs\/api\/#sector-performance\n 8am-5pm ET Mon-Fri\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result"} {"query":"Compute the saturation for a continuous level . This breaks the level into multiple regions and computes the saturation level for each region .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False ) : if not arg_2 : arg_0 = arg_0 . reshape ( arg_1 ) arg_3 = SM32 ( arg_0 ) else : if len ( arg_0 ) > 0 : assert ( arg_0 . max ( ) < arg_1 [ 0 ] * arg_1 [ 1 ] ) arg_3 = SM32 ( 1 , arg_1 [ 0 ] * arg_1 [ 1 ] ) arg_3 . setRowFromSparse ( 0 , arg_0 , [ 1 ] * len ( arg_0 ) ) arg_3 . reshape ( arg_1 [ 0 ] , arg_1 [ 1 ] ) arg_4 = 15 arg_5 = xrange ( arg_4 + 1 , arg_1 [ 0 ] + 1 , arg_4 ) arg_6 = xrange ( arg_4 + 1 , arg_1 [ 1 ] + 1 , arg_4 ) arg_7 = arg_3 . nNonZerosPerBox ( arg_5 , arg_6 ) ( arg_8 , arg_9 ) = arg_7 . tolist ( ) arg_9 \/= float ( arg_4 * arg_4 ) arg_10 = list ( arg_9 ) arg_11 = [ ] arg_12 = set ( arg_8 ) for ( arg_13 , arg_14 ) in itertools . izip ( arg_8 , arg_9 ) : ( arg_15 , arg_16 ) = arg_13 if ( arg_15 - 1 , arg_16 ) in arg_12 and ( arg_15 , arg_16 - 1 ) in arg_12 and ( arg_15 + 1 , arg_16 ) in arg_12 and ( arg_15 , arg_16 + 1 ) in arg_12 : arg_11 . append ( arg_14 ) return ( arg_10 , arg_11 )","id_":252475,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/algorithms\/fdrutilities.py#L1201-L1257","negative":"Returns a list of the dicom files within root_path\n\n Parameters\n ----------\n root_path: str\n Path to the directory to be recursively searched for DICOM files.\n\n Returns\n -------\n dicoms: set\n Set of DICOM absolute file paths"} {"query":"Stop experiment .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 , arg_3 , arg_4 = get_project_experiment_or_local ( arg_0 . obj . get ( 'project' ) , arg_0 . obj . get ( 'experiment' ) ) if not arg_1 and not click . confirm ( \"Are sure you want to Func \" \"experiment `{}`\" . format ( arg_4 ) ) : click . echo ( 'Existing without Funcping experiment.' ) sys . exit ( 0 ) try : PolyaxonClient ( ) . experiment . Func ( arg_2 , arg_3 , arg_4 ) except ( PolyaxonHTTPError , PolyaxonShouldExitError , PolyaxonClientException ) as e : Printer . print_error ( 'Could not Func experiment `{}`.' . format ( arg_4 ) ) Printer . print_error ( 'Error message `{}`.' . format ( e ) ) sys . exit ( 1 ) Printer . print_success ( \"Experiment is being Funcped.\" )","id_":252476,"task_name":"https:\/\/github.com\/polyaxon\/polyaxon-cli\/blob\/a7f5eed74d4d909cad79059f3c21c58606881449\/polyaxon_cli\/cli\/experiment.py#L263-L294","negative":"Revoke the token and remove the cookie."} {"query":"Performable template tag .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . split_contents ( ) if len ( arg_2 ) > 1 : raise TemplateSyntaxError ( \"'%s' takes no arguments\" % arg_2 [ 0 ] ) return PerformableNode ( )","id_":252477,"task_name":"https:\/\/github.com\/jazzband\/django-analytical\/blob\/5487fd677bd47bc63fc2cf39597a0adc5d6c9ab3\/analytical\/templatetags\/performable.py#L41-L52","negative":"Read attribute from sysfs and return as string"} {"query":"Delete a milestone request","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = { 'action' : 'delete' , } arg_3 = 'milestone_requests\/{}' . format ( arg_1 ) arg_4 = make_put_request ( arg_0 , arg_3 , arg_2 = arg_2 ) arg_5 = arg_4 . json ( ) if arg_4 . status_code == 200 : return arg_5 [ 'status' ] else : raise MilestoneRequestNotDeletedException ( message = arg_5 [ 'message' ] , error_code = arg_5 [ 'error_code' ] , request_id = arg_5 [ 'request_id' ] )","id_":252478,"task_name":"https:\/\/github.com\/freelancer\/freelancer-sdk-python\/blob\/e09034936d6f13b3909a9464ee329c81c1834941\/freelancersdk\/resources\/projects\/projects.py#L680-L698","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Performs a GET request and returns the response .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_3 = HTTPRequest ( ) arg_3 . method = 'GET' arg_3 . host = arg_0 . host arg_3 . path = arg_1 arg_3 . path , arg_3 . query = arg_0 . _httpclient . _update_request_uri_query ( arg_3 ) arg_3 . headers = arg_0 . _update_management_header ( arg_3 , arg_2 ) arg_8 = arg_0 . _perform_request ( arg_3 ) return arg_8","id_":252479,"task_name":"https:\/\/github.com\/Azure\/azure-sdk-for-python\/blob\/d7306fde32f60a293a7567678692bdad31e4b667\/azure-servicemanagement-legacy\/azure\/servicemanagement\/servicemanagementclient.py#L158-L177","negative":"This method increases the permanence values of synapses of columns whose\n activity level has been too low. Such columns are identified by having an\n overlap duty cycle that drops too much below those of their peers. The\n permanence values for such columns are increased."} {"query":"Returns the number of transits across the Gaia focal plane averaged over ecliptic longitude .","positive":"def Func ( arg_0 ) : arg_1 = array ( floor ( abs ( sin ( arg_0 ) ) * arg_2 ) , dtype = int ) arg_1 [ ( arg_1 == arg_2 ) ] = arg_2 - 1 return _averageTransitNumber [ arg_1 ]","id_":252480,"task_name":"https:\/\/github.com\/agabrown\/PyGaia\/blob\/ae972b0622a15f713ffae471f925eac25ccdae47\/pygaia\/errors\/utils.py#L87-L103","negative":"Process the logic and structuration of the mining database."} {"query":"Process the logic and structuration of the mining database .","positive":"def Func ( arg_0 ) : if PyFunceble . CONFIGURATION [ \"mining\" ] : arg_1 = arg_0 . mine ( ) if arg_1 : arg_0 . _add ( arg_1 ) arg_0 . _backup ( )","id_":252481,"task_name":"https:\/\/github.com\/funilrys\/PyFunceble\/blob\/cdf69cbde120199171f7158e1c33635753e6e2f5\/PyFunceble\/mining.py#L376-L394","negative":"Return the length of the indentation on the given token's line."} {"query":"Calculates the number of pixels to use for J at a given memory usage .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 1 , arg_3 = 1e9 , arg_4 = 20 ) : arg_5 = int ( arg_3 \/\/ 8 \/\/ arg_1 ) arg_6 = arg_4 * arg_1 arg_7 = arg_0 . residuals . size \/\/ arg_2 if arg_6 > arg_5 : raise RuntimeError ( 'Insufficient max_mem for desired redundancy.' ) arg_8 = np . clip ( arg_7 , arg_6 , arg_5 ) . astype ( 'int' ) return arg_8","id_":252482,"task_name":"https:\/\/github.com\/peri-source\/peri\/blob\/61beed5deaaf978ab31ed716e8470d86ba639867\/peri\/opt\/optimize.py#L142-L183","negative":"Same as `send_stream_error`, but expects `lock` acquired."} {"query":"Display debug information for the storage","positive":"def Func ( arg_0 , arg_1 = False , arg_2 = True , arg_3 = False ) : import codecs import locale import sys if six . PY2 : arg_4 = codecs . getwriter ( locale . getpreferredencoding ( ) ) ( sys . stderr ) else : arg_4 = sys . stderr arg_5 = inspect . stack ( ) [ 1 ] [ 3 ] arg_4 . write ( 'in %s\\n' % arg_5 ) if arg_1 : arg_4 . write ( u' base level : %d\\n' % arg_0 [ 'base_level' ] ) arg_4 . write ( u' base dir : %s\\n' % arg_0 [ 'base_dir' ] ) if arg_3 : arg_4 . write ( u' runs : %s\\n' % list ( arg_0 [ 'runs' ] ) ) if arg_2 : arg_6 = u' Chars : ' for arg_7 in arg_0 [ 'chars' ] : if arg_7 != '\\n' : arg_6 += arg_7 [ 'ch' ] else : arg_6 += 'C' arg_4 . write ( arg_6 + u'\\n' ) arg_6 = u' Res. levels : %s\\n' % u'' . join ( [ six . text_type ( arg_7 [ 'level' ] ) for arg_7 in arg_0 [ 'chars' ] ] ) arg_4 . write ( arg_6 ) arg_8 = [ arg_7 [ 'type' ] . ljust ( 3 ) for arg_7 in arg_0 [ 'chars' ] ] for arg_9 in range ( 3 ) : if arg_9 : arg_6 = u' %s\\n' else : arg_6 = u' Res. types : %s\\n' arg_4 . write ( arg_6 % u'' . join ( [ arg_10 [ arg_9 ] for arg_10 in arg_8 ] ) )","id_":252483,"task_name":"https:\/\/github.com\/MeirKriheli\/python-bidi\/blob\/a0e265bb465c1b7ad628487991e33b5ebe364641\/bidi\/algorithm.py#L62-L104","negative":"Send data synchronous to an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: PLC storage address\n :param value: value to write to the storage address of the PLC\n :param Type data_type: type of the data given to the PLC,\n according to PLCTYPE constants"} {"query":"Remove a contact from the roster .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None ) : arg_4 = arg_0 . roster [ arg_1 ] if arg_1 not in arg_0 . roster : raise KeyError ( arg_1 ) arg_4 = RosterItem ( arg_1 , subscription = \"remove\" ) arg_0 . _roster_set ( arg_4 , arg_2 , arg_3 )","id_":252484,"task_name":"https:\/\/github.com\/Jajcus\/pyxmpp2\/blob\/14a40a3950910a9cd008b55f0d8905aa0186ce18\/pyxmpp2\/roster.py#L886-L904","negative":"Send data synchronous to an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: PLC storage address\n :param value: value to write to the storage address of the PLC\n :param Type data_type: type of the data given to the PLC,\n according to PLCTYPE constants"} {"query":"write lines one by one separated by \\ n to device","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . replace ( '\\r' , '' ) . split ( '\\n' ) for arg_3 in arg_2 : arg_0 . __exchange ( arg_3 )","id_":252485,"task_name":"https:\/\/github.com\/kmpm\/nodemcu-uploader\/blob\/557a25f37b1fb4e31a745719e237e42fff192834\/nodemcu_uploader\/uploader.py#L362-L366","negative":"Temporal distance probability density function.\n\n Returns\n -------\n non_delta_peak_split_points: numpy.array\n non_delta_peak_densities: numpy.array\n len(density) == len(temporal_distance_split_points_ordered) -1\n delta_peak_loc_to_probability_mass : dict"} {"query":"Adds the header template to the master template string","positive":"def Func ( arg_0 ) : logger . debug ( \"===============\" ) logger . debug ( \"Building header\" ) logger . debug ( \"===============\" ) arg_0 . template += hs . header","id_":252486,"task_name":"https:\/\/github.com\/assemblerflow\/flowcraft\/blob\/fc3f4bddded1efc76006600016dc71a06dd908c0\/flowcraft\/generator\/engine.py#L518-L525","negative":"Temporal distance probability density function.\n\n Returns\n -------\n non_delta_peak_split_points: numpy.array\n non_delta_peak_densities: numpy.array\n len(density) == len(temporal_distance_split_points_ordered) -1\n delta_peak_loc_to_probability_mass : dict"} {"query":"check size of inheritance hierarchy and number of instance attributes","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = len ( list ( arg_1 . ancestors ( ) ) ) if arg_2 > arg_0 . config . max_parents : arg_0 . add_message ( \"too-many-ancestors\" , arg_1 = arg_1 , args = ( arg_2 , arg_0 . config . max_parents ) , ) if len ( arg_1 . instance_attrs ) > arg_0 . config . max_attributes : arg_0 . add_message ( \"too-many-instance-attributes\" , arg_1 = arg_1 , args = ( len ( arg_1 . instance_attrs ) , arg_0 . config . max_attributes ) , )","id_":252487,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/checkers\/design_analysis.py#L320-L336","negative":"Configures the learning process. Must be called before fit or evaluate.\n\n # Arguments\n optimizer: Optimization method to be used. One can alternatively pass in the corresponding\n string representation, such as 'sgd'.\n loss: Criterion to be used. One can alternatively pass in the corresponding string\n representation, such as 'mse'.\n metrics: List of validation methods to be used. Default is None. One can alternatively use ['accuracy']."} {"query":"r Note transcription evaluation","positive":"def Func ( arg_0 , arg_1 , ** arg_2 ) : arg_3 = 'pitch_contour' arg_0 = coerce_annotation ( arg_0 , arg_3 ) arg_1 = coerce_annotation ( arg_1 , arg_3 ) arg_4 , arg_5 = arg_0 . to_interval_values ( ) arg_6 , arg_7 = arg_1 . to_interval_values ( ) arg_8 = np . asarray ( [ p [ 'frequency' ] * ( - 1 ) ** ( ~ p [ 'voiced' ] ) for p in arg_5 ] ) arg_9 = np . asarray ( [ p [ 'frequency' ] * ( - 1 ) ** ( ~ p [ 'voiced' ] ) for p in arg_7 ] ) return mir_eval . Func . evaluate ( arg_4 , arg_8 , arg_6 , arg_9 , ** arg_2 )","id_":252488,"task_name":"https:\/\/github.com\/marl\/jams\/blob\/b16778399b9528efbd71434842a079f7691a7a66\/jams\/eval.py#L498-L542","negative":"Reset the parameters."} {"query":"Mark validation status for a variant .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 , arg_6 ) : if not arg_6 in SANGER_OPTIONS : LOG . warning ( \"Invalid validation string: %s\" , arg_6 ) LOG . info ( \"Validation options: %s\" , ', ' . join ( SANGER_OPTIONS ) ) return arg_7 = arg_0 . variant_collection . find_one_and_update ( { '_id' : arg_5 [ '_id' ] } , { '$set' : { 'validation' : arg_6 } } , return_document = pymongo . ReturnDocument . AFTER ) arg_0 . create_event ( arg_1 = arg_1 , arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = arg_4 , category = 'variant' , verb = 'Func' , arg_5 = arg_5 , subject = arg_5 [ 'display_name' ] , ) return arg_7","id_":252489,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/adapter\/mongo\/variant_events.py#L221-L257","negative":"Check the first message matches the expected handshake.\n\n Note:\n The handshake is provided as :py:attr:`RTM_HANDSHAKE`.\n\n Arguments:\n msg (:py:class:`aiohttp.Message`): The message to validate.\n\n Raises:\n :py:class:`SlackApiError`: If the data doesn't match the\n expected handshake."} {"query":"Output the names to the given file","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : for arg_3 in get_Func ( arg_0 = arg_0 , arg_2 = arg_2 ) : click . echo ( arg_3 , file = arg_1 )","id_":252490,"task_name":"https:\/\/github.com\/cthoyt\/ols-client\/blob\/8c6bb54888675652d25324184967392d00d128fc\/src\/ols_client\/cli.py#L19-L22","negative":"Create a plot of weights, visualized as \"bottom-level\" pixel arrays."} {"query":"Return the next aggregated record if any","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = None arg_4 = None if arg_1 is not None : arg_0 . _inIdx += 1 if arg_0 . _filter != None and not arg_0 . _filter [ 0 ] ( arg_0 . _filter [ 1 ] , arg_1 ) : return ( None , None ) if arg_0 . _nullAggregation : return ( arg_1 , arg_2 ) arg_5 = arg_1 [ arg_0 . _timeFieldIdx ] if arg_0 . _firstSequenceStartTime == None : arg_0 . _firstSequenceStartTime = arg_5 if arg_0 . _startTime is None : arg_0 . _startTime = arg_5 if arg_0 . _endTime is None : arg_0 . _endTime = arg_0 . _getEndTime ( arg_5 ) assert arg_0 . _endTime > arg_5 if arg_0 . _resetFieldIdx is not None : arg_9 = arg_1 [ arg_0 . _resetFieldIdx ] else : arg_9 = None if arg_0 . _sequenceIdFieldIdx is not None : arg_10 = arg_1 [ arg_0 . _sequenceIdFieldIdx ] else : arg_10 = None arg_11 = ( arg_9 == 1 and arg_0 . _inIdx > 0 ) or arg_0 . _sequenceId != arg_10 or arg_0 . _inIdx == 0 if arg_11 : arg_0 . _sequenceId = arg_10 arg_13 = ( arg_5 >= arg_0 . _endTime or arg_5 < arg_0 . _startTime ) if ( arg_11 or arg_13 ) and len ( arg_0 . _slice ) > 0 : for arg_14 , arg_15 in enumerate ( arg_0 . _fields ) : arg_16 = arg_15 [ 0 ] if arg_16 == arg_0 . _timeFieldIdx : arg_0 . _slice [ arg_14 ] [ 0 ] = arg_0 . _startTime break arg_3 = arg_0 . _createAggregateRecord ( ) arg_4 = arg_0 . _aggrInputBookmark arg_0 . _slice = defaultdict ( list ) for arg_14 , arg_15 in enumerate ( arg_0 . _fields ) : arg_16 = arg_15 [ 0 ] arg_0 . _slice [ arg_14 ] . append ( arg_1 [ arg_16 ] ) arg_0 . _aggrInputBookmark = arg_2 if arg_11 : arg_0 . _startTime = arg_5 arg_0 . _endTime = arg_0 . _getEndTime ( arg_5 ) if arg_13 : if arg_5 < arg_0 . _startTime : arg_0 . _endTime = arg_0 . _firstSequenceStartTime while arg_5 >= arg_0 . _endTime : arg_0 . _startTime = arg_0 . _endTime arg_0 . _endTime = arg_0 . _getEndTime ( arg_0 . _endTime ) if arg_3 is not None : return ( arg_3 , arg_4 ) elif arg_0 . _slice : for arg_14 , arg_15 in enumerate ( arg_0 . _fields ) : arg_16 = arg_15 [ 0 ] if arg_16 == arg_0 . _timeFieldIdx : arg_0 . _slice [ arg_14 ] [ 0 ] = arg_0 . _startTime break arg_3 = arg_0 . _createAggregateRecord ( ) arg_4 = arg_0 . _aggrInputBookmark arg_0 . _slice = defaultdict ( list ) return ( arg_3 , arg_4 )","id_":252491,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/data\/aggregator.py#L515-L716","negative":"hyperpolarization step. Use to calculate tau and stuff."} {"query":"Return the path to this directory .","positive":"def Func ( arg_0 ) : arg_1 = '' if arg_0 . _parent and arg_0 . _parent . Func : arg_1 = os . Func . join ( arg_1 , arg_0 . _parent . Func ) if arg_0 . _base : arg_1 = os . Func . join ( arg_1 , arg_0 . _base ) if arg_0 . _Func : arg_1 = os . Func . join ( arg_1 , arg_0 . _Func ) return arg_1","id_":252492,"task_name":"https:\/\/github.com\/snare\/scruffy\/blob\/0fedc08cfdb6db927ff93c09f25f24ce5a04c541\/scruffy\/file.py#L299-L312","negative":"Serialize a dataframe.\n\n Parameters\n ----------\n writer : file\n File-like object to write to. Must be opened in binary mode.\n data_type_id : dict\n Serialization format to use.\n See the azureml.DataTypeIds class for constants.\n dataframe: pandas.DataFrame\n Dataframe to serialize."} {"query":"return the error if there is a corresponding exception","positive":"def Func ( arg_0 ) : if isinstance ( arg_0 , dict ) : if 'errors' in arg_0 : arg_1 = arg_0 [ 'errors' ] [ 0 ] else : arg_1 = arg_0 . get ( 'error' , None ) if isinstance ( arg_1 , dict ) : if arg_1 . get ( 'code' ) in errors : return arg_1","id_":252493,"task_name":"https:\/\/github.com\/odrling\/peony-twitter\/blob\/967f98e16e1889389540f2e6acbf7cc7a1a80203\/peony\/exceptions.py#L8-L18","negative":"Adjust contrast of an Image.\n\n Args:\n img (PIL Image): PIL Image to be adjusted.\n contrast_factor (float): How much to adjust the contrast. Can be any\n non negative number. 0 gives a solid gray image, 1 gives the\n original image while 2 increases the contrast by a factor of 2.\n\n Returns:\n PIL Image: Contrast adjusted image."} {"query":"Returns the names of all positional arguments to the given function .","positive":"def Func ( arg_0 ) : arg_1 = _get_cached_arg_spec ( arg_0 ) arg_2 = arg_1 . args if arg_1 . defaults : arg_2 = arg_2 [ : - len ( arg_1 . defaults ) ] return arg_2","id_":252494,"task_name":"https:\/\/github.com\/google\/gin-config\/blob\/17a170e0a6711005d1c78e67cf493dc44674d44f\/gin\/config.py#L695-L701","negative":"issue a command to read the archive records after a known time stamp."} {"query":"Asserts that val is string and contains the given item or items .","positive":"def Func ( arg_0 , * arg_1 ) : if len ( arg_1 ) == 0 : raise ValueError ( 'one or more args must be given' ) if isinstance ( arg_0 . val , str_types ) : if len ( arg_1 ) == 1 : if not isinstance ( arg_1 [ 0 ] , str_types ) : raise TypeError ( 'given arg must be a string' ) if arg_1 [ 0 ] . lower ( ) not in arg_0 . val . lower ( ) : arg_0 . _err ( 'Expected <%s> to case-insensitive contain item <%s>, but did not.' % ( arg_0 . val , arg_1 [ 0 ] ) ) else : arg_2 = [ ] for arg_3 in arg_1 : if not isinstance ( arg_3 , str_types ) : raise TypeError ( 'given args must all be strings' ) if arg_3 . lower ( ) not in arg_0 . val . lower ( ) : arg_2 . append ( arg_3 ) if arg_2 : arg_0 . _err ( 'Expected <%s> to case-insensitive contain items %s, but did not contain %s.' % ( arg_0 . val , arg_0 . _fmt_items ( arg_1 ) , arg_0 . _fmt_items ( arg_2 ) ) ) elif isinstance ( arg_0 . val , Iterable ) : arg_2 = [ ] for arg_3 in arg_1 : if not isinstance ( arg_3 , str_types ) : raise TypeError ( 'given args must all be strings' ) arg_4 = False for arg_5 in arg_0 . val : if not isinstance ( arg_5 , str_types ) : raise TypeError ( 'val items must all be strings' ) if arg_3 . lower ( ) == arg_5 . lower ( ) : arg_4 = True break if not arg_4 : arg_2 . append ( arg_3 ) if arg_2 : arg_0 . _err ( 'Expected <%s> to case-insensitive contain items %s, but did not contain %s.' % ( arg_0 . val , arg_0 . _fmt_items ( arg_1 ) , arg_0 . _fmt_items ( arg_2 ) ) ) else : raise TypeError ( 'val is not a string or iterable' ) return arg_0","id_":252495,"task_name":"https:\/\/github.com\/ActivisionGameScience\/assertpy\/blob\/08d799cdb01f9a25d3e20672efac991c7bc26d79\/assertpy\/assertpy.py#L570-L607","negative":"This method is called before first step of simulation."} {"query":"Get a unique hash depending on the state of the data .","positive":"def Func ( arg_0 , arg_1 = arg_2 , arg_3 = arg_2 , arg_4 = False , arg_5 = arg_2 , arg_6 = False ) : if arg_6 and isinstance ( arg_0 , six . string_types ) : try : arg_0 = json . dumps ( arg_0 ) except TypeError as ex : pass arg_3 = _rectify_base ( arg_3 ) arg_5 = _rectify_hashlen ( arg_5 ) arg_1 = _rectify_hasher ( arg_1 ) ( ) _update_hasher ( arg_1 , arg_0 , arg_4 = arg_4 ) arg_7 = _digest_hasher ( arg_1 , arg_5 , arg_3 ) return arg_7","id_":252496,"task_name":"https:\/\/github.com\/Erotemic\/ubelt\/blob\/db802f3ad8abba025db74b54f86e6892b8927325\/ubelt\/util_hash.py#L709-L769","negative":"Return an existing CA bundle path, or None"} {"query":"Use arguments to route constructor .","positive":"def Func ( * arg_0 , ** arg_1 ) : if 'mode' in arg_1 : arg_2 = arg_1 [ 'mode' ] if arg_2 not in constructors : raise ValueError ( 'Mode %s not supported' % arg_2 ) del arg_1 [ 'mode' ] return constructors [ arg_2 ] else : for arg_2 , arg_3 in constructors : if arg_3 . _argcheck ( * arg_0 , ** arg_1 ) : return arg_3 return ConstructLocal","id_":252497,"task_name":"https:\/\/github.com\/bolt-project\/bolt\/blob\/9cd7104aa085498da3097b72696184b9d3651c51\/bolt\/factory.py#L37-L55","negative":"Send data synchronous to an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: PLC storage address\n :param value: value to write to the storage address of the PLC\n :param Type data_type: type of the data given to the PLC,\n according to PLCTYPE constants"} {"query":"The oembed endpoint or the url to which requests for metadata are passed . Third parties will want to access this view with URLs for your site s content and be returned OEmbed metadata .","positive":"def Func ( arg_0 , * arg_1 , ** arg_2 ) : arg_3 = dict ( arg_0 . GET . items ( ) ) arg_4 = arg_3 . pop ( 'callback' , None ) arg_5 = arg_3 . pop ( 'url' , None ) if not arg_5 : return HttpResponseBadRequest ( 'Required parameter missing: URL' ) try : arg_6 = oembed . site . provider_for_url ( arg_5 ) if not arg_6 . provides : raise OEmbedMissingEndpoint ( ) except OEmbedMissingEndpoint : raise Http404 ( 'No provider found for %s' % arg_5 ) arg_7 = dict ( [ ( smart_str ( k ) , smart_str ( v ) ) for k , v in arg_3 . items ( ) if v ] ) try : arg_8 = oembed . site . embed ( arg_5 , ** arg_7 ) except OEmbedException , e : raise Http404 ( 'Error embedding %s: %s' % ( arg_5 , str ( e ) ) ) arg_9 = HttpResponse ( mimetype = 'application\/json' ) Func = arg_8 . json if arg_4 : arg_9 . write ( '%s(%s)' % ( defaultfilters . force_escape ( arg_4 ) , Func ) ) else : arg_9 . write ( Func ) return arg_9","id_":252498,"task_name":"https:\/\/github.com\/worldcompany\/djangoembed\/blob\/f3f2be283441d91d1f89db780444dc75f7b51902\/oembed\/views.py#L19-L56","negative":"Send buffered metrics in batch requests over TCP"} {"query":"Extract the raw traceback from the current stack frame .","positive":"def Func ( arg_0 = None , arg_1 = None ) : if arg_0 is None : try : raise ZeroDivisionError except ZeroDivisionError : arg_0 = sys . exc_info ( ) [ 2 ] . tb_frame . f_back if arg_1 is None : if hasattr ( sys , 'tracebacklimit' ) : arg_1 = sys . tracebacklimit arg_2 = [ ] arg_3 = 0 while arg_0 is not None and ( arg_1 is None or arg_3 < arg_1 ) : arg_4 = arg_0 . f_lineno arg_5 = arg_0 . f_code arg_6 = arg_5 . co_filename arg_7 = arg_5 . co_name linecache . checkcache ( arg_6 ) arg_8 = linecache . getline ( arg_6 , arg_4 , arg_0 . f_globals ) if arg_8 : arg_8 = arg_8 . strip ( ) else : arg_8 = None arg_2 . append ( ( arg_6 , arg_4 , arg_7 , arg_8 ) ) arg_0 = arg_0 . f_back arg_3 = arg_3 + 1 arg_2 . reverse ( ) return arg_2","id_":252499,"task_name":"https:\/\/github.com\/google\/grumpy\/blob\/3ec87959189cfcdeae82eb68a47648ac25ceb10b\/third_party\/stdlib\/traceback.py#L285-L317","negative":"Return the generation index of the first generation in the given\n swarm that does not have numParticles particles in it, either still in the\n running state or completed. This does not include orphaned particles.\n\n Parameters:\n ---------------------------------------------------------------------\n swarmId: A string representation of the sorted list of encoders in this\n swarm. For example '__address_encoder.__gym_encoder'\n minNumParticles: minium number of partices required for a full\n generation.\n\n retval: generation index, or None if no particles at all."} {"query":"Scan the footpaths originating from stop_id","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : for arg_3 , arg_4 , arg_5 in arg_0 . _walk_network . edges_iter ( nbunch = [ arg_1 ] , arg_5 = True ) : arg_6 = arg_5 [ \"d_walk\" ] arg_7 = arg_2 + arg_6 \/ arg_0 . _walk_speed arg_0 . _update_stop_label ( arg_4 , arg_7 )","id_":252500,"task_name":"https:\/\/github.com\/CxAalto\/gtfspy\/blob\/bddba4b74faae6c1b91202f19184811e326547e5\/gtfspy\/routing\/connection_scan.py#L92-L103","negative":"Checks if an blob_name is updated in Google Cloud Storage.\n\n :param bucket_name: The Google cloud storage bucket where the object is.\n :type bucket_name: str\n :param object_name: The name of the object to check in the Google cloud\n storage bucket.\n :type object_name: str\n :param ts: The timestamp to check against.\n :type ts: datetime.datetime"} {"query":"flush ignored control replies","positive":"def Func ( arg_0 ) : while arg_0 . _ignored_control_replies > 0 : arg_0 . session . recv ( arg_0 . _control_socket ) arg_0 . _ignored_control_replies -= 1","id_":252501,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/parallel\/client\/client.py#L840-L844","negative":"Wrap a reader function in a decorator to supply line and column\n information along with relevant forms."} {"query":"Render left blocks","positive":"def Func ( arg_0 ) : arg_0 . log . debug ( \"Rendering left blocks\" ) arg_1 = arg_0 . left_panel arg_1 . render ( ) arg_2 = arg_0 . left_panel_width - arg_1 . width arg_3 = [ ] arg_4 = ' ' * int ( arg_2 \/ 2 ) if not arg_1 . lines : arg_3 = [ ( '' ) , ( arg_0 . markup . RED + 'BROKEN LEFT PANEL' + arg_0 . markup . RESET ) ] else : while arg_0 . left_panel . lines : arg_5 = arg_0 . left_panel . lines . pop ( 0 ) arg_6 = arg_4 + arg_0 . __truncate ( arg_5 , arg_0 . left_panel_width ) arg_7 = ' ' * ( arg_0 . left_panel_width - len ( arg_0 . markup . clean_markup ( arg_6 ) ) ) arg_6 += arg_7 + arg_0 . markup . RESET arg_3 . append ( arg_6 ) return arg_3","id_":252502,"task_name":"https:\/\/github.com\/yandex\/yandex-tank\/blob\/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b\/yandextank\/plugins\/Console\/screen.py#L365-L383","negative":"Appends transactions that close out all positions at the end of\n the timespan covered by positions data. Utilizes pricing information\n in the positions DataFrame to determine closing price.\n\n Parameters\n ----------\n positions : pd.DataFrame\n The positions that the strategy takes over time.\n transactions : pd.DataFrame\n Prices and amounts of executed round_trips. One row per trade.\n - See full explanation in tears.create_full_tear_sheet\n\n Returns\n -------\n closed_txns : pd.DataFrame\n Transactions with closing transactions appended."} {"query":"list profiles that are bundled with IPython .","positive":"def Func ( ) : arg_0 = os . path . join ( get_ipython_package_dir ( ) , u'config' , u'profile' ) arg_1 = os . listdir ( arg_0 ) arg_2 = [ ] for arg_3 in arg_1 : arg_4 = os . path . join ( arg_0 , arg_3 ) if os . path . isdir ( arg_4 ) and arg_3 != \"__pycache__\" : arg_2 . append ( arg_3 ) return arg_2","id_":252503,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/core\/profileapp.py#L108-L117","negative":"This function returns a dictionary representation of a docker-compose.yml file, based on assembled_specs from\n the spec_assembler, and port_specs from the port_spec compiler"} {"query":"Read data by dataset_reader from specified config .","positive":"def Func ( arg_0 : arg_1 ) : arg_2 = arg_0 . get ( 'dataset' , None ) if arg_2 : arg_0 . pop ( 'dataset' ) arg_3 = arg_2 [ 'type' ] if arg_3 == 'classification' : arg_4 = { 'class_name' : 'basic_classification_reader' } arg_5 = { 'class_name' : 'basic_classification_iterator' } arg_0 [ 'dataset_reader' ] = { ** arg_2 , ** arg_4 } arg_0 [ 'dataset_iterator' ] = { ** arg_2 , ** arg_5 } else : raise Exception ( \"Unsupported dataset type: {}\" . format ( arg_3 ) ) try : arg_6 = arg_1 ( arg_0 [ 'dataset_reader' ] ) except KeyError : raise ConfigError ( \"No dataset reader is provided in the JSON config.\" ) arg_4 = get_model ( arg_6 . pop ( 'class_name' ) ) ( ) arg_7 = arg_6 . pop ( 'data_path' , '' ) if isinstance ( arg_7 , list ) : arg_7 = [ expand_path ( x ) for x in arg_7 ] else : arg_7 = expand_path ( arg_7 ) return arg_4 . read ( arg_7 , ** arg_6 )","id_":252504,"task_name":"https:\/\/github.com\/deepmipt\/DeepPavlov\/blob\/f3e4a69a3764d25d2f5bad4f1f1aebc872b00f9c\/deeppavlov\/core\/commands\/train.py#L31-L58","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Fill missing rates of a currency with the closest available ones .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . _rates [ arg_1 ] arg_3 , arg_4 = arg_0 . bounds [ arg_1 ] for arg_5 in list_dates_between ( arg_3 , arg_4 ) : if arg_5 not in arg_2 : arg_2 [ arg_5 ] = None if arg_0 . verbose : arg_6 = len ( [ r for r in itervalues ( arg_2 ) if r is None ] ) if arg_6 : print ( '{0}: {1} missing rates from {2} to {3} ({4} days)' . format ( arg_1 , arg_6 , arg_3 , arg_4 , 1 + ( arg_4 - arg_3 ) . days ) )","id_":252505,"task_name":"https:\/\/github.com\/alexprengere\/currencyconverter\/blob\/e3cb0d693819c0c824214225b23a47e9380f71df\/currency_converter\/currency_converter.py#L192-L206","negative":"Show the transaction as plain json"} {"query":"Calculate the heat capacity of a phase of the compound at a specified temperature .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_1 not in arg_0 . _phases : raise Exception ( \"The phase '%s' was not found in compound '%s'.\" % ( arg_1 , arg_0 . formula ) ) return arg_0 . _phases [ arg_1 ] . Func ( arg_2 )","id_":252506,"task_name":"https:\/\/github.com\/Ex-Mente\/auxi.0\/blob\/2dcdae74154f136f8ca58289fe5b20772f215046\/auxi\/tools\/chemistry\/thermochemistry.py#L471-L486","negative":"delete a backend, and update the secrets file"} {"query":"Returns a CSV string built from the summaries of the Intervals .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = True , arg_3 = False , arg_4 = \",\" , arg_5 = True ) : if ( arg_1 is None ) : if ( not arg_2 ) : raise StriplogError ( \"You must provide a filename or set as_text to True.\" ) else : arg_2 = False if arg_2 : arg_6 = StringIO ( ) else : arg_6 = open ( arg_1 , 'w' ) arg_7 = [ 'Top' , 'Base' , 'Component' ] arg_8 = csv . DictWriter ( arg_6 , delimiter = arg_4 , arg_7 = arg_7 , quoting = csv . QUOTE_MINIMAL ) if arg_5 : arg_8 . writeheader ( ) for arg_9 in arg_0 . __list : if arg_3 and arg_9 . description : arg_10 = arg_9 . description elif arg_9 . primary : arg_10 = arg_9 . primary . summary ( ) else : arg_10 = '' arg_11 = { j : k for j , k in zip ( arg_7 , [ arg_9 . top . z , arg_9 . base . z , arg_10 ] ) } arg_8 . writerow ( arg_11 ) if arg_2 : return arg_6 . getvalue ( ) else : arg_6 . close return None","id_":252507,"task_name":"https:\/\/github.com\/agile-geoscience\/striplog\/blob\/8033b673a151f96c29802b43763e863519a3124c\/striplog\/striplog.py#L1027-L1080","negative":"Deletes the given local filename.\n\n .. note:: If file doesn't exist this method has no effect.\n\n :param unicode target_filename:\n A local filename\n\n :raises NotImplementedForRemotePathError:\n If trying to delete a non-local path\n\n :raises FileOnlyActionError:\n Raised when filename refers to a directory."} {"query":"Returns the client in async mode .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , ** arg_3 ) : return arg_0 ( arg_1 , arg_2 = arg_2 , is_async = True , ** arg_3 )","id_":252508,"task_name":"https:\/\/github.com\/cgrok\/clashroyale\/blob\/2618f4da22a84ad3e36d2446e23436d87c423163\/clashroyale\/royaleapi\/client.py#L94-L96","negative":"Downloads all variable star observations by a given observer.\n\n Performs a series of HTTP requests to AAVSO's WebObs search and\n downloads the results page by page. Each page is then passed to\n :py:class:`~pyaavso.parsers.webobs.WebObsResultsParser` and parse results\n are added to the final observation list."} {"query":"Consumes a signed 32bit integer number .","positive":"def Func ( arg_0 ) : try : arg_1 = ParseInteger ( arg_0 . token , is_signed = True , is_long = False ) except ValueError as e : raise arg_0 . _ParseError ( str ( e ) ) arg_0 . NextToken ( ) return arg_1","id_":252509,"task_name":"https:\/\/github.com\/ibelie\/typy\/blob\/3616845fb91459aacd8df6bf82c5d91f4542bee7\/typy\/google\/protobuf\/text_format.py#L875-L889","negative":"Creates an error from the given code, and args and kwargs.\n\n :param code: The acknowledgement code\n :param args: Exception args\n :param kwargs: Exception kwargs\n :return: the error for the given acknowledgement code"} {"query":"Scaling Draw Object","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_0 . drawer . append ( pgmagick . DrawableScaling ( float ( arg_1 ) , float ( arg_2 ) ) )","id_":252510,"task_name":"https:\/\/github.com\/hhatto\/pgmagick\/blob\/5dce5fa4681400b4c059431ad69233e6a3e5799a\/pgmagick\/api.py#L925-L931","negative":"Return HgResource object for path.\n\n See DAVProvider.get_resource_inst()"} {"query":"Processes a future import statement returning set of flags it defines .","positive":"def Func ( arg_0 ) : assert isinstance ( arg_0 , ast . ImportFrom ) assert arg_0 . module == '__future__' arg_1 = FutureFeatures ( ) for arg_2 in arg_0 . names : arg_3 = arg_2 . name if arg_3 in _FUTURE_FEATURES : if arg_3 not in _IMPLEMENTED_FUTURE_FEATURES : arg_4 = 'future feature {} not yet implemented by grumpy' . format ( arg_3 ) raise util . ParseError ( arg_0 , arg_4 ) setattr ( arg_1 , arg_3 , True ) elif arg_3 == 'braces' : raise util . ParseError ( arg_0 , 'not a chance' ) elif arg_3 not in _REDUNDANT_FUTURE_FEATURES : arg_4 = 'future feature {} is not defined' . format ( arg_3 ) raise util . ParseError ( arg_0 , arg_4 ) return arg_1","id_":252511,"task_name":"https:\/\/github.com\/google\/grumpy\/blob\/3ec87959189cfcdeae82eb68a47648ac25ceb10b\/compiler\/imputil.py#L276-L293","negative":"Raise an exception if string doesn't match a part's regex\n\n :param string: str\n :param part: a key in the PARTS dict\n :raises: ValueError, TypeError"} {"query":"Adds a given item to the tree irrespective of the subtree .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 , arg_6 = True , arg_7 = True ) : arg_4 = list ( arg_4 ) arg_8 = True arg_9 = '' arg_10 = None arg_11 = None arg_12 = arg_2 == LINK if arg_12 : arg_9 = arg_4 [ 0 ] arg_10 = arg_4 [ 1 ] arg_8 = False elif len ( arg_4 ) == 1 and len ( arg_5 ) == 0 : arg_13 = arg_4 [ 0 ] try : arg_9 = arg_13 . v_full_name arg_10 = arg_13 arg_8 = False except AttributeError : pass if arg_8 : if len ( arg_4 ) > 0 and inspect . isclass ( arg_4 [ 0 ] ) : arg_11 = arg_4 . pop ( 0 ) if len ( arg_4 ) > 0 and isinstance ( arg_4 [ 0 ] , str ) : arg_9 = arg_4 . pop ( 0 ) elif 'name' in arg_5 : arg_9 = arg_5 . pop ( 'name' ) elif 'full_name' in arg_5 : arg_9 = arg_5 . pop ( 'full_name' ) else : raise ValueError ( 'Could not determine a name of the new item you want to add. ' 'Either pass the name as positional argument or as a keyword ' 'argument `name`.' ) arg_14 = arg_9 . split ( '.' ) if arg_7 : for arg_15 , arg_9 in enumerate ( arg_14 ) : arg_16 , arg_9 = arg_0 . _translate_shortcut ( arg_9 ) arg_17 , arg_9 = arg_0 . _replace_wildcards ( arg_9 ) if arg_16 or arg_17 : arg_14 [ arg_15 ] = arg_9 arg_18 = arg_0 . _check_names ( arg_14 , arg_1 ) if arg_18 : arg_19 = '.' . join ( arg_14 ) raise ValueError ( 'Your Parameter\/Result\/Node `%s` contains the following not admissible names: ' '%s please choose other names.' % ( arg_19 , arg_18 ) ) if arg_12 : if arg_10 is None : raise ValueError ( 'You must provide an instance to link to!' ) if arg_10 . v_is_root : raise ValueError ( 'You cannot create a link to the root node' ) if arg_1 . v_is_root and arg_9 in SUBTREE_MAPPING : raise ValueError ( '`%s` is a reserved name for a group under root.' % arg_9 ) if not arg_0 . _root_instance . f_contains ( arg_10 , with_links = False , shortcuts = False ) : raise ValueError ( 'You can only link to items within the trajectory tree!' ) if arg_6 : arg_14 = arg_0 . _add_prefix ( arg_14 , arg_1 , arg_3 ) if arg_3 == GROUP : arg_20 = arg_2 != arg_3 and not arg_12 arg_3 , arg_2 = arg_0 . _determine_types ( arg_1 , arg_14 [ 0 ] , arg_20 , arg_12 ) if arg_0 . _root_instance . _is_run and arg_2 in SENSITIVE_TYPES : raise TypeError ( 'You are not allowed to add config or parameter data or groups ' 'during a single run.' ) return arg_0 . _add_to_tree ( arg_1 , arg_14 , arg_2 , arg_3 , arg_10 , arg_11 , arg_4 , arg_5 )","id_":252512,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/naturalnaming.py#L1093-L1230","negative":"Stop streaming samples from device and delete samples buffer"} {"query":"Extracts the values in datavol that are in the ROI with value roivalue in roivol . The ROI can be masked by maskvol .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = True ) : if arg_3 is not None : arg_5 = ( arg_1 == arg_2 ) * ( arg_3 > 0 ) else : arg_5 = arg_1 == arg_2 if arg_0 . ndim == 4 : arg_6 = arg_0 [ arg_5 , : ] else : arg_6 = arg_0 [ arg_5 ] if arg_4 : if arg_0 . ndim == 4 : arg_6 = arg_6 [ arg_6 . sum ( axis = 1 ) != 0 , : ] return arg_6","id_":252513,"task_name":"https:\/\/github.com\/Neurita\/boyle\/blob\/2dae7199849395a209c887d5f30506e1de8a9ad9\/boyle\/nifti\/roi.py#L350-L394","negative":"Start listening for events from Marathon, running a sync when we first\n successfully subscribe and triggering a sync on API request events."} {"query":"Does google - lint on a single file .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : _SetVerboseLevel ( arg_1 ) _BackupFilters ( ) if not ProcessConfigOverrides ( arg_0 ) : _RestoreFilters ( ) return arg_3 = [ ] arg_4 = [ ] try : if arg_0 == '-' : arg_5 = codecs . StreamReaderWriter ( sys . stdin , codecs . getreader ( 'utf8' ) , codecs . getwriter ( 'utf8' ) , 'replace' ) . read ( ) . split ( '\\n' ) else : arg_5 = codecs . open ( arg_0 , 'r' , 'utf8' , 'replace' ) . read ( ) . split ( '\\n' ) for arg_6 in range ( len ( arg_5 ) - 1 ) : if arg_5 [ arg_6 ] . endswith ( '\\r' ) : arg_5 [ arg_6 ] = arg_5 [ arg_6 ] . rstrip ( '\\r' ) arg_4 . append ( arg_6 + 1 ) else : arg_3 . append ( arg_6 + 1 ) except IOError : _cpplint_state . PrintError ( \"Skipping input '%s': Can't open for reading\\n\" % arg_0 ) _RestoreFilters ( ) return arg_7 = arg_0 [ arg_0 . rfind ( '.' ) + 1 : ] if arg_0 != '-' and arg_7 not in GetAllExtensions ( ) : arg_8 = set ( [ \"external\/local_config_cc\/libtool\" , \"external\/local_config_cc\/make_hashed_objlist.py\" , \"external\/local_config_cc\/wrapped_ar\" , \"external\/local_config_cc\/wrapped_clang\" , \"external\/local_config_cc\/xcrunwrapper.sh\" , ] ) if not arg_0 in arg_8 : _cpplint_state . PrintError ( 'Ignoring %s; not a valid file name ' '(%s)\\n' % ( arg_0 , ', ' . join ( GetAllExtensions ( ) ) ) ) else : FuncData ( arg_0 , arg_7 , arg_5 , Error , arg_2 ) if arg_3 and arg_4 : for arg_6 in arg_4 : Error ( arg_0 , arg_6 , 'whitespace\/newline' , 1 , 'Unexpected \\\\r (^M) found; better to use only \\\\n' ) _RestoreFilters ( )","id_":252514,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/third_party\/python\/cpplint\/cpplint.py#L6188-L6282","negative":"Return a AzureDLFileSystem object."} {"query":"Retrieves service account info for invalid credentials .","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 . invalid : arg_2 = _metadata . get_service_account_info ( arg_1 , service_account = arg_0 . service_account_email or 'default' ) arg_0 . invalid = False arg_0 . service_account_email = arg_2 [ 'email' ] arg_0 . scopes = arg_2 [ 'scopes' ]","id_":252515,"task_name":"https:\/\/github.com\/googleapis\/oauth2client\/blob\/50d20532a748f18e53f7d24ccbe6647132c979a9\/oauth2client\/contrib\/gce.py#L102-L114","negative":"Sets the review comment. Raises CardinalityError if\n already set. OrderError if no reviewer defined before."} {"query":"Merge two lists of statements into one","positive":"def Func ( arg_0 : arg_1 [ \"HdlStatement\" ] , arg_2 : arg_1 [ \"HdlStatement\" ] ) -> arg_1 [ \"HdlStatement\" ] : if arg_0 is None and arg_2 is None : return None arg_3 = [ ] arg_4 = iter ( arg_0 ) arg_5 = iter ( arg_2 ) arg_6 = None arg_7 = None arg_8 = False arg_9 = False while not arg_8 and not arg_9 : while not arg_8 : arg_6 = next ( arg_4 , None ) if arg_6 is None : arg_8 = True break elif arg_6 . rank == 0 : arg_3 . append ( arg_6 ) arg_6 = None else : break while not arg_9 : arg_7 = next ( arg_5 , None ) if arg_7 is None : arg_9 = True break elif arg_7 . rank == 0 : arg_3 . append ( arg_7 ) arg_7 = None else : break if arg_6 is not None or arg_7 is not None : arg_6 . _merge_with_other_stm ( arg_7 ) arg_3 . append ( arg_6 ) arg_6 = None arg_7 = None return arg_3","id_":252516,"task_name":"https:\/\/github.com\/Nic30\/hwt\/blob\/8cbb399e326da3b22c233b98188a9d08dec057e6\/hwt\/hdl\/statements.py#L372-L423","negative":"Get analog data."} {"query":"Condition to stop when any batch member converges or all have failed .","positive":"def Func ( arg_0 , arg_1 ) : return ( tf . reduce_any ( input_tensor = arg_0 ) | tf . reduce_all ( input_tensor = arg_1 ) )","id_":252517,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/optimizer\/bfgs_utils.py#L36-L39","negative":"Return the maximum file descriptor value."} {"query":"Gets the position of the text the ParseNode processed . If the ParseNode does not have its own position it looks to its first child for its position .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . _Func if arg_1 is None and arg_0 . children : arg_2 = arg_0 . children [ 0 ] if isinstance ( arg_2 , ParseNode ) : arg_1 = arg_2 . Func return arg_1","id_":252518,"task_name":"https:\/\/github.com\/treycucco\/pyebnf\/blob\/3634ddabbe5d73508bcc20f4a591f86a46634e1d\/pyebnf\/primitive.py#L49-L61","negative":"Clone throttles without memory"} {"query":"Attempt to detect requirements files in the current working directory","positive":"def Func ( arg_0 ) : if arg_0 . _is_valid_requirements_file ( 'requirements.txt' ) : arg_0 . filenames . append ( 'requirements.txt' ) if arg_0 . _is_valid_requirements_file ( 'requirements.pip' ) : arg_0 . filenames . append ( 'requirements.pip' ) if os . path . isdir ( 'requirements' ) : for arg_1 in os . listdir ( 'requirements' ) : arg_2 = os . path . join ( 'requirements' , arg_1 ) if arg_0 . _is_valid_requirements_file ( arg_2 ) : arg_0 . filenames . append ( arg_2 ) arg_0 . _check_inclusions_recursively ( )","id_":252519,"task_name":"https:\/\/github.com\/simion\/pip-upgrader\/blob\/716adca65d9ed56d4d416f94ede8a8e4fa8d640a\/pip_upgrader\/requirements_detector.py#L32-L45","negative":"Return True if we should retry, False otherwise."} {"query":"Send data synchronous to an ADS - device from data name .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : arg_5 = adsSyncReadWriteReqEx2 ( arg_0 , arg_1 , ADSIGRP_SYM_HNDBYNAME , 0x0 , PLCTYPE_UDINT , arg_2 , PLCTYPE_STRING , ) adsSyncWriteReqEx ( arg_0 , arg_1 , ADSIGRP_SYM_VALBYHND , arg_5 , arg_3 , arg_4 ) adsSyncWriteReqEx ( arg_0 , arg_1 , ADSIGRP_SYM_RELEASEHND , 0 , arg_5 , PLCTYPE_UDINT )","id_":252520,"task_name":"https:\/\/github.com\/stlehmann\/pyads\/blob\/44bd84394db2785332ac44b2948373916bea0f02\/pyads\/pyads_ex.py#L595-L622","negative":"Seek through the file to find how many data blocks there are in the file\n\n Returns:\n n_blocks (int): number of data blocks in the file"} {"query":"Wait until all pending messages have been sent .","positive":"async def Func ( arg_0 ) : if not arg_0 . running : await arg_0 . open ( ) try : arg_1 = arg_0 . _handler . _pending_messages [ : ] await arg_0 . _handler . wait_async ( ) arg_2 = [ ] for arg_3 in arg_1 : if arg_3 . state == constants . MessageState . SendFailed : arg_2 . append ( ( False , MessageSendFailed ( arg_3 . _response ) ) ) else : arg_2 . append ( ( True , None ) ) return arg_2 except Exception as e : raise MessageSendFailed ( e )","id_":252521,"task_name":"https:\/\/github.com\/Azure\/azure-sdk-for-python\/blob\/d7306fde32f60a293a7567678692bdad31e4b667\/azure-servicebus\/azure\/servicebus\/aio\/async_send_handler.py#L151-L182","negative":"Delete previous webhooks. If local ngrok tunnel, create a webhook."} {"query":"Get 3D markers with residual .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = None ) : return arg_0 . _get_3d_markers ( RT3DMarkerPositionResidual , arg_1 , arg_2 , arg_3 )","id_":252522,"task_name":"https:\/\/github.com\/qualisys\/qualisys_python_sdk\/blob\/127d7eeebc2b38b5cafdfa5d1d0198437fedd274\/qtm\/packet.py#L484-L490","negative":"Remove unwanted logbooks from list."} {"query":"Loads annotations from disk .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_0 . _all_get_from_attrs ( arg_2 , HDF5StorageService . ANNOTATED ) if arg_3 : arg_4 = arg_1 . v_annotations if not arg_4 . f_is_empty ( ) : raise TypeError ( 'Loading into non-empty annotations!' ) arg_5 = arg_2 . _v_attrs for arg_6 in arg_5 . _v_attrnames : if arg_6 . startswith ( HDF5StorageService . ANNOTATION_PREFIX ) : arg_7 = arg_6 arg_7 = arg_7 . replace ( HDF5StorageService . ANNOTATION_PREFIX , '' ) arg_8 = getattr ( arg_5 , arg_6 ) setattr ( arg_4 , arg_7 , arg_8 )","id_":252523,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/storageservice.py#L3516-L3538","negative":"increments the branches counter and checks boolean expressions"} {"query":"r Method to calculate molar volume of a solid mixture at temperature T pressure P mole fractions zs and weight fractions ws with a given method .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 ) : if arg_5 == SIMPLE : arg_6 = [ i ( arg_1 , arg_2 ) for i in arg_0 . VolumeSolids ] return mixing_simple ( arg_3 , arg_6 ) else : raise Exception ( 'Method not valid' )","id_":252524,"task_name":"https:\/\/github.com\/CalebBell\/thermo\/blob\/3857ed023a3e64fd3039a32d53576c24990ef1c3\/thermo\/volume.py#L2521-L2552","negative":"Get the reason of this revocation.\n\n :return: The reason, or ``None`` if there is none.\n :rtype: bytes or NoneType\n\n .. seealso::\n\n :meth:`all_reasons`, which gives you a list of all supported\n reasons this method might return."} {"query":"a simple plotting routine to plot the PSD versus frequency .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = False , arg_3 = None , arg_4 = None , ** arg_5 ) : import pylab from pylab import arg_3 as plt_ylim arg_6 = arg_0 . psd if arg_4 is not None : if arg_4 not in arg_0 . _sides_choices : raise errors . SpectrumChoiceError ( arg_4 , arg_0 . _sides_choices ) if arg_4 is None or arg_4 == arg_0 . sides : arg_7 = arg_0 . frequencies ( ) arg_8 = arg_0 . psd arg_4 = arg_0 . sides elif arg_4 is not None : if arg_0 . datatype == 'complex' : if arg_4 == 'onesided' : raise ValueError ( \"sides cannot be one-sided with complex data\" ) logging . debug ( \"sides is different from the one provided. Converting PSD\" ) arg_7 = arg_0 . frequencies ( arg_4 = arg_4 ) arg_8 = arg_0 . get_converted_psd ( arg_4 ) if len ( arg_8 ) != len ( arg_7 ) : raise ValueError ( \"PSD length is %s and freq length is %s\" % ( len ( arg_8 ) , len ( arg_7 ) ) ) if 'ax' in list ( arg_5 . keys ( ) ) : arg_9 = pylab . gca ( ) pylab . sca ( arg_5 [ 'ax' ] ) arg_10 = True del arg_5 [ 'ax' ] else : arg_10 = False if arg_2 : pylab . Func ( arg_7 , 10 * stools . log10 ( arg_8 \/ max ( arg_8 ) ) , ** arg_5 ) else : pylab . Func ( arg_7 , 10 * stools . log10 ( arg_8 ) , ** arg_5 ) pylab . xlabel ( 'Frequency' ) pylab . ylabel ( 'Power (dB)' ) pylab . grid ( True ) if arg_3 : plt_ylim ( arg_3 ) if arg_4 == 'onesided' : pylab . xlim ( 0 , arg_0 . sampling \/ 2. ) elif arg_4 == 'twosided' : pylab . xlim ( 0 , arg_0 . sampling ) elif arg_4 == 'centerdc' : pylab . xlim ( - arg_0 . sampling \/ 2. , arg_0 . sampling \/ 2. ) if arg_1 : pylab . savefig ( arg_1 ) if arg_10 : pylab . sca ( arg_9 ) del arg_8 , arg_7","id_":252525,"task_name":"https:\/\/github.com\/cokelaer\/spectrum\/blob\/bad6c32e3f10e185098748f67bb421b378b06afe\/src\/spectrum\/psd.py#L627-L715","negative":"Generate RENEWING time.\n\n [:rfc:`2131#section-4.4.5`]::\n\n T1\n defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 *\n duration_of_lease). Times T1 and T2 SHOULD be chosen with some\n random \"fuzz\" around a fixed value, to avoid synchronization of\n client reacquisition."} {"query":"Checks that the year is within 50 years from now .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : if arg_0 not in xrange ( ( now . year - 50 ) , ( now . year + 51 ) ) : arg_0 = now . year arg_1 = now . month arg_2 = arg_3 return arg_0 , arg_1 , arg_2","id_":252526,"task_name":"https:\/\/github.com\/wreckage\/django-happenings\/blob\/7bca5576efa6cd4c4e87356bf9e5b8cd538ae91d\/happenings\/utils\/common.py#L115-L121","negative":"Delete the specified InactivityAlert\n\n :param tag_id: The tag ID to delete\n :type tag_id: str\n\n :raises: This will raise a\n :class:`ServerException `\n if there is an error from Logentries"} {"query":"get the ip_address of an inserted instance . Will try three times with delay to give the instance time to start up .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 3 , arg_3 = 3 ) : for arg_4 in range ( arg_2 ) : arg_5 = arg_0 . _get_instances ( ) for arg_6 in arg_5 [ 'items' ] : if arg_6 [ 'name' ] == arg_1 : for arg_7 in arg_6 [ 'networkInterfaces' ] : if arg_7 [ 'name' ] == 'nic0' : for arg_8 in arg_7 [ 'accessConfigs' ] : if arg_8 [ 'name' ] == 'External NAT' : if 'natIP' in arg_8 : return arg_8 [ 'natIP' ] sleep ( arg_3 ) bot . warning ( 'Did not find IP address, check Cloud Console!' )","id_":252527,"task_name":"https:\/\/github.com\/singularityhub\/sregistry-cli\/blob\/abc96140a1d15b5e96d83432e1e0e1f4f8f36331\/sregistry\/main\/google_storage\/build.py#L197-L229","negative":"Returns a new DataFrame where the virtual column is turned into an in memory numpy array.\n\n Example:\n\n >>> x = np.arange(1,4)\n >>> y = np.arange(2,5)\n >>> df = vaex.from_arrays(x=x, y=y)\n >>> df['r'] = (df.x**2 + df.y**2)**0.5 # 'r' is a virtual column (computed on the fly)\n >>> df = df.materialize('r') # now 'r' is a 'real' column (i.e. a numpy array)\n\n :param inplace: {inplace}"} {"query":"Construct an id for agency using its tags .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . tags . get ( 'operator' ) if arg_1 : return int ( hashlib . sha256 ( arg_1 . encode ( 'utf-8' ) ) . hexdigest ( ) , 16 ) % 10 ** 8 return - 1","id_":252528,"task_name":"https:\/\/github.com\/hiposfer\/o2g\/blob\/1165ba75a5eb64b3091e9b71ebd589507ae1ebf3\/o2g\/osm\/builders\/route_builder.py#L54-L59","negative":"Get experiment or experiment job logs.\n\n Uses [Caching](\/references\/polyaxon-cli\/#caching)\n\n Examples for getting experiment logs:\n\n \\b\n ```bash\n $ polyaxon experiment logs\n ```\n\n \\b\n ```bash\n $ polyaxon experiment -xp 10 -p mnist logs\n ```\n\n Examples for getting experiment job logs:\n\n \\b\n ```bash\n $ polyaxon experiment -xp 1 -j 1 logs\n ```"} {"query":"Visualization of a default node .","positive":"def Func ( arg_0 , Func , arg_2 = 1.0 ) : if arg_0 . depth : try : colors . shadow ( dx = 5 , dy = 5 , blur = 10 , arg_2 = 0.5 * arg_2 ) except : pass arg_0 . _ctx . nofill ( ) arg_0 . _ctx . nostroke ( ) if arg_0 . fill : arg_0 . _ctx . fill ( arg_0 . fill . r , arg_0 . fill . g , arg_0 . fill . b , arg_0 . fill . a * arg_2 ) if arg_0 . stroke : arg_0 . _ctx . strokewidth ( arg_0 . strokewidth ) arg_0 . _ctx . stroke ( arg_0 . stroke . r , arg_0 . stroke . g , arg_0 . stroke . b , arg_0 . stroke . a * arg_2 * 3 ) arg_3 = Func . r arg_0 . _ctx . oval ( Func . x - arg_3 , Func . y - arg_3 , arg_3 * 2 , arg_3 * 2 )","id_":252529,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/lib\/graph\/style.py#L223-L250","negative":"Send data synchronous to an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: PLC storage address\n :param value: value to write to the storage address of the PLC\n :param Type data_type: type of the data given to the PLC,\n according to PLCTYPE constants"} {"query":"Validate a python object against an OPF json schema file","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = os . path . join ( os . path . dirname ( __file__ ) , \"jsonschema\" , arg_1 ) jsonhelpers . validate ( arg_0 , schemaPath = arg_2 ) return","id_":252530,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/frameworks\/opf\/opf_utils.py#L354-L372","negative":"Remove cards from watchlist.\n\n :params trade_id: Trade id."} {"query":"REFACTOR status to project init result ENUM jelenleg ha a project init False akkor torlunk minden adatot a projectrol de van egy atmenet mikor csak a lang init nem sikerult erre valo jelenleg a status . ez rossz","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : arg_5 = { } arg_1 . init ( arg_2 , arg_5 , arg_3 , arg_4 = arg_4 ) arg_6 = [ ] for arg_7 , arg_8 in list ( arg_5 . items ( ) ) : if arg_8 is False and arg_7 not in arg_6 : arg_6 . append ( arg_7 ) return arg_6","id_":252531,"task_name":"https:\/\/github.com\/voidpp\/vcp\/blob\/5538cdb7b43029db9aac9edad823cd87afd89ab5\/vcp\/commands.py#L73-L89","negative":"Check the spacing of a single equals sign."} {"query":"Calls Fortran function that reads attribute data . data_offset translates unsigned into signed . If number read in is negative offset added .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 , arg_6 , arg_7 , arg_8 , arg_9 = None ) : arg_10 , = np . where ( arg_2 == arg_7 ) if len ( arg_10 ) > 0 : arg_11 = arg_3 [ arg_10 ] . max ( ) arg_12 = arg_3 [ arg_10 ] arg_13 = np . array ( arg_1 ) [ arg_10 ] arg_14 = np . array ( arg_6 ) [ arg_10 ] arg_15 = arg_4 [ arg_10 ] arg_16 = arg_5 [ arg_10 ] arg_17 , arg_18 = arg_8 ( arg_0 . fname , arg_16 , arg_15 , len ( arg_16 ) , arg_11 , len ( arg_0 . fname ) ) if ( arg_17 == 0 ) . all ( ) : if arg_9 is not None : arg_18 = arg_18 . astype ( int ) arg_10 , arg_19 , = np . where ( arg_18 < 0 ) arg_18 [ arg_10 , arg_19 ] += arg_9 arg_0 . _process_return_multi_z_attr ( arg_18 , arg_13 , arg_14 , arg_12 ) else : arg_10 , = np . where ( arg_17 != 0 ) raise IOError ( fortran_cdf . statusreporter ( arg_17 [ arg_10 ] [ 0 ] ) )","id_":252532,"task_name":"https:\/\/github.com\/rstoneback\/pysatCDF\/blob\/479839f719dbece8e52d6bf6a466cb9506db6719\/pysatCDF\/_cdf.py#L487-L521","negative":"This functions returns a list of jobs"} {"query":"Decorator to run some code in a bot instance .","positive":"def Func ( ** arg_0 ) : def decorator ( arg_1 ) : def run ( ) : from shoebot import ShoebotInstallError print ( \" Shoebot - %s:\" % arg_1 . __name__ . replace ( \"_\" , \" \" ) ) try : import shoebot arg_2 = \"\/tmp\/shoebot-%s.png\" % arg_1 . __name__ arg_3 = shoebot . create_bot ( arg_2 = arg_2 ) arg_1 ( arg_3 ) arg_3 . finish ( ) print ( ' [passed] : %s' % arg_2 ) print ( '' ) except ShoebotInstallError as e : print ( ' [failed]' , e . args [ 0 ] ) print ( '' ) except Exception : print ( ' [failed] - traceback:' ) for arg_4 in traceback . format_exc ( ) . splitlines ( ) : print ( ' %s' % arg_4 ) print ( '' ) return run return decorator","id_":252533,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/shoebot\/diagnose.py#L122-L150","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Add one or more files or URLs to the manifest . If files contains a glob it is expanded .","positive":"def Func ( arg_0 , * arg_1 ) : def _is_url ( arg_2 ) : arg_3 = urlparse ( arg_2 ) return bool ( arg_3 . scheme ) for arg_2 in arg_1 : arg_2 = os . path . expanduser ( arg_2 ) if _is_url ( arg_2 ) : arg_0 . Func_url ( arg_2 ) elif os . path . isfile ( arg_2 ) : arg_0 . Func_file ( arg_2 ) elif os . path . isdir ( arg_2 ) : for arg_4 in os . listdir ( arg_2 ) : arg_0 . Func_file ( arg_4 ) elif glob . glob ( arg_2 ) : for arg_4 in glob . glob ( arg_2 ) : arg_0 . Func_file ( arg_4 ) else : raise ValueError ( 'Path: \"{0}\" is not a valid format or does not exist. ' 'Manifest paths must be files, directories, or URLs.' . format ( arg_2 ) )","id_":252534,"task_name":"https:\/\/github.com\/solvebio\/solvebio-python\/blob\/b29614643043afd19c1d8074e8f25c6700d51a73\/solvebio\/resource\/manifest.py#L47-L76","negative":"Set the presence status."} {"query":"Help calculate network - wide concordance","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 , arg_4 : arg_5 [ arg_6 ] = None , ) -> Tuple [ int , int , int , int ] : arg_7 = defaultdict ( int ) for arg_8 , arg_9 , arg_10 , arg_11 in arg_0 . edges ( keys = True , data = True ) : arg_12 = edge_concords ( arg_0 , arg_8 , arg_9 , arg_10 , arg_11 , arg_2 , arg_4 = arg_4 ) arg_7 [ arg_12 ] += 1 return ( arg_7 [ Concordance . correct ] , arg_7 [ Concordance . incorrect ] , arg_7 [ Concordance . ambiguous ] , arg_7 [ Concordance . unassigned ] , )","id_":252535,"task_name":"https:\/\/github.com\/pybel\/pybel-tools\/blob\/3491adea0ac4ee60f57275ef72f9b73da6dbfe0c\/src\/pybel_tools\/analysis\/concordance.py#L140-L163","negative":"Reraises `exception`, appending `message` to its string representation."} {"query":"Predict inside or outside AD for X .","positive":"def Func ( arg_0 , arg_1 ) : check_is_fitted ( arg_0 , [ 'inverse_influence_matrix' ] ) arg_1 = check_array ( arg_1 ) return arg_0 . __find_leverages ( arg_1 , arg_0 . inverse_influence_matrix ) <= arg_0 . threshold_value","id_":252536,"task_name":"https:\/\/github.com\/stsouko\/CIMtools\/blob\/cbb46e68eaa1fe7e7b6cb311fc7063e97096bdf3\/CIMtools\/applicability_domain\/leverage.py#L151-L170","negative":"Semver tag triggered deployment helper"} {"query":"Connect to a SAMP Hub and wait for a single table load event disconnect download the table and return the DataFrame .","positive":"def Func ( arg_0 = None , arg_1 = None ) : print ( \"Waiting for SAMP message...\" ) import vaex . samp arg_2 = vaex . samp . single_table ( arg_0 = arg_0 , arg_1 = arg_1 ) return from_astropy_table ( arg_2 . to_table ( ) )","id_":252537,"task_name":"https:\/\/github.com\/vaexio\/vaex\/blob\/a45b672f8287afca2ada8e36b74b604b9b28dd85\/packages\/vaex-core\/vaex\/__init__.py#L217-L225","negative":"Request the api endpoint to retrieve information about the inventory\n\n :return: Main Collection\n :rtype: Collection"} {"query":"Created to take some of the load off of _handle_weekly_repeat_out","positive":"def Func ( arg_0 ) : arg_0 . num = 14 arg_2 = arg_0 . repeat_biweekly ( ) if arg_2 : if arg_0 . event . is_chunk ( ) and min ( arg_2 ) not in xrange ( 1 , 8 ) : arg_2 = _chunk_fill_out_first_week ( arg_0 . year , arg_0 . month , arg_2 , arg_0 . event , diff = arg_0 . event . start_end_diff , ) for arg_3 , arg_4 in arg_2 . items ( ) : for arg_5 in arg_4 : arg_0 . count [ arg_3 ] . append ( arg_5 )","id_":252538,"task_name":"https:\/\/github.com\/wreckage\/django-happenings\/blob\/7bca5576efa6cd4c4e87356bf9e5b8cd538ae91d\/happenings\/utils\/handlers.py#L230-L242","negative":"Get the bounding box for the mesh"} {"query":"Do a reverse look up for an item containing the requested data","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_0 . _wx_data_map [ arg_2 ] if wx . VERSION < ( 3 , 0 , 0 ) or 'classic' in wx . version ( ) : arg_4 = arg_0 . FindItemData ( arg_1 , arg_3 ) else : arg_4 = arg_0 . FindItem ( arg_1 , arg_3 ) return arg_4","id_":252539,"task_name":"https:\/\/github.com\/reingart\/gui2py\/blob\/aca0a05f6fcde55c94ad7cc058671a06608b01a4\/gui\/controls\/listview.py#L72-L81","negative":"Creates the Nginx configuration for the project"} {"query":"Prepend a child element with the specified name .","positive":"def Func ( arg_0 , arg_1 ) : return XMLElement ( lib . lsl_Func ( arg_0 . e , str . encode ( arg_1 ) ) )","id_":252540,"task_name":"https:\/\/github.com\/labstreaminglayer\/liblsl-Python\/blob\/1ff6fe2794f8dba286b7491d1f7a4c915b8a0605\/pylsl\/pylsl.py#L1022-L1024","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Stop streaming frames .","positive":"async def Func ( arg_0 ) : arg_0 . _protocol . set_on_packet ( None ) arg_1 = \"streamframes stop\" await arg_0 . _protocol . send_command ( arg_1 , callback = False )","id_":252541,"task_name":"https:\/\/github.com\/qualisys\/qualisys_python_sdk\/blob\/127d7eeebc2b38b5cafdfa5d1d0198437fedd274\/qtm\/qrt.py#L175-L181","negative":"Remove hidden notes and tag a CERN if detected."} {"query":"Create a copy of this pen .","positive":"def Func ( arg_0 ) : arg_1 = Pen ( ) arg_1 . __dict__ = arg_0 . __dict__ . Func ( ) return arg_1","id_":252542,"task_name":"https:\/\/github.com\/jrfonseca\/xdot.py\/blob\/6248c81c21a0fe825089311b17f2c302eea614a2\/xdot\/ui\/pen.py#L46-L50","negative":"This function returns a dictionary representation of a docker-compose.yml file, based on assembled_specs from\n the spec_assembler, and port_specs from the port_spec compiler"} {"query":"Deletes a granule of an existing imagemosaic","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = None ) : arg_5 = dict ( ) arg_6 = arg_4 if isinstance ( arg_2 , basestring ) : arg_7 = arg_2 else : arg_7 = arg_2 . name arg_6 = arg_2 . workspace . name if arg_6 is None : raise ValueError ( \"Must specify workspace\" ) arg_8 = build_url ( arg_0 . service_url , [ \"workspaces\" , arg_6 , \"coveragestores\" , arg_7 , \"coverages\" , arg_1 , \"index\/granules\" , arg_3 , \".json\" ] , arg_5 ) arg_9 = { \"Content-type\" : \"application\/json\" , \"Accept\" : \"application\/json\" } arg_10 = arg_0 . http_request ( arg_8 , method = 'delete' , arg_9 = arg_9 ) if arg_10 . status_code != 200 : FailedRequestError ( 'Failed to delete granule from mosaic {} : {}, {}' . format ( arg_2 , arg_10 . status_code , arg_10 . text ) ) arg_0 . _cache . clear ( ) return None","id_":252543,"task_name":"https:\/\/github.com\/boundlessgeo\/gsconfig\/blob\/532f561f32b91ea8debea0573c503dd20988bf40\/src\/geoserver\/catalog.py#L671-L713","negative":"_get_key_for_index - Returns the key name that would hold the indexes on a value\n\t\t\tInternal - does not validate that indexedFields is actually indexed. Trusts you. Don't let it down.\n\n\t\t\t@param indexedField - string of field name\n\t\t\t@param val - Value of field\n\n\t\t\t@return - Key name string, potentially hashed."} {"query":"Nova annotation for adding function to process nova notification .","positive":"def Func ( * arg_0 ) : check_event_type ( Openstack . Nova , * arg_0 ) arg_1 = arg_0 [ 0 ] def decorator ( arg_2 ) : if arg_1 . find ( \"*\" ) != - 1 : arg_3 = pre_compile ( arg_1 ) arg_4 [ arg_3 ] = arg_2 else : arg_5 [ arg_1 ] = arg_2 log . info ( \"add function {0} to process event_type:{1}\" . format ( arg_2 . __name__ , arg_1 ) ) @ functools . wraps ( arg_2 ) def wrapper ( * arg_6 , ** arg_7 ) : arg_2 ( * arg_6 , ** arg_7 ) return wrapper return decorator","id_":252544,"task_name":"https:\/\/github.com\/ndrlslz\/ternya\/blob\/c05aec10029e645d63ff04313dbcf2644743481f\/ternya\/annotation.py#L53-L79","negative":"Returns the value specified in the XDG_CONFIG_HOME environment variable\n or the appropriate default."} {"query":"Deletes validation log .","positive":"def Func ( arg_0 , ** arg_1 ) : arg_0 . _fill_project_info ( arg_1 ) arg_0 . db . ValidLog . delete_many ( arg_1 ) logging . info ( \"[Database] Delete ValidLog SUCCESS\" )","id_":252545,"task_name":"https:\/\/github.com\/tensorlayer\/tensorlayer\/blob\/aa9e52e36c7058a7e6fd81d36563ca6850b21956\/tensorlayer\/db.py#L495-L509","negative":"Given a single spinn3r feed entry, produce a single StreamItem.\n\n Returns 'None' if a complete item can't be constructed."} {"query":"Create and configure a new security group .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . create_Func ( GroupName = \"private-subnet\" , Description = \"security group for remote executors\" ) arg_3 = [ { 'CidrIp' : '10.0.0.0\/16' } ] arg_4 = [ { 'IpProtocol' : 'TCP' , 'FromPort' : 0 , 'ToPort' : 65535 , 'IpRanges' : arg_3 , } , { 'IpProtocol' : 'UDP' , 'FromPort' : 0 , 'ToPort' : 65535 , 'IpRanges' : arg_3 , } , { 'IpProtocol' : 'ICMP' , 'FromPort' : - 1 , 'ToPort' : - 1 , 'IpRanges' : [ { 'CidrIp' : '0.0.0.0\/0' } ] , } , { 'IpProtocol' : 'TCP' , 'FromPort' : 22 , 'ToPort' : 22 , 'IpRanges' : [ { 'CidrIp' : '0.0.0.0\/0' } ] , } ] arg_5 = [ { 'IpProtocol' : 'TCP' , 'FromPort' : 0 , 'ToPort' : 65535 , 'IpRanges' : [ { 'CidrIp' : '0.0.0.0\/0' } ] , } , { 'IpProtocol' : 'TCP' , 'FromPort' : 0 , 'ToPort' : 65535 , 'IpRanges' : arg_3 , } , { 'IpProtocol' : 'UDP' , 'FromPort' : 0 , 'ToPort' : 65535 , 'IpRanges' : arg_3 , } , ] arg_2 . authorize_ingress ( IpPermissions = arg_4 ) arg_2 . authorize_egress ( IpPermissions = arg_5 ) arg_0 . sg_id = arg_2 . id return arg_2","id_":252546,"task_name":"https:\/\/github.com\/Parsl\/parsl\/blob\/d7afb3bc37f50dcf224ae78637944172edb35dac\/parsl\/providers\/aws\/aws.py#L336-L413","negative":"Update disease terms in mongo database."} {"query":"Synchronize content metadata using the Degreed course content API .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : try : arg_3 , arg_4 = getattr ( arg_0 , '_' + arg_2 ) ( urljoin ( arg_0 . enterprise_configuration . degreed_base_url , arg_0 . global_degreed_config . course_api_path ) , arg_1 , arg_0 . CONTENT_PROVIDER_SCOPE ) except requests . exceptions . RequestException as exc : raise ClientError ( 'DegreedAPIClient request failed: {error} {message}' . format ( error = exc . __class__ . __name__ , message = str ( exc ) ) ) if arg_3 >= 400 : raise ClientError ( 'DegreedAPIClient request failed with status {status_code}: {message}' . format ( arg_3 = arg_3 , message = arg_4 ) )","id_":252547,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/integrated_channels\/degreed\/client.py#L127-L158","negative":"All keys in DB, or all keys matching a glob"} {"query":"Run the excel_to_html function from the command - line .","positive":"def Func ( ) : arg_0 = argparse . ArgumentParser ( prog = 'excel_to_html' ) arg_0 . add_argument ( '-p' , nargs = '?' , help = 'Path to an excel file for conversion.' ) arg_0 . add_argument ( '-s' , nargs = '?' , help = 'The name of a sheet in our excel file. Defaults to \"Sheet1\".' , ) arg_0 . add_argument ( '-css' , nargs = '?' , help = 'Space separated css classes to append to the table.' ) arg_0 . add_argument ( '-m' , action = 'store_true' , help = 'Merge, attempt to combine merged cells.' ) arg_0 . add_argument ( '-c' , nargs = '?' , help = 'Caption for creating an accessible table.' ) arg_0 . add_argument ( '-d' , nargs = '?' , help = 'Two strings separated by a | character. The first string \\ is for the html \"summary\" attribute and the second string is for the html \"details\" attribute. \\ both values must be provided and nothing more.' , ) arg_0 . add_argument ( '-r' , action = 'store_true' , help = 'Row headers. Does the table have row headers?' ) arg_1 = arg_0 . parse_args ( ) arg_2 = { 'p' : arg_1 . p , 's' : arg_1 . s , 'css' : arg_1 . css , 'm' : arg_1 . m , 'c' : arg_1 . c , 'd' : arg_1 . d , 'r' : arg_1 . r , } arg_3 = arg_2 [ 'p' ] arg_4 = arg_2 [ 's' ] if arg_2 [ 's' ] else 'Sheet1' arg_5 = arg_2 [ 'css' ] if arg_2 [ 'css' ] else '' arg_6 = arg_2 [ 'm' ] if arg_2 [ 'm' ] else False arg_7 = arg_2 [ 'c' ] if arg_2 [ 'c' ] else '' arg_8 = arg_2 [ 'd' ] . split ( '|' ) if arg_2 [ 'd' ] else [ ] arg_9 = arg_2 [ 'r' ] if arg_2 [ 'r' ] else False arg_10 = fp . excel_to_html ( arg_3 , sheetname = arg_4 , css_classes = arg_5 , caption = arg_7 , details = arg_8 , row_headers = arg_9 , merge = arg_6 ) print ( arg_10 )","id_":252548,"task_name":"https:\/\/github.com\/bbusenius\/Diablo-Python\/blob\/646ac5a6f1c79cf9b928a4e2a7979988698b6c82\/commands.py#L47-L117","negative":"Pad dimensions of event tensors for mixture distributions.\n\n See `Mixture._sample_n` and `MixtureSameFamily._sample_n` for usage examples.\n\n Args:\n x: event tensor to pad.\n mixture_distribution: Base distribution of the mixture.\n categorical_distribution: `Categorical` distribution that mixes the base\n distribution.\n event_ndims: Integer specifying the number of event dimensions in the event\n tensor.\n\n Returns:\n A padded version of `x` that can broadcast with `categorical_distribution`."} {"query":"Accept the method if its attributes match .","positive":"def Func ( arg_0 , arg_1 ) : try : arg_2 = arg_1 . im_class except AttributeError : return False return arg_0 . validateAttrib ( arg_1 , arg_2 )","id_":252549,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/nose\/plugins\/attrib.py#L279-L286","negative":"Return a AzureDLFileSystem object."} {"query":"Deconstruct the Constraint instance to a tuple .","positive":"def Func ( arg_0 ) : return ( arg_0 . selector , COMPARISON_MAP . get ( arg_0 . comparison , arg_0 . comparison ) , arg_0 . argument )","id_":252550,"task_name":"https:\/\/github.com\/sergedomk\/fiql_parser\/blob\/499dd7cd0741603530ce5f3803d92813e74ac9c3\/fiql_parser\/constraint.py#L105-L115","negative":"Downloads a MP4 or WebM file that is associated with the video at the URL passed.\n\n :param str url: URL of the video to be downloaded\n :return str: Filename of the file in local storage"} {"query":"This functions returns a list of jobs","positive":"def Func ( arg_0 ) : arg_0 . spawn ( arg_0 . _heartbeat ) arg_0 . spawn ( arg_0 . _heartbeat_timeout )","id_":252551,"task_name":"https:\/\/github.com\/abourget\/gevent-socketio\/blob\/1cdb1594a315326987a17ce0924ea448a82fab01\/socketio\/virtsocket.py#L477-L480","negative":"Compares and exchanges.\n\n Compares the value in the AL, AX, EAX or RAX register (depending on the\n size of the operand) with the first operand (destination operand). If\n the two values are equal, the second operand (source operand) is loaded\n into the destination operand. Otherwise, the destination operand is\n loaded into the AL, AX, EAX or RAX register.\n\n The ZF flag is set if the values in the destination operand and\n register AL, AX, or EAX are equal; otherwise it is cleared. The CF, PF,\n AF, SF, and OF flags are set according to the results of the comparison\n operation::\n\n (* accumulator = AL, AX, EAX or RAX, depending on whether *)\n (* a byte, word, a doubleword or a 64bit comparison is being performed*)\n IF accumulator == DEST\n THEN\n ZF = 1\n DEST = SRC\n ELSE\n ZF = 0\n accumulator = DEST\n FI;\n\n :param cpu: current CPU.\n :param dest: destination operand.\n :param src: source operand."} {"query":"Calculate the md5 hash for this file .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . open ( 'rb' ) try : arg_2 = md5 ( ) while True : arg_3 = arg_1 . read ( 8192 ) if not arg_3 : break arg_2 . update ( arg_3 ) finally : arg_1 . close ( ) return arg_2 . digest ( )","id_":252552,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/external\/path\/_path.py#L737-L752","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"Make scheduler service","positive":"def Func ( arg_0 ) : arg_1 = tainternet . TimerService ( arg_0 [ 'frequency' ] , runProcess , arg_0 [ 'args' ] , arg_0 [ 'timeout' ] , arg_0 [ 'grace' ] , tireactor ) arg_2 = service . MultiService ( ) arg_1 . setName ( 'scheduler' ) arg_1 . setServiceParent ( arg_2 ) heart . maybeAddHeart ( arg_2 ) return arg_2","id_":252553,"task_name":"https:\/\/github.com\/ncolony\/ncolony\/blob\/6ac71bda1de6706fb34244ae4972e36db5f062d3\/ncolony\/schedulelib.py#L129-L141","negative":"Coincidence matrix.\n\n Parameters\n ----------\n value_counts : ndarray, with shape (N, V)\n Number of coders that assigned a certain value to a determined unit, where N is the number of units\n and V is the value count.\n\n value_domain : array_like, with shape (V,)\n Possible values V the units can take.\n If the level of measurement is not nominal, it must be ordered.\n\n dtype : data-type\n Result and computation data-type.\n\n Returns\n -------\n o : ndarray, with shape (V, V)\n Coincidence matrix."} {"query":"Reimplemented to support prompt requests .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 [ 'parent_header' ] . get ( 'msg_id' ) arg_3 = arg_0 . _request_info [ 'execute' ] . get ( arg_2 ) if arg_3 and arg_3 . kind == 'prompt' : arg_4 = arg_1 [ 'content' ] [ 'execution_count' ] + 1 arg_0 . _show_interpreter_prompt ( arg_4 ) arg_0 . _request_info [ 'execute' ] . pop ( arg_2 ) else : super ( IPythonWidget , arg_0 ) . Func ( arg_1 )","id_":252554,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/frontend\/qt\/console\/ipython_widget.py#L166-L176","negative":"Get top centrality dictionary."} {"query":"Adds a logger with a given name .","positive":"def Func ( arg_0 , arg_1 = None ) : if arg_1 is None : arg_2 = arg_0 . __class__ arg_1 = '%s.%s' % ( arg_2 . __module__ , arg_2 . __name__ ) arg_0 . _logger = logging . getLogger ( arg_1 )","id_":252555,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/pypetlogging.py#L311-L321","negative":"Get the context for this view."} {"query":"Registers the given widget .","positive":"def Func ( arg_0 , arg_1 , ** arg_2 ) : if not issubclass ( arg_1 , DashboardWidgetBase ) : raise ImproperlyConfigured ( 'DashboardWidgets must be subclasses of DashboardWidgetBase,' ' {0} is not.' . format ( arg_1 ) ) arg_3 = arg_1 ( ** arg_2 ) arg_4 = arg_3 . get_name ( ) if arg_4 in arg_0 . widgets : raise WidgetAlreadyRegistered ( 'Cannot register {0}, a plugin with this name {1} is already ' 'registered.' . format ( arg_1 , arg_4 ) ) arg_0 . widgets [ arg_4 ] = arg_3","id_":252556,"task_name":"https:\/\/github.com\/bitlabstudio\/django-dashboard-app\/blob\/ed98f2bca91a4ced36d0dd1aa1baee78e989cf64\/dashboard_app\/widget_pool.py#L69-L91","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Attempt to authenticate a set of credentials .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None , arg_4 = None ) : arg_2 = arg_2 or arg_4 try : arg_5 = models . EmailAddress . objects . get ( is_verified = True , arg_2 = arg_2 ) except models . EmailAddress . DoesNotExist : return None arg_6 = arg_5 . user if arg_6 . check_password ( arg_3 ) : return arg_6 return None","id_":252557,"task_name":"https:\/\/github.com\/cdriehuys\/django-rest-email-auth\/blob\/7e752c4d77ae02d2d046f214f56e743aa12ab23f\/rest_email_auth\/authentication.py#L40-L74","negative":"A list of row indices to remove. There are two caveats. First, this is\n a potentially slow operation. Second, pattern indices will shift if\n patterns before them are removed."} {"query":"Add all of the members of the complex abundances to the graph .","positive":"def Func ( arg_0 : arg_1 ) -> None : arg_2 = list ( get_nodes_by_function ( arg_0 , COMPLEX ) ) for arg_3 in arg_2 : for arg_4 in arg_3 . members : arg_0 . add_has_component ( arg_3 , arg_4 )","id_":252558,"task_name":"https:\/\/github.com\/pybel\/pybel-tools\/blob\/3491adea0ac4ee60f57275ef72f9b73da6dbfe0c\/src\/pybel_tools\/mutation\/expansion.py#L216-L221","negative":"Attempt to re-establish a connection using previously acquired tokens.\n\n If the Skype token is valid but the registration token is invalid, a new endpoint will be registered.\n\n Raises:\n .SkypeAuthException: if the token file cannot be used to authenticate"} {"query":"Get the underlying botocore . Credentials object .","positive":"def Func ( arg_0 , arg_1 = None ) : arg_2 , arg_3 = arg_0 . _Func ( arg_1 ) return arg_2 . Func ( ) . get_frozen_credentials ( )","id_":252559,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/aws_hook.py#L183-L192","negative":"Reorder a track or a group of tracks in a playlist.\n\n Parameters\n ----------\n playlist : Union[str, Playlist]\n The playlist to modify\n start : int\n The position of the first track to be reordered.\n insert_before : int\n The position where the tracks should be inserted.\n length : Optional[int]\n The amount of tracks to be reordered. Defaults to 1 if not set.\n snapshot_id : str\n The playlist\u2019s snapshot ID against which you want to make the changes.\n\n Returns\n -------\n snapshot_id : str\n The snapshot id of the playlist."} {"query":"Processes a single track .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = 0 ) : if isinstance ( arg_0 , six . string_types ) : arg_0 = io . FileStruct ( arg_0 ) arg_5 = arg_0 . est_file arg_6 = arg_0 . ref_file assert os . path . basename ( arg_5 ) [ : - 4 ] == os . path . basename ( arg_6 ) [ : - 4 ] , \"File names are different %s --- %s\" % ( os . path . basename ( arg_5 ) [ : - 4 ] , os . path . basename ( arg_6 ) [ : - 4 ] ) if not os . path . isfile ( arg_6 ) : raise NoReferencesError ( \"Reference file %s does not exist. You must \" \"have annotated references to run \" \"evaluations.\" % arg_6 ) arg_7 = compute_gt_results ( arg_5 , arg_6 , arg_1 , arg_2 , arg_3 , arg_4 = arg_4 ) return arg_7","id_":252560,"task_name":"https:\/\/github.com\/urinieto\/msaf\/blob\/9dbb57d77a1310465a65cc40f1641d083ca74385\/msaf\/eval.py#L236-L278","negative":"Set a property value or remove a property.\n\n value == None means 'remove property'.\n Raise HTTP_FORBIDDEN if property is read-only, or not supported.\n\n When dry_run is True, this function should raise errors, as in a real\n run, but MUST NOT change any data.\n\n This default implementation\n\n - raises HTTP_FORBIDDEN, if trying to modify a locking property\n - raises HTTP_FORBIDDEN, if trying to modify an immutable {DAV:}\n property\n - handles Windows' Win32LastModifiedTime to set the getlastmodified\n property, if enabled\n - stores everything else as dead property, if a property manager is\n present.\n - raises HTTP_FORBIDDEN, else\n\n Removing a non-existing prop is NOT an error.\n\n Note: RFC 4918 states that {DAV:}displayname 'SHOULD NOT be protected'\n\n A resource provider may override this method, to update supported custom\n live properties."} {"query":"Execute a device . Used if the time between executions is greater than DEFAULT_DELAY","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . src . lower ( ) arg_1 = arg_0 . devices [ arg_2 ] threading . Thread ( target = arg_1 . Func , kwargs = { 'root_allowed' : arg_0 . root_allowed } ) . start ( )","id_":252561,"task_name":"https:\/\/github.com\/Nekmo\/amazon-dash\/blob\/0e2bdc24ff8ea32cecb2f5f54f5cc1c0f99c197b\/amazon_dash\/listener.py#L140-L150","negative":"Delete existing messages.\n\n http:\/\/dev.wheniwork.com\/#delete-existing-message"} {"query":"When a position sentence is received it will be passed to the callback function","positive":"def Func ( arg_0 , arg_1 , arg_2 = True , arg_3 = False , arg_4 = False ) : if not arg_0 . _connected : raise ConnectionError ( \"not connected to a server\" ) arg_5 = b'' while True : try : for arg_5 in arg_0 . _socket_readlines ( arg_2 ) : if arg_5 [ 0 : 1 ] != b'#' : if arg_4 : arg_1 ( arg_5 ) else : arg_1 ( arg_0 . _parse ( arg_5 ) ) else : arg_0 . logger . debug ( \"Server: %s\" , arg_5 . decode ( 'utf8' ) ) except ParseError as exp : arg_0 . logger . log ( 11 , \"%s\\n Packet: %s\" , exp . message , exp . packet ) except UnknownFormat as exp : arg_0 . logger . log ( 9 , \"%s\\n Packet: %s\" , exp . message , exp . packet ) except LoginError as exp : arg_0 . logger . error ( \"%s: %s\" , exp . __class__ . __name__ , exp . message ) except ( KeyboardInterrupt , SystemExit ) : raise except ( ConnectionDrop , ConnectionError ) : arg_0 . close ( ) if not arg_3 : raise else : arg_0 . connect ( arg_2 = arg_2 ) continue except GenericError : pass except StopIteration : break except : arg_0 . logger . error ( \"APRS Packet: %s\" , arg_5 ) raise if not arg_2 : break","id_":252562,"task_name":"https:\/\/github.com\/rossengeorgiev\/aprs-python\/blob\/94b89a6da47a322129484efcaf1e82f6a9932891\/aprslib\/inet.py#L160-L213","negative":"Unregister an extension code. For testing only."} {"query":"Convert our options into the actual circle object","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . marker = Circle ( __id__ = arg_1 ) arg_0 . parent ( ) . markers [ arg_1 ] = arg_0 arg_0 . marker . setTag ( arg_1 ) arg_5 = arg_0 . declaration if arg_5 . clickable : arg_0 . set_clickable ( arg_5 . clickable ) del arg_0 . options","id_":252563,"task_name":"https:\/\/github.com\/codelv\/enaml-native-maps\/blob\/5b6dda745cede05755dd40d29775cc0544226c29\/src\/googlemaps\/android\/android_map_view.py#L1151-L1164","negative":"issue a command to read the archive records after a known time stamp."} {"query":"Prompts a user for input . This is a convenience function that can be used to prompt a user for input later .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = False , arg_3 = False , arg_4 = None , arg_5 = None , arg_6 = ': ' , arg_7 = True , arg_8 = False ) : arg_9 = None def prompt_func ( arg_0 ) : arg_10 = arg_2 and hidden_prompt_func or visible_prompt_func try : echo ( arg_0 , nl = False , arg_8 = arg_8 ) return arg_10 ( '' ) except ( KeyboardInterrupt , EOFError ) : if arg_2 : echo ( None , arg_8 = arg_8 ) raise Abort ( ) if arg_5 is None : arg_5 = convert_type ( arg_4 , arg_1 ) Func = _build_prompt ( arg_0 , arg_6 , arg_7 , arg_1 ) while 1 : while 1 : arg_12 = prompt_func ( Func ) if arg_12 : break elif arg_1 is not None : return arg_1 try : arg_9 = arg_5 ( arg_12 ) except UsageError as e : echo ( 'Error: %s' % e . message , arg_8 = arg_8 ) continue if not arg_3 : return arg_9 while 1 : arg_13 = prompt_func ( 'Repeat for confirmation: ' ) if arg_13 : break if arg_12 == arg_13 : return arg_9 echo ( 'Error: the two entered values do not match' , arg_8 = arg_8 )","id_":252564,"task_name":"https:\/\/github.com\/cpenv\/cpenv\/blob\/afbb569ae04002743db041d3629a5be8c290bd89\/cpenv\/packages\/click\/termui.py#L34-L110","negative":"Main executor of the trimmomatic_report template.\n\n Parameters\n ----------\n log_files : list\n List of paths to the trimmomatic log files."} {"query":"Asserts that val is a dict and does not contain the given value or values .","positive":"def Func ( arg_0 , * arg_1 ) : arg_0 . _check_dict_like ( arg_0 . val , check_getitem = False ) if len ( arg_1 ) == 0 : raise ValueError ( 'one or more value args must be given' ) else : arg_2 = [ ] for arg_3 in arg_1 : if arg_3 in arg_0 . val . values ( ) : arg_2 . append ( arg_3 ) if arg_2 : arg_0 . _err ( 'Expected <%s> to not contain values %s, but did contain %s.' % ( arg_0 . val , arg_0 . _fmt_items ( arg_1 ) , arg_0 . _fmt_items ( arg_2 ) ) ) return arg_0","id_":252565,"task_name":"https:\/\/github.com\/ActivisionGameScience\/assertpy\/blob\/08d799cdb01f9a25d3e20672efac991c7bc26d79\/assertpy\/assertpy.py#L801-L813","negative":"Get information about the users current playback.\n\n Returns\n -------\n player : Player\n A player object representing the current playback."} {"query":"Build a set of color attributes in a class .","positive":"def Func ( arg_0 ) : for arg_1 , arg_2 in color_templates : setattr ( arg_0 , arg_1 , arg_0 . _base % arg_2 )","id_":252566,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/utils\/coloransi.py#L48-L54","negative":"This function will create the VM directory where a repo will be mounted, if it\n doesn't exist. If wait_for_server is set, it will wait up to 10 seconds for\n the nfs server to start, by retrying mounts that fail with 'Connection Refused'.\n\n If wait_for_server is not set, it will attempt to run the mount command once"} {"query":"Initialize bluez DBus communication . Must be called before any other calls are made!","positive":"def Func ( arg_0 ) : GObject . threads_init ( ) dbus . mainloop . glib . threads_init ( ) arg_0 . _mainloop = dbus . mainloop . glib . DBusGMainLoop ( set_as_default = True ) arg_0 . _bus = dbus . SystemBus ( ) arg_0 . _bluez = dbus . Interface ( arg_0 . _bus . get_object ( 'org.bluez' , '\/' ) , 'org.freedesktop.DBus.ObjectManager' )","id_":252567,"task_name":"https:\/\/github.com\/adafruit\/Adafruit_Python_BluefruitLE\/blob\/34fc6f596371b961628369d78ce836950514062f\/Adafruit_BluefruitLE\/bluez_dbus\/provider.py#L58-L72","negative":"Maintains the context of the runtime settings for invoking\n a command.\n\n This should be called by a click.option callback, and only\n called once for each setting for each command invocation.\n\n If the setting exists, it follows that the runtime settings are\n stale, so the entire runtime settings are reset."} {"query":"Parses a string of numbers and ranges into a list of integers . Ranges are separated by dashes and inclusive of both the start and end number .","positive":"def Func ( arg_0 ) : arg_1 = [ ] for arg_2 in arg_0 . split ( \",\" ) : for arg_3 in arg_2 . split ( \" \" ) : if len ( arg_3 ) == 0 : continue if \"-\" in arg_3 : arg_4 , arg_5 = arg_3 . split ( \"-\" ) arg_6 = int ( arg_4 . strip ( ) ) arg_7 = int ( arg_5 . strip ( ) ) arg_1 . extend ( range ( arg_6 , arg_7 + 1 ) ) else : arg_1 . append ( int ( arg_3 . strip ( ) ) ) return arg_1","id_":252568,"task_name":"https:\/\/github.com\/openvax\/mhctools\/blob\/b329b4dccd60fae41296816b8cbfe15d6ca07e67\/mhctools\/cli\/parsing_helpers.py#L17-L37","negative":"Apply updates to the next tuple metrics"} {"query":"Cleans the working directory of unwanted temporary files","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ f for f in os . listdir ( \".\" ) if f . endswith ( \"_U.fastq.gz\" ) ] for arg_3 in arg_2 : os . remove ( arg_3 ) arg_4 = [ f for f in os . listdir ( \".\" ) if f . endswith ( \"_trim.fastq.gz\" ) ] if arg_1 == \"true\" and len ( arg_4 ) == 2 : for arg_5 in arg_0 : arg_6 = os . path . realpath ( arg_5 ) logger . debug ( \"Removing temporary fastq file path: {}\" . format ( arg_6 ) ) if re . match ( \".*\/work\/.{2}\/.{30}\/.*\" , arg_6 ) : os . remove ( arg_6 )","id_":252569,"task_name":"https:\/\/github.com\/assemblerflow\/flowcraft\/blob\/fc3f4bddded1efc76006600016dc71a06dd908c0\/flowcraft\/templates\/trimmomatic.py#L242-L262","negative":"Converts a unit-length quaternion to a sequence\n of ZYZ Euler angles.\n\n Returns:\n ndarray: Array of Euler angles."} {"query":"Send a command to the server","positive":"def Func ( arg_0 , arg_1 , arg_2 = 5 ) : logger . info ( u'Sending %s' % arg_1 ) arg_3 , arg_4 , arg_5 = select . select ( [ ] , [ arg_0 . sock ] , [ ] , arg_2 ) if not arg_4 : raise SendTimeoutError ( ) arg_4 [ 0 ] . Funcall ( arg_1 + '\\n' )","id_":252570,"task_name":"https:\/\/github.com\/Diaoul\/pyjulius\/blob\/48f2752ff4e0f3bd7b578754b1c583cabdc24b09\/pyjulius\/core.py#L144-L154","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Converts a voxel list to an ndarray .","positive":"def Func ( arg_0 ) : arg_1 = len ( arg_0 [ 0 ] ) for arg_2 in range ( len ( arg_1 ) ) : size . append ( max ( [ arg_3 [ arg_2 ] for arg_3 in arg_0 ] ) ) arg_4 = numpy . zeros ( arg_1 ) for arg_5 in arg_0 : arg_4 [ arg_5 ] = 1 return arg_4","id_":252571,"task_name":"https:\/\/github.com\/neurodata\/ndio\/blob\/792dd5816bc770b05a3db2f4327da42ff6253531\/ndio\/convert\/volume.py#L20-L41","negative":"Return a set of all nodes that have both an in-degree > 0 and out-degree > 0.\n\n This means that they are an integral part of a pathway, since they are both produced and consumed."} {"query":"Converts a hms string into seconds .","positive":"def Func ( arg_0 ) : if arg_0 == \"-\" : return 0 if arg_0 . endswith ( \"ms\" ) : return float ( arg_0 . rstrip ( \"ms\" ) ) \/ 1000 arg_1 = list ( map ( float , re . split ( \"[dhms]\" , arg_0 ) [ : - 1 ] ) ) if len ( arg_1 ) == 4 : return arg_1 [ 0 ] * 24 * 3600 + arg_1 [ 1 ] * 3600 + arg_1 [ 2 ] * 60 + arg_1 [ 3 ] if len ( arg_1 ) == 3 : return arg_1 [ 0 ] * 3600 + arg_1 [ 1 ] * 60 + arg_1 [ 2 ] elif len ( arg_1 ) == 2 : return arg_1 [ 0 ] * 60 + arg_1 [ 1 ] else : return arg_1 [ 0 ]","id_":252572,"task_name":"https:\/\/github.com\/assemblerflow\/flowcraft\/blob\/fc3f4bddded1efc76006600016dc71a06dd908c0\/flowcraft\/generator\/inspect.py#L327-L357","negative":"Computes the log-normalizer of the distribution."} {"query":"generate a generic flat file html for an ABF parent . You could give this a single ABF ID its parent ID or a list of ABF IDs . If a child ABF is given the parent will automatically be used .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False , arg_3 = False ) : if type ( arg_1 ) is str : arg_1 = [ arg_1 ] for arg_4 in cm . abfSort ( arg_1 ) : arg_5 = cm . parent ( arg_0 . groups , arg_4 ) arg_6 = os . path . abspath ( \"%s\/%s_basic.html\" % ( arg_0 . folder2 , arg_5 ) ) if arg_3 is False and os . path . basename ( arg_6 ) in arg_0 . files2 : continue arg_7 = cm . filesByType ( arg_0 . groupFiles [ arg_5 ] ) arg_8 = \"\" arg_8 += '
' arg_8 += 'summary of data from: %s<\/span><\/br>' % arg_5 arg_8 += '%s<\/code>' % os . path . abspath ( arg_0 . folder1 + \"\/\" + arg_5 + \".abf\" ) arg_8 += '<\/div>' arg_9 = [ \"experiment\" , \"plot\" , \"tif\" , \"other\" ] arg_10 = cm . list_order_by ( arg_7 . keys ( ) , arg_9 ) for arg_11 in [ x for x in arg_10 if len ( arg_7 [ x ] ) ] : if arg_11 == 'experiment' : arg_8 += \"

Experimental Data:<\/h3>\" elif arg_11 == 'plot' : arg_8 += \"

Intrinsic Properties:<\/h3>\" elif arg_11 == 'tif' : arg_8 += \"

Micrographs:<\/h3>\" elif arg_11 == 'other' : arg_8 += \"

Additional Files:<\/h3>\" else : arg_8 += \"

????:<\/h3>\" for arg_12 in arg_7 [ arg_11 ] : arg_8 += arg_0 . htmlFor ( arg_12 ) arg_8 += '
' * 3 print ( \"creating\" , arg_6 , '...' ) style . save ( arg_8 , arg_6 , arg_2 = arg_2 )","id_":252573,"task_name":"https:\/\/github.com\/swharden\/SWHLab\/blob\/a86c3c65323cec809a4bd4f81919644927094bf5\/swhlab\/indexing\/indexing.py#L178-L216","negative":"Apply updates to the next tuple metrics"} {"query":"Helper function to append functions into a given list .","positive":"def Func ( arg_0 , arg_1 ) : [ arg_0 . append ( arg_2 ) for arg_2 in arg_1 if isfunction ( arg_2 ) or ismethod ( arg_2 ) ]","id_":252574,"task_name":"https:\/\/github.com\/h2non\/pook\/blob\/e64094e41e4d89d98d2d29af7608ef27dc50cf19\/pook\/mock.py#L16-L25","negative":"Parse an ID3v1 tag, returning a list of ID3v2.4 frames."} {"query":"Return a term resource for the passed SIS ID .","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in arg_0 . get_all_terms ( ) : if arg_2 . sis_term_id == arg_1 : return arg_2","id_":252575,"task_name":"https:\/\/github.com\/uw-it-aca\/uw-restclients-canvas\/blob\/9845faf33d49a8f06908efc22640c001116d6ea2\/uw_canvas\/terms.py#L25-L31","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Update number of trials for missing values","positive":"def Func ( ** arg_0 ) : arg_1 = os . environ . get ( BBG_ROOT , '' ) . replace ( '\\\\' , '\/' ) if not arg_1 : return if len ( arg_0 ) == 0 : return arg_2 = f'{data_path}\/Logs\/{missing_info(**kwargs)}' arg_3 = len ( files . all_files ( arg_2 ) ) + 1 files . create_folder ( arg_2 ) open ( f'{log_path}\/{cnt}.log' , 'a' ) . close ( )","id_":252576,"task_name":"https:\/\/github.com\/alpha-xone\/xbbg\/blob\/70226eb19a72a08144b5d8cea9db4913200f7bc5\/xbbg\/core\/missing.py#L30-L42","negative":"Respond when the server indicates that the client is out of sync.\n\n The server can request a sync when this client sends a message that \n fails the check() on the server. If the reason for the failure isn't \n very serious, then the server can decide to send it as usual in the \n interest of a smooth gameplay experience. When this happens, the \n server sends out an extra response providing the clients with the\n information they need to resync themselves."} {"query":"Fetches a field from extras and returns it . This is some Airflow magic . The grpc hook type adds custom UI elements to the hook page which allow admins to specify scopes credential pem files etc . They get formatted as shown below .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_3 = 'extra__grpc__{}' . format ( arg_1 ) if arg_3 in arg_0 . extras : return arg_0 . extras [ arg_3 ] else : return arg_2","id_":252577,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/grpc_hook.py#L112-L123","negative":"The estimated signal-to-noise_maps mappers of the image."} {"query":"This is a parallel driver for periodicvar_recovery .","positive":"def Func ( arg_0 , arg_1 = 1.0e-3 , arg_2 = None , arg_3 = None , arg_4 = None ) : arg_5 = os . path . join ( arg_0 , 'periodfinding' ) if not os . path . exists ( arg_5 ) : LOGERROR ( 'no \"periodfinding\" subdirectory in %s, can\\'t continue' % arg_0 ) return None arg_6 = glob . glob ( os . path . join ( arg_5 , '*periodfinding*pkl*' ) ) if len ( arg_6 ) > 0 : if arg_2 : arg_6 = arg_6 [ arg_2 : ] if arg_3 : arg_6 = arg_6 [ : arg_3 ] arg_7 = [ ( x , arg_0 , arg_1 ) for x in arg_6 ] arg_8 = mp . Pool ( arg_4 ) arg_9 = arg_8 . map ( periodrec_worker , arg_7 ) arg_8 . close ( ) arg_8 . join ( ) arg_10 = { x [ 'objectid' ] : x for x in arg_9 if x is not None } arg_11 = np . array ( [ x [ 'objectid' ] for x in arg_9 if ( x is not None and x [ 'actual_vartype' ] in PERIODIC_VARTYPES ) ] , dtype = np . unicode_ ) arg_12 = np . array ( [ x [ 'objectid' ] for x in arg_9 if ( x is not None and 'actual' in x [ 'best_recovered_status' ] ) ] , dtype = np . unicode_ ) arg_13 = np . array ( [ x [ 'objectid' ] for x in arg_9 if ( x is not None and 'twice' in x [ 'best_recovered_status' ] ) ] , dtype = np . unicode_ ) arg_14 = np . array ( [ x [ 'objectid' ] for x in arg_9 if ( x is not None and 'half' in x [ 'best_recovered_status' ] ) ] , dtype = np . unicode_ ) arg_15 = [ x [ 'objectid' ] for x in arg_9 ] arg_16 = { 'simbasedir' : os . path . abspath ( arg_0 ) , 'objectids' : arg_15 , 'period_tolerance' : arg_1 , 'actual_periodicvars' : arg_11 , 'recovered_periodicvars' : arg_12 , 'alias_twice_periodicvars' : arg_13 , 'alias_half_periodicvars' : arg_14 , 'details' : arg_10 } arg_17 = os . path . join ( arg_0 , 'periodicvar-recovery.pkl' ) with open ( arg_17 , 'wb' ) as outfd : pickle . dump ( arg_16 , outfd , pickle . HIGHEST_PROTOCOL ) return arg_16 else : LOGERROR ( 'no periodfinding result pickles found in %s, can\\'t continue' % arg_5 ) return None","id_":252578,"task_name":"https:\/\/github.com\/waqasbhatti\/astrobase\/blob\/2922a14619d183fb28005fa7d02027ac436f2265\/astrobase\/fakelcs\/recovery.py#L1950-L2064","negative":"Checks if a key exists in a bucket\n\n :param key: S3 key that will point to the file\n :type key: str\n :param bucket_name: Name of the bucket in which the file is stored\n :type bucket_name: str"} {"query":"Design an FIR highpass filter using remez with order determination . The filter order is determined based on f_pass Hz fstop Hz and the desired passband ripple d_pass dB and stopband attenuation d_stop dB all relative to a sampling rate of fs Hz . Mark Wickert October 2016 updated October 2018","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = 1.0 , arg_5 = 5 ) : arg_6 = arg_4 \/ 2. - arg_1 arg_7 = arg_4 \/ 2. - arg_0 arg_8 , arg_9 , arg_10 , arg_11 = lowpass_order ( arg_6 , arg_7 , arg_2 , arg_3 , fsamp = arg_4 ) arg_12 = arg_8 arg_12 += arg_5 arg_13 = signal . remez ( arg_12 , arg_9 , arg_10 [ 0 : : 2 ] , arg_11 , Hz = 2 ) arg_8 = np . arange ( len ( arg_13 ) ) arg_13 *= ( - 1 ) ** arg_8 print ( 'Remez filter taps = %d.' % arg_12 ) return arg_13","id_":252579,"task_name":"https:\/\/github.com\/mwickert\/scikit-dsp-comm\/blob\/5c1353412a4d81a8d7da169057564ecf940f8b5b\/sk_dsp_comm\/fir_design_helper.py#L327-L350","negative":"Provide the run IDs of failed jobs\n\n\n Returns\n -------\n None"} {"query":"Push a PLY lexer on the stack to parse filename .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . lexer . qasm_file = arg_0 . filename arg_0 . lexer . qasm_line = arg_0 . lineno arg_0 . stack . append ( arg_0 . lexer ) arg_0 . __mklexer__ ( arg_1 )","id_":252580,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/qasm\/qasmlexer.py#L68-L73","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Retrieve a list of scaling ips .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = [ 'id' ] , arg_4 = None , arg_5 = None , arg_6 = False ) : LOG . info ( 'Func for tenant %s filters %s fields %s' % ( arg_0 . tenant_id , arg_1 , arg_2 ) ) arg_7 = _get_ips_by_type ( arg_0 , ip_types . SCALING , arg_1 = arg_1 , arg_2 = arg_2 ) return [ v . _make_scaling_ip_dict ( arg_8 ) for arg_8 in arg_7 ]","id_":252581,"task_name":"https:\/\/github.com\/openstack\/quark\/blob\/1112e6a66917d3e98e44cb7b33b107fd5a74bb2e\/quark\/plugin_modules\/floating_ips.py#L610-L635","negative":"Compares and exchanges.\n\n Compares the value in the AL, AX, EAX or RAX register (depending on the\n size of the operand) with the first operand (destination operand). If\n the two values are equal, the second operand (source operand) is loaded\n into the destination operand. Otherwise, the destination operand is\n loaded into the AL, AX, EAX or RAX register.\n\n The ZF flag is set if the values in the destination operand and\n register AL, AX, or EAX are equal; otherwise it is cleared. The CF, PF,\n AF, SF, and OF flags are set according to the results of the comparison\n operation::\n\n (* accumulator = AL, AX, EAX or RAX, depending on whether *)\n (* a byte, word, a doubleword or a 64bit comparison is being performed*)\n IF accumulator == DEST\n THEN\n ZF = 1\n DEST = SRC\n ELSE\n ZF = 0\n accumulator = DEST\n FI;\n\n :param cpu: current CPU.\n :param dest: destination operand.\n :param src: source operand."} {"query":"Init openstack glance mq","positive":"def Func ( arg_0 , arg_1 ) : if not arg_0 . enable_component_notification ( Openstack . Glance ) : log . debug ( \"disable listening glance notification\" ) return for arg_2 in range ( arg_0 . config . glance_mq_consumer_count ) : arg_1 . create_consumer ( arg_0 . config . glance_mq_exchange , arg_0 . config . glance_mq_queue , ProcessFactory . process ( Openstack . Glance ) ) log . debug ( \"enable listening openstack glance notification.\" )","id_":252582,"task_name":"https:\/\/github.com\/ndrlslz\/ternya\/blob\/c05aec10029e645d63ff04313dbcf2644743481f\/ternya\/ternya.py#L165-L183","negative":"Render the sourcecode."} {"query":"Builds the file bundle .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = 'config.yaml' , arg_4 = None , ) : arg_5 = os . path . join ( arg_0 , arg_3 ) arg_6 = read_cfg ( arg_5 , arg_4 ) arg_7 = arg_6 . get ( 'dist_directory' , 'dist' ) arg_8 = os . path . join ( arg_0 , arg_7 ) mkdir ( arg_8 ) arg_9 = arg_6 . get ( 'function_name' ) arg_10 = '{0}-{1}.zip' . format ( timestamp ( ) , arg_9 ) arg_11 = mkdtemp ( prefix = 'aws-lambda' ) pip_install_to_target ( arg_11 , arg_1 = arg_1 , arg_2 = arg_2 , ) if 'zope' in os . listdir ( arg_11 ) : print ( 'Zope packages detected; fixing Zope package paths to ' 'make them importable.' , ) with open ( os . path . join ( arg_11 , 'zope\/__init__.py' ) , 'wb' ) : pass arg_10 = ( '{0}.zip' . format ( arg_10 ) if not arg_10 . endswith ( '.zip' ) else arg_10 ) arg_12 = defaultdict ( ** arg_6 . get ( 'Func' , { } ) ) arg_13 = arg_12 . get ( 'source_directories' , '' ) arg_13 = ( arg_13 if arg_13 is not None else '' ) arg_14 = [ d . strip ( ) for d in arg_13 . split ( ',' ) ] arg_15 = [ ] for arg_16 in os . listdir ( arg_0 ) : if os . path . isfile ( arg_16 ) : if arg_16 == '.DS_Store' : continue if arg_16 == arg_3 : continue print ( 'Bundling: %r' % arg_16 ) arg_15 . append ( os . path . join ( arg_0 , arg_16 ) ) elif os . path . isdir ( arg_16 ) and arg_16 in arg_14 : print ( 'Bundling directory: %r' % arg_16 ) arg_15 . append ( os . path . join ( arg_0 , arg_16 ) ) os . chdir ( arg_11 ) for arg_17 in arg_15 : if os . path . isfile ( arg_17 ) : arg_18 , arg_16 = os . path . split ( arg_17 ) copyfile ( arg_17 , os . path . join ( arg_11 , arg_16 ) ) copystat ( arg_17 , os . path . join ( arg_11 , arg_16 ) ) elif os . path . isdir ( arg_17 ) : arg_19 = os . path . join ( arg_11 , arg_17 [ len ( arg_0 ) + 1 : ] ) copytree ( arg_17 , arg_19 ) arg_20 = archive ( '.\/' , arg_8 , arg_10 ) return arg_20","id_":252583,"task_name":"https:\/\/github.com\/nficano\/python-lambda\/blob\/b0bd25404df70212d7fa057758760366406d64f2\/aws_lambda\/aws_lambda.py#L272-L366","negative":"Initialize all ephemerals used by derived classes."} {"query":"Lists all of the service certificates associated with the specified hosted service .","positive":"def Func ( arg_0 , arg_1 ) : _validate_not_none ( 'service_name' , arg_1 ) return arg_0 . _perform_get ( '\/' + arg_0 . subscription_id + '\/services\/hostedservices\/' + _str ( arg_1 ) + '\/certificates' , Certificates )","id_":252584,"task_name":"https:\/\/github.com\/Azure\/azure-sdk-for-python\/blob\/d7306fde32f60a293a7567678692bdad31e4b667\/azure-servicemanagement-legacy\/azure\/servicemanagement\/servicemanagementservice.py#L890-L902","negative":"Handle marking messages as read and keeping client active."} {"query":"Sample a colormap from matplotlib","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ ] arg_3 = cm . cmap_d [ arg_0 ] for arg_4 in np . linspace ( 0 , 1 , arg_1 ) : arg_2 . append ( arg_3 ( arg_4 ) ) return arg_2","id_":252585,"task_name":"https:\/\/github.com\/quantopian\/pyfolio\/blob\/712716ab0cdebbec9fabb25eea3bf40e4354749d\/pyfolio\/utils.py#L533-L542","negative":"This endpoint is used to set the health of an allocation that is in the deployment manually. In some use\n cases, automatic detection of allocation health may not be desired. As such those task groups can be marked\n with an upgrade policy that uses health_check = \"manual\". Those allocations must have their health marked\n manually using this endpoint. Marking an allocation as healthy will allow the rolling upgrade to proceed.\n Marking it as failed will cause the deployment to fail.\n\n https:\/\/www.nomadproject.io\/docs\/http\/deployments.html\n\n arguments:\n - id\n - healthy_allocations, Specifies the set of allocation that should be marked as healthy.\n - unhealthy_allocations, Specifies the set of allocation that should be marked as unhealthy.\n returns: dict\n raises:\n - nomad.api.exceptions.BaseNomadException\n - nomad.api.exceptions.URLNotFoundNomadException"} {"query":"Updates a NIC with the parameters provided .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , ** arg_4 ) : arg_5 = { } for arg_6 , arg_7 in arg_4 . items ( ) : arg_5 [ arg_0 . _underscore_to_camelcase ( arg_6 ) ] = arg_7 arg_9 = arg_0 . _perform_request ( url = '\/datacenters\/%s\/servers\/%s\/nics\/%s' % ( arg_1 , arg_2 , arg_3 ) , method = 'PATCH' , arg_5 = json . dumps ( arg_5 ) ) return arg_9","id_":252586,"task_name":"https:\/\/github.com\/profitbricks\/profitbricks-sdk-python\/blob\/2c804b141688eccb07d6ae56601d5c60a62abebd\/profitbricks\/client.py#L1181-L1209","negative":"Clears out the current store and gets a cookie. Set the cross site\n request forgery token for each subsequent request.\n\n :return: A response having cleared the current store.\n :rtype: requests.Response"} {"query":"Return a disease term","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = { } try : arg_1 = int ( arg_1 ) arg_2 [ 'disease_nr' ] = arg_1 except ValueError : arg_2 [ '_id' ] = arg_1 return arg_0 . Func_collection . find_one ( arg_2 )","id_":252587,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/adapter\/mongo\/hpo.py#L106-L124","negative":"Handle Error messages and log them accordingly.\n\n :param data:\n :param ts:"} {"query":"Insert raw SVG data into the widet .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : try : arg_3 = svg_to_image ( arg_2 ) except ValueError : arg_0 . _insert_plain_text ( arg_1 , 'Received invalid SVG data.' ) else : arg_4 = arg_0 . _add_image ( arg_3 ) arg_0 . _name_to_svg_map [ arg_4 . name ( ) ] = arg_2 arg_1 . insertBlock ( ) arg_1 . insertImage ( arg_4 ) arg_1 . insertBlock ( )","id_":252588,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/frontend\/qt\/console\/rich_ipython_widget.py#L301-L313","negative":"return the user input's value from a 'compiled' value"} {"query":"Gather information from common gene information .","positive":"def Func ( arg_0 ) : arg_1 = set ( ) for arg_2 in arg_0 . get ( 'genes' , [ ] ) : arg_1 . update ( arg_2 . get ( 'manual_inheritance' , [ ] ) ) return list ( arg_1 )","id_":252589,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/server\/blueprints\/variants\/controllers.py#L856-L861","negative":"Updates the target temperature on the NuHeat API\n\n :param temperature: The desired temperature in NuHeat format\n :param permanent: Permanently hold the temperature. If set to False, the schedule will\n resume at the next programmed event"} {"query":"Return an iterator on interfaces implemented by the given class node .","positive":"def Func ( arg_0 , arg_1 = True , arg_2 = arg_3 ) : try : arg_4 = bases . Instance ( arg_0 ) . getattr ( \"__implements__\" ) [ 0 ] except exceptions . NotFoundError : return if not arg_1 and arg_4 . frame ( ) is not arg_0 : return arg_5 = set ( ) arg_6 = False for arg_7 in node_classes . unpack_infer ( arg_4 ) : if arg_7 is astroid . Uninferable : arg_6 = True continue if arg_7 not in arg_5 and arg_2 ( arg_7 ) : arg_5 . add ( arg_7 ) yield arg_7 if arg_6 : raise exceptions . InferenceError ( )","id_":252590,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/pyreverse\/inspector.py#L41-L60","negative":"Modify an existing lock's timeout.\n\n token:\n Valid lock token.\n timeout:\n Suggested lifetime in seconds (-1 for infinite).\n The real expiration time may be shorter than requested!\n Returns:\n Lock dictionary.\n Raises ValueError, if token is invalid."} {"query":"ASCII adjust AX before division .","positive":"def Func ( arg_0 , arg_1 = None ) : if arg_1 is None : arg_1 = 10 else : arg_1 = arg_1 . read ( ) arg_0 . AL += arg_0 . AH * arg_1 arg_0 . AH = 0 arg_0 . _calculate_logic_flags ( 8 , arg_0 . AL )","id_":252591,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/native\/cpu\/x86.py#L1062-L1097","negative":"Shift the model result and return the new instance.\n\n Queues up the T(i+1) prediction value and emits a T(i)\n input\/prediction pair, if possible. E.g., if the previous T(i-1)\n iteration was learn-only, then we would not have a T(i) prediction in our\n FIFO and would not be able to emit a meaningful input\/prediction pair.\n\n :param modelResult: A :class:`~.nupic.frameworks.opf.opf_utils.ModelResult`\n instance to shift.\n :return: A :class:`~.nupic.frameworks.opf.opf_utils.ModelResult` instance that\n has been shifted"} {"query":"Render to cookie strings .","positive":"def Func ( arg_0 ) : arg_1 = '' for arg_2 , arg_3 in arg_0 . items ( ) : arg_1 += '{}={};' . format ( arg_2 , arg_3 ) return arg_1","id_":252592,"task_name":"https:\/\/github.com\/tokibito\/funkload-friendly\/blob\/a60e8d2b76ba5ad6c16f8cfc347bd200bd45c189\/src\/funkload_friendly\/cookie.py#L29-L35","negative":"Parses a string into a Tag"} {"query":"Returns a zero - length range located just before the beginning of this range .","positive":"def Func ( arg_0 ) : return Range ( arg_0 . source_buffer , arg_0 . Func_pos , arg_0 . Func_pos , expanded_from = arg_0 . expanded_from )","id_":252593,"task_name":"https:\/\/github.com\/google\/grumpy\/blob\/3ec87959189cfcdeae82eb68a47648ac25ceb10b\/third_party\/pythonparser\/source.py#L126-L131","negative":"Remove rows with NAs from the H2OFrame.\n\n :returns: new H2OFrame with all rows from the original frame containing any NAs removed."} {"query":"Returns a list of two actions per gcs bucket to mount .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = [ ] for arg_4 in arg_1 : arg_5 = arg_4 . value [ len ( 'gs:\/\/' ) : ] arg_6 = arg_4 . docker_path arg_3 . extend ( [ google_v2_pipelines . build_action ( name = 'mount-{}' . format ( arg_5 ) , flags = [ 'ENABLE_FUSE' , 'RUN_IN_BACKGROUND' ] , image_uri = _GCSFUSE_IMAGE , arg_1 = [ arg_2 ] , commands = [ '--implicit-dirs' , '--foreground' , '-o ro' , arg_5 , os . path . join ( providers_util . DATA_MOUNT_POINT , arg_6 ) ] ) , google_v2_pipelines . build_action ( name = 'mount-wait-{}' . format ( arg_5 ) , flags = [ 'ENABLE_FUSE' ] , image_uri = _GCSFUSE_IMAGE , arg_1 = [ arg_2 ] , commands = [ 'wait' , os . path . join ( providers_util . DATA_MOUNT_POINT , arg_6 ) ] ) ] ) return arg_3","id_":252594,"task_name":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/443ce31daa6023dc2fd65ef2051796e19d18d5a7\/dsub\/providers\/google_v2.py#L585-L611","negative":"Stop streaming samples from device and delete samples buffer"} {"query":"Convert an OpenSSL library failure into a Python exception .","positive":"def Func ( arg_0 ) : arg_1 = [ ] while True : arg_2 = lib . ERR_get_error ( ) if arg_2 == 0 : break arg_1 . append ( ( text ( lib . ERR_lib_error_string ( arg_2 ) ) , text ( lib . ERR_func_error_string ( arg_2 ) ) , text ( lib . ERR_reason_error_string ( arg_2 ) ) ) ) raise arg_0 ( arg_1 )","id_":252595,"task_name":"https:\/\/github.com\/pyca\/pyopenssl\/blob\/1fbe064c50fd030948141d7d630673761525b0d0\/src\/OpenSSL\/_util.py#L34-L54","negative":"Return the next aggregated record, if any\n\n Parameters:\n ------------------------------------------------------------------------\n record: The input record (values only) from the input source, or\n None if the input has reached EOF (this will cause this\n method to force completion of and return any partially\n aggregated time period)\n curInputBookmark: The bookmark to the next input record\n retval:\n (outputRecord, inputBookmark)\n\n outputRecord: the aggregated record\n inputBookmark: a bookmark to the last position from the input that\n contributed to this aggregated record.\n\n If we don't have any aggregated records yet, returns (None, None)\n\n\n The caller should generally do a loop like this:\n while True:\n inRecord = reader.getNextRecord()\n bookmark = reader.getBookmark()\n\n (aggRecord, aggBookmark) = aggregator.next(inRecord, bookmark)\n\n # reached EOF?\n if inRecord is None and aggRecord is None:\n break\n\n if aggRecord is not None:\n proessRecord(aggRecord, aggBookmark)\n\n\n This method makes use of the self._slice member variable to build up\n the values we need to aggregate. This is a dict of lists. The keys are\n the field indices and the elements of each list are the values for that\n field. For example:\n\n self._siice = { 0: [42, 53], 1: [4.0, 5.1] }"} {"query":"Update a single parameter or group of parameters params with values .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : return super ( State , arg_0 ) . Func ( arg_1 , arg_2 )","id_":252596,"task_name":"https:\/\/github.com\/peri-source\/peri\/blob\/61beed5deaaf978ab31ed716e8470d86ba639867\/peri\/states.py#L204-L217","negative":"Return a AzureDLFileSystem object."} {"query":"Returns true if the given parameter with name key has transitioned to the given value .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None ) : arg_4 = getattr ( arg_0 . last_manifest , arg_1 ) arg_5 = arg_0 . current_manifest . get ( arg_1 ) if arg_3 is not None : return arg_4 == arg_3 and arg_5 == arg_2 return arg_4 != arg_2 and arg_5 == arg_2","id_":252597,"task_name":"https:\/\/github.com\/chrisspen\/burlap\/blob\/a92b0a8e5206850bb777c74af8421ea8b33779bd\/burlap\/common.py#L1109-L1117","negative":"This will output the nginx HTTP config string for specific port spec"} {"query":"a function for the client to announce him or herself depending on the level specified . If you want your client to have additional announced things here then implement the class _speak for your client .","positive":"def Func ( arg_0 ) : if arg_0 . quiet is False : bot . info ( '[client|%s] [database|%s]' % ( arg_0 . client_name , arg_0 . database ) ) arg_0 . _Func ( )","id_":252598,"task_name":"https:\/\/github.com\/singularityhub\/sregistry-cli\/blob\/abc96140a1d15b5e96d83432e1e0e1f4f8f36331\/sregistry\/main\/base\/__init__.py#L85-L97","negative":"Returns the profile with the received ID as a dict\n\n If a local copy of the profile exists, it'll be returned. If not, it'll\n be downloaded from the web. The results are cached, so any subsequent\n calls won't hit the filesystem or the web.\n\n Args:\n profile_id (str): The ID of the profile you want.\n\n Raises:\n RegistryError: If there was some problem opening the profile file\n or its format was incorrect."} {"query":"Cancel or un - schedule a task .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . registry . remove ( arg_1 ) arg_0 . _scheduler . cancel_job_task ( arg_1 ) logger . info ( \"Task %s canceled\" , arg_1 )","id_":252599,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-kingarthur\/blob\/9d6a638bee68d5e5c511f045eeebf06340fd3252\/arthur\/scheduler.py#L314-L325","negative":"Returns the data for a certain media item.\n\n :param media_id: id that identifies the media item to be accessed.\n :param fields: list of the media\"s field to be returned. By default the\n API returns some fields, but others are not returned unless they are\n explicity asked for. I have no real documentation on the fields, but\n they all seem to start with the \"media.\" prefix (e.g. media.name,\n media.stream_data).\n :param schema: validation schema to use"} {"query":"ASCII adjust after addition .","positive":"def Func ( arg_0 ) : arg_0 . AF = Operators . OR ( arg_0 . AL & 0x0F > 9 , arg_0 . AF ) arg_0 . CF = arg_0 . AF arg_0 . AH = Operators . ITEBV ( 8 , arg_0 . AF , arg_0 . AH + 1 , arg_0 . AH ) arg_0 . AL = Operators . ITEBV ( 8 , arg_0 . AF , arg_0 . AL + 6 , arg_0 . AL ) arg_0 . AL = arg_0 . AL & 0x0f","id_":252600,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/native\/cpu\/x86.py#L1013-L1059","negative":"Generate a new random masterkey, encrypt it with the password and\n store it in the store.\n\n :param str password: Password to use for en-\/de-cryption"} {"query":"Write the cobra model to a file in YAML format .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False , ** arg_3 ) : arg_4 = model_to_dict ( arg_0 , arg_2 = arg_2 ) arg_4 [ \"version\" ] = YAML_SPEC if isinstance ( arg_1 , string_types ) : with io . open ( arg_1 , \"w\" ) as file_handle : yaml . dump ( arg_4 , file_handle , ** arg_3 ) else : yaml . dump ( arg_4 , arg_1 , ** arg_3 )","id_":252601,"task_name":"https:\/\/github.com\/opencobra\/cobrapy\/blob\/9d1987cdb3a395cf4125a3439c3b002ff2be2009\/cobra\/io\/yaml.py#L83-L111","negative":"Modify an existing lock's timeout.\n\n token:\n Valid lock token.\n timeout:\n Suggested lifetime in seconds (-1 for infinite).\n The real expiration time may be shorter than requested!\n Returns:\n Lock dictionary.\n Raises ValueError, if token is invalid."} {"query":"Combine arguments and turn them into gromacs tool arguments .","positive":"def Func ( arg_0 , * arg_1 , ** arg_2 ) : arg_3 = arg_0 . _combineargs ( * arg_1 , ** arg_2 ) return arg_0 . _build_arg_list ( ** arg_3 )","id_":252602,"task_name":"https:\/\/github.com\/Becksteinlab\/GromacsWrapper\/blob\/d4f9a8cb6f48292732cf7c7e4ef4a6d2ccbc51b9\/gromacs\/core.py#L621-L624","negative":"returns the classified labeling of record"} {"query":"Dequantize an array .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = arg_5 . float64 ) : if not ( isinstance ( arg_3 , int ) and arg_3 > 1 ) : raise ValueError ( 'levels must be a positive integer, but got {}' . format ( arg_3 ) ) if arg_1 >= arg_2 : raise ValueError ( 'min_val ({}) must be smaller than max_val ({})' . format ( arg_1 , arg_2 ) ) arg_7 = ( arg_0 + 0.5 ) . astype ( arg_4 ) * ( arg_2 - arg_1 ) \/ arg_3 + arg_1 return arg_7","id_":252603,"task_name":"https:\/\/github.com\/open-mmlab\/mmcv\/blob\/0d77f61450aab4dde8b8585a577cc496acb95d7f\/mmcv\/arraymisc\/quantization.py#L32-L56","negative":"Create Flask application class.\n\n Invenio-Files-REST needs to patch the Werkzeug form parsing in order to\n support streaming large file uploads. This is done by subclassing the Flask\n application class."} {"query":"Get the existing embedded document if it exists else created it .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : arg_5 = getattr ( arg_1 , arg_3 , False ) if not arg_5 : arg_5 = arg_1 . _fields [ arg_3 ] . document_type_obj ( ) arg_6 , arg_7 = trim_field_key ( arg_5 , arg_4 ) arg_0 . process_document ( arg_5 , arg_2 , make_key ( arg_6 , arg_7 ) ) setattr ( arg_1 , arg_3 , arg_5 )","id_":252604,"task_name":"https:\/\/github.com\/jazzband\/django-mongonaut\/blob\/5485b2e029dff8ae267a4cb39c92d0a72cb5b144\/mongonaut\/mixins.py#L208-L217","negative":"Called when socket is read-ready"} {"query":"Ensure we are authenticated .","positive":"def Func ( arg_0 , arg_1 ) : arg_1 . user = arg_2 = None if arg_1 . method == 'OPTIONS' : return for arg_3 in arg_0 . meta . authentication : arg_2 = arg_3 . authenticate ( arg_1 ) if arg_2 is False : continue if arg_2 is None and not arg_3 . allow_anonymous : arg_3 . unauthenticated ( ) arg_1 . user = arg_2 return if not arg_2 and not arg_3 . allow_anonymous : arg_3 . unauthenticated ( )","id_":252605,"task_name":"https:\/\/github.com\/armet\/python-armet\/blob\/d61eca9082256cb1e7f7f3c7f2fbc4b697157de7\/armet\/resources\/resource\/base.py#L501-L529","negative":"Inherits the data from the parent."} {"query":"Return True if there are any more good sprints still being explored . A good sprint is one that is earlier than where we detected an increase in error from sprint to subsequent sprint .","positive":"def Func ( arg_0 ) : if arg_0 . _state [ 'lastGoodSprint' ] is not None : arg_1 = arg_0 . _state [ 'sprints' ] [ 0 : arg_0 . _state [ 'lastGoodSprint' ] + 1 ] else : arg_1 = arg_0 . _state [ 'sprints' ] for arg_2 in arg_1 : if arg_2 [ 'status' ] == 'active' : arg_3 = True break else : arg_3 = False return arg_3","id_":252606,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/swarming\/hypersearch\/hs_state.py#L620-L637","negative":"Wait for the event, run the task, trigger the next task."} {"query":"Return a list of results for a given project ID .","positive":"def Func ( arg_0 , arg_1 = 100 , arg_2 = 0 , arg_3 = None ) : if arg_3 is not None : arg_4 = dict ( arg_1 = arg_1 , arg_3 = arg_3 ) else : arg_4 = dict ( arg_1 = arg_1 , arg_2 = arg_2 ) print ( OFFSET_WARNING ) arg_4 [ 'project_id' ] = arg_0 try : arg_5 = _pybossa_req ( 'get' , 'result' , arg_4 = arg_4 ) if type ( arg_5 ) . __name__ == 'list' : return [ Result ( arg_6 ) for arg_6 in arg_5 ] else : return arg_5 except : raise","id_":252607,"task_name":"https:\/\/github.com\/Scifabric\/pybossa-client\/blob\/998d7cb0207ff5030dc800f0c2577c5692316c2c\/pbclient\/__init__.py#L621-L649","negative":"Return an open file-object to the index file"} {"query":"Returns the GET array s contents for the specified variable .","positive":"def Func ( arg_0 , arg_1 , arg_2 = True ) : arg_3 = arg_0 . GET . getlist ( arg_1 ) if not arg_3 : if arg_2 : return [ ] else : raise Exception , _ ( \"No array called '%(varname)s' in GET variables\" ) % { 'varname' : arg_1 } return arg_3","id_":252608,"task_name":"https:\/\/github.com\/praekelt\/django-analytics\/blob\/29c22d03374ccc0ec451650e2c2886d324f6e5c6\/analytics\/geckoboard_views.py#L14-L26","negative":"Generate a tag for the alignment of the geometry of the bulge and disk of a bulge-disk system, to customize \\ \n phase names based on the bulge-disk model. This adds together the bulge_disk tags generated in the 3 functions\n above"} {"query":"Stops a timer if it hasn t fired yet","positive":"def Func ( arg_0 , arg_1 ) : if arg_1 in arg_0 . _timer_callbacks : arg_2 = arg_0 . _timer_callbacks [ arg_1 ] arg_2 . cancel ( ) del arg_0 . _timer_callbacks [ arg_1 ]","id_":252609,"task_name":"https:\/\/github.com\/arcticfoxnv\/slackminion\/blob\/62ea77aba5ac5ba582793e578a379a76f7d26cdb\/slackminion\/plugin\/base.py#L78-L87","negative":"Translates the given metrics value to JSON string\n\n metrics: A list of dictionaries per OPFTaskDriver.getMetrics():\n\n Returns: JSON string representing the given metrics object."} {"query":"Create a new record from dump .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 , arg_3 = arg_1 . latest arg_4 = Record . create ( arg_3 ) arg_4 . model . created = arg_1 . created . replace ( tzinfo = None ) arg_4 . model . updated = arg_2 . replace ( tzinfo = None ) RecordIdentifier . insert ( arg_1 . recid ) PersistentIdentifier . create ( pid_type = 'recid' , pid_value = str ( arg_1 . recid ) , object_type = 'rec' , object_uuid = str ( arg_4 . id ) , status = PIDStatus . REGISTERED ) db . session . commit ( ) return arg_0 . update_record ( revisions = arg_1 . rest , arg_4 = arg_4 , arg_6 = arg_1 . created )","id_":252610,"task_name":"https:\/\/github.com\/inveniosoftware\/invenio-migrator\/blob\/6902c6968a39b747d15e32363f43b7dffe2622c2\/invenio_migrator\/records.py#L97-L115","negative":"Disconnect any connected devices that have any of the specified\n service UUIDs."} {"query":"Run DMESG job","positive":"def Func ( arg_0 , arg_1 = True , arg_2 = True ) : if env ( ) : return 1 cij . emph ( \"cij.dmesg.start: shell: %r, cmd: %r\" % ( arg_1 , arg_0 . __prefix + arg_0 . __suffix ) ) return cij . ssh . command ( arg_0 . __prefix , arg_1 , arg_2 , arg_0 . __suffix )","id_":252611,"task_name":"https:\/\/github.com\/refenv\/cijoe\/blob\/21d7b2ed4ff68e0a1457e7df2db27f6334f1a379\/modules\/cij\/dmesg.py#L28-L36","negative":"Set renewal, rebinding times."} {"query":"Progress to the next identifier and return the current one .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . _current arg_0 . _current = arg_0 . readfunc ( ) return arg_1","id_":252612,"task_name":"https:\/\/github.com\/xtuml\/pyxtuml\/blob\/7dd9343b9a0191d1db1887ab9288d0a026608d9a\/xtuml\/tools.py#L41-L47","negative":"Convert this exception to a dictionary.\n\n Returns:\n dist: A dictionary of information about this exception,\n Has a 'reason' key, a 'type' key and a dictionary of params"} {"query":"Start the web server .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 ) : arg_6 = dict ( MONGO_HOST = arg_0 . obj [ 'host' ] , MONGO_PORT = arg_0 . obj [ 'port' ] , MONGO_DBNAME = arg_0 . obj [ 'mongodb' ] , MONGO_USERNAME = arg_0 . obj [ 'username' ] , MONGO_PASSWORD = arg_0 . obj [ 'password' ] , ) arg_7 = check_connection ( arg_2 = arg_6 [ 'MONGO_HOST' ] , arg_3 = arg_6 [ 'MONGO_PORT' ] , username = arg_6 [ 'MONGO_USERNAME' ] , password = arg_6 [ 'MONGO_PASSWORD' ] , authdb = arg_0 . obj [ 'authdb' ] , ) log . info ( \"Test if mongod is running\" ) if not arg_7 : log . warning ( \"Connection could not be established\" ) log . info ( \"Is mongod running?\" ) arg_0 . abort ( ) arg_1 = os . path . abspath ( arg_1 ) if arg_1 else None arg_8 = create_app ( arg_1 = arg_6 , config_file = arg_1 ) if arg_5 : arg_9 = Server ( arg_8 . wsgi_app ) arg_9 . Func ( arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = arg_4 ) else : arg_8 . run ( arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = arg_4 )","id_":252613,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/commands\/serve.py#L22-L53","negative":"This method increases the permanence values of synapses of columns whose\n activity level has been too low. Such columns are identified by having an\n overlap duty cycle that drops too much below those of their peers. The\n permanence values for such columns are increased."} {"query":"returns a list of absolute paths to the cleansed attachements","positive":"def Func ( arg_0 ) : if exists ( arg_0 . fs_cleansed_attachment_container ) : return [ join ( arg_0 . fs_cleansed_attachment_container , arg_1 ) for arg_1 in listdir ( arg_0 . fs_cleansed_attachment_container ) ] else : return [ ]","id_":252614,"task_name":"https:\/\/github.com\/ZeitOnline\/briefkasten\/blob\/ce6b6eeb89196014fe21d68614c20059d02daa11\/application\/briefkasten\/dropbox.py#L467-L473","negative":"Deserialize a dataframe.\n\n Parameters\n ----------\n reader : file\n File-like object to read from. Must be opened in binary mode.\n data_type_id : dict\n Serialization format of the raw data.\n See the azureml.DataTypeIds class for constants.\n\n Returns\n -------\n pandas.DataFrame\n Dataframe object."} {"query":"Saves the lineage to XCom and if configured to do so sends it to the backend .","positive":"def Func ( arg_0 ) : arg_1 = _get_backend ( ) @ wraps ( arg_0 ) def wrapper ( arg_2 , arg_3 , * arg_4 , ** arg_5 ) : arg_2 . log . debug ( \"Backend: %s, Lineage called with inlets: %s, outlets: %s\" , arg_1 , arg_2 . inlets , arg_2 . outlets ) arg_6 = arg_0 ( arg_2 , arg_3 , * arg_4 , ** arg_5 ) arg_7 = [ x . as_dict ( ) for x in arg_2 . outlets ] arg_8 = [ x . as_dict ( ) for x in arg_2 . inlets ] if len ( arg_2 . outlets ) > 0 : arg_2 . xcom_push ( arg_3 , key = PIPELINE_OUTLETS , value = arg_7 , execution_date = arg_3 [ 'ti' ] . execution_date ) if len ( arg_2 . inlets ) > 0 : arg_2 . xcom_push ( arg_3 , key = PIPELINE_INLETS , value = arg_8 , execution_date = arg_3 [ 'ti' ] . execution_date ) if arg_1 : arg_1 . send_lineage ( operator = arg_2 , arg_8 = arg_2 . inlets , arg_7 = arg_2 . outlets , arg_3 = arg_3 ) return arg_6 return wrapper","id_":252615,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/lineage\/__init__.py#L48-L82","negative":"Return a list of examples which violate the schema."} {"query":"Run a shell command","positive":"def Func ( arg_0 ) : arg_0 = [ pipes . quote ( c ) for c in arg_0 ] arg_0 = \" \" . join ( arg_0 ) arg_0 += \"; exit 0\" try : arg_1 = subprocess . check_output ( arg_0 , stderr = subprocess . STDOUT , shell = True ) except subprocess . CalledProcessError as e : arg_1 = e . output arg_1 = arg_1 . decode ( 'utf-8' ) arg_1 = arg_1 . strip ( ) return arg_1","id_":252616,"task_name":"https:\/\/github.com\/pingali\/dgit\/blob\/ecde01f40b98f0719dbcfb54452270ed2f86686d\/dgitcore\/helper.py#L130-L147","negative":"Raises OrderError if no package or file defined.\n Raises CardinalityError if more than one type set.\n Raises SPDXValueError if type is unknown."} {"query":"Modify the content of filepath replacing old for new .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = 1 ) : with open ( arg_0 , 'r' ) as f : arg_4 = f . read ( ) arg_4 = arg_4 . replace ( arg_1 , arg_2 , arg_3 ) with open ( arg_0 , 'w' ) as f : f . write ( arg_4 )","id_":252617,"task_name":"https:\/\/github.com\/PythonSanSebastian\/docstamp\/blob\/b43808f2e15351b0b2f0b7eade9c7ef319c9e646\/docstamp\/file_utils.py#L198-L220","negative":"Returns a list of the dicom files within root_path\n\n Parameters\n ----------\n root_path: str\n Path to the directory to be recursively searched for DICOM files.\n\n Returns\n -------\n dicoms: set\n Set of DICOM absolute file paths"} {"query":"Retrieves a list of load balancers in the data center .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 1 ) : arg_3 = arg_0 . _perform_request ( '\/datacenters\/%s\/loadbalancers?depth=%s' % ( arg_1 , str ( arg_2 ) ) ) return arg_3","id_":252618,"task_name":"https:\/\/github.com\/profitbricks\/profitbricks-sdk-python\/blob\/2c804b141688eccb07d6ae56601d5c60a62abebd\/profitbricks\/client.py#L883-L898","negative":"Return HgResource object for path.\n\n See DAVProvider.get_resource_inst()"} {"query":"Yield all of the importable Python files in dirname recursively .","positive":"def Func ( arg_0 ) : for arg_1 , ( arg_2 , arg_3 , arg_4 ) in enumerate ( os . walk ( arg_0 ) ) : if arg_1 > 0 and '__init__.py' not in arg_4 : del arg_3 [ : ] continue for arg_5 in arg_4 : if re . match ( r\"^[^.#~!$@%^&*()+=,]+\\.pyw?$\" , arg_5 ) : yield os . path . join ( arg_2 , arg_5 )","id_":252619,"task_name":"https:\/\/github.com\/tnkteja\/myhelp\/blob\/fb3a4809d448ad14d5b2e6ddf2e7e89ad52b71cb\/virtualEnvironment\/lib\/python2.7\/site-packages\/coverage\/files.py#L288-L309","negative":"Calculate the seconds to reset the token requests, by obtaining the different\n between the current date and the next date when the token is fully regenerated."} {"query":"Flatten a list of kernels which may contain _SumKernel instances .","positive":"def Func ( arg_0 ) : arg_1 = [ ] for arg_2 in arg_0 : if isinstance ( arg_2 , _SumKernel ) : arg_1 += arg_2 . kernels else : arg_1 . append ( arg_2 ) return arg_1","id_":252620,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/positive_semidefinite_kernels\/positive_semidefinite_kernel.py#L608-L624","negative":"Dumps and loads a database snapshot simultaneously.\n Requires that the destination server has direct database access\n to the source server.\n\n This is better than a serial dump+load when:\n 1. The network connection is reliable.\n 2. You don't need to save the dump file.\n\n The benefits of this over a dump+load are:\n 1. Usually runs faster, since the load and dump happen in parallel.\n 2. Usually takes up less disk space since no separate dump file is\n downloaded."} {"query":"Create a Sequence from Iterable s .","positive":"def Func ( arg_0 : arg_1 ) -> ISeq [ Any ] : try : arg_2 = iter ( arg_0 ) return _Sequence ( arg_2 , next ( arg_2 ) ) except StopIteration : return EMPTY","id_":252621,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/seq.py#L170-L176","negative":"Sends a POST request to initialize the live reports\n\n Parameters\n ----------\n report_id : str\n Hash of the report JSON as retrieved from :func:`~_get_report_hash`"} {"query":"Get a temp filename for atomic download .","positive":"def Func ( arg_0 ) : arg_1 = '%s-%s.tmp' % ( arg_0 , '' . join ( random . Random ( ) . sample ( \"0123456789abcdefghijklmnopqrstuvwxyz\" , 15 ) ) ) TEMP_FILES . add ( arg_1 ) return arg_1","id_":252622,"task_name":"https:\/\/github.com\/bloomreach\/s4cmd\/blob\/bb51075bf43703e7cd95aa39288cf7732ec13a6d\/s4cmd.py#L192-L196","negative":"For a 2D array and mask, map the values of all unmasked pixels to a 1D array.\n\n The pixel coordinate origin is at the top left corner of the 2D array and goes right-wards and downwards, such\n that for an array of shape (3,3) where all pixels are unmasked:\n\n - pixel [0,0] of the 2D array will correspond to index 0 of the 1D array.\n - pixel [0,1] of the 2D array will correspond to index 1 of the 1D array.\n - pixel [1,0] of the 2D array will correspond to index 4 of the 1D array.\n\n Parameters\n ----------\n mask : ndarray\n A 2D array of bools, where *False* values mean unmasked and are included in the mapping.\n array_2d : ndarray\n The 2D array of values which are mapped to a 1D array.\n\n Returns\n --------\n ndarray\n A 1D array of values mapped from the 2D array with dimensions (total_unmasked_pixels).\n\n Examples\n --------\n mask = np.array([[True, False, True],\n [False, False, False]\n [True, False, True]])\n\n array_2d = np.array([[1.0, 2.0, 3.0],\n [4.0, 5.0, 6.0],\n [7.0, 8.0, 9.0]])\n\n array_1d = map_2d_array_to_masked_1d_array_from_array_2d_and_mask(mask=mask, array_2d=array_2d)"} {"query":"Create a new user instance .","positive":"def Func ( arg_0 , arg_1 , * arg_2 , ** arg_3 ) : arg_4 = arg_0 . _get_manager ( arg_1 ) return arg_4 . create_user ( * arg_2 , ** arg_3 )","id_":252623,"task_name":"https:\/\/github.com\/cdriehuys\/django-rest-email-auth\/blob\/7e752c4d77ae02d2d046f214f56e743aa12ab23f\/rest_email_auth\/factories.py#L59-L77","negative":"Get the reason of this revocation.\n\n :return: The reason, or ``None`` if there is none.\n :rtype: bytes or NoneType\n\n .. seealso::\n\n :meth:`all_reasons`, which gives you a list of all supported\n reasons this method might return."} {"query":"Generates constant - sampled SamplePulse .","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 , arg_4 : arg_5 = None ) -> SamplePulse : return _sampled_Func_pulse ( arg_0 , arg_2 , arg_4 = arg_4 )","id_":252624,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/pulse\/pulse_lib\/discrete.py#L23-L33","negative":"Add the attachments from the message from the commandline options."} {"query":"trick to compute the formatting of children layout before actually writing it","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . out try : for arg_3 in arg_1 . children : arg_4 = StringIO ( ) arg_0 . out = arg_4 arg_3 . accept ( arg_0 ) yield arg_4 . getvalue ( ) finally : arg_0 . out = arg_2","id_":252625,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/reporters\/ureports\/__init__.py#L79-L96","negative":"Return True if we should retry. False otherwise.\n\n Args:\n exception: An exception to test for transience.\n\n Returns:\n True if we should retry. False otherwise."} {"query":"Format block by splitting on individual characters .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = 60 , arg_3 = arg_4 ) : if arg_2 < 1 : arg_2 = 1 arg_1 = ( arg_0 . text if arg_1 is None else arg_1 ) or '' arg_1 = ' ' . join ( arg_1 . split ( '\\n' ) ) arg_5 = get_codes ( arg_1 ) if not arg_5 : yield from ( arg_3 ( arg_1 [ arg_6 : arg_6 + arg_2 ] ) for arg_6 in range ( 0 , len ( arg_1 ) , arg_2 ) ) else : arg_7 = 0 arg_8 = [ ] for arg_6 , arg_9 in enumerate ( get_indices_list ( arg_1 ) ) : arg_8 . append ( arg_9 ) if len ( arg_9 ) == 1 : arg_7 += 1 if arg_7 == arg_2 : yield '' . join ( arg_8 ) arg_8 = [ ] arg_7 = 0 if arg_8 : yield '' . join ( arg_8 )","id_":252626,"task_name":"https:\/\/github.com\/welbornprod\/fmtblock\/blob\/92a5529235d557170ed21e058e3c5995197facbe\/fmtblock\/formatters.py#L209-L236","negative":"Get a single publication."} {"query":"Parses the given DSL string and returns parsed results .","positive":"def Func ( arg_0 , arg_1 = '' ) : arg_2 = Funcr . Func ( arg_0 ) arg_3 = ChatlVisitor ( arg_1 ) visit_Func_tree ( arg_2 , arg_3 ) return arg_3 . Funcd","id_":252627,"task_name":"https:\/\/github.com\/atlassistant\/pychatl\/blob\/e2b5600c3183830be266f55fd110dc5e75a86e1c\/pychatl\/parser.py#L37-L54","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Get an IO write task for the requested set of data","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : return IOWriteTask ( arg_0 . _transfer_coordinator , main_kwargs = { 'fileobj' : arg_1 , 'data' : arg_2 , 'offset' : arg_3 , } )","id_":252628,"task_name":"https:\/\/github.com\/boto\/s3transfer\/blob\/2aead638c8385d8ae0b1756b2de17e8fad45fffa\/s3transfer\/download.py#L105-L129","negative":"Remove all binary files in the adslib directory."} {"query":"Returns the first row returned for the given query .","positive":"def Func ( arg_0 , arg_1 , * arg_2 , ** arg_3 ) : arg_4 = arg_0 . _query ( arg_1 , arg_2 , arg_3 ) if not arg_4 : return None elif not isinstance ( arg_4 , list ) : raise MySQLError ( \"Query is not a select query\" ) elif len ( arg_4 ) > 1 : raise MySQLError ( \"Multiple rows returned for Database.Func() query\" ) else : return arg_4 [ 0 ]","id_":252629,"task_name":"https:\/\/github.com\/memsql\/memsql-python\/blob\/aac223a1b937d5b348b42af3c601a6c685ca633a\/memsql\/common\/database.py#L124-L134","negative":"Input file list validation function. Checks that object is a list and\n contains valid filepaths that can be processed by SoX.\n\n Parameters\n ----------\n input_filepath_list : list\n A list of filepaths."} {"query":"Get a modelx object from its full name .","positive":"def Func ( arg_0 : arg_1 ) : arg_2 = arg_0 . split ( \".\" ) arg_3 = get_models ( ) [ arg_2 . pop ( 0 ) ] while len ( arg_2 ) > 0 : arg_4 = arg_2 . pop ( 0 ) arg_3 = getattr ( arg_3 , arg_4 ) return arg_3","id_":252630,"task_name":"https:\/\/github.com\/fumitoh\/modelx\/blob\/0180da34d052c44fb94dab9e115e218bbebfc9c3\/modelx\/core\/api.py#L194-L203","negative":"Returns all of the items from queryset where the user has a\n product invoking that item's condition in one of their carts."} {"query":"Transform data feature to high level","positive":"def Func ( arg_0 , arg_1 = 'polynomial' , arg_2 = 1 ) : if arg_0 . status != 'load_train_data' : print ( \"Please load train data first.\" ) return arg_0 . train_X arg_0 . feature_transform_mode = arg_1 arg_0 . feature_transform_degree = arg_2 arg_0 . train_X = arg_0 . train_X [ : , 1 : ] arg_0 . train_X = utility . DatasetLoader . feature_transform ( arg_0 . train_X , arg_0 . feature_transform_mode , arg_0 . feature_transform_degree ) return arg_0 . train_X","id_":252631,"task_name":"https:\/\/github.com\/fukuball\/fuku-ml\/blob\/0da15ad7af76adf344b5a6b3f3dbabbbab3446b0\/FukuML\/MLBase.py#L34-L55","negative":"Trim all the annotations inside the jam and return as a new `JAMS`\n object.\n\n See `Annotation.trim` for details about how the annotations\n are trimmed.\n\n This operation is also documented in the jam-level sandbox\n with a list keyed by ``JAMS.sandbox.trim`` containing a tuple for each\n jam-level trim of the form ``(start_time, end_time)``.\n\n This function also copies over all of the file metadata from the\n original jam.\n\n Note: trimming does not affect the duration of the jam, i.e. the value\n of ``JAMS.file_metadata.duration`` will be the same for the original\n and trimmed jams.\n\n Parameters\n ----------\n start_time : float\n The desired start time for the trimmed annotations in seconds.\n end_time\n The desired end time for trimmed annotations in seconds. Must be\n greater than ``start_time``.\n strict : bool\n When ``False`` (default) observations that lie at the boundaries of\n the trimming range (see `Annotation.trim` for details), will have\n their time and\/or duration adjusted such that only the part of the\n observation that lies within the trim range is kept. When ``True``\n such observations are discarded and not included in the trimmed\n annotation.\n\n Returns\n -------\n jam_trimmed : JAMS\n The trimmed jam with trimmed annotations, returned as a new JAMS\n object."} {"query":"alternative to reify and property decorators . caches the value when it s generated . It cashes it as instance . _name_of_the_property .","positive":"def Func ( arg_0 ) -> property : arg_1 = \"_\" + arg_0 . __name__ @ property def wrapper ( arg_2 ) : try : return getattr ( arg_2 , arg_1 ) except AttributeError : arg_3 = arg_0 ( arg_2 ) setattr ( arg_2 , arg_1 , arg_3 ) return arg_3 return wrapper","id_":252632,"task_name":"https:\/\/github.com\/ninjaaron\/libaaron\/blob\/a2ee417b784ca72c89c05bddb2e3e815a6b95154\/libaaron\/libaaron.py#L42-L57","negative":"Expand dimensions by iteratively append empty axes.\n\n Parameters\n ----------\n arry : ndarray\n The original array\n\n extra : int\n The number of empty axes to append"} {"query":"Retrieve descriptor .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 if arg_1 is None : arg_1 = { } if isinstance ( arg_1 , six . string_types ) : try : if os . path . isfile ( arg_1 ) : with open ( arg_1 , 'r' ) as f : arg_1 = json . load ( f ) else : arg_2 = requests . get ( arg_1 ) arg_2 . raise_for_status ( ) arg_2 . encoding = 'utf8' arg_1 = arg_2 . json ( ) except ( IOError , requests . exceptions . RequestException ) as error : arg_4 = 'Unable to load JSON at \"%s\"' % arg_0 six . raise_from ( exceptions . DataPackageException ( arg_4 ) , error ) except ValueError as error : arg_4 = 'Unable to parse JSON at \"%s\". %s' % ( arg_0 , error ) six . raise_from ( exceptions . DataPackageException ( arg_4 ) , error ) if hasattr ( arg_1 , 'read' ) : try : arg_1 = json . load ( arg_1 ) except ValueError as e : six . raise_from ( exceptions . DataPackageException ( str ( e ) ) , e ) if not isinstance ( arg_1 , dict ) : arg_5 = 'Data must be a \\'dict\\', but was a \\'{0}\\'' raise exceptions . DataPackageException ( arg_5 . format ( type ( arg_1 ) . __name__ ) ) return arg_1","id_":252633,"task_name":"https:\/\/github.com\/frictionlessdata\/datapackage-py\/blob\/aca085ea54541b087140b58a81332f8728baeeb2\/datapackage\/helpers.py#L41-L78","negative":"Revoke the token and remove the cookie."} {"query":"Validate min and max bounds are within waveform s independent variable vector .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 , arg_4 = False , False if arg_1 is None : arg_1 = arg_0 . _indep_vector [ 0 ] arg_3 = True if arg_2 is None : arg_2 = arg_0 . _indep_vector [ - 1 ] arg_4 = True if arg_3 and arg_4 : return arg_1 , arg_2 arg_5 = pexdoc . exh . addex ( RuntimeError , \"Incongruent `indep_min` and `indep_max` arguments\" ) arg_6 = pexdoc . exh . addai ( \"indep_min\" ) arg_7 = pexdoc . exh . addai ( \"indep_max\" ) arg_5 ( bool ( arg_1 >= arg_2 ) ) arg_6 ( bool ( ( arg_1 < arg_0 . _indep_vector [ 0 ] ) and ( not np . isclose ( arg_1 , arg_0 . _indep_vector [ 0 ] , FP_RTOL , FP_ATOL ) ) ) ) arg_7 ( bool ( ( arg_2 > arg_0 . _indep_vector [ - 1 ] ) and ( not np . isclose ( arg_2 , arg_0 . _indep_vector [ - 1 ] , FP_RTOL , FP_ATOL ) ) ) ) return arg_1 , arg_2","id_":252634,"task_name":"https:\/\/github.com\/pmacosta\/peng\/blob\/976935377adaa3de26fc5677aceb2cdfbd6f93a7\/peng\/wave_functions.py#L101-L130","negative":"Returns protobuf mapcontainer. Read from translation file."} {"query":"Fetch commits .","positive":"def Func ( arg_0 , arg_1 = arg_2 , arg_3 = arg_4 , arg_5 = arg_6 , arg_7 = None , arg_8 = False , arg_9 = False ) : if not arg_3 : arg_3 = arg_4 if not arg_5 : arg_5 = arg_6 arg_10 = { 'from_date' : arg_3 , 'to_date' : arg_5 , 'branches' : arg_7 , 'latest_items' : arg_8 , 'no_update' : arg_9 } arg_11 = super ( ) . Func ( arg_1 , ** arg_10 ) return arg_11","id_":252635,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-perceval\/blob\/41c908605e88b7ebc3a536c643fa0f212eaf9e0e\/perceval\/backends\/core\/git.py#L78-L132","negative":"Revoke the token and remove the cookie."} {"query":"Downloads a MP4 or WebM file that is associated with the video at the URL passed .","positive":"def Func ( arg_0 , arg_1 ) : try : arg_2 = YouTube ( arg_1 ) except RegexMatchError : log . error ( f\"Cannot Func file at {url}\" ) else : arg_3 = arg_2 . streams . first ( ) log . info ( f\"Download for {stream.default_filename} has started\" ) arg_4 = time ( ) arg_3 . Func ( ) arg_5 = time ( ) log . info ( f\"Download for {stream.default_filename} has finished in {end_time - start_time} seconds\" ) return arg_3 . default_filename","id_":252636,"task_name":"https:\/\/github.com\/Music-Moo\/music2storage\/blob\/de12b9046dd227fc8c1512b5060e7f5fcd8b0ee2\/music2storage\/service.py#L49-L68","negative":"Update boost factors when local inhibition is used"} {"query":"Join an iterable by a delimiter replacing instances of delimiter in items with escape + delimiter .","positive":"def Func ( arg_0 , arg_1 = \" \" , arg_2 = \"\\\\\" ) : arg_3 = arg_2 + arg_1 return arg_1 . join ( arg_4 . replace ( arg_1 , arg_3 ) for arg_4 in arg_0 )","id_":252637,"task_name":"https:\/\/github.com\/treycucco\/pyebnf\/blob\/3634ddabbe5d73508bcc20f4a591f86a46634e1d\/pyebnf\/util.py#L32-L37","negative":"Return output for the combined time and result summary statistics."} {"query":"Adds two sequences of trits together .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = max ( len ( arg_0 ) , len ( arg_1 ) ) arg_3 = [ 0 ] * arg_2 arg_0 += [ 0 ] * ( arg_2 - len ( arg_0 ) ) arg_1 += [ 0 ] * ( arg_2 - len ( arg_1 ) ) arg_4 = 0 for arg_5 in range ( len ( arg_3 ) ) : arg_3 [ arg_5 ] , arg_4 = _full_Func ( arg_0 [ arg_5 ] , arg_1 [ arg_5 ] , arg_4 ) return arg_3","id_":252638,"task_name":"https:\/\/github.com\/iotaledger\/iota.lib.py\/blob\/97cdd1e241498446b46157b79b2a1ea2ec6d387a\/iota\/trits.py#L21-L44","negative":"Plot the temporal distance cumulative density function.\n\n Returns\n -------\n fig: matplotlib.Figure"} {"query":"Serialize a value from an xtuml metamodel instance .","positive":"def Func ( arg_0 , arg_1 ) : arg_1 = arg_1 . upper ( ) arg_2 = { 'BOOLEAN' : False , 'INTEGER' : 0 , 'REAL' : 0.0 , 'STRING' : '' , 'UNIQUE_ID' : 0 } arg_3 = { 'BOOLEAN' : lambda v : '%d' % int ( v ) , 'INTEGER' : lambda v : '%d' % v , 'REAL' : lambda v : '%f' % v , 'STRING' : lambda v : \"'%s'\" % v . replace ( \"'\" , \"''\" ) , 'UNIQUE_ID' : lambda v : '\"%s\"' % uuid . UUID ( int = v ) } if arg_0 is None : arg_0 = arg_2 [ arg_1 ] return arg_3 [ arg_1 ] ( arg_0 )","id_":252639,"task_name":"https:\/\/github.com\/xtuml\/pyxtuml\/blob\/7dd9343b9a0191d1db1887ab9288d0a026608d9a\/xtuml\/persist.py#L32-L57","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"collections . OrderedDict dumper .","positive":"def Func ( arg_0 , arg_1 , arg_2 = \"collections.OrderedDict\" ) : return { \"$\" + arg_2 : [ ( arg_3 , arg_0 . _json_convert ( arg_4 ) ) for arg_3 , arg_4 in iteritems ( arg_1 ) ] }","id_":252640,"task_name":"https:\/\/github.com\/MacHu-GWU\/superjson-project\/blob\/782ca4b2edbd4b4018b8cedee42eeae7c921b917\/superjson\/_superjson.py#L476-L484","negative":"Disconnect any connected devices that have any of the specified\n service UUIDs."} {"query":"Get a list of most queried decks","positive":"def Func ( arg_0 , ** arg_1 : arg_2 ) : arg_3 = arg_0 . api . POPULAR + '\/decks' return arg_0 . _get_model ( arg_3 , ** arg_1 )","id_":252641,"task_name":"https:\/\/github.com\/cgrok\/clashroyale\/blob\/2618f4da22a84ad3e36d2446e23436d87c423163\/clashroyale\/royaleapi\/client.py#L740-L758","negative":"Converts py_zipkin's annotations dict to protobuf.\n\n :param annotations: annotations dict.\n :type annotations: dict\n :return: corresponding protobuf's list of annotations.\n :rtype: list"} {"query":"Merges config with templates","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if not arg_2 : return arg_1 if not isinstance ( arg_2 , list ) : raise TypeError ( 'templates argument must be an instance of list' ) arg_3 = { } arg_4 = arg_2 + [ arg_1 ] for arg_5 in arg_4 : arg_3 = merge_config ( arg_3 , arg_0 . _load ( arg_5 ) , arg_0 . list_identifiers ) return arg_3","id_":252642,"task_name":"https:\/\/github.com\/openwisp\/netjsonconfig\/blob\/c23ce9732720856e2f6dc54060db71a8182c7d4b\/netjsonconfig\/backends\/base\/backend.py#L66-L80","negative":"Read attribute from sysfs and return as string"} {"query":"This method generates a dictionary of the query string parameters contained in a given editable URL .","positive":"def Func ( arg_0 ) : arg_1 = re . compile ( r\"[\\?#&](?P[^&=]+)=(?P[^&=]+)\" ) arg_2 = arg_1 . findall ( arg_0 ) if arg_2 : arg_3 = dict ( ) for arg_4 in arg_2 : ( arg_5 , arg_6 ) = arg_4 if arg_5 in arg_3 : raise Exception ( \"%s option already defined\" % arg_5 ) arg_3 [ arg_5 ] = arg_6 return arg_3 return None","id_":252643,"task_name":"https:\/\/github.com\/AkihikoITOH\/capybara\/blob\/e86c2173ea386654f4ae061148e8fbe3f25e715c\/capybara\/virtualenv\/lib\/python2.7\/site-packages\/pip\/req\/req_install.py#L1042-L1059","negative":"This returns an array of each sector and performance for the current trading day. Performance is based on each sector ETF.\n\n https:\/\/iexcloud.io\/docs\/api\/#sector-performance\n 8am-5pm ET Mon-Fri\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result"} {"query":"Send buffered metrics in batch requests","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . remote_address while len ( arg_0 . _batches ) > 0 : arg_0 . _socket . sendto ( arg_0 . _batches [ 0 ] , arg_1 ) arg_0 . _batches . popleft ( ) return arg_0","id_":252644,"task_name":"https:\/\/github.com\/farzadghanei\/statsd-metrics\/blob\/153ff37b79777f208e49bb9d3fb737ba52b99f98\/statsdmetrics\/client\/__init__.py#L360-L368","negative":"Return a list of column names\n\n Example:\n\n >>> import vaex\n >>> df = vaex.from_scalars(x=1, x2=2, y=3, s='string')\n >>> df['r'] = (df.x**2 + df.y**2)**2\n >>> df.get_column_names()\n ['x', 'x2', 'y', 's', 'r']\n >>> df.get_column_names(virtual=False)\n ['x', 'x2', 'y', 's']\n >>> df.get_column_names(regex='x.*')\n ['x', 'x2']\n\n :param virtual: If False, skip virtual columns\n :param hidden: If False, skip hidden columns\n :param strings: If False, skip string columns\n :param regex: Only return column names matching the (optional) regular expression\n :rtype: list of str\n\n Example:\n >>> import vaex\n >>> df = vaex.from_scalars(x=1, x2=2, y=3, s='string')\n >>> df['r'] = (df.x**2 + df.y**2)**2\n >>> df.get_column_names()\n ['x', 'x2', 'y', 's', 'r']\n >>> df.get_column_names(virtual=False)\n ['x', 'x2', 'y', 's']\n >>> df.get_column_names(regex='x.*')\n ['x', 'x2']"} {"query":"Construct a function that checks a directory for process configuration","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = filepath . FilePath ( arg_0 ) arg_3 = set ( ) arg_4 = { } def _check ( arg_2 ) : arg_5 = set ( arg_8 for arg_8 in os . listdir ( arg_0 ) if not arg_8 . endswith ( '.new' ) ) arg_6 = arg_3 - arg_5 arg_7 = arg_5 - arg_3 for arg_8 in arg_7 : arg_9 = arg_2 . child ( arg_8 ) . getContent ( ) arg_4 [ arg_8 ] = arg_9 arg_1 . add ( arg_8 , arg_9 ) for arg_8 in arg_6 : arg_1 . remove ( arg_8 ) arg_10 = arg_5 & arg_3 for arg_8 in arg_10 : arg_11 = arg_2 . child ( arg_8 ) . getContent ( ) arg_12 = arg_4 [ arg_8 ] if arg_11 == arg_12 : continue arg_1 . remove ( arg_8 ) arg_4 [ arg_8 ] = arg_11 arg_1 . add ( arg_8 , arg_11 ) arg_3 . clear ( ) arg_3 . update ( arg_5 ) return functools . partial ( _check , arg_2 )","id_":252645,"task_name":"https:\/\/github.com\/ncolony\/ncolony\/blob\/6ac71bda1de6706fb34244ae4972e36db5f062d3\/ncolony\/directory_monitor.py#L15-L52","negative":"Generate RENEWING time.\n\n [:rfc:`2131#section-4.4.5`]::\n\n T1\n defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 *\n duration_of_lease). Times T1 and T2 SHOULD be chosen with some\n random \"fuzz\" around a fixed value, to avoid synchronization of\n client reacquisition."} {"query":"Censor any values outside of range with None","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : return [ arg_3 if arg_1 [ 0 ] <= arg_3 <= arg_1 [ 1 ] else arg_2 for arg_3 in arg_0 ]","id_":252646,"task_name":"https:\/\/github.com\/has2k1\/mizani\/blob\/312d0550ee0136fd1b0384829b33f3b2065f47c8\/mizani\/bounds.py#L363-L368","negative":"Load a configuration module and return a Config"} {"query":"Get the items for this checklist . Returns a list of ChecklistItem objects .","positive":"def Func ( arg_0 , arg_1 = None ) : arg_2 = arg_0 . get_card ( ) arg_3 = [ ] for arg_4 in arg_0 . get_items ( arg_1 ) : arg_3 . append ( arg_0 . create_checklist_item ( arg_2 . id , arg_0 . id , arg_4 ) ) return arg_3","id_":252647,"task_name":"https:\/\/github.com\/its-rigs\/Trolly\/blob\/483dc94c352df40dc05ead31820b059b2545cf82\/trolly\/checklist.py#L47-L56","negative":"Stop the timer\n\n Returns:\n The time the timer was stopped"} {"query":"Delete size bytes of empty space starting at offset .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = 2 ** 16 ) : arg_4 = False assert 0 < arg_1 assert 0 <= arg_2 arg_0 . seek ( 0 , 2 ) arg_5 = arg_0 . tell ( ) arg_6 = arg_5 - arg_2 - arg_1 assert 0 <= arg_6 try : if arg_6 > 0 : arg_0 . flush ( ) try : import mmap arg_7 = mmap . mmap ( arg_0 . fileno ( ) , arg_5 ) try : arg_7 . move ( arg_2 , arg_2 + arg_1 , arg_6 ) finally : arg_7 . close ( ) except ( ValueError , EnvironmentError , ImportError ) : arg_4 = lock ( arg_0 ) arg_0 . seek ( arg_2 + arg_1 ) arg_8 = arg_0 . read ( arg_3 ) while arg_8 : arg_0 . seek ( arg_2 ) arg_0 . write ( arg_8 ) arg_2 += len ( arg_8 ) arg_0 . seek ( arg_2 + arg_1 ) arg_8 = arg_0 . read ( arg_3 ) arg_0 . truncate ( arg_5 - arg_1 ) arg_0 . flush ( ) finally : if arg_4 : unlock ( arg_0 )","id_":252648,"task_name":"https:\/\/github.com\/LordSputnik\/mutagen\/blob\/38e62c8dc35c72b16554f5dbe7c0fde91acc3411\/mutagen\/_util.py#L210-L250","negative":"Load the configuration.\n\n :param under_test:\n Tell us if we only have to load the configuration file (True)\n or load the configuration file and initate the output directory\n if it does not exist (False).\n :type under_test: bool\n\n :param custom:\n A dict with the configuration index (from .PyFunceble.yaml) to update.\n :type custom: dict\n\n .. warning::\n If :code:`custom` is given, the given :code:`dict` overwrite\n the last value of the given configuration indexes."} {"query":"Gets the base class for the custom database back - end .","positive":"def Func ( ) : arg_0 = getattr ( settings , 'POSTGRES_EXTRA_DB_BACKEND_BASE' , 'django.db.backends.postgresql' ) arg_1 = importlib . import_module ( arg_0 + '.base' ) arg_2 = getattr ( arg_1 , 'DatabaseWrapper' , None ) if not arg_2 : raise ImproperlyConfigured ( ( '\\'%s\\' is not a valid database back-end.' ' The module does not define a DatabaseWrapper class.' ' Check the value of POSTGRES_EXTRA_DB_BACKEND_BASE.' ) % arg_0 ) if isinstance ( arg_2 , Psycopg2DatabaseWrapper ) : raise ImproperlyConfigured ( ( '\\'%s\\' is not a valid database back-end.' ' It does inherit from the PostgreSQL back-end.' ' Check the value of POSTGRES_EXTRA_DB_BACKEND_BASE.' ) % arg_0 ) return arg_2","id_":252649,"task_name":"https:\/\/github.com\/SectorLabs\/django-postgres-extra\/blob\/eef2ed5504d225858d4e4f5d77a838082ca6053e\/psqlextra\/backend\/base.py#L16-L51","negative":"Read the file and perform any transforms to get a loaded image"} {"query":"Return an existing CA bundle path or None","positive":"def Func ( ) : if os . name == 'nt' : return get_win_certfile ( ) else : for arg_0 in cert_paths : if os . path . isfile ( arg_0 ) : return arg_0 try : return pkg_resources . resource_filename ( 'certifi' , 'cacert.pem' ) except ( ImportError , ResolutionError , ExtractionError ) : return None","id_":252650,"task_name":"https:\/\/github.com\/AkihikoITOH\/capybara\/blob\/e86c2173ea386654f4ae061148e8fbe3f25e715c\/capybara\/virtualenv\/lib\/python2.7\/site-packages\/setuptools\/ssl_support.py#L230-L241","negative":"Dynamically adjust the reader max_in_flight. Set to 0 to immediately disable a Reader"} {"query":"Register sample aggregations .","positive":"def Func ( ) : return [ dict ( aggregation_name = 'file-download-agg' , templates = 'invenio_stats.contrib.aggregations.aggr_file_download' , aggregator_class = StatAggregator , aggregator_config = dict ( client = current_search_client , event = 'file-download' , aggregation_field = 'unique_id' , aggregation_interval = 'day' , copy_fields = dict ( file_key = 'file_key' , bucket_id = 'bucket_id' , file_id = 'file_id' , ) , metric_aggregation_fields = { 'unique_count' : ( 'cardinality' , 'unique_session_id' , { 'precision_threshold' : 1000 } ) , 'volume' : ( 'sum' , 'size' , { } ) , } , ) ) , dict ( aggregation_name = 'record-view-agg' , templates = 'invenio_stats.contrib.aggregations.aggr_record_view' , aggregator_class = StatAggregator , aggregator_config = dict ( client = current_search_client , event = 'record-view' , aggregation_field = 'unique_id' , aggregation_interval = 'day' , copy_fields = dict ( record_id = 'record_id' , pid_type = 'pid_type' , pid_value = 'pid_value' , ) , metric_aggregation_fields = { 'unique_count' : ( 'cardinality' , 'unique_session_id' , { 'precision_threshold' : 1000 } ) , } , ) ) ]","id_":252651,"task_name":"https:\/\/github.com\/inveniosoftware\/invenio-stats\/blob\/d877ae5462084abb4a28a20f1ebb3d636769c1bc\/invenio_stats\/contrib\/registrations.py#L45-L84","negative":"Get the reason of this revocation.\n\n :return: The reason, or ``None`` if there is none.\n :rtype: bytes or NoneType\n\n .. seealso::\n\n :meth:`all_reasons`, which gives you a list of all supported\n reasons this method might return."} {"query":"Remove all of the non - descendants operation nodes of node .","positive":"def Func ( arg_0 , arg_1 ) : if isinstance ( arg_1 , int ) : warnings . warn ( 'Calling Func() with a node id is deprecated,' ' use a DAGNode instead' , DeprecationWarning , 2 ) arg_1 = arg_0 . _id_to_node [ arg_1 ] arg_2 = nx . descendants ( arg_0 . _multi_graph , arg_1 ) arg_3 = list ( set ( arg_0 . _multi_graph . nodes ( ) ) - set ( arg_2 ) ) for arg_4 in arg_3 : if arg_4 . type == \"op\" : arg_0 . remove_op_node ( arg_4 )","id_":252652,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/dagcircuit\/dagcircuit.py#L1219-L1231","negative":"Send data synchronous to an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: PLC storage address\n :param value: value to write to the storage address of the PLC\n :param Type data_type: type of the data given to the PLC,\n according to PLCTYPE constants"} {"query":"Apply cleaner - > tokenizer .","positive":"def Func ( arg_0 , arg_1 : arg_2 [ arg_3 ] ) -> arg_2 [ arg_2 [ arg_3 ] ] : arg_4 = process_text_constructor ( cleaner = arg_0 . cleaner , tokenizer = arg_0 . tokenizer , append_indicators = arg_0 . append_indicators , start_tok = arg_0 . start_tok , end_tok = arg_0 . end_tok ) arg_5 = arg_0 . num_cores return flattenlist ( apply_parallel ( arg_4 , arg_1 , arg_5 ) )","id_":252653,"task_name":"https:\/\/github.com\/hamelsmu\/ktext\/blob\/221f09f5b1762705075fd1bd914881c0724d5e02\/ktext\/preprocess.py#L227-L235","negative":"Verifies that `parts` don't broadcast."} {"query":"Verifies that parts don t broadcast .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 = tuple ( arg_0 ) if not arg_1 : return arg_0 arg_2 = 'Broadcasting probably indicates an error in model specification.' arg_3 = tuple ( arg_8 . shape for arg_8 in arg_0 ) if all ( tensorshape_util . is_fully_defined ( arg_4 ) for arg_4 in arg_3 ) : if not all ( arg_5 == arg_6 for arg_5 , arg_6 in zip ( arg_3 [ 1 : ] , arg_3 [ : - 1 ] ) ) : raise ValueError ( arg_2 ) return arg_0 arg_7 = [ assert_util . assert_equal ( arg_5 , arg_6 , message = arg_2 ) for arg_5 , arg_6 in zip ( arg_3 [ 1 : ] , arg_3 [ : - 1 ] ) ] with tf . control_dependencies ( arg_7 ) : return tuple ( tf . identity ( arg_8 ) for arg_8 in arg_0 )","id_":252654,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/distributions\/joint_distribution.py#L324-L341","negative":"Enumerate all possible resonance forms and return them as a list.\n\n :param mol: The input molecule.\n :type mol: rdkit.Chem.rdchem.Mol\n :return: A list of all possible resonance forms of the molecule.\n :rtype: list of rdkit.Chem.rdchem.Mol"} {"query":"Draw node and children","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = 0 ) : if arg_0 . mesh : arg_0 . mesh . Func ( arg_1 = arg_1 , view_matrix = arg_0 . matrix_global_bytes , arg_2 = arg_2 , arg_3 = arg_3 ) for arg_4 in arg_0 . children : arg_4 . Func ( arg_1 = arg_1 , arg_2 = arg_2 , arg_3 = arg_3 )","id_":252655,"task_name":"https:\/\/github.com\/Contraz\/demosys-py\/blob\/6466128a3029c4d09631420ccce73024025bd5b6\/demosys\/scene\/node.py#L19-L40","negative":"Parses a file and returns a document object.\n File, a file like object."} {"query":"Check if a given string is in the correct URL format or not","positive":"def Func ( arg_0 ) : arg_1 = re . compile ( r'^(?:http|ftp)s?:\/\/' r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\\.)+(?:[A-Z]{2,6}\\.?|[A-Z0-9-]{2,}\\.?)|' r'localhost|' r'\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3})' r'(?::\\d+)?' r'(?:\/?|[\/?]\\S+)$' , re . IGNORECASE ) if arg_1 . match ( arg_0 ) : logger . info ( \"URL given as config\" ) return True else : return False","id_":252656,"task_name":"https:\/\/github.com\/linkedin\/naarad\/blob\/261e2c0760fd6a6b0ee59064180bd8e3674311fe\/src\/naarad\/utils.py#L84-L101","negative":"Bring the interrupt pin on the GPIO into Linux userspace."} {"query":"Compares string operands .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = { 8 : 'SI' , 32 : 'ESI' , 64 : 'RSI' } [ arg_0 . address_bit_size ] arg_4 = { 8 : 'DI' , 32 : 'EDI' , 64 : 'RDI' } [ arg_0 . address_bit_size ] arg_5 , arg_6 , arg_7 = arg_0 . get_descriptor ( arg_0 . DS ) arg_8 = arg_0 . read_register ( arg_3 ) + arg_5 arg_9 = arg_0 . read_register ( arg_4 ) + arg_5 arg_10 = arg_1 . size arg_11 = arg_0 . read_int ( arg_9 , arg_10 ) arg_12 = arg_0 . read_int ( arg_8 , arg_10 ) arg_13 = ( arg_12 - arg_11 ) & ( ( 1 << arg_10 ) - 1 ) arg_0 . _calculate_CMP_flags ( arg_10 , arg_13 , arg_12 , arg_11 ) arg_14 = Operators . ITEBV ( arg_0 . address_bit_size , arg_0 . DF , - arg_10 \/\/ 8 , arg_10 \/\/ 8 ) arg_0 . write_register ( arg_3 , arg_0 . read_register ( arg_3 ) + arg_14 ) arg_0 . write_register ( arg_4 , arg_0 . read_register ( arg_4 ) + arg_14 )","id_":252657,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/native\/cpu\/x86.py#L4263-L4324","negative":"Write the index.html file for this report."} {"query":"Dumps data into a nicely formatted JSON string .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = True , ** arg_3 ) : if arg_1 is None : arg_1 = ALWAYS_DUMP_YAML if arg_1 : Func = yaml . safe_dump if arg_2 else yaml . dump else : Func = json . dumps arg_3 . update ( indent = 4 , sort_keys = True ) if not arg_2 : arg_3 . update ( default = repr ) return Func ( arg_0 , ** arg_3 )","id_":252658,"task_name":"https:\/\/github.com\/ManiacalLabs\/BiblioPixel\/blob\/fd97e6c651a4bbcade64733847f4eec8f7704b7c\/bibliopixel\/util\/data_file.py#L9-L28","negative":"Unregister an extension code. For testing only."} {"query":"Apply a lambda expression to an H2OFrame .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = 0 ) : from . astfun import lambda_to_expr assert_is_type ( arg_2 , 0 , 1 ) assert_is_type ( arg_1 , FunctionType ) assert_satisfies ( arg_1 , arg_1 . __name__ == \"\" ) arg_3 = lambda_to_expr ( arg_1 ) return H2OFrame . _expr ( expr = ExprNode ( \"Func\" , arg_0 , 1 + ( arg_2 == 0 ) , * arg_3 ) )","id_":252659,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-py\/h2o\/frame.py#L3302-L3315","negative":"use previously calculated variation of the rate to estimate\n the uncertainty in a particular numdate due to rate variation.\n\n Parameters\n ----------\n node : PhyloTree.Clade\n node for which the confidence interval is to be calculated\n interval : tuple, optional\n Array of length two, or tuple, defining the bounds of the confidence interval"} {"query":"Return the generation index of the first generation in the given swarm that does not have numParticles particles in it either still in the running state or completed . This does not include orphaned particles .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if not arg_1 in arg_0 . _swarmNumParticlesPerGeneration : return None arg_3 = arg_0 . _swarmNumParticlesPerGeneration [ arg_1 ] arg_3 = numpy . array ( arg_3 ) arg_4 = numpy . where ( arg_3 < arg_2 ) [ 0 ] if len ( arg_4 ) == 0 : return len ( arg_3 ) else : return arg_4 [ 0 ]","id_":252660,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/swarming\/hypersearch_v2.py#L632-L657","negative":"Creates and connects the underlying text widget."} {"query":"Generate a solution representation of the current solver state .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = False ) : check_solver_status ( arg_0 . solver . status , arg_3 = arg_3 ) if arg_1 is None : arg_1 = arg_0 . reactions if arg_2 is None : arg_2 = arg_0 . metabolites arg_4 = list ( ) arg_5 = empty ( len ( arg_1 ) ) arg_6 = empty ( len ( arg_1 ) ) arg_7 = arg_0 . solver . primal_values arg_8 = empty ( len ( arg_2 ) ) if arg_0 . solver . is_integer : arg_6 . fill ( nan ) arg_8 . fill ( nan ) for ( arg_9 , arg_10 ) in enumerate ( arg_1 ) : arg_4 . append ( arg_10 . id ) arg_5 [ arg_9 ] = arg_7 [ arg_10 . id ] - arg_7 [ arg_10 . reverse_id ] arg_11 = [ arg_16 . id for arg_16 in arg_2 ] else : arg_12 = arg_0 . solver . reduced_costs for ( arg_9 , arg_10 ) in enumerate ( arg_1 ) : arg_13 = arg_10 . id arg_14 = arg_10 . reverse_id arg_4 . append ( arg_13 ) arg_5 [ arg_9 ] = arg_7 [ arg_13 ] - arg_7 [ arg_14 ] arg_6 [ arg_9 ] = arg_12 [ arg_13 ] - arg_12 [ arg_14 ] arg_11 = list ( ) arg_15 = arg_0 . solver . shadow_prices for ( arg_9 , arg_16 ) in enumerate ( arg_2 ) : arg_11 . append ( arg_16 . id ) arg_8 [ arg_9 ] = arg_15 [ arg_16 . id ] return Solution ( arg_0 . solver . objective . value , arg_0 . solver . status , Series ( index = arg_4 , data = arg_5 , name = \"fluxes\" ) , Series ( index = arg_4 , data = arg_6 , name = \"reduced_costs\" ) , Series ( index = arg_11 , data = arg_8 , name = \"shadow_prices\" ) )","id_":252661,"task_name":"https:\/\/github.com\/opencobra\/cobrapy\/blob\/9d1987cdb3a395cf4125a3439c3b002ff2be2009\/cobra\/core\/solution.py#L196-L257","negative":"Converts py_zipkin's annotations dict to protobuf.\n\n :param annotations: annotations dict.\n :type annotations: dict\n :return: corresponding protobuf's list of annotations.\n :rtype: list"} {"query":"Test whether an href string meets criteria specified by configuration parameters require_abs_url which means does it look like it is probably an absolute URL? and domain_substrings . It searches for each of the domain_substrings in the href individually and if any match then returns True .","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 . config [ 'require_abs_url' ] : if not arg_1 . lower ( ) . startswith ( ( 'http:\/\/' , 'https:\/\/' ) ) : return False if arg_0 . config [ 'all_domains' ] : return True if arg_0 . config [ 'domain_substrings' ] : arg_2 = arg_1 . split ( '\/' ) if len ( arg_2 ) < 3 : return False arg_3 = arg_2 [ 2 ] . lower ( ) for arg_4 in arg_0 . config [ 'domain_substrings' ] : try : if arg_4 in arg_3 : return True except Exception , exc : logger . warn ( '%r in %r raised' , arg_4 , arg_3 , exc_info = True ) return False","id_":252662,"task_name":"https:\/\/github.com\/trec-kba\/streamcorpus-pipeline\/blob\/8bb82ea1beb83c6b40ed03fa1659df2897c2292a\/streamcorpus_pipeline\/_hyperlink_labels.py#L159-L189","negative":"Generator that reads a block of data from the server.\n\n It first attempts to read from the internal buffer. If there is not\n enough data in the internal buffer it then requests more data from the\n server and adds it to the buffer.\n\n Args:\n length: An optional amount of data to retrieve. A length of 0 (the\n default) will retrieve a least one buffer of data.\n\n Yields:\n A block of data when enough data becomes available.\n\n Note:\n If a length of 0 is supplied then the size of the yielded buffer can\n vary. If there is data in the internal buffer it will yield all of\n that data otherwise it will yield the the data returned by a recv\n on the socket."} {"query":"Pairwise distance between each point in a and each point in b","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : def sq ( arg_3 ) : return ( arg_3 * arg_3 ) arg_4 = sq ( arg_1 [ : , 0 ] [ : , None ] - arg_2 [ : , 0 ] [ None , : ] ) for arg_3 , arg_5 in zip ( arg_1 . T [ 1 : ] , arg_2 . T [ 1 : ] ) : arg_4 += sq ( arg_3 [ : , None ] - arg_5 [ None , : ] ) return arg_4","id_":252663,"task_name":"https:\/\/github.com\/peri-source\/peri\/blob\/61beed5deaaf978ab31ed716e8470d86ba639867\/peri\/interpolation.py#L192-L201","negative":"Write the index.html file for this report."} {"query":"Load the Certificate object from DigitalOcean .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . get_data ( \"certificates\/%s\" % arg_0 . id ) arg_2 = arg_1 [ \"certificate\" ] for arg_3 in arg_2 . keys ( ) : setattr ( arg_0 , arg_3 , arg_2 [ arg_3 ] ) return arg_0","id_":252664,"task_name":"https:\/\/github.com\/koalalorenzo\/python-digitalocean\/blob\/d0221b57856fb1e131cafecf99d826f7b07a947c\/digitalocean\/Certificate.py#L69-L81","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Recursively kill the descendants of a process before killing it .","positive":"def Func ( arg_0 ) : if sys . platform == \"win32\" : try : subprocess . check_output ( [ \"taskkill\" , \"\/F\" , \"\/T\" , \"\/PID\" , str ( arg_0 ) ] , stderr = None ) except subprocess . CalledProcessError as e : if e . returncode not in [ 1 , 128 , 255 ] : raise elif e . returncode == 1 : try : os . kill ( arg_0 , signal . SIGTERM ) except OSError as e : if e . errno != errno . ESRCH : raise else : try : arg_1 = subprocess . check_output ( [ \"pgrep\" , \"-P\" , str ( arg_0 ) ] , stderr = None ) except subprocess . CalledProcessError as e : if e . returncode == 1 : arg_1 = b'' else : raise arg_1 = arg_1 . decode ( ) . split ( '\\n' ) [ : - 1 ] for arg_2 in arg_1 : arg_2 = int ( arg_2 ) Func ( arg_2 ) try : os . kill ( arg_0 , signal . SIGTERM ) except OSError as e : if e . errno != errno . ESRCH : raise","id_":252665,"task_name":"https:\/\/github.com\/tomMoral\/loky\/blob\/dc2d941d8285a96f3a5b666a4bd04875b0b25984\/loky\/backend\/utils.py#L63-L116","negative":"Register a file reader for use in parse_config_file.\n\n Registered file readers will be used to try reading files passed to\n `parse_config_file`. All file readers (beginning with the default `open`) will\n be tried until one of them succeeds at opening the file.\n\n This function may also be be used used as a decorator. For example:\n\n @register_file_reader(IOError)\n def exotic_data_source(filename):\n ...\n\n Args:\n *args: (When used as a decorator, only the existence check is supplied.)\n - file_reader_fn: The file reader function to register. This should be a\n function that can be used as a context manager to open a file and\n provide a file-like object, similar to Python's built-in `open`.\n - is_readable_fn: A function taking the file path and returning a boolean\n indicating whether the file can be read by `file_reader_fn`.\n\n Returns:\n `None`, or when used as a decorator, a function that will perform the\n registration using the supplied readability predicate."} {"query":"Heuristic to decide whether an AST Call is a logging call .","positive":"def Func ( arg_0 , arg_1 ) : try : if arg_0 . get_id_attr ( arg_1 . func . value ) == \"warnings\" : return None if arg_1 . func . attr in LOGGING_LEVELS : return arg_1 . func . attr except AttributeError : pass return None","id_":252666,"task_name":"https:\/\/github.com\/globality-corp\/flake8-logging-format\/blob\/3c6ce53d0ff1ec369799cff0ed6d048343252e40\/logging_format\/visitor.py#L184-L197","negative":"Adds all parameters to `traj`"} {"query":"A defaultdict with for each job a list of its tasks .","positive":"def Func ( arg_0 ) : arg_1 = collections . defaultdict ( list ) for arg_2 in arg_0 : arg_1 [ arg_2 . get_field ( 'job-id' ) ] . append ( arg_2 ) return arg_1","id_":252667,"task_name":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/443ce31daa6023dc2fd65ef2051796e19d18d5a7\/dsub\/commands\/dsub.py#L835-L840","negative":"Uploads file to GDocs spreadsheet.\n Content type can be provided as argument, default is ods."} {"query":"Execute Main . Source .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : try : arg_3 = int ( arg_0 . exec_command ( 'main' , 'source' , arg_1 , arg_2 ) ) return arg_3 except ( ValueError , TypeError ) : pass return None","id_":252668,"task_name":"https:\/\/github.com\/joopert\/nad_receiver\/blob\/416de0173a330c75cc73f9c90b0c5df32e5e0ba3\/nad_receiver\/__init__.py#L107-L119","negative":"Disconnect any connected devices that have any of the specified\n service UUIDs."} {"query":"If the given object is an instance of Child add it to self and register self as a parent .","positive":"def Func ( arg_0 , arg_1 ) : if not isinstance ( arg_1 , ChildMixin ) : raise TypeError ( 'Requires instance of TreeElement. ' 'Got {}' . format ( type ( arg_1 ) ) ) arg_1 . parent = arg_0 arg_0 . _children . append ( arg_1 )","id_":252669,"task_name":"https:\/\/github.com\/hackebrot\/poyo\/blob\/4c7338a87c692c317b3b5bc726d731dd96689298\/poyo\/_nodes.py#L26-L36","negative":"Not accurate false due to spikes are observed"} {"query":"This queries the GAIA TAP service for a list of objects near the coords .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = ( 'source_id' , 'ra' , 'dec' , 'phot_g_mean_mag' , 'l' , 'b' , 'parallax' , 'parallax_error' , 'pmra' , 'pmra_error' , 'pmdec' , 'pmdec_error' ) , arg_5 = None , arg_6 = 'csv' , arg_7 = False , arg_8 = '~\/.astrobase\/gaia-cache' , arg_9 = True , arg_10 = 15.0 , arg_11 = 2.0 , arg_12 = 300.0 , arg_13 = 3 , arg_14 = True ) : arg_15 = ( \"select {columns}, \" \"(DISTANCE(POINT('ICRS', \" \"{{table}}.ra, {{table}}.dec), \" \"POINT('ICRS', {ra_center:.5f}, {decl_center:.5f})))*3600.0 \" \"AS dist_arcsec \" \"from {{table}} where \" \"CONTAINS(POINT('ICRS',{{table}}.ra, {{table}}.dec),\" \"CIRCLE('ICRS',{ra_center:.5f},{decl_center:.5f},\" \"{search_radius:.6f}))=1 \" \"{extra_filter_str}\" \"ORDER by dist_arcsec asc \" ) if arg_5 is not None : arg_16 = ' and %s ' % arg_5 else : arg_16 = '' arg_17 = arg_15 . format ( ra_center = arg_0 , decl_center = arg_1 , search_radius = arg_2 \/ 3600.0 , arg_16 = arg_16 , arg_4 = ', ' . join ( arg_4 ) ) return tap_query ( arg_17 , arg_3 = arg_3 , arg_6 = arg_6 , arg_7 = arg_7 , arg_8 = arg_8 , arg_9 = arg_9 , arg_10 = arg_10 , arg_11 = arg_11 , arg_12 = arg_12 , arg_13 = arg_13 , arg_14 = arg_14 )","id_":252670,"task_name":"https:\/\/github.com\/waqasbhatti\/astrobase\/blob\/2922a14619d183fb28005fa7d02027ac436f2265\/astrobase\/services\/gaia.py#L835-L980","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Adjusts enthalpy of vaporization of enthalpy for another temperature for one temperature .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = 0.38 ) : arg_5 = arg_0 \/ arg_3 arg_6 = arg_2 \/ arg_3 arg_7 = arg_1 * ( ( 1 - arg_5 ) \/ ( 1 - arg_6 ) ) ** arg_4 return arg_7","id_":252671,"task_name":"https:\/\/github.com\/CalebBell\/thermo\/blob\/3857ed023a3e64fd3039a32d53576c24990ef1c3\/thermo\/phase_change.py#L854-L861","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Returns a dictionary with all the past baking statuses of a single book .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . matchdict [ 'uuid' ] try : UUID ( arg_1 ) except ValueError : raise httpexceptions . HTTPBadRequest ( '{} is not a valid uuid' . format ( arg_1 ) ) arg_2 , arg_3 = get_baking_statuses_sql ( { 'uuid' : arg_1 } ) with db_connect ( cursor_factory = DictCursor ) as db_conn : with db_conn . cursor ( ) as cursor : cursor . execute ( arg_2 , arg_3 ) arg_4 = cursor . fetchall ( ) if len ( arg_4 ) == 0 : raise httpexceptions . HTTPBadRequest ( '{} is not a book' . format ( arg_1 ) ) arg_5 = [ ] arg_6 = arg_4 [ 0 ] for arg_7 in arg_4 : arg_8 = '' arg_9 = arg_7 [ 'state' ] or 'PENDING' if arg_9 == 'FAILURE' : if arg_7 [ 'traceback' ] is not None : arg_8 = arg_7 [ 'traceback' ] arg_10 = arg_7 [ 'latest_recipe_id' ] arg_11 = arg_7 [ 'recipe_id' ] if ( arg_10 is not None and arg_11 != arg_10 ) : arg_9 += ' stale_recipe' arg_5 . append ( { 'version' : arg_7 [ 'current_version' ] , 'recipe' : arg_7 [ 'recipe' ] , 'created' : str ( arg_7 [ 'created' ] ) , 'state' : arg_9 , 'state_message' : arg_8 , } ) return { 'uuid' : str ( arg_6 [ 'uuid' ] ) , 'title' : arg_6 [ 'name' ] . decode ( 'utf-8' ) , 'authors' : format_authors ( arg_6 [ 'authors' ] ) , 'print_style' : arg_6 [ 'print_style' ] , 'current_recipe' : arg_6 [ 'recipe_id' ] , 'current_ident' : arg_6 [ 'module_ident' ] , 'current_state' : arg_5 [ 0 ] [ 'state' ] , 'states' : arg_5 }","id_":252672,"task_name":"https:\/\/github.com\/openstax\/cnx-publishing\/blob\/f55b4a2c45d8618737288f1b74b4139d5ac74154\/cnxpublishing\/views\/admin\/content_status.py#L243-L292","negative":"Init a uniform noise variable."} {"query":"update the base including the URL for GitLab and the API endpoint .","positive":"def Func ( arg_0 ) : arg_0 . base = arg_0 . _get_and_update_setting ( 'SREGISTRY_GITLAB_BASE' , \"https:\/\/gitlab.com\/\" ) arg_0 . api_base = \"%s\/api\/v4\" % arg_0 . base . strip ( '\/' ) arg_0 . artifacts = arg_0 . _get_and_update_setting ( 'SREGISTRY_GITLAB_FOLDER' , 'build' ) arg_0 . job = arg_0 . _get_and_update_setting ( 'SREGISTRY_GITLAB_JOB' , 'build' ) bot . debug ( ' Api: %s' % arg_0 . api_base ) bot . debug ( 'Artifacts: %s' % arg_0 . artifacts ) bot . debug ( ' Job: %s' % arg_0 . job )","id_":252673,"task_name":"https:\/\/github.com\/singularityhub\/sregistry-cli\/blob\/abc96140a1d15b5e96d83432e1e0e1f4f8f36331\/sregistry\/main\/gitlab\/__init__.py#L28-L41","negative":"Set the rotation of this body using a rotation matrix.\n\n Parameters\n ----------\n rotation : sequence of 9 floats\n The desired rotation matrix for this body."} {"query":"Iterate Pages .","positive":"def Func ( arg_0 ) : try : while True : yield arg_0 . _query ( ItemPage = arg_0 . current_page , ** arg_0 . kwargs ) arg_0 . current_page += 1 except NoMorePages : pass","id_":252674,"task_name":"https:\/\/github.com\/AkihikoITOH\/capybara\/blob\/e86c2173ea386654f4ae061148e8fbe3f25e715c\/capybara\/virtualenv\/lib\/python2.7\/site-packages\/amazon\/api.py#L239-L253","negative":"Revoke the token and remove the cookie."} {"query":"Determines which method of getting the query object for use","positive":"def Func ( arg_0 ) : if hasattr ( arg_0 . model , 'Func' ) : return arg_0 . model . Func else : return arg_0 . session . Func ( arg_0 . model )","id_":252675,"task_name":"https:\/\/github.com\/lepture\/flask-oauthlib\/blob\/9e6f152a5bb360e7496210da21561c3e6d41b0e1\/flask_oauthlib\/contrib\/oauth2.py#L203-L208","negative":"Resets builder's state for building new documents.\n Must be called between usage with different documents."} {"query":"Get the clan badge image URL","positive":"def Func ( arg_0 , arg_1 : arg_2 ) : try : arg_3 = arg_1 . clan . badge_id except AttributeError : try : arg_3 = arg_1 . badge_id except AttributeError : return 'https:\/\/i.imgur.com\/Y3uXsgj.png' if arg_3 is None : return 'https:\/\/i.imgur.com\/Y3uXsgj.png' for arg_4 in arg_0 . constants . alliance_badges : if arg_4 . id == arg_3 : return 'https:\/\/royaleapi.github.io\/cr-api-assets\/badges\/' + arg_4 . name + '.png'","id_":252676,"task_name":"https:\/\/github.com\/cgrok\/clashroyale\/blob\/2618f4da22a84ad3e36d2446e23436d87c423163\/clashroyale\/official_api\/client.py#L532-L557","negative":"Restore Python settings to the original states"} {"query":"Writes a series of security group rules to a redis server .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : LOG . info ( \"Applying security group rules for device %s with MAC %s\" % ( arg_1 , arg_2 ) ) arg_4 = { SECURITY_GROUP_RULE_KEY : arg_3 } arg_5 = arg_0 . vif_key ( arg_1 , arg_2 ) arg_0 . set_field ( arg_5 , SECURITY_GROUP_HASH_ATTR , arg_4 ) arg_0 . set_field_raw ( arg_5 , SECURITY_GROUP_ACK , False )","id_":252677,"task_name":"https:\/\/github.com\/openstack\/quark\/blob\/1112e6a66917d3e98e44cb7b33b107fd5a74bb2e\/quark\/cache\/security_groups_client.py#L150-L159","negative":"Clear the output directory."} {"query":"Connection to Earth explorer without proxy","positive":"def Func ( arg_0 ) : logger . info ( \"Establishing connection to Earthexplorer\" ) print ( \"\\n Establishing connection to Earthexplorer\" ) try : arg_1 = urllib . request . build_opener ( urllib . request . HTTPCookieProcessor ( ) ) urllib . request . install_opener ( arg_1 ) arg_2 = urllib . parse . urlencode ( dict ( username = arg_0 . user , password = arg_0 . password ) ) arg_2 = arg_2 . encode ( 'utf-8' ) arg_3 = arg_1 . open ( \"https:\/\/ers.cr.usgs.gov\/login\" , arg_2 ) arg_4 = arg_3 . read ( ) . decode ( 'utf-8' ) arg_3 . close ( ) if arg_4 . find ( 'You must sign in as a registered user to download data or place orders for USGS EROS products' ) > 0 : print ( \"\\n Authentification failed\" ) logger . error ( \"Authentification failed\" ) raise AutenticationUSGSFailed ( 'Authentification USGS failed' ) print ( 'User %s connected with USGS' % arg_0 . user ) logger . debug ( 'User %s connected with USGS' % arg_0 . user ) return except Exception as e : print ( '\\nError when trying to connect USGS: %s' % e ) raise logger . error ( 'Error when trying to connect USGS: %s' % e )","id_":252678,"task_name":"https:\/\/github.com\/lucaslamounier\/USGSDownload\/blob\/0969483ea9f9648aa17b099f36d2e1010488b2a4\/usgsdownload\/usgs.py#L193-L215","negative":"Start listening for events from Marathon, running a sync when we first\n successfully subscribe and triggering a sync on API request events."} {"query":"Load a JSON stream and return a generator yielding one object at a time .","positive":"def Func ( arg_0 ) : if isinstance ( arg_0 , string_type ) : arg_0 = open ( arg_0 , 'rb' ) for arg_1 in arg_0 : arg_1 = arg_1 . strip ( ) if arg_1 : if isinstance ( arg_1 , bytes ) : arg_1 = arg_1 . decode ( 'utf-8' ) yield json . loads ( arg_1 )","id_":252679,"task_name":"https:\/\/github.com\/LuminosoInsight\/luminoso-api-client-python\/blob\/3bedf2a454aee39214c11fbf556ead3eecc27881\/luminoso_api\/v4_json_stream.py#L175-L186","negative":"Stops a timer if it hasn't fired yet\n\n * func - the function passed in start_timer"} {"query":"Rewrite local file URIs as required by the rewrite_uris method .","positive":"def Func ( arg_0 ) : arg_1 , arg_2 = os . path . split ( arg_0 ) arg_3 = [ ( 'file:\/\/\/' , '\/' ) , ( '~\/' , os . getenv ( 'HOME' ) ) , ( '.\/' , '' ) , ( 'file:\/' , '\/' ) ] arg_4 = arg_1 for arg_5 , arg_6 in arg_3 : if arg_4 . startswith ( arg_5 ) : arg_4 = os . path . join ( arg_6 , arg_4 [ len ( arg_5 ) : ] ) arg_7 = directory_fmt ( os . path . abspath ( arg_4 ) ) arg_7 = os . path . join ( arg_7 , arg_2 ) arg_8 = [ ( r'\/\\.\\.' , '\/_dotdot_' ) , ( r'^\\.\\.' , '_dotdot_' ) , ( r'^~\/' , '_home_\/' ) , ( r'^file:\/' , '' ) ] arg_9 = os . path . normpath ( arg_1 ) for arg_10 , arg_6 in arg_8 : arg_9 = re . sub ( arg_10 , arg_6 , arg_9 ) arg_9 = arg_9 . lstrip ( '.\/' ) arg_9 = directory_fmt ( 'file\/' + arg_9 ) + arg_2 return arg_7 , arg_9","id_":252680,"task_name":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/443ce31daa6023dc2fd65ef2051796e19d18d5a7\/dsub\/lib\/param_util.py#L308-L367","negative":"Return a list of all enrollments for the passed section_id.\n\n https:\/\/canvas.instructure.com\/doc\/api\/enrollments.html#method.enrollments_api.index"} {"query":"Supply a file name for the class object .","positive":"def Func ( arg_0 , Func = None , arg_2 = None , arg_3 = False , arg_4 = False ) : if Func is None : if not hasattr ( arg_0 , '_filename' ) : arg_0 . _filename = None if arg_0 . _filename : Func = arg_0 . _filename else : raise ValueError ( \"A file name is required because no default file name was defined.\" ) arg_6 = None else : Func , arg_6 = os . path . splitext ( Func ) if arg_3 : arg_0 . _filename = Func if arg_6 and arg_4 : arg_2 = arg_6 if arg_2 is not None : if arg_2 . startswith ( os . extsep ) : arg_2 = arg_2 [ 1 : ] if arg_2 != \"\" : Func = Func + os . extsep + arg_2 return Func","id_":252681,"task_name":"https:\/\/github.com\/Becksteinlab\/GromacsWrapper\/blob\/d4f9a8cb6f48292732cf7c7e4ef4a6d2ccbc51b9\/gromacs\/utilities.py#L545-L589","negative":"Whether a connection can be established between those two meshes."} {"query":"Provides a connection string for database .","positive":"def Func ( arg_0 , arg_1 = None ) : return ' ' . join ( \"%s=%s\" % ( arg_2 , arg_3 ) for arg_2 , arg_3 in arg_0 . _connect_options ( arg_1 ) )","id_":252682,"task_name":"https:\/\/github.com\/drkjam\/pydba\/blob\/986c4b1315d6b128947c3bc3494513d8e5380ff0\/pydba\/postgres.py#L243-L256","negative":"Register a file reader for use in parse_config_file.\n\n Registered file readers will be used to try reading files passed to\n `parse_config_file`. All file readers (beginning with the default `open`) will\n be tried until one of them succeeds at opening the file.\n\n This function may also be be used used as a decorator. For example:\n\n @register_file_reader(IOError)\n def exotic_data_source(filename):\n ...\n\n Args:\n *args: (When used as a decorator, only the existence check is supplied.)\n - file_reader_fn: The file reader function to register. This should be a\n function that can be used as a context manager to open a file and\n provide a file-like object, similar to Python's built-in `open`.\n - is_readable_fn: A function taking the file path and returning a boolean\n indicating whether the file can be read by `file_reader_fn`.\n\n Returns:\n `None`, or when used as a decorator, a function that will perform the\n registration using the supplied readability predicate."} {"query":"Find synonyms using a word2vec model .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 20 ) : arg_3 = h2o . api ( \"GET \/3\/Word2VecSynonyms\" , data = { 'model' : arg_0 . model_id , 'word' : arg_1 , 'count' : arg_2 } ) return OrderedDict ( sorted ( zip ( arg_3 [ 'synonyms' ] , arg_3 [ 'scores' ] ) , key = lambda t : t [ 1 ] , reverse = True ) )","id_":252683,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-py\/h2o\/model\/word_embedding.py#L17-L27","negative":"Override of clean method to perform additional validation"} {"query":"repeated membrane tests likely with drug added . Maybe IPSCs .","positive":"def Func ( arg_0 = arg_1 ) : standard_inspect ( arg_0 ) swhlab . memtest . memtest ( arg_0 ) swhlab . memtest . checkSweep ( arg_0 ) swhlab . plot . save ( arg_0 , tag = 'check' , resize = False ) swhlab . memtest . plot_standard4 ( arg_0 ) swhlab . plot . save ( arg_0 , tag = 'memtests' )","id_":252684,"task_name":"https:\/\/github.com\/swharden\/SWHLab\/blob\/a86c3c65323cec809a4bd4f81919644927094bf5\/doc\/oldcode\/indexing\/standard.py#L139-L146","negative":"Return a AzureDLFileSystem object."} {"query":"Maintain selection during context","positive":"def Func ( ) : arg_0 = cmds . ls ( selection = True ) try : yield finally : if arg_0 : cmds . select ( arg_0 , replace = True , noExpand = True ) else : cmds . select ( deselect = True , noExpand = True )","id_":252685,"task_name":"https:\/\/github.com\/pyblish\/pyblish-maya\/blob\/75db8b5d8de9d53ae95e74195a788b5f6db2cb5f\/pyblish_maya\/lib.py#L209-L230","negative":"Apply a quick patch-up to a Filterbank header by overwriting a header value\n\n\n Args:\n filename (str): name of file to open and fix. WILL BE MODIFIED.\n keyword (stt): header keyword to update\n new_value (long, double, angle or string): New value to write.\n\n Notes:\n This will overwrite the current value of the blimpy with a desired\n 'fixed' version. Note that this has limited support for patching\n string-type values - if the length of the string changes, all hell will\n break loose."} {"query":"waits on one or more jobs for up to timeout seconds .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = - 1 ) : arg_3 = time . time ( ) if arg_1 is None : arg_4 = arg_0 . outstanding else : if isinstance ( arg_1 , ( int , basestring , AsyncResult ) ) : arg_1 = [ arg_1 ] arg_4 = set ( ) for arg_5 in arg_1 : if isinstance ( arg_5 , int ) : arg_5 = arg_0 . history [ arg_5 ] elif isinstance ( arg_5 , AsyncResult ) : map ( arg_4 . add , arg_5 . msg_ids ) continue arg_4 . add ( arg_5 ) if not arg_4 . intersection ( arg_0 . outstanding ) : return True arg_0 . spin ( ) while arg_4 . intersection ( arg_0 . outstanding ) : if arg_2 >= 0 and ( time . time ( ) - arg_3 ) > arg_2 : break time . sleep ( 1e-3 ) arg_0 . spin ( ) return len ( arg_4 . intersection ( arg_0 . outstanding ) ) == 0","id_":252686,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/parallel\/client\/client.py#L1021-L1064","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Returns the largest possible clique for the node with given id .","positive":"def Func ( arg_0 , arg_1 ) : Func = [ arg_1 ] for arg_3 in arg_0 . nodes : arg_4 = True for arg_1 in Func : if arg_3 . id == arg_1 or arg_0 . edge ( arg_3 . id , arg_1 ) == None : arg_4 = False break if arg_4 : Func . append ( arg_3 . id ) return Func","id_":252687,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/lib\/graph\/cluster.py#L110-L125","negative":"Returns an authorized HTTP object to be used to build a Google cloud\n service hook connection."} {"query":"Attach a class to a parsing class and register it as a parser directive .","positive":"def Func ( arg_0 = None ) : global _Funcs arg_1 = _Funcs def wrapper ( arg_2 ) : nonlocal arg_0 if arg_0 is None : arg_0 = arg_2 . __name__ arg_2 . ns_name = arg_0 set_one ( arg_1 , arg_0 , arg_2 ) return arg_2 return wrapper","id_":252688,"task_name":"https:\/\/github.com\/LionelAuroux\/pyrser\/blob\/f153a97ef2b6bf915a1ed468c0252a9a59b754d5\/pyrser\/meta.py#L189-L204","negative":"Generate a new random masterkey, encrypt it with the password and\n store it in the store.\n\n :param str password: Password to use for en-\/de-cryption"} {"query":"Computes the number of elements in a tensor with shape event_shape .","positive":"def Func ( arg_0 , arg_1 = None ) : with tf . compat . v1 . name_scope ( arg_1 , 'event_size' , [ arg_0 ] ) : arg_0 = tf . convert_to_tensor ( value = arg_0 , dtype = tf . int32 , arg_1 = 'event_shape' ) arg_2 = tf . get_static_value ( arg_0 ) if arg_2 is not None : return np . prod ( arg_2 ) else : return tf . reduce_prod ( input_tensor = arg_0 )","id_":252689,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/layers\/distribution_layer.py#L65-L86","negative":"simple timer. returns a time object, or a string."} {"query":"Attempts to list all of the classes within a given module namespace . This method unlike list_classes will recurse into discovered submodules .","positive":"def Func ( arg_0 , arg_1 = None ) : arg_2 = list ( ) arg_3 = rlist_modules ( arg_0 ) for arg_4 in arg_3 : [ arg_2 . append ( arg_5 ) for arg_5 in list_classes ( arg_4 , arg_1 ) ] return arg_2","id_":252690,"task_name":"https:\/\/github.com\/zinic\/pynsive\/blob\/15bc8b35a91be5817979eb327427b6235b1b411e\/pynsive\/reflection.py#L220-L240","negative":"Enable the joint motors in this skeleton.\n\n This method sets the maximum force that can be applied by each joint to\n attain the desired target velocities. It also enables torque feedback\n for all joint motors.\n\n Parameters\n ----------\n max_force : float\n The maximum force that each joint is allowed to apply to attain its\n target velocity."} {"query":"Writes self . cfg to self . config_file .","positive":"def Func ( arg_0 ) : with open ( arg_0 . config_file , \"w\" ) as config_file : arg_0 . cfg . write ( config_file )","id_":252691,"task_name":"https:\/\/github.com\/buckket\/twtxt\/blob\/6c8ad8ef3cbcf0dd335a12285d8b6bbdf93ce851\/twtxt\/config.py#L95-L98","negative":"Destroy nDestroy synapses on the specified segment, but don't destroy\n synapses to the \"excludeCells\"."} {"query":"Enumerate the keys found at any scope for the current plugin .","positive":"def Func ( arg_0 ) : arg_1 = set ( ) try : for arg_2 in arg_0 . idb . Func ( ) : if arg_2 not in arg_1 : yield arg_2 arg_1 . add ( arg_2 ) except ( PermissionError , EnvironmentError ) : pass try : for arg_2 in arg_0 . directory . Func ( ) : if arg_2 not in arg_1 : yield arg_2 arg_1 . add ( arg_2 ) except ( PermissionError , EnvironmentError ) : pass try : for arg_2 in arg_0 . user . Func ( ) : if arg_2 not in arg_1 : yield arg_2 arg_1 . add ( arg_2 ) except ( PermissionError , EnvironmentError ) : pass try : for arg_2 in arg_0 . system . Func ( ) : if arg_2 not in arg_1 : yield arg_2 arg_1 . add ( arg_2 ) except ( PermissionError , EnvironmentError ) : pass","id_":252692,"task_name":"https:\/\/github.com\/williballenthin\/ida-settings\/blob\/ddfeab5bd0b6f6f177d0d50f8078c585602b1d9e\/ida_settings\/ida_settings.py#L659-L696","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"Call API in PythonBigDL","positive":"def Func ( arg_0 , arg_1 , * arg_2 ) : arg_3 = _get_gateway ( ) arg_4 = Exception ( \"Cannot find function: %s\" % arg_1 ) for arg_5 in JavaCreator . instance ( arg_0 , arg_3 ) . value : try : arg_6 = getattr ( arg_5 , arg_1 ) arg_7 = callJavaFunc ( arg_6 , * arg_2 ) except Exception as e : arg_4 = e if \"does not exist\" not in str ( e ) : raise e else : return arg_7 raise arg_4","id_":252693,"task_name":"https:\/\/github.com\/intel-analytics\/BigDL\/blob\/e9c19788285986ab789a2e2998f9a85d7524779f\/pyspark\/bigdl\/util\/common.py#L576-L592","negative":"Produces a list of ports to be updated async."} {"query":"Wrapper for scikit - learn classification functions Imlements various types of classification and cross validation","positive":"def Func ( arg_0 , arg_1 , arg_2 = 'ERF' , arg_3 = None , arg_4 = 'summary_clf' , arg_5 = None , arg_6 = None , arg_7 = None , arg_8 = None , arg_9 = 'accuracy' , arg_10 = True , arg_11 = None ) : arg_12 = Classifier ( arg_2 , arg_3 , arg_8 ) if arg_5 is not None : arg_13 = arg_12 . cross_val_fit ( arg_0 , arg_1 , arg_5 , arg_9 = arg_9 , arg_11 = arg_11 , arg_6 = arg_6 ) else : arg_13 = arg_12 . fit ( arg_0 , arg_1 , arg_6 = arg_6 ) . score ( arg_0 , arg_1 ) from collections import Counter if arg_4 == 'clf' : return arg_12 else : if arg_4 == 'summary' : arg_4 = { 'score' : arg_13 , 'n' : dict ( Counter ( arg_1 ) ) } elif arg_4 == 'summary_clf' : arg_4 = { 'score' : arg_13 , 'n' : dict ( Counter ( arg_1 ) ) , 'clf' : arg_12 , 'features_selected' : arg_12 . features_selected , 'predictions' : arg_12 . predictions } return arg_4","id_":252694,"task_name":"https:\/\/github.com\/neurosynth\/neurosynth\/blob\/948ce7edce15d7df693446e76834e0c23bfe8f11\/neurosynth\/analysis\/classify.py#L212-L248","negative":"This method is called before first step of simulation."} {"query":"Schedule a job in the given queue .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = 0 ) : arg_0 . _rwlock . writer_acquire ( ) arg_5 = arg_0 . _generate_job_id ( arg_2 ) arg_6 = arg_0 . _scheduler . enter ( arg_4 , 1 , arg_0 . _enqueue_job , argument = ( arg_1 , arg_5 , arg_3 , ) ) arg_0 . _jobs [ arg_5 ] = arg_6 arg_0 . _tasks [ arg_2 ] = arg_5 arg_0 . _rwlock . writer_release ( ) logging . debug ( \"Job #%s (task: %s) scheduled on %s (wait: %s)\" , arg_5 , arg_2 , arg_1 , arg_4 ) return arg_5","id_":252695,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-kingarthur\/blob\/9d6a638bee68d5e5c511f045eeebf06340fd3252\/arthur\/scheduler.py#L117-L134","negative":"Uploads file to GDocs spreadsheet.\n Content type can be provided as argument, default is ods."} {"query":"Printing of img or imgs","positive":"def Func ( arg_0 ) : if isinstance ( arg_0 , string_types ) : return arg_0 if isinstance ( arg_0 , collections . Iterable ) : return '[{}]' . format ( ', ' . join ( Func ( arg_1 ) for arg_1 in arg_0 ) ) try : arg_2 = arg_0 . get_filename ( ) if arg_2 is not None : arg_3 = \"{}('{}')\" . format ( arg_0 . __class__ . __name__ , arg_2 ) else : arg_3 = \"{}(shape={}, affine={})\" . format ( arg_0 . __class__ . __name__ , repr ( get_shape ( arg_0 ) ) , repr ( arg_0 . get_affine ( ) ) ) except Exception as exc : log . error ( 'Error reading attributes from img.get_filename()' ) return repr ( arg_0 ) else : return arg_3","id_":252696,"task_name":"https:\/\/github.com\/Neurita\/boyle\/blob\/2dae7199849395a209c887d5f30506e1de8a9ad9\/boyle\/nifti\/check.py#L295-L316","negative":"Check for roster related features in the stream features received\n and set `server_features` accordingly."} {"query":"Decrypt the encrypted masterkey","positive":"def Func ( arg_0 ) : arg_1 = AESCipher ( arg_0 . password ) arg_2 , arg_3 = arg_0 . config [ arg_0 . config_key ] . split ( \"$\" ) try : arg_4 = arg_1 . decrypt ( arg_3 ) except Exception : arg_0 . _raise_wrongmasterpassexception ( ) if arg_2 != arg_0 . _derive_checksum ( arg_4 ) : arg_0 . _raise_wrongmasterpassexception ( ) arg_0 . decrypted_master = arg_4","id_":252697,"task_name":"https:\/\/github.com\/xeroc\/python-graphenelib\/blob\/8bb5396bc79998ee424cf3813af478304173f3a6\/graphenestorage\/masterpassword.py#L96-L107","negative":"Create a traceback for an Octave evaluation error."} {"query":"This generates fake sinusoidal light curves .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = { 'period' : arg_4 . uniform ( arg_6 = 0.04 , arg_7 = 500.0 ) , 'fourierorder' : [ 2 , 10 ] , 'amplitude' : arg_4 . uniform ( arg_6 = 0.1 , arg_7 = 0.9 ) , 'phioffset' : 0.0 , } , arg_8 = False ) : if arg_1 is None : arg_1 = np . full_like ( arg_0 , 0.0 ) if arg_2 is None : arg_2 = np . full_like ( arg_0 , 0.0 ) arg_9 = npr . random ( ) * ( arg_0 . max ( ) - arg_0 . min ( ) ) + arg_0 . min ( ) arg_10 = arg_3 [ 'period' ] . rvs ( size = 1 ) arg_11 = npr . randint ( arg_3 [ 'fourierorder' ] [ 0 ] , high = arg_3 [ 'fourierorder' ] [ 1 ] ) arg_12 = arg_3 [ 'amplitude' ] . rvs ( size = 1 ) if arg_8 and arg_12 < 0.0 : arg_12 = - arg_12 elif not arg_8 and arg_12 > 0.0 : arg_12 = - arg_12 arg_13 = [ abs ( arg_12 \/ 2.0 ) \/ float ( x ) for x in range ( 1 , arg_11 + 1 ) ] arg_14 = [ arg_3 [ 'phioffset' ] * float ( x ) for x in range ( 1 , arg_11 + 1 ) ] arg_15 , arg_16 , arg_17 , arg_18 , arg_19 = sinusoidal . sine_series_sum ( [ arg_10 , arg_9 , arg_13 , arg_14 ] , arg_0 , arg_1 , arg_2 ) arg_20 = np . argsort ( arg_17 ) arg_21 = arg_17 [ arg_20 ] arg_22 = arg_15 [ arg_20 ] arg_23 = arg_19 [ arg_20 ] arg_24 = arg_16 [ arg_20 ] arg_25 = { 'vartype' : 'sinusoidal' , 'params' : { x : y for x , y in zip ( [ 'period' , 'epoch' , 'amplitude' , 'fourierorder' , 'fourieramps' , 'fourierphases' ] , [ arg_10 , arg_9 , arg_12 , arg_11 , arg_13 , arg_14 ] ) } , 'times' : arg_21 , 'mags' : arg_22 , 'errs' : arg_23 , 'phase' : arg_24 , 'varperiod' : arg_10 , 'varamplitude' : arg_12 } return arg_25","id_":252698,"task_name":"https:\/\/github.com\/waqasbhatti\/astrobase\/blob\/2922a14619d183fb28005fa7d02027ac436f2265\/astrobase\/fakelcs\/generation.py#L545-L698","negative":"Call the disambiguation service in order to process a pdf file .\n\n Args:\n pdf (file): PDF file to be disambiguated.\n language (str): language of text (if known)\n\n Returns:\n dict, int: API response and API status."} {"query":"Collect Python starred arguments into a Basilisp list .","positive":"def Func ( arg_0 ) -> ISeq : if isinstance ( arg_0 , tuple ) : return llist . list ( arg_0 ) raise TypeError ( \"Python variadic arguments should always be a tuple\" )","id_":252699,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/runtime.py#L1152-L1156","negative":"Turn a mongodb-style search dict into an SQL query."} {"query":"Loads the tables from the gtfs object and counts the number of rows that have null values in fields that should not be null . Stores the number of null rows in warnings_container","positive":"def Func ( arg_0 ) : for arg_1 in DB_TABLE_NAMES : arg_2 = \"Null values in must-have columns in table {table}\" . format ( arg_1 = arg_1 ) arg_3 = \"Null values in good-to-have columns in table {table}\" . format ( arg_1 = arg_1 ) arg_4 = DB_TABLE_NAME_TO_FIELDS_WHERE_NULL_NOT_OK [ arg_1 ] arg_5 = DB_TABLE_NAME_TO_FIELDS_WHERE_NULL_OK_BUT_WARN [ arg_1 ] arg_6 = arg_0 . gtfs . get_table ( arg_1 ) for arg_7 , arg_8 in zip ( [ arg_2 , arg_3 ] , [ arg_4 , arg_5 ] ) : arg_9 = arg_6 [ arg_8 ] arg_10 = arg_9 . isnull ( ) . any ( 1 ) if sum ( arg_10 ) > 0 : arg_11 = arg_6 [ arg_10 . values ] arg_0 . warnings_container . add_warning ( arg_7 , arg_11 , len ( arg_11 ) )","id_":252700,"task_name":"https:\/\/github.com\/CxAalto\/gtfspy\/blob\/bddba4b74faae6c1b91202f19184811e326547e5\/gtfspy\/import_validator.py#L207-L226","negative":"Runs the consumer."} {"query":"Display record view .","positive":"def Func ( arg_0 = None , arg_1 = None , arg_2 = None , arg_3 = None , arg_4 = None , ** arg_5 ) : try : arg_6 , arg_7 = arg_1 . resolve ( arg_0 ) except ( PIDDoesNotExistError , PIDUnregistered ) : abort ( 404 ) except PIDMissingObjectError as e : current_app . logger . exception ( \"No object assigned to {0}.\" . format ( e . pid ) , extra = { 'pid' : e . pid } ) abort ( 500 ) except PIDRedirectedError as e : try : return redirect ( url_for ( '.{0}' . format ( e . destination_pid . pid_type ) , arg_0 = e . destination_pid . pid_value ) ) except BuildError : current_app . logger . exception ( \"Invalid redirect - pid_type '{0}' endpoint missing.\" . format ( e . destination_pid . pid_type ) , extra = { 'pid' : e . pid , 'destination_pid' : e . destination_pid , } ) abort ( 500 ) arg_3 = arg_3 or current_permission_factory if arg_3 : if not arg_3 ( arg_7 ) . can ( ) : from flask_login import current_user if not current_user . is_authenticated : return redirect ( url_for ( current_app . config [ 'RECORDS_UI_LOGIN_ENDPOINT' ] , next = request . url ) ) abort ( 403 ) return arg_4 ( arg_6 , arg_7 , arg_2 = arg_2 , ** arg_5 )","id_":252701,"task_name":"https:\/\/github.com\/inveniosoftware\/invenio-records-ui\/blob\/ae92367978f2e1e96634685bd296f0fd92b4da54\/invenio_records_ui\/views.py#L136-L205","negative":"Sets the player's paused state."} {"query":"BACKPORT FROM PYTHON3 FTPLIB .","positive":"def Func ( arg_0 , arg_1 = \"\" , arg_2 = None ) : arg_2 = arg_2 or [ ] if arg_2 : arg_0 . sendcmd ( \"OPTS MLST \" + \";\" . join ( arg_2 ) + \";\" ) if arg_1 : arg_3 = \"MLSD %s\" % arg_1 else : arg_3 = \"MLSD\" arg_4 = [ ] arg_0 . retrlines ( arg_3 , arg_4 . append ) for arg_5 in arg_4 : arg_6 , arg_7 , arg_8 = arg_5 . rstrip ( ftplib . CRLF ) . partition ( ' ' ) arg_9 = { } for arg_10 in arg_6 [ : - 1 ] . split ( \";\" ) : arg_11 , arg_7 , arg_12 = arg_10 . partition ( \"=\" ) arg_9 [ arg_11 . lower ( ) ] = arg_12 yield ( arg_8 , arg_9 )","id_":252702,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/ftp_hook.py#L28-L58","negative":"Regenerates the primary or secondary access key for the specified\n storage account.\n\n service_name:\n Name of the storage service account.\n key_type:\n Specifies which key to regenerate. Valid values are:\n Primary, Secondary"} {"query":"puts stats from pickles into a dictionary","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : with open ( arg_0 , 'r' ) as infile : arg_3 , arg_4 = pickle . load ( infile ) arg_5 , arg_6 , arg_7 , arg_8 , arg_9 = arg_2 arg_5 [ arg_1 ] += arg_3 arg_10 , arg_11 , arg_12 , arg_13 = arg_4 arg_6 . update ( arg_10 ) arg_7 . update ( arg_11 ) arg_8 . update ( arg_12 ) arg_9 . update ( arg_13 ) arg_2 = arg_5 , arg_6 , arg_7 , arg_8 , arg_9 return arg_2","id_":252703,"task_name":"https:\/\/github.com\/dereneaton\/ipyrad\/blob\/5eeb8a178160f45faf71bf47cec4abe998a575d1\/ipyrad\/assemble\/demultiplex.py#L1479-L1502","negative":"Check that a binary operator is surrounded by exactly one space."} {"query":"Configure the null keyring as the default .","positive":"def Func ( ) : arg_0 = platform . config_root ( ) try : os . makedirs ( arg_0 ) except OSError : pass arg_1 = os . path . join ( arg_0 , 'keyringrc.cfg' ) if os . path . exists ( arg_1 ) : arg_2 = \"Refusing to overwrite {filename}\" . format ( ** locals ( ) ) raise RuntimeError ( arg_2 ) with open ( arg_1 , 'w' ) as file : file . write ( '[backend]\\ndefault-keyring=keyring.backends.null.Keyring' )","id_":252704,"task_name":"https:\/\/github.com\/jaraco\/keyring\/blob\/71c798378e365286b7cc03c06e4d7d24c7de8fc4\/keyring\/core.py#L35-L49","negative":"Check whether the certificate has expired.\n\n :return: ``True`` if the certificate has expired, ``False`` otherwise.\n :rtype: bool"} {"query":"Authenticate the gmusicapi Mobileclient instance .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = None ) : arg_4 = type ( arg_0 ) . __name__ if arg_1 is None : arg_1 = input ( \"Enter your Google username or email address: \" ) if arg_2 is None : arg_2 = getpass . getpass ( \"Enter your Google Music password: \" ) if arg_3 is None : arg_3 = Mobileclient . FROM_MAC_ADDRESS try : arg_0 . api . Func ( arg_1 , arg_2 , arg_3 ) except OSError : logger . exception ( \"{} authentication failed.\" . format ( arg_4 ) ) if not arg_0 . is_authenticated : logger . warning ( \"{} authentication failed.\" . format ( arg_4 ) ) return False logger . info ( \"{} authentication succeeded.\\n\" . format ( arg_4 ) ) return True","id_":252705,"task_name":"https:\/\/github.com\/thebigmunch\/gmusicapi-wrapper\/blob\/8708683cd33955def1378fc28319ef37805b851d\/gmusicapi_wrapper\/mobileclient.py#L29-L67","negative":"Sleep for the time specified in the exception. If not specified, wait\n for 60 seconds."} {"query":"Assert that recur forms do not appear in any position of this or child AST nodes .","positive":"def Func ( arg_0 : arg_1 ) -> None : if arg_0 . op == NodeOp . RECUR : raise ParserException ( \"recur must appear in tail position\" , form = arg_0 . form , lisp_ast = arg_0 ) elif arg_0 . op in { NodeOp . FN , NodeOp . LOOP } : pass else : arg_0 . visit ( Func )","id_":252706,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/compiler\/parser.py#L1501-L1511","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Convert to human readable version of CLINSIG evaluation .","positive":"def Func ( arg_0 ) : for arg_1 in arg_0 [ 'clnsig' ] : if isinstance ( arg_1 [ 'accession' ] , int ) : arg_2 = \"https:\/\/www.ncbi.nlm.nih.gov\/clinvar\/variation\/{}\" else : arg_2 = \"https:\/\/www.ncbi.nlm.nih.gov\/clinvar\/{}\" arg_3 = 'not provided' if arg_1 . get ( 'value' ) : try : int ( arg_1 [ 'value' ] ) arg_3 = CLINSIG_MAP . get ( arg_1 [ 'value' ] , 'not provided' ) except ValueError : arg_3 = arg_1 [ 'value' ] arg_1 [ 'human' ] = arg_3 arg_1 [ 'link' ] = arg_2 . format ( arg_1 [ 'accession' ] ) yield arg_1","id_":252707,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/server\/blueprints\/variants\/controllers.py#L722-L746","negative":"Called when a device is disconnected."} {"query":"Parse HStruct type to this transaction template instance","positive":"def Func ( arg_0 , arg_1 : arg_2 , arg_3 : arg_4 ) : for arg_5 in arg_1 . fields : arg_6 = arg_5 . dtype arg_7 = arg_5 arg_8 = arg_5 . name is None if arg_8 : arg_9 = arg_6 . bit_length ( ) arg_3 += arg_9 else : arg_10 = TransTmpl ( arg_6 , arg_3 , parent = arg_0 , arg_7 = arg_7 ) arg_0 . children . append ( arg_10 ) arg_3 = arg_10 . bitAddrEnd return arg_3","id_":252708,"task_name":"https:\/\/github.com\/Nic30\/hwt\/blob\/8cbb399e326da3b22c233b98188a9d08dec057e6\/hwt\/hdl\/transTmpl.py#L140-L159","negative":"Return a list of not null values from the `col_name` column of `df`."} {"query":"Resets the state to allow building new documents","positive":"def Func ( arg_0 ) : arg_0 . doc_version_set = False arg_0 . doc_comment_set = False arg_0 . doc_namespace_set = False arg_0 . doc_data_lics_set = False arg_0 . doc_name_set = False arg_0 . doc_spdx_id_set = False","id_":252709,"task_name":"https:\/\/github.com\/spdx\/tools-python\/blob\/301d72f6ae57c832c1da7f6402fa49b192de6810\/spdx\/parsers\/tagvaluebuilders.py#L163-L171","negative":"Retrieve the last analog data value received for the specified pin.\n\n :param pin: Selected pin\n\n :return: The last value entered into the analog response table."} {"query":"Rename key src to dst","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_1 == arg_2 : return arg_0 . rename ( arg_1 + \"{\" + arg_1 + \"}\" , arg_1 ) if not arg_0 . exists ( arg_1 ) : return arg_0 . rename ( arg_1 + \"{\" + arg_1 + \"}\" , arg_1 ) arg_0 . delete ( arg_2 ) arg_3 = arg_0 . type ( arg_1 ) arg_4 = arg_0 . ttl ( arg_1 ) if arg_3 == b ( 'none' ) : return False if arg_3 == b ( 'string' ) : arg_0 . set ( arg_2 , arg_0 . get ( arg_1 ) ) elif arg_3 == b ( 'hash' ) : arg_0 . hmset ( arg_2 , arg_0 . hgetall ( arg_1 ) ) elif arg_3 == b ( 'list' ) : for arg_5 in arg_0 . lrange ( arg_1 , 0 , - 1 ) : arg_0 . rpush ( arg_2 , arg_5 ) elif arg_3 == b ( 'set' ) : for arg_5 in arg_0 . smembers ( arg_1 ) : arg_0 . sadd ( arg_2 , arg_5 ) elif arg_3 == b ( 'zset' ) : for arg_5 , arg_6 in arg_0 . zrange ( arg_1 , 0 , - 1 , withscores = True ) : arg_0 . zadd ( arg_2 , arg_6 , arg_5 ) arg_4 = - 1 if arg_4 is None or arg_4 < 0 else int ( arg_4 ) if arg_4 != - 1 : arg_0 . expire ( arg_2 , arg_4 ) return arg_0 . delete ( arg_1 )","id_":252710,"task_name":"https:\/\/github.com\/salimane\/rediscluster-py\/blob\/4fe4d928cd6fe3e7564f7362e3996898bda5a285\/rediscluster\/cluster_client.py#L441-L476","negative":"Fetch the comments of a given event."} {"query":"Parse command line options and launch the prebuilder .","positive":"def Func ( ) : arg_0 = optparse . OptionParser ( usage = \"%prog [options] [another_model_path..]\" , version = xtuml . version . complete_string , formatter = optparse . TitledHelpFormatter ( ) ) arg_0 . add_option ( \"-v\" , \"--verbosity\" , dest = 'verbosity' , action = \"count\" , help = \"increase debug logging level\" , default = 1 ) arg_0 . add_option ( \"-o\" , \"--output\" , dest = \"output\" , metavar = \"PATH\" , help = \"set output to PATH\" , action = \"store\" , default = None ) ( arg_1 , arg_2 ) = arg_0 . parse_args ( ) if len ( arg_2 ) == 0 or arg_1 . output is None : arg_0 . print_help ( ) sys . exit ( 1 ) arg_3 = { 0 : logging . ERROR , 1 : logging . WARNING , 2 : logging . INFO , 3 : logging . DEBUG , } logging . basicConfig ( level = arg_3 . get ( arg_1 . verbosity , logging . DEBUG ) ) arg_4 = ooaofooa . load_metamodel ( arg_2 ) prebuild_model ( arg_4 ) xtuml . persist_instances ( arg_4 , arg_1 . output )","id_":252711,"task_name":"https:\/\/github.com\/xtuml\/pyxtuml\/blob\/7dd9343b9a0191d1db1887ab9288d0a026608d9a\/bridgepoint\/prebuild.py#L1853-L1887","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Write the specified byte value to the GPIO registor . If no value specified the current buffered value will be written .","positive":"def Func ( arg_0 , arg_1 = None ) : if arg_1 is not None : arg_0 . gpio = arg_1 arg_0 . _device . writeList ( arg_0 . GPIO , arg_0 . gpio )","id_":252712,"task_name":"https:\/\/github.com\/adafruit\/Adafruit_Python_GPIO\/blob\/a92a23d6b5869663b2bc1ccf78bb11585076a9c4\/Adafruit_GPIO\/MCP230xx.py#L119-L125","negative":"Return True if `line` is a dict entry that uses `key`.\n\n Return False for multiline cases where the line should not be removed by\n itself."} {"query":"Returns an array of length size and type dtype that is everywhere 0 except in the indices listed in sequence pos . The non - zero indices contain a normalized distribution based on the counts .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = numpy . zeros ( arg_1 , arg_3 = arg_3 ) if hasattr ( arg_0 , '__iter__' ) : arg_5 = 0 for arg_6 in arg_0 : arg_5 += arg_2 [ arg_6 ] arg_5 = float ( arg_5 ) for arg_6 in arg_0 : arg_4 [ arg_6 ] = arg_2 [ arg_6 ] \/ arg_5 else : arg_4 [ arg_0 ] = 1 return arg_4","id_":252713,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/math\/stats.py#L156-L183","negative":"Sequentially update the actors, the world, and the messaging system. \n The theater terminates once all of the actors indicate that they are done."} {"query":"Invalidate httpBL cache","positive":"def Func ( arg_0 ) : if arg_0 . _use_cache : arg_0 . _cache_version += 1 arg_0 . _cache . increment ( 'cached_httpbl_{0}_version' . format ( arg_0 . _api_key ) )","id_":252714,"task_name":"https:\/\/github.com\/dlancer\/django-cached-httpbl\/blob\/b32106f4283f9605122255f2c9bfbd3bff465fa5\/cached_httpbl\/api.py#L180-L187","negative":"Return HgResource object for path.\n\n See DAVProvider.get_resource_inst()"} {"query":"Gets whether the field with the specified name is a HStoreField .","positive":"def Func ( arg_0 , arg_1 : arg_2 ) -> Tuple [ bool , Optional [ models . Field ] ] : arg_3 = None for arg_4 in arg_0 . model . _meta . local_concrete_fields : if arg_4 . name == arg_1 or arg_4 . column == arg_1 : arg_3 = arg_4 break return isinstance ( arg_3 , HStoreField ) , arg_3","id_":252715,"task_name":"https:\/\/github.com\/SectorLabs\/django-postgres-extra\/blob\/eef2ed5504d225858d4e4f5d77a838082ca6053e\/psqlextra\/query.py#L133-L149","negative":"Cycles through notifications with latest results from data feeds."} {"query":"Given an list of words this function highlights the matched text in the given string .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 'Funced' ) : if not arg_1 : return arg_0 if not arg_0 : return '' arg_3 , arg_4 = get_text_tokenizer ( arg_1 ) arg_5 = Func_text ( arg_3 , arg_0 , arg_2 ) return arg_5","id_":252716,"task_name":"https:\/\/github.com\/un33k\/django-toolware\/blob\/973f3e003dc38b812897dab88455bee37dcaf931\/toolware\/templatetags\/highlight.py#L38-L47","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"Reimplemented to connect signal handlers and event filter .","positive":"def Func ( arg_0 , arg_1 ) : super ( CompletionWidget , arg_0 ) . Func ( arg_1 ) arg_0 . _text_edit . cursorPositionChanged . connect ( arg_0 . _update_current ) arg_0 . _text_edit . installEventFilter ( arg_0 )","id_":252717,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/frontend\/qt\/console\/completion_widget.py#L72-L77","negative":"Write the index.html file for this report."} {"query":"Call the segmenter in order to split text in sentences .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = { 'text' : arg_1 } arg_3 , arg_4 = arg_0 . post ( arg_0 . Funcation_service , arg_2 = arg_2 ) if arg_4 != 200 : logger . debug ( 'Segmentation failed.' ) return arg_0 . decode ( arg_3 ) , arg_4","id_":252718,"task_name":"https:\/\/github.com\/hirmeos\/entity-fishing-client-python\/blob\/cd5c6e10c6c4e653669e11d735d5773766986bda\/nerd\/nerd_client.py#L320-L337","negative":"Update boost factors when local inhibition is used"} {"query":"Return endpoints grouped by the class which handles them .","positive":"def Func ( ) : arg_0 = defaultdict ( list ) for arg_1 in endpoints ( ) : arg_0 [ arg_1 [ \"class_name\" ] ] . append ( arg_1 ) return arg_0","id_":252719,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-bindings\/bin\/bindings.py#L298-L303","negative":"Downloads the latest Raspbian image and writes it to a microSD card.\n\n Based on the instructions from:\n\n https:\/\/www.raspberrypi.org\/documentation\/installation\/installing-images\/linux.md"} {"query":"Reload children .","positive":"def Func ( arg_0 ) : for arg_1 in arg_0 . children [ : ] : arg_0 . removeChild ( arg_1 ) arg_0 . _fetched = False","id_":252720,"task_name":"https:\/\/github.com\/4degrees\/riffle\/blob\/e5a0d908df8c93ff1ee7abdda8875fd1667df53d\/source\/riffle\/model.py#L133-L140","negative":"Revoke the token and remove the cookie."} {"query":"Convert column name to index .","positive":"def Func ( arg_0 ) : arg_1 = string . ascii_uppercase . index arg_2 = 0 for arg_3 in arg_0 . upper ( ) : arg_2 = arg_2 * 26 + arg_1 ( arg_3 ) + 1 return arg_2","id_":252721,"task_name":"https:\/\/github.com\/fumitoh\/modelx\/blob\/0180da34d052c44fb94dab9e115e218bbebfc9c3\/modelx\/io\/excel.py#L23-L30","negative":"r'''\n Generate statistics for a single run\n\n This is a stand-alone helper function to evolve a single sample state\n (realization) and return the cluster statistics.\n\n Parameters\n ----------\n spanning_cluster : bool, optional\n Whether to detect a spanning cluster or not.\n Defaults to ``True``.\n\n kwargs : keyword arguments\n Piped through to :func:`sample_states`\n\n Returns\n -------\n\n ret : dict\n Cluster statistics\n\n ret['N'] : int\n Total number of sites\n\n ret['M'] : int\n Total number of bonds\n\n ret['max_cluster_size'] : 1-D :py:class:`numpy.ndarray` of int, size ``ret['M'] + 1``\n Array of the sizes of the largest cluster (absolute number of sites) at\n the respective occupation number.\n\n ret['has_spanning_cluster'] : 1-D :py:class:`numpy.ndarray` of bool, size ``ret['M'] + 1``\n Array of booleans for each occupation number.\n The respective entry is ``True`` if there is a spanning cluster,\n ``False`` otherwise.\n Only exists if `spanning_cluster` argument is set to ``True``.\n\n ret['moments'] : 2-D :py:class:`numpy.ndarray` of int\n Array of shape ``(5, ret['M'] + 1)``.\n The ``(k, m)``-th entry is the ``k``-th raw moment of the (absolute)\n cluster size distribution, with ``k`` ranging from ``0`` to ``4``, at\n occupation number ``m``.\n\n See Also\n --------\n\n sample_states"} {"query":"Called when socket is read - ready","positive":"def Func ( arg_0 ) : try : pyngus . read_socket_input ( arg_0 . connection , arg_0 . socket ) except Exception as e : LOG . error ( \"Exception on socket read: %s\" , str ( e ) ) arg_0 . connection . close_input ( ) arg_0 . connection . close ( ) arg_0 . connection . process ( time . time ( ) )","id_":252722,"task_name":"https:\/\/github.com\/kgiusti\/pyngus\/blob\/5392392046989f1bb84ba938c30e4d48311075f1\/examples\/rpc-server.py#L92-L100","negative":"Initialize the archive manager.\n\n :param archive_path: path where the archive manager is located"} {"query":"Factory for creating the argument parser","positive":"def Func ( ) : arg_0 = \"Converts a completezip to a litezip\" arg_1 = argparse . ArgumentParser ( arg_0 = arg_0 ) arg_2 = arg_1 . add_mutually_exclusive_group ( ) arg_2 . add_argument ( '-v' , '--verbose' , action = 'store_true' , dest = 'verbose' , default = None , help = \"increase verbosity\" ) arg_2 . add_argument ( '-q' , '--quiet' , action = 'store_false' , dest = 'verbose' , default = None , help = \"print nothing to stdout or stderr\" ) arg_1 . add_argument ( 'location' , help = \"Location of the unpacked litezip\" ) return arg_1","id_":252723,"task_name":"https:\/\/github.com\/openstax\/cnx-litezip\/blob\/5e613f486f29fe350999d6b990d32847ac16a1b8\/litezip\/cli\/validate.py#L9-L25","negative":"Sets the package verification code, if not already set.\n code - A string.\n Raises CardinalityError if already defined.\n Raises OrderError if no package previously defined.\n Raises Value error if doesn't match verifcode form"} {"query":"Update annotations of discretized continuous pulse function with duration .","positive":"def Func ( arg_0 : arg_1 ) -> arg_1 : arg_2 = list ( arg_0 . __annotations__ . items ( ) ) arg_3 = arg_2 [ 1 : ] arg_3 . insert ( 0 , ( 'duration' , int ) ) arg_0 . __annotations__ = dict ( arg_3 ) return arg_0","id_":252724,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/pulse\/samplers\/decorators.py#L135-L145","negative":"Returns a link to a view that moves the passed in object up in rank.\n\n :param obj:\n Object to move\n :param link_text:\n Text to display in the link. Defaults to \"up\"\n :returns:\n HTML link code to view for moving the object"} {"query":"View decorator which terminates stale TPA sessions .","positive":"def Func ( arg_0 ) : @ wraps ( arg_0 ) def wrapper ( arg_1 , * arg_2 , ** arg_3 ) : if not arg_1 . GET . get ( FRESH_LOGIN_PARAMETER ) : arg_4 = get_enterprise_customer_or_404 ( arg_3 . get ( 'enterprise_uuid' ) ) arg_5 = arg_4 . identity_provider or '' arg_6 = get_identity_provider ( arg_5 ) if arg_6 : arg_7 , arg_8 , arg_9 , arg_10 , arg_11 , arg_12 = urlparse ( arg_1 . get_full_path ( ) ) arg_13 = urlunparse ( ( arg_7 , arg_8 , quote ( arg_9 ) , arg_10 , arg_11 , arg_12 ) ) return redirect ( '{logout_url}?{params}' . format ( logout_url = '\/logout' , arg_10 = urlencode ( { 'redirect_url' : arg_13 } ) ) ) return arg_0 ( arg_1 , * arg_2 , ** arg_3 ) return wrapper","id_":252725,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/enterprise\/decorators.py#L161-L221","negative":"Defines the JSON body to match.\n\n ``json`` argument can be an JSON string, a JSON serializable\n Python structure, such as a ``dict`` or ``list`` or it can be\n a regular expression used to match the body.\n\n Arguments:\n json (str|dict|list|regex): body JSON to match.\n\n Returns:\n self: current Mock instance."} {"query":"Add a new color scheme to the table .","positive":"def Func ( arg_0 , arg_1 ) : if not isinstance ( arg_1 , ColorScheme ) : raise ValueError , 'ColorSchemeTable only accepts ColorScheme instances' arg_0 [ arg_1 . name ] = arg_1","id_":252726,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/utils\/coloransi.py#L157-L161","negative":"Shannon entropy in nats."} {"query":"A simpler version of data to avoid infinite recursion in some cases .","positive":"def Func ( arg_0 ) : if arg_0 . is_caching : return arg_0 . cache with open ( arg_0 . path , \"r\" ) as f : return json . load ( f )","id_":252727,"task_name":"https:\/\/github.com\/controversial\/livejson\/blob\/91021de60903d2d8b2cfb7d8d8910bcf27ec003b\/livejson.py#L195-L203","negative":"setting baudrate if supported"} {"query":"Download a remote file from S3 .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = False ) : arg_2 = path ( arg_2 ) arg_4 = open_s3 ( arg_0 ) arg_5 = arg_2 . dirname ( ) arg_5 . makedirs ( ) arg_6 = arg_4 . get_key ( arg_1 ) if arg_2 . exists ( ) : arg_7 = arg_2 . bytes ( ) arg_8 , arg_9 = arg_6 . get_md5_from_hexdigest ( hashlib . md5 ( arg_7 ) . hexdigest ( ) ) try : arg_10 = arg_6 . etag . replace ( '\"' , '' ) except KeyError : pass else : if arg_10 == arg_8 : info ( 'Hash is the same. Skipping %s' % arg_2 ) return elif not arg_3 : arg_11 = datetime . datetime ( * time . strptime ( arg_6 . last_modified , '%a, %d %b %Y %H:%M:%S %Z' ) [ 0 : 6 ] ) arg_12 = datetime . datetime . utcfromtimestamp ( arg_2 . stat ( ) . st_mtime ) if arg_11 < arg_12 : info ( \"File at %s is less recent than the local version.\" % ( arg_1 ) ) return info ( \"Downloading %s...\" % ( arg_1 ) ) try : with open ( arg_2 , 'w' ) as fo : arg_6 . get_contents_to_file ( fo ) except Exception as e : error ( \"Failed: %s\" % e ) raise","id_":252728,"task_name":"https:\/\/github.com\/eykd\/paved\/blob\/f04f8a4248c571f3d5ce882b325884a3e5d80203\/paved\/s3.py#L97-L138","negative":"Fetch LAtools reference data from online repository.\n\n Parameters\n ----------\n name : str<\n Which data to download. Can be one of 'culture_reference',\n 'culture_test', 'downcore_reference', 'downcore_test', 'iolite_reference'\n or 'zircon_reference'.\n If None, all are downloaded and returned as a dict.\n\n Returns\n -------\n pandas.DataFrame or dict."} {"query":"Ensure image_rendition is added to the global context .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = super ( RenditionAwareStructBlock , arg_0 ) . Func ( arg_1 ) arg_2 [ 'image_rendition' ] = arg_0 . rendition . image_rendition or 'original' return arg_2","id_":252729,"task_name":"https:\/\/github.com\/WGBH\/wagtail-streamfieldtools\/blob\/192f86845532742b0b7d432bef3987357833b8ed\/streamfield_tools\/blocks\/struct_block.py#L118-L123","negative":"Remove rows with NAs from the H2OFrame.\n\n :returns: new H2OFrame with all rows from the original frame containing any NAs removed."} {"query":"Setting up and running pc with all arguments .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None , arg_4 = True ) : if ( arg_0 . arguments [ '{CITEST}' ] == arg_0 . dir_CI_test [ 'hsic' ] and arg_0 . arguments [ '{METHOD_INDEP}' ] == arg_0 . dir_method_indep [ 'corr' ] ) : warnings . warn ( 'Selected method for indep is unfit for the hsic test,' ' setting the hsic.gamma method.' ) arg_0 . arguments [ '{METHOD_INDEP}' ] = arg_0 . dir_method_indep [ 'hsic_gamma' ] elif ( arg_0 . arguments [ '{CITEST}' ] == arg_0 . dir_CI_test [ 'gaussian' ] and arg_0 . arguments [ '{METHOD_INDEP}' ] != arg_0 . dir_method_indep [ 'corr' ] ) : warnings . warn ( 'Selected method for indep is unfit for the selected test,' ' setting the classic correlation-based method.' ) arg_0 . arguments [ '{METHOD_INDEP}' ] = arg_0 . dir_method_indep [ 'corr' ] arg_6 = str ( uuid . uuid4 ( ) ) os . makedirs ( '\/tmp\/cdt_pc' + arg_6 + '\/' ) arg_0 . arguments [ '{FOLDER}' ] = '\/tmp\/cdt_pc' + arg_6 + '\/' def retrieve_result ( ) : return read_csv ( '\/tmp\/cdt_pc' + arg_6 + '\/result.csv' , delimiter = ',' ) . values try : arg_1 . to_csv ( '\/tmp\/cdt_pc' + arg_6 + '\/data.csv' , header = False , index = False ) if arg_3 is not None and arg_2 is not None : arg_3 . to_csv ( '\/tmp\/cdt_pc' + arg_6 + '\/fixedgaps.csv' , index = False , header = False ) arg_2 . to_csv ( '\/tmp\/cdt_pc' + arg_6 + '\/fixededges.csv' , index = False , header = False ) arg_0 . arguments [ '{SKELETON}' ] = 'TRUE' else : arg_0 . arguments [ '{SKELETON}' ] = 'FALSE' arg_7 = launch_R_script ( \"{}\/R_templates\/pc.R\" . format ( os . path . dirname ( os . path . realpath ( __file__ ) ) ) , arg_0 . arguments , output_function = retrieve_result , arg_4 = arg_4 ) except Exception as e : rmtree ( '\/tmp\/cdt_pc' + arg_6 + '' ) raise e except KeyboardInterrupt : rmtree ( '\/tmp\/cdt_pc' + arg_6 + '\/' ) raise KeyboardInterrupt rmtree ( '\/tmp\/cdt_pc' + arg_6 + '' ) return arg_7","id_":252730,"task_name":"https:\/\/github.com\/Diviyan-Kalainathan\/CausalDiscoveryToolbox\/blob\/be228b078ba9eb76c01b3ccba9a1c0ad9e9e5ed1\/cdt\/causality\/graph\/PC.py#L233-L276","negative":"Given an email address, check the email_remapping table to see if the email\n should be sent to a different address. This function also handles overriding\n the email domain if ignore_vcs_email_domain is set or the domain was missing"} {"query":"Checks that the given zone contains the required fields","positive":"def Func ( arg_0 ) : if not has_valid_id ( arg_0 ) : raise InvalidZone ( \"%s must contain a valid 'id' attribute\" % arg_0 . __name__ ) if not has_valid_name ( arg_0 ) : raise InvalidZone ( \"%s must contain a valid 'name' attribute\" % arg_0 . __name__ )","id_":252731,"task_name":"https:\/\/github.com\/ubyssey\/dispatch\/blob\/8da6084fe61726f20e9cf675190480cfc45ee764\/dispatch\/theme\/validators.py#L35-L42","negative":"Create required links from a sensor region to a classifier region."} {"query":"Returns a decoder for a MessageSet item .","positive":"def Func ( arg_0 ) : arg_1 = encoder . TagBytes ( 2 , wire_format . WIRETYPE_VARINT ) arg_2 = encoder . TagBytes ( 3 , wire_format . WIRETYPE_LENGTH_DELIMITED ) arg_3 = encoder . TagBytes ( 1 , wire_format . WIRETYPE_END_GROUP ) arg_4 = ReadTag arg_5 = _DecodeVarint arg_6 = SkipField def DecodeItem ( arg_7 , arg_8 , arg_9 , arg_10 , arg_11 ) : arg_12 = arg_8 arg_13 = - 1 arg_14 = - 1 arg_15 = - 1 while 1 : ( arg_16 , arg_8 ) = arg_4 ( arg_7 , arg_8 ) if arg_16 == arg_1 : ( arg_13 , arg_8 ) = arg_5 ( arg_7 , arg_8 ) elif arg_16 == arg_2 : ( arg_17 , arg_14 ) = arg_5 ( arg_7 , arg_8 ) arg_8 = arg_15 = arg_14 + arg_17 elif arg_16 == arg_3 : break else : arg_8 = SkipField ( arg_7 , arg_8 , arg_9 , arg_16 ) if arg_8 == - 1 : raise _DecodeError ( 'Missing group end tag.' ) if arg_8 > arg_9 : raise _DecodeError ( 'Truncated message.' ) if arg_13 == - 1 : raise _DecodeError ( 'MessageSet item missing type_id.' ) if arg_14 == - 1 : raise _DecodeError ( 'MessageSet item missing message.' ) arg_18 = arg_0 . get ( arg_13 ) if arg_18 is not None : arg_19 = arg_11 . get ( arg_18 ) if arg_19 is None : arg_19 = arg_11 . setdefault ( arg_18 , arg_18 . message_type . _concrete_class ( ) ) if arg_19 . _InternalParse ( arg_7 , arg_14 , arg_15 ) != arg_15 : raise _DecodeError ( 'Unexpected end-group tag.' ) else : if not arg_10 . _unknown_fields : arg_10 . _unknown_fields = [ ] arg_10 . _unknown_fields . append ( ( MESSAGE_SET_ITEM_TAG , arg_7 [ arg_12 : arg_8 ] ) ) return arg_8 return DecodeItem","id_":252732,"task_name":"https:\/\/github.com\/ibelie\/typy\/blob\/3616845fb91459aacd8df6bf82c5d91f4542bee7\/typy\/google\/protobuf\/internal\/decoder.py#L645-L715","negative":"This is a parallel worker for running period-recovery.\n\n Parameters\n ----------\n\n task : tuple\n This is used to pass args to the `periodicvar_recovery` function::\n\n task[0] = period-finding result pickle to work on\n task[1] = simbasedir\n task[2] = period_tolerance\n\n Returns\n -------\n\n dict\n This is the dict produced by the `periodicvar_recovery` function for the\n input period-finding result pickle."} {"query":"send the model to a MATLAB workspace through pymatbridge","positive":"def Func ( arg_0 , arg_1 = \"model\" , arg_2 = None ) : if scipy_sparse is None : raise ImportError ( \"`Func` requires scipy!\" ) if arg_2 is None : from IPython import get_ipython arg_2 = get_ipython ( ) . magics_manager . registry [ \"MatlabMagics\" ] . Matlab arg_3 = create_mat_dict ( arg_0 ) arg_4 = arg_3 [ \"S\" ] . todok ( ) arg_3 [ \"S\" ] = 0 arg_5 = \"cobra_pymatbridge_temp_\" + uuid4 ( ) . hex _check ( arg_2 . set_variable ( arg_1 , arg_3 ) ) _check ( arg_2 . set_variable ( arg_5 , arg_4 ) ) _check ( arg_2 . run_code ( \"%s.S = %s;\" % ( arg_1 , arg_5 ) ) ) for arg_6 in arg_3 . keys ( ) : if arg_6 == \"S\" : continue _check ( arg_2 . run_code ( \"{0}.{1} = {0}.{1}';\" . format ( arg_1 , arg_6 ) ) ) _check ( arg_2 . run_code ( \"clear %s;\" % arg_5 ) )","id_":252733,"task_name":"https:\/\/github.com\/opencobra\/cobrapy\/blob\/9d1987cdb3a395cf4125a3439c3b002ff2be2009\/cobra\/io\/mat.py#L268-L302","negative":"Protected get. Get an item from Q.\n Will block. but if the process group has errors,\n raise an StopProcessGroup exception.\n\n A slave process will terminate upon StopProcessGroup.\n The master process shall read the error from the process group."} {"query":"Make api method docs inheritted .","positive":"def Func ( arg_0 , arg_1 ) : if sys . version_info >= ( 3 , 5 ) : return arg_1 if not issubclass ( arg_1 , arg_0 ) : raise KappaError ( 'Cannot fix docs of class that is not decendent.' ) for arg_2 , arg_3 in vars ( arg_1 ) . items ( ) : if callable ( arg_3 ) and not arg_3 . __doc__ : if arg_2 in arg_0 . __abstractmethods__ : arg_4 = getattr ( arg_0 , arg_2 ) arg_3 . __doc__ = arg_4 . __doc__ return arg_1","id_":252734,"task_name":"https:\/\/github.com\/Kappa-Dev\/KaSim\/blob\/12a01c616a47e3046323103625795fb2fca8273a\/python\/kappy\/kappa_common.py#L244-L264","negative":"Filter tags according between_tags option.\n\n :param list(dict) all_tags: Pre-filtered tags.\n :rtype: list(dict)\n :return: Filtered tags."} {"query":"Clears the input and output buffers","positive":"def Func ( arg_0 ) : try : arg_0 . _port . reset_input_buffer ( ) arg_0 . _port . reset_output_buffer ( ) except AttributeError : arg_0 . _port . flushInput ( ) arg_0 . _port . flushOutput ( )","id_":252735,"task_name":"https:\/\/github.com\/kmpm\/nodemcu-uploader\/blob\/557a25f37b1fb4e31a745719e237e42fff192834\/nodemcu_uploader\/uploader.py#L110-L118","negative":"iterator for the most significant key phrases"} {"query":"Get item creator according registered item type .","positive":"def Func ( arg_0 ) : if arg_0 not in Pipe . pipe_item_types : for arg_1 in Pipe . pipe_item_types : if issubclass ( arg_0 , arg_1 ) : return Pipe . pipe_item_types [ arg_1 ] return None else : return Pipe . pipe_item_types [ arg_0 ]","id_":252736,"task_name":"https:\/\/github.com\/GaryLee\/cmdlet\/blob\/5852a63fc2c7dd723a3d7abe18455f8dacb49433\/cmdlet\/cmdlet.py#L199-L212","negative":"Bring the interrupt pin on the GPIO into Linux userspace."} {"query":"Draws the graph .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , ** arg_3 ) : if not HAS_MATPLOTLIB : raise ImportError ( \"Matplotlib is required to draw the graph.\" ) arg_4 = plt . figure ( figsize = arg_3 . get ( 'figsize' , ( 7 , 7 ) ) ) arg_5 = arg_4 . gca ( ) arg_6 = { 'line_kwargs' : arg_1 , 'scatter_kwargs' : arg_2 , 'pos' : arg_3 . get ( 'pos' ) } arg_1 , arg_2 = arg_0 . lines_scatter_args ( ** arg_6 ) arg_7 = LineCollection ( ** arg_1 ) arg_5 . add_collection ( arg_7 ) arg_5 . scatter ( ** arg_2 ) if hasattr ( arg_5 , 'set_facecolor' ) : arg_5 . set_facecolor ( arg_3 . get ( 'bgcolor' , [ 1 , 1 , 1 , 1 ] ) ) else : arg_5 . set_axis_bgcolor ( arg_3 . get ( 'bgcolor' , [ 1 , 1 , 1 , 1 ] ) ) arg_5 . get_xaxis ( ) . set_visible ( False ) arg_5 . get_yaxis ( ) . set_visible ( False ) if 'fname' in arg_3 : arg_8 = { k : v for k , v in arg_3 . items ( ) if k in SAVEFIG_KWARGS } arg_4 . savefig ( arg_3 [ 'fname' ] , ** arg_8 ) else : plt . ion ( ) plt . show ( )","id_":252737,"task_name":"https:\/\/github.com\/djordon\/queueing-tool\/blob\/ccd418cf647ac03a54f78ba5e3725903f541b808\/queueing_tool\/graph\/graph_wrapper.py#L356-L425","negative":"Register a file reader for use in parse_config_file.\n\n Registered file readers will be used to try reading files passed to\n `parse_config_file`. All file readers (beginning with the default `open`) will\n be tried until one of them succeeds at opening the file.\n\n This function may also be be used used as a decorator. For example:\n\n @register_file_reader(IOError)\n def exotic_data_source(filename):\n ...\n\n Args:\n *args: (When used as a decorator, only the existence check is supplied.)\n - file_reader_fn: The file reader function to register. This should be a\n function that can be used as a context manager to open a file and\n provide a file-like object, similar to Python's built-in `open`.\n - is_readable_fn: A function taking the file path and returning a boolean\n indicating whether the file can be read by `file_reader_fn`.\n\n Returns:\n `None`, or when used as a decorator, a function that will perform the\n registration using the supplied readability predicate."} {"query":"Raises a ValidationError for any ActivatableModel that has ForeignKeys or OneToOneFields that will cause cascading deletions to occur . This function also raises a ValidationError if the activatable model has not defined a Boolean field with the field name defined by the ACTIVATABLE_FIELD_NAME variable on the model .","positive":"def Func ( ) : for arg_0 in get_activatable_models ( ) : arg_1 = next ( ( f for f in arg_0 . _meta . fields if f . __class__ == models . BooleanField and f . name == arg_0 . ACTIVATABLE_FIELD_NAME ) , None ) if arg_1 is None : raise ValidationError ( ( 'Model {0} is an activatable model. It must define an activatable BooleanField that ' 'has a field name of model.ACTIVATABLE_FIELD_NAME (which defaults to is_active)' . format ( arg_0 ) ) ) if not arg_0 . ALLOW_CASCADE_DELETE : for arg_2 in arg_0 . _meta . fields : if arg_2 . __class__ in ( models . ForeignKey , models . OneToOneField ) : if arg_2 . remote_field . on_delete == models . CASCADE : raise ValidationError ( ( 'Model {0} is an activatable model. All ForeignKey and OneToOneFields ' 'must set on_delete methods to something other than CASCADE (the default). ' 'If you want to explicitely allow cascade deletes, then you must set the ' 'ALLOW_CASCADE_DELETE=True class variable on your model.' ) . format ( arg_0 ) )","id_":252738,"task_name":"https:\/\/github.com\/ambitioninc\/django-activatable-model\/blob\/2c142430949a923a69201f4914a6b73a642b4b48\/activatable_model\/validation.py#L14-L43","negative":"Add an HTTP header to response object.\n\n Arguments:\n name (str): HTTP header field name\n value (str): HTTP header field value"} {"query":"Checks the mail folder for mails containing attachments with the given name .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 'INBOX' , arg_3 = False ) : arg_4 = arg_0 . _retrieve_mails_attachments_by_name ( arg_1 , arg_2 , arg_3 , latest_only = True ) return len ( arg_4 ) > 0","id_":252739,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/imap_hook.py#L49-L66","negative":"Whether a connection can be established between those two meshes."} {"query":"Return a JSON representation of a Python string","positive":"def Func ( arg_0 ) : def replace ( arg_1 ) : return ESCAPE_DCT [ arg_1 . group ( 0 ) ] return '\"' + ESCAPE . sub ( replace , arg_0 ) + '\"'","id_":252740,"task_name":"https:\/\/github.com\/google\/grumpy\/blob\/3ec87959189cfcdeae82eb68a47648ac25ceb10b\/third_party\/stdlib\/json\/encoder.py#L41-L47","negative":"Scans String.\n\n Compares the byte, word, or double word specified with the memory operand\n with the value in the AL, AX, EAX, or RAX register, and sets the status flags\n according to the results. The memory operand address is read from either\n the ES:RDI, ES:EDI or the ES:DI registers (depending on the address-size\n attribute of the instruction, 32 or 16, respectively)::\n\n IF (byte comparison)\n THEN\n temp = AL - SRC;\n SetStatusFlags(temp);\n THEN IF DF = 0\n THEN (E)DI = (E)DI + 1;\n ELSE (E)DI = (E)DI - 1;\n FI;\n ELSE IF (word comparison)\n THEN\n temp = AX - SRC;\n SetStatusFlags(temp)\n THEN IF DF = 0\n THEN (E)DI = (E)DI + 2;\n ELSE (E)DI = (E)DI - 2;\n FI;\n ELSE (* doubleword comparison *)\n temp = EAX - SRC;\n SetStatusFlags(temp)\n THEN IF DF = 0\n THEN\n (E)DI = (E)DI + 4;\n ELSE\n (E)DI = (E)DI - 4;\n FI;\n FI;\n FI;\n\n :param cpu: current CPU.\n :param dest: destination operand.\n :param src: source operand."} {"query":"Change password for logged in django staff user","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = PasswordChangeForm ( arg_1 . user , data = arg_1 . data ) if not arg_2 . is_valid ( ) : raise serializers . ValidationError ( arg_2 . errors ) arg_2 . save ( ) update_session_auth_hash ( arg_1 , arg_2 . user ) return Response ( status = status . HTTP_204_NO_CONTENT )","id_":252741,"task_name":"https:\/\/github.com\/5monkeys\/django-bananas\/blob\/cfd318c737f6c4580036c13d2acf32bca96654bf\/bananas\/admin\/api\/views.py#L103-L117","negative":"Main executor of the trimmomatic_report template.\n\n Parameters\n ----------\n log_files : list\n List of paths to the trimmomatic log files."} {"query":"Decorate a CLI function that might require authentication .","positive":"def Func ( arg_0 ) : @ wraps ( arg_0 ) def wrapper ( arg_1 ) : try : arg_2 = arg_0 ( arg_1 ) except UnauthorizedException as e : arg_3 = config_from_env ( config_from_file ( ) ) arg_4 = _get_username ( arg_1 , arg_3 ) if arg_4 is None : sys . exit ( \"Please set a username (run `osf -h` for details).\" ) else : sys . exit ( \"You are not authorized to access this project.\" ) return arg_2 return wrapper","id_":252742,"task_name":"https:\/\/github.com\/osfclient\/osfclient\/blob\/44b9a87e8c1ae6b63cdecd27a924af3fc2bf94cf\/osfclient\/cli.py#L82-L103","negative":"Leave a one-to-one conversation.\n\n One-to-one conversations are \"sticky\"; they can't actually be deleted.\n This API clears the event history of the specified conversation up to\n ``delete_upper_bound_timestamp``, hiding it if no events remain."} {"query":"Get privileges of a local file","positive":"def Func ( arg_0 , arg_1 ) : try : return str ( oct ( os . stat ( arg_1 ) . st_mode ) [ - 3 : ] ) except Exception as e : raise Failure ( 'Could not get stat for %s, error_message = %s' , arg_1 , e )","id_":252743,"task_name":"https:\/\/github.com\/bloomreach\/s4cmd\/blob\/bb51075bf43703e7cd95aa39288cf7732ec13a6d\/s4cmd.py#L1278-L1283","negative":"For a 2D array and mask, map the values of all unmasked pixels to a 1D array.\n\n The pixel coordinate origin is at the top left corner of the 2D array and goes right-wards and downwards, such\n that for an array of shape (3,3) where all pixels are unmasked:\n\n - pixel [0,0] of the 2D array will correspond to index 0 of the 1D array.\n - pixel [0,1] of the 2D array will correspond to index 1 of the 1D array.\n - pixel [1,0] of the 2D array will correspond to index 4 of the 1D array.\n\n Parameters\n ----------\n mask : ndarray\n A 2D array of bools, where *False* values mean unmasked and are included in the mapping.\n array_2d : ndarray\n The 2D array of values which are mapped to a 1D array.\n\n Returns\n --------\n ndarray\n A 1D array of values mapped from the 2D array with dimensions (total_unmasked_pixels).\n\n Examples\n --------\n mask = np.array([[True, False, True],\n [False, False, False]\n [True, False, True]])\n\n array_2d = np.array([[1.0, 2.0, 3.0],\n [4.0, 5.0, 6.0],\n [7.0, 8.0, 9.0]])\n\n array_1d = map_2d_array_to_masked_1d_array_from_array_2d_and_mask(mask=mask, array_2d=array_2d)"} {"query":"Retrieves a dataset file matching a provided file path","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None ) : return arg_0 . Funcs ( arg_1 , \"^{}$\" . format ( arg_2 ) , version_number = arg_3 ) [ 0 ]","id_":252744,"task_name":"https:\/\/github.com\/CitrineInformatics\/python-citrination-client\/blob\/409984fc65ce101a620f069263f155303492465c\/citrination_client\/data\/client.py#L202-L215","negative":"Gets back all response headers."} {"query":"Renders the selected social widget . You can specify optional settings that will be passed to widget template .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . split_contents ( ) arg_3 = arg_2 [ 0 ] if len ( arg_2 ) < 2 : raise TemplateSyntaxError ( \"'%s' takes at least one argument\" % arg_3 ) arg_4 = [ ] arg_5 = { } arg_2 = arg_2 [ 1 : ] if len ( arg_2 ) : for arg_6 in arg_2 : arg_7 = kwarg_re . match ( arg_6 ) if not arg_7 : raise TemplateSyntaxError ( \"Malformed arguments to %s tag\" % arg_3 ) arg_8 , arg_9 = arg_7 . groups ( ) if arg_8 : arg_8 = arg_8 . replace ( '-' , '_' ) arg_5 [ arg_8 ] = arg_0 . compile_filter ( arg_9 ) else : arg_4 . append ( arg_0 . compile_filter ( arg_9 ) ) return SocialWidgetNode ( arg_4 , arg_5 )","id_":252745,"task_name":"https:\/\/github.com\/creafz\/django-social-widgets\/blob\/785c599621549f7b111d98f28ce3c7958c747dd1\/social_widgets\/templatetags\/social_widgets.py#L122-L159","negative":"Main executor of the trimmomatic_report template.\n\n Parameters\n ----------\n log_files : list\n List of paths to the trimmomatic log files."} {"query":"Set item of the dictionary .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = None ) : with arg_0 . _lock : logger . debug ( \"expdict.__setitem__({0!r}, {1!r}, {2!r}, {3!r})\" . format ( arg_1 , arg_2 , arg_3 , arg_4 ) ) if not arg_3 : arg_3 = arg_0 . _default_timeout arg_0 . _timeouts [ arg_1 ] = ( time . time ( ) + arg_3 , arg_4 ) return dict . __setitem__ ( arg_0 , arg_1 , arg_2 )","id_":252746,"task_name":"https:\/\/github.com\/Jajcus\/pyxmpp2\/blob\/14a40a3950910a9cd008b55f0d8905aa0186ce18\/pyxmpp2\/expdict.py#L88-L110","negative":"Extracts the category from a GitHub item.\n\n This backend generates two types of item which are\n 'issue' and 'pull_request'."} {"query":"This endpoint is used to set the health of an allocation that is in the deployment manually . In some use cases automatic detection of allocation health may not be desired . As such those task groups can be marked with an upgrade policy that uses health_check = manual . Those allocations must have their health marked manually using this endpoint . Marking an allocation as healthy will allow the rolling upgrade to proceed . Marking it as failed will cause the deployment to fail .","positive":"def Func ( arg_0 , arg_1 , arg_2 = arg_3 ( ) , arg_4 = arg_3 ( ) ) : arg_5 = { \"HealthyAllocationIDs\" : arg_2 , \"UnHealthyAllocationIDs\" : arg_4 , \"DeploymentID\" : arg_1 } return arg_0 . request ( \"allocation-health\" , arg_1 , json = arg_5 , method = \"post\" ) . json ( )","id_":252747,"task_name":"https:\/\/github.com\/jrxFive\/python-nomad\/blob\/37df37e4de21e6f8ac41c6154e7f1f44f1800020\/nomad\/api\/deployment.py#L144-L165","negative":"Get the underlying `botocore.Credentials` object.\n\n This contains the following authentication attributes: access_key, secret_key and token."} {"query":"Delete the external tool identified by external_tool_id .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = arg_1 . format ( arg_2 ) + \"\/external_tools\/{}\" . format ( arg_3 ) arg_5 = arg_0 . _delete_resource ( arg_4 ) return True","id_":252748,"task_name":"https:\/\/github.com\/uw-it-aca\/uw-restclients-canvas\/blob\/9845faf33d49a8f06908efc22640c001116d6ea2\/uw_canvas\/external_tools.py#L102-L114","negative":"Return a response object from the given JSON data.\n\n :param data: Data to JSON-encode.\n :type data: mixed\n :param headers: Dict of headers to include in the requests.\n :type headers: dict\n :param status_code: HTTP status code.\n :type status_code: int\n :rtype: requests.Response"} {"query":"Follow a set of marker data yielding kinematic joint angles .","positive":"def Func ( arg_0 , arg_1 = 0 , arg_2 = 1e100 , arg_3 = None , arg_4 = 20 ) : arg_5 = None if arg_4 > 0 : arg_0 . skeleton . enable_motors ( arg_4 ) arg_5 = np . zeros ( arg_0 . skeleton . num_dofs ) for arg_6 in arg_0 . follow_markers ( arg_1 , arg_2 , arg_3 ) : if arg_5 is not None : arg_0 . skeleton . set_target_angles ( arg_5 ) yield arg_0 . skeleton . joint_angles","id_":252749,"task_name":"https:\/\/github.com\/EmbodiedCognition\/pagoda\/blob\/8892f847026d98aba8646ecbc4589397e6dec7bd\/pagoda\/cooper.py#L598-L631","negative":"Provides permissions for mongoadmin for use in the context"} {"query":"Randomly reorder SRV records using their weights .","positive":"def Func ( arg_0 ) : if not arg_0 : return [ ] arg_1 = [ ] while len ( arg_0 ) > 1 : arg_2 = 0 for arg_3 in arg_0 : arg_2 += arg_3 . weight + 0.1 arg_4 = random . random ( ) * arg_2 arg_2 = 0 for arg_3 in arg_0 : arg_2 += arg_3 . weight + 0.1 if arg_4 < arg_2 : arg_0 . remove ( arg_3 ) arg_1 . append ( arg_3 ) break arg_1 . append ( arg_0 [ 0 ] ) return arg_1","id_":252750,"task_name":"https:\/\/github.com\/Jajcus\/pyxmpp2\/blob\/14a40a3950910a9cd008b55f0d8905aa0186ce18\/pyxmpp2\/resolver.py#L71-L97","negative":"Get experiment or experiment job logs.\n\n Uses [Caching](\/references\/polyaxon-cli\/#caching)\n\n Examples for getting experiment logs:\n\n \\b\n ```bash\n $ polyaxon experiment logs\n ```\n\n \\b\n ```bash\n $ polyaxon experiment -xp 10 -p mnist logs\n ```\n\n Examples for getting experiment job logs:\n\n \\b\n ```bash\n $ polyaxon experiment -xp 1 -j 1 logs\n ```"} {"query":"Find addresses matching the command parameters .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 ) : arg_6 = AddressGenerator ( arg_1 , arg_4 , arg_5 ) if arg_3 is None : for arg_7 in arg_6 . create_iterator ( start = arg_2 ) : arg_8 = FindTransactionsCommand ( arg_0 . adapter ) ( addresses = [ arg_7 . address ] , ) if not arg_8 . get ( 'hashes' ) : return [ arg_7 ] return arg_6 . get_addresses ( start = arg_2 , arg_3 = arg_3 )","id_":252751,"task_name":"https:\/\/github.com\/iotaledger\/iota.lib.py\/blob\/97cdd1e241498446b46157b79b2a1ea2ec6d387a\/iota\/commands\/extended\/get_new_addresses.py#L53-L73","negative":"Drop a node from the network\n\n :param node: node to drop\n :type node: Node"} {"query":"Properly Format Time for permlinks","positive":"def Func ( arg_0 ) : if isinstance ( arg_0 , float ) : return datetime . utcfromtimestamp ( arg_0 ) . strftime ( timeFormat ) if isinstance ( arg_0 , datetime ) : return arg_0 . strftime ( timeFormat )","id_":252752,"task_name":"https:\/\/github.com\/xeroc\/python-graphenelib\/blob\/8bb5396bc79998ee424cf3813af478304173f3a6\/graphenecommon\/utils.py#L9-L15","negative":"Clip input array with a vector list.\n\n Parameters\n ----------\n array : array\n input raster data\n array_affine : Affine\n Affine object describing the raster's geolocation\n geometries : iterable\n iterable of dictionaries, where every entry has a 'geometry' and\n 'properties' key.\n inverted : bool\n invert clip (default: False)\n clip_buffer : integer\n buffer (in pixels) geometries before clipping\n\n Returns\n -------\n clipped array : array"} {"query":"Registers the up and down handlers . Also registers a scheduled function every 60th of a second causing pyglet to redraw your window with 60fps .","positive":"def Func ( arg_0 ) : arg_0 . peng . keybinds . add ( arg_0 . peng . cfg [ \"controls.controls.crouch\" ] , \"peng3d:actor.%s.player.controls.crouch\" % arg_0 . actor . uuid , arg_0 . on_crouch_down , False ) arg_0 . peng . keybinds . add ( arg_0 . peng . cfg [ \"controls.controls.jump\" ] , \"peng3d:actor.%s.player.controls.jump\" % arg_0 . actor . uuid , arg_0 . on_jump_down , False ) pyglet . clock . schedule_interval ( arg_0 . update , 1.0 \/ 60 )","id_":252753,"task_name":"https:\/\/github.com\/not-na\/peng3d\/blob\/1151be665b26cc8a479f6307086ba919e4d32d85\/peng3d\/actor\/player.py#L155-L165","negative":"write triples into a translation file."} {"query":"This function handles the retrieval of whether or not a chemical can be absorbed through the skin relevant to chemical safety calculations .","positive":"def Func ( arg_0 , arg_1 = False , arg_2 = None ) : def list_methods ( ) : arg_3 = [ ] if arg_0 in _OntarioExposureLimits : arg_3 . append ( ONTARIO ) arg_3 . append ( NONE ) return arg_3 if arg_1 : return list_methods ( ) if not arg_2 : arg_2 = list_methods ( ) [ 0 ] if arg_2 == ONTARIO : arg_4 = ( _OntarioExposureLimits [ arg_0 ] [ \"Func\" ] ) elif arg_2 == NONE : arg_4 = None else : raise Exception ( 'Failure in in function' ) return arg_4","id_":252754,"task_name":"https:\/\/github.com\/CalebBell\/thermo\/blob\/3857ed023a3e64fd3039a32d53576c24990ef1c3\/thermo\/safety.py#L405-L436","negative":"Convert UTC datetime into user interface string."} {"query":"Given a single spinn3r feed entry produce a single StreamItem .","positive":"def Func ( arg_0 ) : if not hasattr ( arg_0 , 'permalink_entry' ) : return None arg_1 = arg_0 . permalink_entry arg_2 = streamcorpus . make_stream_item ( arg_1 . date_found [ : - 1 ] + '.0Z' , arg_1 . canonical_link . href . encode ( 'utf8' ) ) if not arg_2 . stream_time : logger . debug ( 'failed to generate stream_time from {0!r}' . format ( arg_1 . date_found ) ) return None if not arg_2 . abs_url : logger . debug ( 'failed to generate abs_url from {0!r}' . format ( arg_1 . canonical_link . href ) ) return None arg_2 . body = _make_content_item ( arg_1 . content , alternate_data = arg_0 . feed_entry . content . data ) if not arg_2 . body : return None if not arg_2 . body . raw : return None if arg_1 . content_extract . data : arg_2 . other_content [ 'extract' ] = _make_content_item ( arg_1 . content_extract ) arg_2 . other_content [ 'title' ] = streamcorpus . ContentItem ( raw = arg_1 . title . encode ( 'utf8' ) , media_type = arg_1 . content_extract . mime_type , encoding = 'UTF-8' ) arg_2 . other_content [ 'feed_entry_title' ] = streamcorpus . ContentItem ( raw = arg_0 . feed_entry . title . encode ( 'utf8' ) , media_type = arg_0 . feed_entry . content . mime_type , encoding = 'UTF-8' ) if arg_0 . feed_entry . content . data : arg_2 . other_content [ 'feed_entry' ] = _make_content_item ( arg_0 . feed_entry . content ) arg_2 . source_metadata [ 'lang' ] = arg_1 . lang [ 0 ] . code arg_2 . source_metadata [ 'author' ] = json . dumps ( dict ( name = arg_1 . author [ 0 ] . name , email = arg_1 . author [ 0 ] . email , link = arg_1 . author [ 0 ] . link [ 0 ] . href , ) ) arg_2 . source = arg_0 . source . publisher_type return arg_2","id_":252755,"task_name":"https:\/\/github.com\/trec-kba\/streamcorpus-pipeline\/blob\/8bb82ea1beb83c6b40ed03fa1659df2897c2292a\/streamcorpus_pipeline\/_spinn3r_feed_storage.py#L243-L298","negative":"Change the given swarm's state to 'newState'. If 'newState' is\n 'completed', then bestModelId and bestErrScore must be provided.\n\n Parameters:\n ---------------------------------------------------------------------\n swarmId: swarm Id\n newStatus: new status, either 'active', 'completing', 'completed', or\n 'killed'"} {"query":"Replace child nodes on original function call with their partials","positive":"def Func ( arg_0 ) : for arg_1 in filter ( lambda n : len ( n . arg_name ) , arg_0 . child_list ) : arg_0 . data [ \"bound_args\" ] . arguments [ arg_1 . arg_name ] = arg_1 . partial ( ) arg_0 . updated = True","id_":252756,"task_name":"https:\/\/github.com\/datacamp\/pythonwhat\/blob\/ffbf7f8436a51f77c22f3bed75ba3bc37a5c666f\/pythonwhat\/probe.py#L126-L131","negative":"issue a command to read the archive records after a known time stamp."} {"query":"Inserts a small icon to QR Code image","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = 4 , arg_3 = None , arg_4 = None ) : arg_5 , arg_6 = arg_0 . size arg_7 = int ( arg_5 ) \/ int ( arg_2 ) arg_8 = int ( arg_6 ) \/ int ( arg_2 ) try : arg_9 = os . path . join ( arg_1 ) if arg_4 : arg_9 = os . path . join ( arg_4 , arg_1 ) if arg_1 . split ( \":\/\/\" ) [ 0 ] in [ \"http\" , \"https\" , \"ftp\" ] : arg_9 = BytesIO ( urlopen ( arg_1 ) . read ( ) ) arg_10 = Image . open ( arg_9 ) except : return arg_0 arg_11 , arg_12 = arg_10 . size arg_11 = arg_7 if arg_11 > arg_7 else arg_11 arg_12 = arg_8 if arg_12 > arg_8 else arg_12 arg_10 = arg_10 . resize ( ( int ( arg_11 ) , int ( arg_12 ) ) , Image . ANTIALIAS ) arg_10 = arg_10 . convert ( \"RGBA\" ) arg_13 = int ( ( arg_5 - arg_11 ) \/ 2 ) arg_14 = int ( ( arg_6 - arg_12 ) \/ 2 ) arg_3 = ( int ( arg_3 [ 0 ] ) , int ( arg_3 [ 1 ] ) ) if arg_3 else ( arg_13 , arg_14 ) arg_0 . paste ( im = arg_10 , box = arg_3 , mask = arg_10 ) return arg_0","id_":252757,"task_name":"https:\/\/github.com\/marcoagner\/Flask-QRcode\/blob\/fbedf5a671d86cae7e446b10d612e319fc21162b\/flask_qrcode\/__init__.py#L162-L190","negative":"Return the most recent timestamp in the operation."} {"query":"Load all Service Fabric commands","positive":"def Func ( arg_0 , arg_1 ) : with CommandSuperGroup ( __name__ , arg_0 , 'rcctl.custom_cluster#{}' ) as super_group : with super_group . group ( 'cluster' ) as group : group . command ( 'select' , 'select' ) with CommandSuperGroup ( __name__ , arg_0 , 'rcctl.custom_reliablecollections#{}' , client_factory = client_create ) as super_group : with super_group . group ( 'dictionary' ) as group : group . command ( 'query' , 'query_reliabledictionary' ) group . command ( 'execute' , 'execute_reliabledictionary' ) group . command ( 'schema' , 'get_reliabledictionary_schema' ) group . command ( 'list' , 'get_reliabledictionary_list' ) group . command ( 'type-schema' , 'get_reliabledictionary_type_schema' ) with ArgumentsContext ( arg_0 , 'dictionary' ) as ac : ac . argument ( 'application_name' , options_list = [ '--application-name' , '-a' ] ) ac . argument ( 'service_name' , options_list = [ '--service-name' , '-s' ] ) ac . argument ( 'dictionary_name' , options_list = [ '--dictionary-name' , '-d' ] ) ac . argument ( 'output_file' , options_list = [ '--output-file' , '-out' ] ) ac . argument ( 'input_file' , options_list = [ '--input-file' , '-in' ] ) ac . argument ( 'query_string' , options_list = [ '--query-string' , '-q' ] ) ac . argument ( 'type_name' , options_list = [ '--type-name' , '-t' ] ) return OrderedDict ( arg_0 . command_table )","id_":252758,"task_name":"https:\/\/github.com\/shalabhms\/reliable-collections-cli\/blob\/195d69816fb5a6e1e9ab0ab66b606b1248b4780d\/rcctl\/rcctl\/commands.py#L33-L60","negative":"Setup a handler to be called when a trait changes.\n\n This is used to setup dynamic notifications of trait changes.\n\n Static handlers can be created by creating methods on a HasTraits\n subclass with the naming convention '_[traitname]_changed'. Thus,\n to create static handler for the trait 'a', create the method\n _a_changed(self, name, old, new) (fewer arguments can be used, see\n below).\n\n Parameters\n ----------\n handler : callable\n A callable that is called when a trait changes. Its\n signature can be handler(), handler(name), handler(name, new)\n or handler(name, old, new).\n name : list, str, None\n If None, the handler will apply to all traits. If a list\n of str, handler will apply to all names in the list. If a\n str, the handler will apply just to that name.\n remove : bool\n If False (the default), then install the handler. If True\n then unintall it."} {"query":"Write the value of the Boolean object to the output stream .","positive":"def Func ( arg_0 , arg_1 , arg_2 = arg_3 . KMIPVersion . KMIP_1_0 ) : try : arg_1 . write ( pack ( '!Q' , arg_0 . value ) ) except Exception : arg_0 . logger . error ( \"Error writing boolean value to buffer\" ) raise","id_":252759,"task_name":"https:\/\/github.com\/OpenKMIP\/PyKMIP\/blob\/b51c5b044bd05f8c85a1d65d13a583a4d8fc1b0e\/kmip\/core\/primitives.py#L754-L770","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Sets the beam moments directly .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_0 . _sx = arg_1 arg_0 . _sxp = arg_2 arg_0 . _sxxp = arg_3 arg_7 = _np . sqrt ( arg_1 ** 2 * arg_2 ** 2 - arg_3 ** 2 ) arg_0 . _store_emit ( arg_7 = arg_7 )","id_":252760,"task_name":"https:\/\/github.com\/joelfrederico\/SciSalt\/blob\/7bf57c49c7dde0a8b0aa337fbd2fbd527ce7a67f\/scisalt\/PWFA\/beam.py#L133-L150","negative":"Uploads file to GDocs spreadsheet.\n Content type can be provided as argument, default is ods."} {"query":"Upload given metrics function into H2O cluster .","positive":"def Func ( arg_0 , arg_1 = \"metrics.py\" , arg_2 = None , arg_3 = None , arg_4 = None ) : import tempfile import inspect if not arg_4 : arg_4 = _default_source_provider arg_5 = \"\"\"# Generated codeimport water.udf.CMetricFunc as MetricFunc# User given metric function as a class implementing# 3 methods defined by interface CMetricFunc{}# Generated user metric which satisfies the interface# of Java MetricFuncclass {}Wrapper({}, MetricFunc, object): pass\"\"\" assert_satisfies ( arg_0 , inspect . isclass ( arg_0 ) or isinstance ( arg_0 , str ) , \"The argument func needs to be string or class !\" ) assert_satisfies ( arg_1 , arg_1 is not None , \"The argument func_file is missing!\" ) assert_satisfies ( arg_1 , arg_1 . endswith ( '.py' ) , \"The argument func_file needs to end with '.py'\" ) arg_6 = None arg_7 = None arg_8 = arg_1 [ : - 3 ] if isinstance ( arg_0 , str ) : assert_satisfies ( arg_3 , arg_3 is not None , \"The argument class_name is missing! \" + \"It needs to reference the class in given string!\" ) arg_6 = arg_5 . format ( arg_0 , arg_3 , arg_3 ) arg_7 = \"metrics_{}\" . format ( arg_3 ) arg_3 = \"{}.{}Wrapper\" . format ( arg_8 , arg_3 ) else : assert_satisfies ( arg_0 , inspect . isclass ( arg_0 ) , \"The parameter `func` should be str or class\" ) for arg_9 in [ 'map' , 'reduce' , 'metric' ] : assert_satisfies ( arg_0 , arg_9 in arg_0 . __dict__ , \"The class `func` needs to define method `{}`\" . format ( arg_9 ) ) assert_satisfies ( arg_3 , arg_3 is None , \"If class is specified then class_name parameter needs to be None\" ) arg_3 = \"{}.{}Wrapper\" . format ( arg_8 , arg_0 . __name__ ) arg_7 = \"metrics_{}\" . format ( arg_0 . __name__ ) arg_6 = arg_5 . format ( arg_4 ( arg_0 ) , arg_0 . __name__ , arg_0 . __name__ ) if not arg_2 : arg_2 = arg_7 arg_10 = tempfile . mkdtemp ( prefix = \"h2o-func\" ) arg_11 = _create_zip_file ( \"{}\/func.jar\" . format ( arg_10 ) , ( arg_1 , arg_6 ) ) arg_12 = _put_key ( arg_11 , arg_12 = arg_2 ) return \"python:{}={}\" . format ( arg_12 , arg_3 )","id_":252761,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-py\/h2o\/h2o.py#L1430-L1531","negative":"Downloads the latest Raspbian image and writes it to a microSD card.\n\n Based on the instructions from:\n\n https:\/\/www.raspberrypi.org\/documentation\/installation\/installing-images\/linux.md"} {"query":"Returns a list of the ancestors of this node but does not pass the root node even if the root has parents due to cycles .","positive":"def Func ( arg_0 ) : if arg_0 . is_root ( ) : return [ ] arg_1 = set ( [ ] ) arg_0 . _depth_ascend ( arg_0 , arg_1 , True ) try : arg_1 . remove ( arg_0 ) except KeyError : pass return list ( arg_1 )","id_":252762,"task_name":"https:\/\/github.com\/cltrudeau\/django-flowr\/blob\/d077b90376ede33721db55ff29e08b8a16ed17ae\/flowr\/models.py#L238-L252","negative":"_get_key_for_index - Returns the key name that would hold the indexes on a value\n\t\t\tInternal - does not validate that indexedFields is actually indexed. Trusts you. Don't let it down.\n\n\t\t\t@param indexedField - string of field name\n\t\t\t@param val - Value of field\n\n\t\t\t@return - Key name string, potentially hashed."} {"query":"Load crs object from esri code via spatialreference . org . Parses based on the proj4 representation .","positive":"def Func ( arg_0 ) : arg_0 = str ( arg_0 ) arg_1 = utils . crscode_to_string ( \"esri\" , arg_0 , \"proj4\" ) arg_2 = from_proj4 ( arg_1 ) return arg_2","id_":252763,"task_name":"https:\/\/github.com\/karimbahgat\/PyCRS\/blob\/d6a8bb9c28787a25b4a1d59a7e4603db3221eaef\/pycrs\/parse.py#L43-L60","negative":"Convert data to web output.\n\n Parameters\n ----------\n data : array\n\n Returns\n -------\n web data : array"} {"query":"Similar to smart_unicode except that lazy instances are resolved to strings rather than kept as lazy objects .","positive":"def Func ( arg_0 , arg_1 = 'utf-8' , arg_2 = False , arg_3 = 'strict' ) : if isinstance ( arg_0 , str ) : return arg_0 if arg_2 and is_protected_type ( arg_0 ) : return arg_0 try : if not isinstance ( arg_0 , str ) : if hasattr ( arg_0 , '__unicode__' ) : arg_0 = arg_0 . __unicode__ ( ) else : try : arg_0 = str ( arg_0 , arg_1 , arg_3 ) except UnicodeEncodeError : if not isinstance ( arg_0 , Exception ) : raise arg_0 = ' ' . join ( [ Func ( arg , arg_1 , arg_2 , arg_3 ) for arg in arg_0 ] ) elif not isinstance ( arg_0 , str ) : arg_0 = arg_0 . decode ( arg_1 , arg_3 ) except UnicodeDecodeError as ex : if not isinstance ( arg_0 , Exception ) : raise DjangoUnicodeDecodeError ( arg_0 , * ex . args ) else : arg_0 = ' ' . join ( [ Func ( arg , arg_1 , arg_2 , arg_3 ) for arg in arg_0 ] ) return arg_0","id_":252764,"task_name":"https:\/\/github.com\/goose3\/goose3\/blob\/e6994b1b1826af2720a091d1bff5ca15594f558d\/goose3\/utils\/encoding.py#L52-L101","negative":"Backtrack detected onset events to the nearest preceding local\n minimum of an energy function.\n\n This function can be used to roll back the timing of detected onsets\n from a detected peak amplitude to the preceding minimum.\n\n This is most useful when using onsets to determine slice points for\n segmentation, as described by [1]_.\n\n .. [1] Jehan, Tristan.\n \"Creating music by listening\"\n Doctoral dissertation\n Massachusetts Institute of Technology, 2005.\n\n Parameters\n ----------\n events : np.ndarray, dtype=int\n List of onset event frame indices, as computed by `onset_detect`\n\n energy : np.ndarray, shape=(m,)\n An energy function\n\n Returns\n -------\n events_backtracked : np.ndarray, shape=events.shape\n The input events matched to nearest preceding minima of `energy`.\n\n Examples\n --------\n >>> y, sr = librosa.load(librosa.util.example_audio_file(),\n ... offset=30, duration=2.0)\n >>> oenv = librosa.onset.onset_strength(y=y, sr=sr)\n >>> # Detect events without backtracking\n >>> onset_raw = librosa.onset.onset_detect(onset_envelope=oenv,\n ... backtrack=False)\n >>> # Backtrack the events using the onset envelope\n >>> onset_bt = librosa.onset.onset_backtrack(onset_raw, oenv)\n >>> # Backtrack the events using the RMS values\n >>> rms = librosa.feature.rms(S=np.abs(librosa.stft(y=y)))\n >>> onset_bt_rms = librosa.onset.onset_backtrack(onset_raw, rms[0])\n\n >>> # Plot the results\n >>> import matplotlib.pyplot as plt\n >>> plt.figure()\n >>> plt.subplot(2,1,1)\n >>> plt.plot(oenv, label='Onset strength')\n >>> plt.vlines(onset_raw, 0, oenv.max(), label='Raw onsets')\n >>> plt.vlines(onset_bt, 0, oenv.max(), label='Backtracked', color='r')\n >>> plt.legend(frameon=True, framealpha=0.75)\n >>> plt.subplot(2,1,2)\n >>> plt.plot(rms[0], label='RMS')\n >>> plt.vlines(onset_bt_rms, 0, rms.max(), label='Backtracked (RMS)', color='r')\n >>> plt.legend(frameon=True, framealpha=0.75)"} {"query":"Convert images to another colorspace .","positive":"def Func ( arg_0 , arg_1 = \"RGB\" , arg_2 = None , arg_3 = None , arg_4 = False , arg_5 = None ) : return WithColorspace ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 )","id_":252765,"task_name":"https:\/\/github.com\/aleju\/imgaug\/blob\/786be74aa855513840113ea523c5df495dc6a8af\/imgaug\/augmenters\/color.py#L39-L42","negative":"This will output the nginx HTTP config string for specific port spec"} {"query":"Retrieve the list of courses contained within the catalog linked to this enterprise .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_3 = arg_0 . get_object ( ) arg_0 . check_object_permissions ( arg_1 , arg_3 ) arg_0 . ensure_data_exists ( arg_1 , arg_3 . catalog , error_message = \"No catalog is associated with Enterprise {enterprise_name} from endpoint '{path}'.\" . format ( enterprise_name = arg_3 . name , path = arg_1 . get_full_path ( ) ) ) arg_4 = CourseCatalogApiClient ( arg_1 . user , arg_3 . site ) Func = arg_4 . get_paginated_catalog_courses ( arg_3 . catalog , arg_1 . GET ) arg_0 . ensure_data_exists ( arg_1 , Func , error_message = ( \"Unable to fetch API response for catalog courses for \" \"Enterprise {enterprise_name} from endpoint '{path}'.\" . format ( enterprise_name = arg_3 . name , path = arg_1 . get_full_path ( ) ) ) ) arg_6 = serializers . EnterpriseCatalogCoursesReadOnlySerializer ( Func ) arg_6 . update_enterprise_courses ( arg_3 , catalog_id = arg_3 . catalog ) return get_paginated_response ( arg_6 . data , arg_1 )","id_":252766,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/enterprise\/api\/v1\/views.py#L149-L189","negative":"Drop a node from the network\n\n :param node: node to drop\n :type node: Node"} {"query":"Hack in a data directory","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = list ( arg_0 . DATA_DIRS ) arg_2 . append ( arg_1 ) arg_0 . DATA_DIRS = arg_2","id_":252767,"task_name":"https:\/\/github.com\/Contraz\/demosys-py\/blob\/6466128a3029c4d09631420ccce73024025bd5b6\/demosys\/conf\/__init__.py#L75-L79","negative":"Generates a jobs_pending progress bar widget."} {"query":"Filter tags according between_tags option .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ t [ \"name\" ] for t in arg_1 ] arg_3 = [ ] for arg_4 in arg_0 . options . between_tags : try : arg_5 = arg_2 . index ( arg_4 ) except ValueError : raise ChangelogGeneratorError ( \"ERROR: can't find tag {0}, specified with \" \"--between-tags option.\" . format ( arg_4 ) ) arg_3 . append ( arg_1 [ arg_5 ] ) arg_3 = arg_0 . sort_tags_by_date ( arg_3 ) if len ( arg_3 ) == 1 : arg_3 . append ( arg_3 [ 0 ] ) arg_6 = arg_0 . get_time_of_tag ( arg_3 [ 1 ] ) arg_7 = arg_0 . get_time_of_tag ( arg_3 [ 0 ] ) for arg_4 in arg_1 : if arg_6 < arg_0 . get_time_of_tag ( arg_4 ) < arg_7 : arg_3 . append ( arg_4 ) if arg_6 == arg_7 : arg_3 . pop ( 0 ) return arg_3","id_":252768,"task_name":"https:\/\/github.com\/topic2k\/pygcgen\/blob\/c41701815df2c8c3a57fd5f7b8babe702127c8a1\/pygcgen\/generator.py#L1036-L1071","negative":"Network block for ResNet."} {"query":"Like union but ignores whether the two intervals intersect or not","positive":"def Func ( arg_0 , arg_1 ) : return Interval ( min ( arg_0 . start , arg_1 . start ) , max ( arg_0 . end , arg_1 . end ) )","id_":252769,"task_name":"https:\/\/github.com\/sanger-pathogens\/Fastaq\/blob\/2c775c846d2491678a9637daa320592e02c26c72\/pyfastaq\/intervals.py#L56-L58","negative":"Management of the json template."} {"query":"Splits the muscle alignment into chunks . Each chunk is run on a separate computing core . Because the largest clusters are at the beginning of the clusters file assigning equal clusters to each file would put all of the large cluster that take longer to align near the top . So instead we randomly distribute the clusters among the files . If assembly method is reference then this step is just a placeholder and nothing happens .","positive":"def Func ( arg_0 , arg_1 ) : LOGGER . info ( \"inside Func\" ) if arg_0 . paramsdict [ \"assembly_method\" ] != \"reference\" : arg_2 = os . path . join ( arg_0 . dirs . clusts , arg_1 . name + \".clust.gz\" ) with iter ( gzip . open ( arg_2 , 'rb' ) ) as arg_5 : arg_3 = sum ( 1 for i in arg_5 if \"\/\/\" in i ) \/\/ 2 arg_4 = ( arg_3 \/\/ 20 ) + ( arg_3 % 20 ) LOGGER . info ( \"optim for align chunks: %s\" , arg_4 ) arg_5 = gzip . open ( arg_2 , 'rb' ) arg_6 = iter ( arg_5 . read ( ) . strip ( ) . split ( \"\/\/\\n\/\/\\n\" ) ) arg_7 = arg_4 \/\/ 10 for arg_8 in range ( 10 ) : arg_9 = arg_4 + ( arg_8 * arg_7 ) arg_10 = arg_3 - arg_9 if arg_8 == 9 : arg_11 = list ( itertools . islice ( arg_6 , int ( 1e9 ) ) ) else : arg_11 = list ( itertools . islice ( arg_6 , arg_9 ) ) arg_3 = arg_10 arg_12 = os . path . join ( arg_0 . tmpdir , arg_1 . name + \"_chunk_{}.ali\" . format ( arg_8 ) ) with open ( arg_12 , 'wb' ) as out : out . write ( \"\/\/\\n\/\/\\n\" . join ( arg_11 ) ) arg_5 . close ( )","id_":252770,"task_name":"https:\/\/github.com\/dereneaton\/ipyrad\/blob\/5eeb8a178160f45faf71bf47cec4abe998a575d1\/ipyrad\/assemble\/cluster_within.py#L1267-L1319","negative":"message_replied event is not truly a message event and does not have a message.text\n don't process such events\n\n commands may not be idempotent, so ignore message_changed events."} {"query":"Authenticate a user from a token form field","positive":"def Func ( arg_0 , arg_1 ) : try : arg_2 = arg_1 . data [ 'token' ] except KeyError : return try : arg_3 = AuthToken . objects . get ( arg_2 = arg_2 ) except AuthToken . DoesNotExist : return return ( arg_3 . user , arg_3 )","id_":252771,"task_name":"https:\/\/github.com\/incuna\/django-user-management\/blob\/6784e33191d4eff624d2cf2df9ca01db4f23c9c6\/user_management\/api\/authentication.py#L10-L27","negative":"Creates a service from a constructor and checks which kwargs are not used"} {"query":"Stop scheduling tasks because an engine has been unregistered from a pure ZMQ scheduler .","positive":"def Func ( arg_0 ) : arg_0 . _task_socket . close ( ) arg_0 . _task_socket = None arg_2 = \"An engine has been unregistered, and we are using pure \" + \"ZMQ task scheduling. Task farming will be disabled.\" if arg_0 . outstanding : arg_2 += \" If you were running tasks when this happened, \" + \"some `outstanding` msg_ids may never resolve.\" warnings . warn ( arg_2 , RuntimeWarning )","id_":252772,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/parallel\/client\/client.py#L530-L541","negative":"Load the configuration.\n\n :param under_test:\n Tell us if we only have to load the configuration file (True)\n or load the configuration file and initate the output directory\n if it does not exist (False).\n :type under_test: bool\n\n :param custom:\n A dict with the configuration index (from .PyFunceble.yaml) to update.\n :type custom: dict\n\n .. warning::\n If :code:`custom` is given, the given :code:`dict` overwrite\n the last value of the given configuration indexes."} {"query":"Returns the front ID found in front at the given index .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 , arg_3 = arg_1 arg_4 = arg_0 [ arg_2 , arg_3 ] if arg_4 == 0 : return - 1 else : return arg_4","id_":252773,"task_name":"https:\/\/github.com\/MaxStrange\/AudioSegment\/blob\/1daefb8de626ddff3ff7016697c3ad31d262ecd6\/algorithms\/asa.py#L577-L591","negative":"Stop streaming samples from device and delete samples buffer"} {"query":"Perform delta step by writing stacked values to signals","positive":"def Func ( arg_0 ) -> Generator [ None , None , None ] : arg_1 = arg_0 . _valuesToApply arg_0 . _applyValPlaned = False arg_3 = arg_0 . config . logApplyingValues if arg_1 and arg_3 : arg_3 ( arg_0 , arg_1 ) arg_0 . _valuesToApply = [ ] arg_5 = arg_0 . _seqProcsToRun . append for arg_6 , arg_7 , arg_8 , arg_9 in arg_1 : if arg_8 : arg_5 ( arg_9 ) else : arg_6 . simUpdateVal ( arg_0 , arg_7 ) arg_0 . _runCombProcesses ( ) if arg_0 . _valuesToApply and not arg_0 . _applyValPlaned : arg_0 . _scheduleApplyValues ( ) return yield","id_":252774,"task_name":"https:\/\/github.com\/Nic30\/hwt\/blob\/8cbb399e326da3b22c233b98188a9d08dec057e6\/hwt\/simulator\/hdlSimulator.py#L400-L432","negative":"Enumerate all possible resonance forms and return them as a list.\n\n :param mol: The input molecule.\n :type mol: rdkit.Chem.rdchem.Mol\n :return: A list of all possible resonance forms of the molecule.\n :rtype: list of rdkit.Chem.rdchem.Mol"} {"query":"Restores the database from a snapshot","positive":"def Func ( arg_0 ) : arg_1 = get_app ( ) if not arg_0 : arg_2 = arg_1 . get_latest_snapshot ( ) if not arg_2 : click . echo ( \"Couldn't find any snapshots for project %s\" % load_config ( ) [ 'project_name' ] ) sys . exit ( 1 ) else : arg_2 = arg_1 . get_snapshot ( arg_0 ) if not arg_2 : click . echo ( \"Couldn't find snapshot with name %s.\\n\" \"You can list snapshots with 'stellar list'\" % arg_0 ) sys . exit ( 1 ) if not arg_2 . slaves_ready : if arg_1 . is_copy_process_running ( arg_2 ) : sys . stdout . write ( 'Waiting for background process(%s) to finish' % arg_2 . worker_pid ) sys . stdout . flush ( ) while not arg_2 . slaves_ready : sys . stdout . write ( '.' ) sys . stdout . flush ( ) sleep ( 1 ) arg_1 . db . session . refresh ( arg_2 ) click . echo ( '' ) else : click . echo ( 'Background process missing, doing slow Func.' ) arg_1 . inline_slave_copy ( arg_2 ) arg_1 . Func ( arg_2 ) click . echo ( 'Restore complete.' )","id_":252775,"task_name":"https:\/\/github.com\/fastmonkeys\/stellar\/blob\/79f0353563c35fa6ae46a2f00886ab1dd31c4492\/stellar\/command.py#L89-L129","negative":"Generates a bytecode from an object.\n\n :param obb: The object to generate.\n :param previous: The previous bytecode to use when generating subobjects.\n :return: The generated bytecode."} {"query":"Perform param type mapping This requires a bit of logic since this isn t standardized . If a type doesn t map assume str","positive":"def Func ( arg_0 ) : arg_1 , arg_2 = TYPE_INFO_RE . match ( arg_0 ) . groups ( ) if arg_1 in ( 'list' , 'array' ) : if arg_2 is not None : arg_2 = arg_2 . strip ( ) if not arg_2 : arg_2 = 'str' arg_3 = TYPE_INFO_RE . match ( arg_2 ) if arg_3 : arg_2 = arg_3 . group ( 1 ) . lower ( ) return [ PARAM_TYPE_MAP . setdefault ( arg_2 , string_types ) ] return PARAM_TYPE_MAP . setdefault ( arg_1 , string_types )","id_":252776,"task_name":"https:\/\/github.com\/disqus\/python-phabricator\/blob\/ad08e335081531fae053a78a1c708cd11e3e6c49\/phabricator\/__init__.py#L111-L134","negative":"Clear all matching our user_id."} {"query":"Search the handler list for handlers matching given stanza type and payload namespace . Run the handlers found ordering them by priority until the first one which returns True .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None ) : if arg_3 is None : arg_3 = arg_2 . stanza_type arg_4 = arg_2 . get_all_payload ( ) arg_5 = [ p . __class__ for p in arg_4 ] arg_6 = [ ( p . __class__ , p . handler_key ) for p in arg_4 ] for arg_7 in arg_1 : arg_8 = arg_7 . _pyxmpp_stanza_handled [ 1 ] arg_9 = arg_7 . _pyxmpp_payload_class_handled arg_10 = arg_7 . _pyxmpp_payload_key if arg_8 != arg_3 : continue if arg_9 : if arg_10 is None and arg_9 not in arg_5 : continue if arg_10 and ( arg_9 , arg_10 ) not in arg_6 : continue arg_11 = arg_7 ( arg_2 ) if arg_0 . _process_handler_result ( arg_11 ) : return True return False","id_":252777,"task_name":"https:\/\/github.com\/Jajcus\/pyxmpp2\/blob\/14a40a3950910a9cd008b55f0d8905aa0186ce18\/pyxmpp2\/stanzaprocessor.py#L240-L275","negative":"Replace target with replacement"} {"query":"Creates a snapshot of a cluster","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_0 . get_conn ( ) . Func ( SnapshotIdentifier = arg_1 , ClusterIdentifier = arg_2 , ) return arg_3 [ 'Snapshot' ] if arg_3 [ 'Snapshot' ] else None","id_":252778,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/redshift_hook.py#L100-L113","negative":"Yield egg or source distribution objects based on basename"} {"query":"Sets the style to the specified Pygments style .","positive":"def Func ( arg_0 , arg_1 ) : if isinstance ( arg_1 , basestring ) : arg_1 = get_style_by_name ( arg_1 ) arg_0 . _style = arg_1 arg_0 . _clear_caches ( )","id_":252779,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/frontend\/qt\/console\/pygments_highlighter.py#L129-L135","negative":"Fit structure-based AD. The training model memorizes the unique set of reaction signature.\n\n Parameters\n ----------\n X : after read rdf file\n\n Returns\n -------\n self : object"} {"query":"Automatically includes all submodules and role selectors in the top - level fabfile using spooky - scary black magic .","positive":"def Func ( ) : arg_0 = inspect . stack ( ) arg_1 = None for arg_2 , arg_3 , arg_4 , arg_5 , arg_5 , arg_5 in arg_0 : if 'fabfile.py' in arg_3 : arg_1 = arg_2 break if not arg_1 : return try : arg_6 = arg_1 . f_locals for arg_7 , arg_8 in sub_modules . items ( ) : arg_6 [ arg_7 ] = arg_8 for arg_9 , arg_10 in role_commands . items ( ) : assert arg_9 not in sub_modules , ( 'The role %s conflicts with a built-in submodule. ' 'Please choose a different name.' ) % ( arg_9 ) arg_6 [ arg_9 ] = arg_10 arg_6 [ 'common' ] = common arg_6 [ 'shell' ] = shell for arg_11 in common . post_import_modules : exec ( \"import %s\" % arg_11 ) arg_6 [ arg_11 ] = locals ( ) [ arg_11 ] finally : del arg_0","id_":252780,"task_name":"https:\/\/github.com\/chrisspen\/burlap\/blob\/a92b0a8e5206850bb777c74af8421ea8b33779bd\/burlap\/__init__.py#L321-L372","negative":"Update annotations of discretized continuous pulse function with duration.\n\n Args:\n discretized_pulse: Discretized decorated continuous pulse."} {"query":"This fits a univariate cubic spline to the phased light curve .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = 0.01 , arg_5 = 30 , arg_6 = 30.0 , arg_7 = False , arg_8 = False , arg_9 = False , arg_10 = True ) : if arg_2 is None : arg_2 = npfull_like ( arg_1 , 0.005 ) arg_11 , arg_12 , arg_13 = sigclip_magseries ( arg_0 , arg_1 , arg_2 , arg_6 = arg_6 , arg_9 = arg_9 ) arg_14 = npnonzero ( arg_13 ) arg_11 , arg_12 , arg_13 = arg_11 [ arg_14 ] , arg_12 [ arg_14 ] , arg_13 [ arg_14 ] arg_15 , arg_16 , arg_17 , arg_18 , arg_19 = ( get_phased_quantities ( arg_11 , arg_12 , arg_13 , arg_3 ) ) arg_20 = len ( arg_15 ) arg_21 = int ( npfloor ( arg_4 * arg_20 ) ) arg_21 = arg_5 if arg_21 > arg_5 else arg_21 arg_22 = nplinspace ( arg_15 [ 0 ] + 0.01 , arg_15 [ - 1 ] - 0.01 , num = arg_21 ) arg_23 = npdiff ( arg_15 ) > 0.0 arg_24 = npconcatenate ( ( nparray ( [ True ] ) , arg_23 ) ) arg_15 , arg_16 , arg_17 = ( arg_15 [ arg_24 ] , arg_16 [ arg_24 ] , arg_17 [ arg_24 ] ) arg_25 = LSQUnivariateSpline ( arg_15 , arg_16 , t = arg_22 , w = 1.0 \/ arg_17 ) arg_26 = arg_25 ( arg_15 ) arg_27 = npsum ( ( ( arg_26 - arg_16 ) * ( arg_26 - arg_16 ) ) \/ ( arg_17 * arg_17 ) ) arg_28 = arg_27 \/ ( len ( arg_16 ) - arg_21 - 1 ) if arg_10 : LOGINFO ( 'spline fit done. nknots = %s, ' 'chisq = %.5f, reduced chisq = %.5f' % ( arg_21 , arg_27 , arg_28 ) ) if not arg_9 : arg_29 = npwhere ( arg_26 == npmax ( arg_26 ) ) else : arg_29 = npwhere ( arg_26 == npmin ( arg_26 ) ) if len ( arg_29 [ 0 ] ) > 1 : arg_29 = ( arg_29 [ 0 ] [ 0 ] , ) arg_30 = arg_18 [ arg_29 ] arg_31 = { 'fittype' : 'spline' , 'fitinfo' : { 'nknots' : arg_21 , 'fitmags' : arg_26 , 'fitepoch' : arg_30 } , 'fitchisq' : arg_27 , 'fitredchisq' : arg_28 , 'fitplotfile' : None , 'magseries' : { 'times' : arg_18 , 'phase' : arg_15 , 'mags' : arg_16 , 'errs' : arg_17 , 'magsarefluxes' : arg_9 } , } if arg_7 and isinstance ( arg_7 , str ) : make_fit_plot ( arg_15 , arg_16 , arg_17 , arg_26 , arg_3 , arg_19 , arg_30 , arg_7 , arg_9 = arg_9 ) arg_31 [ 'fitplotfile' ] = arg_7 return arg_31","id_":252781,"task_name":"https:\/\/github.com\/waqasbhatti\/astrobase\/blob\/2922a14619d183fb28005fa7d02027ac436f2265\/astrobase\/lcfit\/nonphysical.py#L72-L263","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Return a WHERE clause matching the given API path and user_id .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 , arg_3 = split_api_filepath ( arg_1 ) return and_ ( files . c . name == arg_3 , files . c . user_id == arg_0 , files . c . parent_name == arg_2 , )","id_":252782,"task_name":"https:\/\/github.com\/quantopian\/pgcontents\/blob\/ed36268b7917332d16868208e1e565742a8753e1\/pgcontents\/query.py#L292-L301","negative":"Write the index.html file for this report."} {"query":"Splits the modpath into the dir that must be in PYTHONPATH for the module to be imported and the modulepath relative to this directory .","positive":"def Func ( arg_0 , arg_1 = True ) : if six . PY2 : if arg_0 . endswith ( '.pyc' ) : arg_0 = arg_0 [ : - 1 ] arg_2 = abspath ( expanduser ( arg_0 ) ) if arg_1 : if not exists ( arg_2 ) : if not exists ( arg_0 ) : raise ValueError ( 'modpath={} does not exist' . format ( arg_0 ) ) raise ValueError ( 'modpath={} is not a module' . format ( arg_0 ) ) if isdir ( arg_2 ) and not exists ( join ( arg_0 , '__init__.py' ) ) : raise ValueError ( 'modpath={} is not a module' . format ( arg_0 ) ) arg_3 , arg_4 = split ( arg_2 ) arg_5 = [ arg_4 ] arg_6 = arg_3 while exists ( join ( arg_6 , '__init__.py' ) ) : arg_6 , arg_7 = split ( arg_6 ) arg_5 . append ( arg_7 ) arg_8 = arg_5 [ : : - 1 ] arg_9 = os . path . sep . join ( arg_8 ) return arg_6 , arg_9","id_":252783,"task_name":"https:\/\/github.com\/Erotemic\/ubelt\/blob\/db802f3ad8abba025db74b54f86e6892b8927325\/ubelt\/util_import.py#L550-L596","negative":"Returns an aggregator connection."} {"query":"Start connection and initialize session services","positive":"def Func ( arg_0 ) : arg_1 = _get_logger ( arg_0 . debug ) arg_2 = arg_0 . _session . Func ( ) if arg_2 : arg_3 = arg_0 . _session . nextEvent ( ) arg_4 = _EVENT_DICT [ arg_3 . eventType ( ) ] arg_1 . info ( 'Event Type: {!r}' . format ( arg_4 ) ) for arg_5 in arg_3 : arg_1 . info ( 'Message Received:\\n{}' . format ( arg_5 ) ) if arg_3 . eventType ( ) != blpapi . Event . SESSION_STATUS : raise RuntimeError ( 'Expected a \"SESSION_STATUS\" event but ' 'received a {!r}' . format ( arg_4 ) ) arg_3 = arg_0 . _session . nextEvent ( ) arg_4 = _EVENT_DICT [ arg_3 . eventType ( ) ] arg_1 . info ( 'Event Type: {!r}' . format ( arg_4 ) ) for arg_5 in arg_3 : arg_1 . info ( 'Message Received:\\n{}' . format ( arg_5 ) ) if arg_3 . eventType ( ) != blpapi . Event . SESSION_STATUS : raise RuntimeError ( 'Expected a \"SESSION_STATUS\" event but ' 'received a {!r}' . format ( arg_4 ) ) else : arg_3 = arg_0 . _session . nextEvent ( arg_0 . timeout ) if arg_3 . eventType ( ) == blpapi . Event . SESSION_STATUS : for arg_5 in arg_3 : arg_1 . warning ( 'Message Received:\\n{}' . format ( arg_5 ) ) raise ConnectionError ( 'Could not Func blpapi.Session' ) arg_0 . _init_services ( ) return arg_0","id_":252784,"task_name":"https:\/\/github.com\/matthewgilbert\/pdblp\/blob\/aaef49ad6fca9af6ee44739d6e7e1cc3e7b0f8e2\/pdblp\/pdblp.py#L117-L149","negative":"Load a configuration module and return a Config"} {"query":"Set brightness of bulb .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = \"C {},,,,{},\\r\\n\" . format ( arg_0 . _zid , arg_1 ) arg_3 = arg_0 . _hub . send_command ( arg_2 ) _LOGGER . debug ( \"Set brightness %s: %s\" , repr ( arg_2 ) , arg_3 ) return arg_3","id_":252785,"task_name":"https:\/\/github.com\/lindsaymarkward\/python-yeelight-sunflower\/blob\/4ec72d005ce307f832429620ba0bcbf6b236eead\/yeelightsunflower\/main.py#L236-L241","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Returns the summary of the learned topics .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = 10 , arg_4 = 10 ) : arg_1 = np . squeeze ( arg_1 , axis = 0 ) arg_5 = np . argsort ( - arg_1 , kind = \"mergesort\" ) arg_6 = np . argsort ( - arg_0 , axis = 1 ) arg_7 = [ ] for arg_8 in arg_5 [ : arg_3 ] : arg_9 = [ \"index={} alpha={:.2f}\" . format ( arg_8 , arg_1 [ arg_8 ] ) ] arg_9 += [ arg_2 [ arg_10 ] for arg_10 in arg_6 [ arg_8 , : arg_4 ] ] arg_7 . append ( \" \" . join ( arg_9 ) ) return np . array ( arg_7 )","id_":252786,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/examples\/latent_dirichlet_allocation_edward2.py#L371-L399","negative":"Callable to configure Bokeh's show method when a proxy must be\n configured.\n\n If port is None we're asking about the URL\n for the origin header."} {"query":"Current instruction pointed by self . pc","positive":"def Func ( arg_0 ) : try : arg_1 = getattr ( arg_0 , '_decoding_cache' ) except Exception : arg_1 = arg_0 . _decoding_cache = { } arg_2 = arg_0 . pc if isinstance ( arg_2 , Constant ) : arg_2 = arg_2 . value if arg_2 in arg_1 : return arg_1 [ arg_2 ] def getcode ( ) : arg_3 = arg_0 . bytecode for arg_4 in range ( arg_2 , len ( arg_3 ) ) : yield simplify ( arg_3 [ arg_4 ] ) . value while True : yield 0 Func = EVMAsm . disassemble_one ( getcode ( ) , arg_2 = arg_2 , fork = DEFAULT_FORK ) arg_1 [ arg_2 ] = Func return Func","id_":252787,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/platforms\/evm.py#L718-L747","negative":"Save the model in the given directory.\n\n :param saveModelDir: (string)\n Absolute directory path for saving the model. This directory should\n only be used to store a saved model. If the directory does not exist,\n it will be created automatically and populated with model data. A\n pre-existing directory will only be accepted if it contains previously\n saved model data. If such a directory is given, the full contents of\n the directory will be deleted and replaced with current model data."} {"query":"Writes out PNG image data in chunks to file pointer fp","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . urlopen ( ) . fp while 1 : try : arg_1 . Func ( arg_2 . next ( ) ) except StopIteration : return","id_":252788,"task_name":"https:\/\/github.com\/appknox\/google-chartwrapper\/blob\/3769aecbef6c83b6cd93ee72ece478ffe433ac57\/GChartWrapper\/GChart.py#L630-L641","negative":"Concert velocities from a cartesian to a spherical coordinate system\n\n TODO: errors\n\n :param x: name of x column (input)\n :param y: y\n :param z: z\n :param vx: vx\n :param vy: vy\n :param vz: vz\n :param vr: name of the column for the radial velocity in the r direction (output)\n :param vlong: name of the column for the velocity component in the longitude direction (output)\n :param vlat: name of the column for the velocity component in the latitude direction, positive points to the north pole (output)\n :param distance: Expression for distance, if not given defaults to sqrt(x**2+y**2+z**2), but if this column already exists, passing this expression may lead to a better performance\n :return:"} {"query":"visit an astroid . Import node","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . root ( ) . file for arg_3 in arg_1 . names : arg_4 = modutils . is_relative ( arg_3 [ 0 ] , arg_2 ) arg_0 . _imported_module ( arg_1 , arg_3 [ 0 ] , arg_4 )","id_":252789,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/pyreverse\/inspector.py#L240-L248","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Removes a NIC from the load balancer .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = arg_0 . _perform_request ( url = '\/datacenters\/%s\/loadbalancers\/%s\/balancednics\/%s' % ( arg_1 , arg_2 , arg_3 ) , method = 'DELETE' ) return arg_4","id_":252790,"task_name":"https:\/\/github.com\/profitbricks\/profitbricks-sdk-python\/blob\/2c804b141688eccb07d6ae56601d5c60a62abebd\/profitbricks\/client.py#L1036-L1058","negative":"Parses a file and returns a document object.\n File, a file like object."} {"query":"This method increases the permanence values of synapses of columns whose activity level has been too low . Such columns are identified by having an overlap duty cycle that drops too much below those of their peers . The permanence values for such columns are increased .","positive":"def Func ( arg_0 ) : arg_1 = numpy . where ( arg_0 . _overlapDutyCycles < arg_0 . _minOverlapDutyCycles ) [ 0 ] for arg_2 in arg_1 : arg_3 = arg_0 . _permanences [ arg_2 ] . astype ( realDType ) arg_4 = numpy . where ( arg_0 . _potentialPools [ arg_2 ] > 0 ) [ 0 ] arg_3 [ arg_4 ] += arg_0 . _synPermBelowStimulusInc arg_0 . _updatePermanencesForColumn ( arg_3 , arg_2 , raisePerm = False )","id_":252791,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/algorithms\/spatial_pooler.py#L1177-L1190","negative":"Open the editor at the given filename, linenumber, column and\n show an error message. This is used for correcting syntax errors.\n The current implementation only has special support for the VIM editor,\n and falls back on the 'editor' hook if VIM is not used.\n\n Call ip.set_hook('fix_error_editor',youfunc) to use your own function,"} {"query":"Clean up children and remove the directory .","positive":"def Func ( arg_0 ) : for arg_1 in arg_0 . _children : arg_0 . _children [ arg_1 ] . Func ( ) if arg_0 . _Func : arg_0 . remove ( True )","id_":252792,"task_name":"https:\/\/github.com\/snare\/scruffy\/blob\/0fedc08cfdb6db927ff93c09f25f24ce5a04c541\/scruffy\/file.py#L348-L358","negative":"This will output the nginx HTTP config string for specific port spec"} {"query":"Returns a list of locations .","positive":"def Func ( arg_0 ) : arg_1 = \"\/2\/locations\" arg_2 = arg_0 . _get_resource ( arg_1 ) arg_3 = [ ] for arg_4 in arg_2 [ 'locations' ] : arg_3 . append ( arg_0 . location_from_json ( arg_4 ) ) return arg_3","id_":252793,"task_name":"https:\/\/github.com\/uw-it-cte\/uw-restclients-wheniwork\/blob\/0d3ca09d5bbe808fec12e5f943596570d33a1731\/uw_wheniwork\/locations.py#L16-L29","negative":"Reassemble a Binder object coming out of the database."} {"query":"Update the internal line and column buffers after a new character is added .","positive":"def Func ( arg_0 , arg_1 ) : if newline_chars . match ( arg_1 ) : arg_0 . _col . append ( 0 ) arg_0 . _line . append ( arg_0 . _line [ - 1 ] + 1 ) else : arg_0 . _col . append ( arg_0 . _col [ - 1 ] + 1 ) arg_0 . _line . append ( arg_0 . _line [ - 1 ] )","id_":252794,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/reader.py#L121-L132","negative":"scan through the java output text and extract the bad java messages that may or may not happened when\n unit tests are run. It will not record any bad java messages that are stored in g_ok_java_messages.\n\n :return: none"} {"query":"This decorator wraps descriptor methods with a new method that tries to delegate to a function of the same name defined on the owner instance for convenience for dispatcher clients .","positive":"def Func ( arg_0 ) : @ functools . wraps ( arg_0 ) def delegator ( arg_1 , * arg_2 , ** arg_3 ) : if arg_1 . Func : arg_4 = getattr ( arg_1 , 'inst' , None ) if arg_4 is not None : arg_5 = ( arg_1 . delegator_prefix or '' ) + arg_0 . __name__ arg_6 = getattr ( arg_4 , arg_5 , None ) if arg_6 is not None : return arg_6 ( * arg_2 , ** arg_3 ) return arg_0 ( arg_1 , * arg_2 , ** arg_3 ) return delegator","id_":252795,"task_name":"https:\/\/github.com\/twneale\/nmmd\/blob\/ab3b3f1290c85b8d05bbf59b54c7f22da8972b6f\/nmmd\/base.py#L66-L85","negative":"Get the context for this view."} {"query":"Is the wx event loop running .","positive":"def Func ( arg_0 = None ) : if arg_0 is None : arg_0 = get_app_wx ( ) if hasattr ( arg_0 , '_in_event_loop' ) : return arg_0 . _in_event_loop else : return arg_0 . IsMainLoopRunning ( )","id_":252796,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/lib\/guisupport.py#L85-L92","negative":"DRF view to list all catalogs.\n\n Arguments:\n request (HttpRequest): Current request\n\n Returns:\n (Response): DRF response object containing course catalogs."} {"query":"Function used to fit the exponential decay .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : return arg_1 * np . exp ( - arg_0 \/ arg_2 ) + arg_3","id_":252797,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/tools\/qcvv\/fitters.py#L23-L26","negative":"Given a KeyboardModifiers flags object, return whether the Control\n key is down.\n\n Parameters:\n -----------\n include_command : bool, optional (default True)\n Whether to treat the Command key as a (mutually exclusive) synonym\n for Control when in Mac OS."} {"query":"Dump data from Invenio legacy .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 , arg_6 ) : init_app_context ( ) arg_3 = arg_3 if arg_3 else '{0}_Func' . format ( arg_0 ) arg_7 = dict ( ( f . strip ( '-' ) . replace ( '-' , '_' ) , True ) for f in arg_6 ) try : arg_8 = collect_things_entry_points ( ) [ arg_0 ] except KeyError : click . Abort ( '{0} is not in the list of available things to migrate: ' '{1}' . format ( arg_0 , collect_things_entry_points ( ) ) ) click . echo ( \"Querying {0}...\" . format ( arg_0 ) ) arg_9 , arg_10 = arg_8 . get ( arg_1 , arg_2 , arg_5 = arg_5 , ** arg_7 ) arg_11 = 0 click . echo ( \"Dumping {0}...\" . format ( arg_0 ) ) with click . progressbar ( length = arg_9 ) as bar : for arg_12 , arg_13 in enumerate ( grouper ( arg_10 , arg_4 ) ) : with open ( '{0}_{1}.json' . format ( arg_3 , arg_12 ) , 'w' ) as fp : fp . write ( \"[\\n\" ) for arg_14 in arg_13 : try : json . Func ( arg_8 . Func ( arg_14 , arg_2 , ** arg_7 ) , fp , default = set_serializer ) fp . write ( \",\" ) except Exception as e : click . secho ( \"Failed Func {0} {1} ({2})\" . format ( arg_0 , arg_14 , e . message ) , fg = 'red' ) arg_11 += 1 bar . update ( arg_11 ) fp . seek ( fp . tell ( ) - 1 ) fp . write ( \"\\n]\" )","id_":252798,"task_name":"https:\/\/github.com\/inveniosoftware\/invenio-migrator\/blob\/6902c6968a39b747d15e32363f43b7dffe2622c2\/invenio_migrator\/legacy\/cli.py#L57-L97","negative":"Handle CLI command"} {"query":"Configure any additional default transport zone bindings .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = CONF . NVP . default_tz if not arg_4 : LOG . warn ( \"additional_default_tz_types specified, \" \"but no default_tz. Skipping \" \"Func().\" ) return if not arg_3 : LOG . warn ( \"neutron network_id not specified, skipping \" \"Func()\" ) return for arg_5 in CONF . NVP . additional_default_tz_types : if arg_5 in TZ_BINDINGS : arg_6 = TZ_BINDINGS [ arg_5 ] arg_6 . add ( arg_1 , arg_2 , arg_4 , arg_3 ) else : LOG . warn ( \"Unknown default tz type %s\" % ( arg_5 ) )","id_":252799,"task_name":"https:\/\/github.com\/openstack\/quark\/blob\/1112e6a66917d3e98e44cb7b33b107fd5a74bb2e\/quark\/drivers\/nvp_driver.py#L636-L660","negative":"Method which indicates if the object matches specified criteria.\n\n Match accepts criteria as kwargs and looks them up on attributes.\n Actual matching is performed with fnmatch, so shell-like wildcards\n work within match strings. Examples:\n\n obj._match(AXTitle='Terminal*')\n obj._match(AXRole='TextField', AXRoleDescription='search text field')"} {"query":"Creates a new affinity group for the specified subscription .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = None ) : _validate_not_none ( 'name' , arg_1 ) _validate_not_none ( 'label' , arg_2 ) _validate_not_none ( 'location' , arg_3 ) return arg_0 . _perform_post ( '\/' + arg_0 . subscription_id + '\/affinitygroups' , _XmlSerializer . Func_to_xml ( arg_1 , arg_2 , arg_4 , arg_3 ) )","id_":252800,"task_name":"https:\/\/github.com\/Azure\/azure-sdk-for-python\/blob\/d7306fde32f60a293a7567678692bdad31e4b667\/azure-servicemanagement-legacy\/azure\/servicemanagement\/servicemanagementservice.py#L1064-L1088","negative":"Common routine for reporting debugger error messages."} {"query":"Create sparse feature matrix and vocabulary where fixed_vocab = False","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . vocabulary_ arg_3 = _make_int_array ( ) arg_4 = _make_int_array ( ) arg_4 . append ( 0 ) for arg_5 in arg_1 : for arg_6 in arg_5 : try : arg_3 . append ( arg_2 [ arg_6 ] ) except KeyError : continue arg_4 . append ( len ( arg_3 ) ) arg_3 = frombuffer_empty ( arg_3 , dtype = np . intc ) arg_4 = np . frombuffer ( arg_4 , dtype = np . intc ) arg_7 = np . ones ( len ( arg_3 ) ) arg_8 = sp . csr_matrix ( ( arg_7 , arg_3 , arg_4 ) , shape = ( len ( arg_4 ) - 1 , len ( arg_2 ) ) , dtype = arg_0 . dtype ) arg_8 . sum_duplicates ( ) if arg_0 . binary : arg_8 . data . fill ( 1 ) return arg_8","id_":252801,"task_name":"https:\/\/github.com\/lensacom\/sparkit-learn\/blob\/0498502107c1f7dcf33cda0cdb6f5ba4b42524b7\/splearn\/feature_extraction\/text.py#L169-L197","negative":"Output profiler report."} {"query":"List of users of this slack team","positive":"def Func ( arg_0 ) : if not arg_0 . _Func : arg_0 . _Func = arg_0 . _call_api ( 'Func.list' ) [ 'members' ] return arg_0 . _Func","id_":252802,"task_name":"https:\/\/github.com\/djangobot\/djangobot\/blob\/0ec951891812ea4114c27a08c790f63d0f0fd254\/djangobot\/slack.py#L77-L83","negative":"Using the record length and appropriate start points, seek to the\n country that corresponds to the converted IP address integer.\n Return offset of record.\n\n :arg ipnum: Result of ip2long conversion"} {"query":"Filter all issues that don t have a label .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ ] if not arg_0 . options . add_issues_wo_labels : for arg_3 in arg_1 : if not arg_3 [ 'labels' ] : arg_2 . append ( arg_3 ) return arg_2","id_":252803,"task_name":"https:\/\/github.com\/topic2k\/pygcgen\/blob\/c41701815df2c8c3a57fd5f7b8babe702127c8a1\/pygcgen\/generator.py#L746-L759","negative":"Generate RENEWING time.\n\n [:rfc:`2131#section-4.4.5`]::\n\n T1\n defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 *\n duration_of_lease). Times T1 and T2 SHOULD be chosen with some\n random \"fuzz\" around a fixed value, to avoid synchronization of\n client reacquisition."} {"query":"It will download file specified by url using requests module","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . split ( '\/' ) [ - 1 ] if os . path . exists ( os . path . join ( os . getcwd ( ) , arg_2 ) ) : print 'File already exists' return try : arg_3 = requests . get ( arg_1 , stream = True , timeout = 200 ) except requests . exceptions . SSLError : try : arg_4 = requests . get ( arg_1 , stream = True , verify = False , timeout = 200 ) except requests . exceptions . RequestException as e : print e quit ( ) except requests . exceptions . RequestException as e : print e quit ( ) arg_5 = 1024 arg_6 = int ( arg_3 . headers [ 'Content-Length' ] ) arg_7 = arg_6 \/ arg_5 arg_8 = arg_3 . iter_content ( arg_5 = arg_5 ) arg_9 = tqdm ( iterable = arg_8 , total = arg_7 , unit = 'KB' , leave = False ) with open ( arg_2 , 'wb' ) as f : for arg_10 in arg_9 : f . write ( arg_10 ) '''print 'Total size of file to be downloaded %.2f MB '%total_size\t\ttotal_downloaded_size=0.0\t\twith open(file_name,'wb') as f:\t\t\tfor chunk in r.iter_content(chunk_size=1*1024*1024):\t\t\t\tif chunk:\t\t\t\t\tsize_of_chunk=float(len(chunk))\/(1024*1024)\t\t\t\t\ttotal_downloaded_size+=size_of_chunk\t\t\t\t\tprint '{0:.0%} Downloaded'.format(total_downloaded_size\/total_size)\t\t\t\t\tf.write(chunk)''' print 'Downloaded file %s ' % arg_2","id_":252804,"task_name":"https:\/\/github.com\/ankitmathur3193\/song-cli\/blob\/ca8ccfe547e9d702313ff6d14e81ae4355989a67\/song\/commands\/FileDownload.py#L27-L74","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"list builders or instances for the project . They should start with sregistry - builder","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = 'us-west1-a' ) : arg_3 = [ ] arg_4 = arg_0 . _get_instances ( arg_1 , arg_2 ) for arg_5 in arg_4 [ 'items' ] : arg_3 . append ( [ arg_5 [ 'name' ] , arg_5 [ 'status' ] ] ) bot . info ( \"[google-compute] Found %s instances\" % ( len ( arg_3 ) ) ) bot . table ( arg_3 ) bot . newline ( )","id_":252805,"task_name":"https:\/\/github.com\/singularityhub\/sregistry-cli\/blob\/abc96140a1d15b5e96d83432e1e0e1f4f8f36331\/sregistry\/main\/google_storage\/build.py#L96-L114","negative":"Finishes the load job. Called automatically when the connection closes.\n\n :return: The exit code returned when applying rows to the table"} {"query":"Used by cache to get a unique key per URL","positive":"def Func ( * arg_0 , ** arg_1 ) : arg_2 = request . path arg_0 = str ( hash ( frozenset ( request . args . items ( ) ) ) ) return ( arg_2 + arg_0 ) . encode ( 'ascii' , 'ignore' )","id_":252806,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/www\/utils.py#L222-L228","negative":"Read a varint from file, parse it, and return the decoded integer."} {"query":"Task to send content metadata to each linked integrated channel .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = time . time ( ) arg_4 = User . objects . get ( arg_0 = arg_0 ) arg_5 = INTEGRATED_CHANNEL_CHOICES [ arg_1 ] . objects . get ( pk = arg_2 ) LOGGER . info ( 'Transmitting content metadata to integrated channel using configuration: [%s]' , arg_5 ) try : arg_5 . Func ( arg_4 ) except Exception : LOGGER . exception ( 'Transmission of content metadata failed for user [%s] and for integrated ' 'channel with code [%s] and id [%s].' , arg_0 , arg_1 , arg_2 ) arg_6 = time . time ( ) - arg_3 LOGGER . info ( 'Content metadata transmission task for integrated channel configuration [%s] took [%s] seconds' , arg_5 , arg_6 )","id_":252807,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/integrated_channels\/integrated_channel\/tasks.py#L20-L46","negative":"Return file-like object for 'member'.\n\n 'member' may be a filename or a RarInfo object."} {"query":"Checks file sizes for host","positive":"def Func ( arg_0 , arg_1 ) : if os . path . getsize ( arg_0 ) > arg_1 : return False else : return True","id_":252808,"task_name":"https:\/\/github.com\/lc-guy\/limf\/blob\/ad380feb70ef8e579a91ca09c807efec9e8af565\/limf\/parse_arguments.py#L11-L18","negative":"Discard deposit changes.\n\n #. The signal :data:`invenio_records.signals.before_record_update` is\n sent before the edit execution.\n\n #. It restores the last published version.\n\n #. The following meta information are saved inside the deposit:\n\n .. code-block:: python\n\n deposit['$schema'] = deposit_schema_from_record_schema\n\n #. The signal :data:`invenio_records.signals.after_record_update` is\n sent after the edit execution.\n\n #. The deposit index is updated.\n\n Status required: ``'draft'``.\n\n :param pid: Force a pid object. (Default: ``None``)\n :returns: A new Deposit object."} {"query":"Produce the scree plot .","positive":"def Func ( arg_0 , arg_1 = \"barplot\" , ** arg_2 ) : arg_3 = arg_2 . pop ( \"server\" ) if arg_2 : raise ValueError ( \"Unknown arguments %s to Func()\" % \", \" . join ( arg_2 . keys ( ) ) ) try : import matplotlib if arg_3 : matplotlib . use ( 'Agg' , warn = False ) import matplotlib . pyplot as plt except ImportError : print ( \"matplotlib is required for this function!\" ) return arg_4 = [ s ** 2 for s in arg_0 . _model_json [ 'output' ] [ 'importance' ] . cell_values [ 0 ] [ 1 : ] ] plt . xlabel ( 'Components' ) plt . ylabel ( 'Variances' ) plt . title ( 'Scree Plot' ) plt . xticks ( list ( range ( 1 , len ( arg_4 ) + 1 ) ) ) if arg_1 == \"barplot\" : plt . bar ( list ( range ( 1 , len ( arg_4 ) + 1 ) ) , arg_4 ) elif arg_1 == \"lines\" : plt . plot ( list ( range ( 1 , len ( arg_4 ) + 1 ) ) , arg_4 , 'b--' ) if not arg_3 : plt . show ( )","id_":252809,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-py\/h2o\/model\/dim_reduction.py#L95-L124","negative":"Sequentially update the actors, the world, and the messaging system. \n The theater terminates once all of the actors indicate that they are done."} {"query":"Read metadata and apply that to the next object in the input stream .","positive":"def Func ( arg_0 : arg_1 ) -> IMeta : arg_2 = arg_0 . reader . advance ( ) assert arg_2 == \"^\" arg_3 = _read_next_consuming_comment ( arg_0 ) arg_4 : Optional [ lmap . Map [ LispForm , LispForm ] ] = None if isinstance ( arg_3 , symbol . Symbol ) : arg_4 = lmap . map ( { keyword . keyword ( \"tag\" ) : arg_3 } ) elif isinstance ( arg_3 , keyword . Keyword ) : arg_4 = lmap . map ( { arg_3 : True } ) elif isinstance ( arg_3 , lmap . Map ) : arg_4 = arg_3 else : raise SyntaxError ( f\"Expected symbol, keyword, or map for metadata, not {type(meta)}\" ) arg_5 = _read_next_consuming_comment ( arg_0 ) try : return arg_5 . with_meta ( arg_4 ) except AttributeError : raise SyntaxError ( f\"Can not attach metadata to object of type {type(obj_with_meta)}\" )","id_":252810,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/reader.py#L636-L661","negative":"Converts size string into megabytes\n\n Parameters\n ----------\n s : str\n The size string can be '30KB', '20MB' or '1GB'\n\n Returns\n -------\n float\n With the size in bytes"} {"query":"Expand file patterns to a list of package_data paths .","positive":"def Func ( arg_0 , arg_1 = None ) : if arg_1 is None : arg_1 = [ '*' ] return _get_files ( arg_1 , pjoin ( HERE , arg_0 ) )","id_":252811,"task_name":"https:\/\/github.com\/jupyter-widgets\/jupyterlab-sidecar\/blob\/8889d09f1a0933e2cbee06d4874f720b075b29e8\/setupbase.py#L583-L601","negative":"Return the most recent timestamp in the operation."} {"query":"Receive data on the connection .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_3 = _no_zero_allocator ( \"char[]\" , arg_1 ) if arg_2 is not None and arg_2 & socket . MSG_PEEK : arg_4 = _lib . SSL_peek ( arg_0 . _ssl , arg_3 , arg_1 ) else : arg_4 = _lib . SSL_read ( arg_0 . _ssl , arg_3 , arg_1 ) arg_0 . _raise_ssl_error ( arg_0 . _ssl , arg_4 ) return _ffi . buffer ( arg_3 , arg_4 ) [ : ]","id_":252812,"task_name":"https:\/\/github.com\/pyca\/pyopenssl\/blob\/1fbe064c50fd030948141d7d630673761525b0d0\/src\/OpenSSL\/SSL.py#L1778-L1793","negative":"Enables uniform interface to value and batch jacobian calculation.\n\n Works in both eager and graph modes.\n\n Arguments:\n f: The scalar function to evaluate.\n x: The value at which to compute the value and the batch jacobian.\n\n Returns:\n A tuple (f(x), J(x)), where J(x) is the batch jacobian."} {"query":"Get weights for this layer","positive":"def Func ( arg_0 ) : arg_1 = callBigDlFunc ( arg_0 . bigdl_type , \"getWeights\" , arg_0 . value ) if arg_1 is not None : return [ arg_2 . to_ndarray ( ) for arg_2 in arg_1 ] else : print ( \"The layer does not have weight\/bias\" ) return None","id_":252813,"task_name":"https:\/\/github.com\/intel-analytics\/BigDL\/blob\/e9c19788285986ab789a2e2998f9a85d7524779f\/pyspark\/bigdl\/nn\/layer.py#L514-L526","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Forcibly terminates all Celery processes .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . local_renderer with arg_0 . settings ( warn_only = True ) : arg_1 . sudo ( 'pkill -9 -f celery' ) arg_1 . sudo ( 'rm -f \/tmp\/celery*.pid' )","id_":252814,"task_name":"https:\/\/github.com\/chrisspen\/burlap\/blob\/a92b0a8e5206850bb777c74af8421ea8b33779bd\/burlap\/celery.py#L94-L101","negative":"Checks if an blob_name is updated in Google Cloud Storage.\n\n :param bucket_name: The Google cloud storage bucket where the object is.\n :type bucket_name: str\n :param object_name: The name of the object to check in the Google cloud\n storage bucket.\n :type object_name: str\n :param ts: The timestamp to check against.\n :type ts: datetime.datetime"} {"query":"Creates the an empty file if it does not already exist .","positive":"def Func ( arg_0 ) : if os . path . exists ( arg_0 ) : return False else : open ( arg_0 , 'a+b' ) . close ( ) logger . info ( 'Credential file {0} created' . format ( arg_0 ) ) return True","id_":252815,"task_name":"https:\/\/github.com\/googleapis\/oauth2client\/blob\/50d20532a748f18e53f7d24ccbe6647132c979a9\/oauth2client\/contrib\/multiprocess_file_storage.py#L100-L112","negative":"TARGET power button"} {"query":"Allocate or reallocate a scaling IP .","positive":"def Func ( arg_0 , arg_1 ) : LOG . info ( 'Func for tenant %s and body %s' , arg_0 . tenant_id , arg_1 ) arg_2 = arg_1 . get ( 'scaling_network_id' ) arg_3 = arg_1 . get ( 'scaling_ip_address' ) arg_4 = arg_1 . get ( 'ports' , [ ] ) arg_5 = _get_network ( arg_0 , arg_2 ) arg_6 = { } for arg_7 in arg_4 : arg_8 = _get_port ( arg_0 , arg_7 [ 'port_id' ] ) arg_9 = _get_fixed_ip ( arg_0 , arg_7 . get ( 'fixed_ip_address' ) , arg_8 ) arg_6 [ arg_8 . id ] = { \"port\" : arg_8 , \"fixed_ip\" : arg_9 } arg_11 = _allocate_ip ( arg_0 , arg_5 , None , arg_3 , ip_types . SCALING ) _create_flip ( arg_0 , arg_11 , arg_6 ) return v . _make_scaling_ip_dict ( arg_11 )","id_":252816,"task_name":"https:\/\/github.com\/openstack\/quark\/blob\/1112e6a66917d3e98e44cb7b33b107fd5a74bb2e\/quark\/plugin_modules\/floating_ips.py#L526-L553","negative":"Return single program by name, or None if not found.\n\n Arguments:\n program_title(string): Program title as seen by students and in Course Catalog Admin\n\n Returns:\n dict: Program data provided by Course Catalog API"} {"query":"Rebuilds the graph around the given node id .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . clear ( ) arg_0 . add_node ( arg_1 , root = True ) for arg_2 , arg_3 in arg_0 . get_links ( arg_1 ) : arg_0 . add_edge ( arg_1 , arg_3 , weight = arg_2 ) if len ( arg_0 ) > arg_0 . max : break for arg_2 , arg_3 , arg_4 in arg_0 . get_cluster ( arg_1 ) : for arg_5 in arg_4 : arg_0 . add_edge ( arg_5 , arg_3 , weight = arg_2 ) arg_0 . add_edge ( arg_1 , arg_5 , weight = arg_2 ) if len ( arg_0 ) > arg_0 . max : break if arg_0 . event . clicked : g . add_node ( arg_0 . event . clicked )","id_":252817,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/lib\/graph\/__init__.py#L672-L700","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Get the summary of exceptions for component_name and list of instances . Empty instance list will fetch all exceptions .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = [ ] , arg_4 = None ) : if not arg_1 or not arg_1 . host or not arg_1 . stats_port : return arg_5 = tmaster_pb2 . ExceptionLogRequest ( ) arg_5 . component_name = arg_2 if len ( arg_3 ) > 0 : arg_5 . instances . extend ( arg_3 ) arg_6 = arg_5 . SerializeToString ( ) arg_7 = str ( arg_1 . stats_port ) arg_8 = arg_1 . host arg_9 = \"http:\/\/{0}:{1}\/exceptionsummary\" . format ( arg_8 , arg_7 ) Log . debug ( \"Creating request object.\" ) arg_10 = tornado . httpclient . HTTPRequest ( arg_9 , body = arg_6 , method = 'POST' , request_timeout = 5 ) Log . debug ( 'Making HTTP call to fetch exceptionsummary url: %s' , arg_9 ) try : arg_11 = tornado . httpclient . AsyncHTTPClient ( ) arg_12 = yield arg_11 . fetch ( arg_10 ) Log . debug ( \"HTTP call complete.\" ) except tornado . httpclient . HTTPError as e : raise Exception ( str ( e ) ) arg_13 = arg_12 . code if arg_13 >= 400 : arg_14 = \"Error in getting exceptions from Tmaster, code: \" + arg_13 Log . error ( arg_14 ) raise tornado . gen . Return ( { \"message\" : arg_14 } ) arg_15 = tmaster_pb2 . ExceptionLogResponse ( ) arg_15 . ParseFromString ( arg_12 . body ) if arg_15 . status . status == common_pb2 . NOTOK : if arg_15 . status . HasField ( \"message\" ) : raise tornado . gen . Return ( { \"message\" : arg_15 . status . message } ) arg_16 = [ ] for arg_17 in arg_15 . exceptions : arg_16 . append ( { 'class_name' : arg_17 . stacktrace , 'lasttime' : arg_17 . lasttime , 'firsttime' : arg_17 . firsttime , 'count' : str ( arg_17 . count ) } ) raise tornado . gen . Return ( arg_16 )","id_":252818,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/tools\/tracker\/src\/python\/handlers\/exceptionsummaryhandler.py#L75-L129","negative":"Read data synchronous from an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: data name\n :param Type data_type: type of the data given to the PLC, according to\n PLCTYPE constants\n :param bool return_ctypes: return ctypes instead of python types if True\n (default: False)\n :rtype: data_type\n :return: value: **value**"} {"query":"Get an SMTP session with SSL .","positive":"def Func ( arg_0 ) : return smtplib . SMTP_SSL ( arg_0 . server , arg_0 . port , context = ssl . create_default_context ( ) )","id_":252819,"task_name":"https:\/\/github.com\/trp07\/messages\/blob\/7789ebc960335a59ea5d319fceed3dd349023648\/messages\/email_.py#L238-L242","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Get a named attribute from an object .","positive":"def Func ( arg_0 , arg_1 = None ) : def getter ( arg_2 ) : return _Func ( arg_2 , arg_0 , arg_1 ) return transform ( getter )","id_":252820,"task_name":"https:\/\/github.com\/streamlink\/streamlink\/blob\/c8ed1daff14ac03195870238b9b900c1109dd5c1\/src\/streamlink\/plugin\/api\/validate.py#L180-L189","negative":"Adds all parameters to `traj`"} {"query":"Reraises exception appending message to its string representation .","positive":"def Func ( arg_0 , arg_1 ) : class arg_5 ( type ( arg_0 ) ) : arg_2 = type ( arg_0 ) . __module__ def __init__ ( arg_3 ) : pass def __getattr__ ( arg_3 , arg_4 ) : return getattr ( arg_0 , arg_4 ) def __str__ ( arg_3 ) : return str ( arg_0 ) + arg_1 arg_5 . __name__ = type ( arg_0 ) . __name__ arg_7 = arg_5 ( ) if six . PY3 : arg_5 . __qualname__ = type ( arg_0 ) . __qualname__ six . raise_from ( arg_7 . with_traceback ( arg_0 . __traceback__ ) , None ) else : six . reraise ( arg_7 , None , sys . exc_info ( ) [ 2 ] )","id_":252821,"task_name":"https:\/\/github.com\/google\/gin-config\/blob\/17a170e0a6711005d1c78e67cf493dc44674d44f\/gin\/utils.py#L28-L51","negative":"Sparse matrix roll\n\n This operation is equivalent to ``numpy.roll``, but operates on sparse matrices.\n\n Parameters\n ----------\n x : scipy.sparse.spmatrix or np.ndarray\n The sparse matrix input\n\n shift : int\n The number of positions to roll the specified axis\n\n axis : (0, 1, -1)\n The axis along which to roll.\n\n Returns\n -------\n x_rolled : same type as `x`\n The rolled matrix, with the same format as `x`\n\n See Also\n --------\n numpy.roll\n\n Examples\n --------\n >>> # Generate a random sparse binary matrix\n >>> X = scipy.sparse.lil_matrix(np.random.randint(0, 2, size=(5,5)))\n >>> X_roll = roll_sparse(X, 2, axis=0) # Roll by 2 on the first axis\n >>> X_dense_r = roll_sparse(X.toarray(), 2, axis=0) # Equivalent dense roll\n >>> np.allclose(X_roll, X_dense_r.toarray())\n True"} {"query":"Recovers x and y coordinates from the compressed point .","positive":"def Func ( arg_0 : arg_1 ) -> G1Uncompressed : arg_2 = ( arg_0 % POW_2_383 ) \/\/ POW_2_382 if arg_2 == 1 : return Z1 arg_3 = arg_0 % POW_2_381 arg_4 = pow ( ( arg_3 ** 3 + b . n ) % q , ( q + 1 ) \/\/ 4 , q ) if pow ( arg_4 , 2 , q ) != ( arg_3 ** 3 + b . n ) % q : raise ValueError ( \"The given point is not on G1: y**2 = x**3 + b\" ) arg_5 = ( arg_0 % POW_2_382 ) \/\/ POW_2_381 if ( arg_4 * 2 ) \/\/ q != arg_5 : arg_4 = q - arg_4 return ( FQ ( arg_3 ) , FQ ( arg_4 ) , FQ ( 1 ) )","id_":252822,"task_name":"https:\/\/github.com\/ethereum\/py_ecc\/blob\/2088796c59574b256dc8e18f8c9351bc3688ca71\/py_ecc\/bls\/utils.py#L118-L140","negative":"Start performing the action."} {"query":"load the configuration information from the target hierarchy","positive":"def Func ( arg_0 ) : arg_1 = [ arg_0 . additional_config , arg_0 . app_config ] + [ t . getConfig ( ) for t in arg_0 . hierarchy ] arg_2 = [ _mirrorStructure ( arg_0 . additional_config , 'command-line config' ) , _mirrorStructure ( arg_0 . app_config , 'application\\'s config.json' ) , ] + [ _mirrorStructure ( t . getConfig ( ) , t . getName ( ) ) for t in arg_0 . hierarchy ] arg_0 . config = _mergeDictionaries ( * arg_1 ) arg_0 . config_blame = _mergeDictionaries ( * arg_2 )","id_":252823,"task_name":"https:\/\/github.com\/ARMmbed\/yotta\/blob\/56bc1e56c602fa20307b23fe27518e9cd6c11af1\/yotta\/lib\/target.py#L311-L325","negative":"Update the estimate.\n\n Parameters\n ----------\n new_val: float\n new observated value of estimated quantity."} {"query":"Associate event handlers","positive":"def Func ( arg_0 , arg_1 = None ) : if not arg_1 or isinstance ( arg_1 , dict ) : if not arg_1 : arg_1 = util . get_caller_module_dict ( ) arg_2 = arg_1 [ '__name__' ] arg_3 = arg_1 else : arg_2 = arg_1 . __class__ . __name__ arg_3 = dict ( [ ( k , getattr ( arg_1 , k ) ) for k in dir ( arg_1 ) if k . startswith ( \"on_\" ) ] ) for arg_4 in [ n for n in arg_3 if n . startswith ( \"on_\" ) ] : arg_5 = arg_4 . split ( \"_\" ) arg_6 = arg_5 . pop ( 0 ) + arg_5 . pop ( - 1 ) arg_7 = arg_0 for arg_8 in arg_5 : try : arg_7 = arg_7 [ arg_8 ] except KeyError : arg_7 = None break if not arg_7 : from . component import COMPONENTS for arg_9 , arg_7 in COMPONENTS . items ( ) : if arg_7 . name == arg_8 : print \"WARNING: %s should be %s\" % ( arg_8 , arg_9 . replace ( \".\" , \"_\" ) ) break else : raise NameError ( \"'%s' component not found (%s.%s)\" % ( arg_8 , arg_2 , arg_4 ) ) if arg_6 in PYTHONCARD_EVENT_MAP : arg_10 = PYTHONCARD_EVENT_MAP [ arg_6 ] print \"WARNING: %s should be %s (%s)\" % ( arg_6 , arg_10 , arg_4 ) arg_6 = arg_10 if not hasattr ( arg_7 , arg_6 ) : raise NameError ( \"'%s' event not valid (%s.%s)\" % ( arg_6 , arg_2 , arg_4 ) ) setattr ( arg_7 , arg_6 , arg_3 [ arg_4 ] )","id_":252824,"task_name":"https:\/\/github.com\/reingart\/gui2py\/blob\/aca0a05f6fcde55c94ad7cc058671a06608b01a4\/gui\/resource.py#L199-L243","negative":"Generate requested statistics for a dataset and cache to a file.\n If filename is None, then don't cache to a file"} {"query":"Returns the concordance scores for each stratified graph based on the given annotation","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None ) : return { arg_4 : calculate_concordance ( arg_5 , arg_2 , arg_3 = arg_3 ) for arg_4 , arg_5 in get_subgraphs_by_annotation ( arg_0 , arg_1 ) . items ( ) }","id_":252825,"task_name":"https:\/\/github.com\/pybel\/pybel-tools\/blob\/3491adea0ac4ee60f57275ef72f9b73da6dbfe0c\/src\/pybel_tools\/analysis\/concordance.py#L236-L248","negative":"Put us back at the beginning of the file again."} {"query":"Convert hex to a color name using matplotlib s colour names .","positive":"def Func ( arg_0 ) : for arg_1 , arg_2 in defaults . COLOURS . items ( ) : if ( len ( arg_1 ) > 1 ) and ( arg_2 == arg_0 . upper ( ) ) : return arg_1 . lower ( ) return None","id_":252826,"task_name":"https:\/\/github.com\/agile-geoscience\/striplog\/blob\/8033b673a151f96c29802b43763e863519a3124c\/striplog\/utils.py#L179-L192","negative":"This method increases the permanence values of synapses of columns whose\n activity level has been too low. Such columns are identified by having an\n overlap duty cycle that drops too much below those of their peers. The\n permanence values for such columns are increased."} {"query":"Maintain current time during context","positive":"def Func ( ) : arg_0 = cmds . currentTime ( query = True ) try : yield finally : cmds . currentTime ( arg_0 , edit = True )","id_":252827,"task_name":"https:\/\/github.com\/pyblish\/pyblish-maya\/blob\/75db8b5d8de9d53ae95e74195a788b5f6db2cb5f\/pyblish_maya\/lib.py#L234-L248","negative":"Read data synchronous from an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: data name\n :param Type data_type: type of the data given to the PLC, according to\n PLCTYPE constants\n :param bool return_ctypes: return ctypes instead of python types if True\n (default: False)\n :rtype: data_type\n :return: value: **value**"} {"query":"Matches the string with the pattern caching the compiled regexp .","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 not in arg_2 : arg_2 [ arg_0 ] = sre_compile . compile ( arg_0 ) return arg_2 [ arg_0 ] . match ( arg_1 )","id_":252828,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/third_party\/python\/cpplint\/cpplint.py#L757-L764","negative":"Sequentially update the actors, the world, and the messaging system. \n The theater terminates once all of the actors indicate that they are done."} {"query":"List all files in all app storages .","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in six . itervalues ( arg_0 . storages ) : if arg_2 . exists ( '' ) : for arg_3 in utils . get_files ( arg_2 , arg_1 ) : yield arg_3 , arg_2","id_":252829,"task_name":"https:\/\/github.com\/adrianoveiga\/django-media-fixtures\/blob\/a3f0d9ac84e73d491eeb0c881b23cc47ccca1b54\/django_media_fixtures\/finders.py#L121-L128","negative":"Given a dictionary mapping unique keys to amino acid sequences,\n run MHC binding predictions on all candidate epitopes extracted from\n sequences and return a EpitopeCollection.\n\n Parameters\n ----------\n fasta_dictionary : dict or string\n Mapping of protein identifiers to protein amino acid sequences.\n If string then converted to dictionary."} {"query":"Read the commits of a pack .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = 'objects\/pack\/pack-' + arg_1 arg_3 = [ 'git' , 'verify-pack' , '-v' , arg_2 ] arg_4 = arg_0 . _exec ( arg_3 , cwd = arg_0 . dirpath , env = arg_0 . gitenv ) arg_4 = arg_4 . decode ( 'utf-8' , errors = 'surrogateescape' ) . rstrip ( ) arg_5 = [ line . split ( ' ' ) for line in arg_4 . split ( '\\n' ) ] arg_6 = [ parts [ 0 ] for parts in arg_5 if parts [ 1 ] == 'commit' ] arg_6 . reverse ( ) return arg_6","id_":252830,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-perceval\/blob\/41c908605e88b7ebc3a536c643fa0f212eaf9e0e\/perceval\/backends\/core\/git.py#L1119-L1135","negative":"Generate new sequences.\n\n Args:\n batch_size: Number of sequences to generate.\n length: Number of timesteps to generate for each sequence.\n samples: Number of samples to draw from the latent distributions.\n fix_static: Boolean for whether or not to share the same random\n sample of the static latent variable `f` from its prior across\n all examples.\n fix_dynamic: Boolean for whether or not to share the same random\n sample of the dynamic latent variable `z_{1:T}` from its prior\n across all examples.\n\n Returns:\n A batched Independent distribution wrapping a set of Normal\n distributions over the pixels of the generated sequences, where\n the Independent distribution has event shape [height, width,\n channels], batch shape [samples, batch_size, timesteps], and\n sample shape [sample_shape, samples, batch_size, timesteps,\n height, width, channels]."} {"query":"Reuses the same db session","positive":"def Func ( arg_0 ) : if not arg_0 . session : arg_0 . session = dal . get_default_session ( ) return arg_0 . session","id_":252831,"task_name":"https:\/\/github.com\/MisterY\/price-database\/blob\/b4fd366b7763891c690fe3000b8840e656da023e\/pricedb\/csv.py#L95-L99","negative":"Fetches fuel prices for all stations."} {"query":"Return the next batch of items to upload .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . queue arg_2 = [ ] arg_3 = arg_0 . Func_item ( ) if arg_3 is None : return arg_2 arg_2 . append ( arg_3 ) while len ( arg_2 ) < arg_0 . upload_size and not arg_1 . empty ( ) : arg_3 = arg_0 . Func_item ( ) if arg_3 : arg_2 . append ( arg_3 ) return arg_2","id_":252832,"task_name":"https:\/\/github.com\/nyaruka\/python-librato-bg\/blob\/e541092838694de31d256becea8391a9cfe086c7\/librato_bg\/consumer.py#L59-L73","negative":"Check if the device is on."} {"query":"See docstrings for generate_files and generate_checkpoints .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 , arg_6 ) : arg_7 = [ ] if arg_4 is not None : arg_7 . append ( arg_1 >= arg_4 ) if arg_5 is not None : arg_7 . append ( arg_1 < arg_5 ) if arg_0 is files : arg_7 . append ( files . c . name . like ( u'%.ipynb' ) ) arg_8 = select ( [ arg_0 ] ) . order_by ( arg_1 ) for arg_9 in arg_7 : arg_8 = arg_8 . where ( arg_9 ) arg_10 = arg_2 . execute ( arg_8 ) for arg_11 in arg_10 : try : arg_12 = arg_11 [ 'user_id' ] arg_13 = arg_3 ( arg_12 ) . decrypt arg_14 = to_dict_with_content ( arg_0 . c , arg_11 , arg_13 ) if arg_0 is files : arg_14 [ 'path' ] = arg_14 [ 'parent_name' ] + arg_14 [ 'name' ] arg_14 [ 'last_modified' ] = arg_14 [ 'created_at' ] yield { 'id' : arg_14 [ 'id' ] , 'user_id' : arg_12 , 'path' : to_api_path ( arg_14 [ 'path' ] ) , 'last_modified' : arg_14 [ 'last_modified' ] , 'content' : reads_base64 ( arg_14 [ 'content' ] ) , } except CorruptedFile : if arg_6 is not None : arg_6 . warning ( 'Corrupted file with id %d in table %s.' % ( arg_11 [ 'id' ] , arg_0 . name ) )","id_":252833,"task_name":"https:\/\/github.com\/quantopian\/pgcontents\/blob\/ed36268b7917332d16868208e1e565742a8753e1\/pgcontents\/query.py#L770-L836","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"r Generate one of the two sequences that generated a delta .","positive":"def Func ( arg_0 , arg_1 ) : try : arg_2 = { 1 : \"- \" , 2 : \"+ \" } [ int ( arg_1 ) ] except KeyError : raise ValueError , ( 'unknown delta choice (must be 1 or 2): %r' % arg_1 ) arg_3 = ( \" \" , arg_2 ) for arg_4 in arg_0 : if arg_4 [ : 2 ] in arg_3 : yield arg_4 [ 2 : ]","id_":252834,"task_name":"https:\/\/github.com\/google\/grumpy\/blob\/3ec87959189cfcdeae82eb68a47648ac25ceb10b\/third_party\/stdlib\/difflib.py#L2097-L2127","negative":"Bind the server unless it is already bound, this is a read-only node, or the last attempt was too recently.\n\n :raises TransportNotReadyError if the bind attempt fails"} {"query":"Wrapper for the other log methods decide which one based on the URL parameter .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = True ) : if arg_1 is None : arg_1 = arg_0 . url if re . match ( \"file:\/\/\" , arg_1 ) : arg_0 . Func_file ( arg_1 ) elif re . match ( \"https:\/\/\" , arg_1 ) or re . match ( \"http:\/\/\" , arg_1 ) : arg_0 . Func_post ( arg_1 , arg_2 , arg_3 ) else : arg_0 . Func_stdout ( )","id_":252835,"task_name":"https:\/\/github.com\/zwischenloesung\/ardu-report-lib\/blob\/51bd4a07e036065aafcb1273b151bea3fdfa50fa\/libardurep\/datareporter.py#L34-L46","negative":"List all events occurring at or after a timestamp."} {"query":"check that the signature of the two given methods match","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : if not ( isinstance ( arg_1 , astroid . FunctionDef ) and isinstance ( arg_2 , astroid . FunctionDef ) ) : arg_0 . add_message ( \"method-check-failed\" , args = ( arg_1 , arg_2 ) , node = arg_1 ) return arg_5 = arg_4 . instantiate_class ( ) arg_1 = function_to_method ( arg_1 , arg_5 ) arg_2 = function_to_method ( arg_2 , arg_5 ) if arg_1 . args . args is None or arg_2 . args . args is None : return if is_attr_private ( arg_1 . name ) : return if arg_1 . decorators : for arg_6 in arg_1 . decorators . nodes : if ( isinstance ( arg_6 , astroid . Attribute ) and arg_6 . attrname == \"setter\" ) : return if _different_parameters ( arg_2 , arg_1 , dummy_parameter_regex = arg_0 . _dummy_rgx ) : arg_0 . add_message ( \"arguments-differ\" , args = ( arg_3 , arg_1 . name ) , node = arg_1 ) elif len ( arg_1 . args . defaults ) < len ( arg_2 . args . defaults ) : arg_0 . add_message ( \"signature-differs\" , args = ( arg_3 , arg_1 . name ) , node = arg_1 )","id_":252836,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/checkers\/classes.py#L1539-L1581","negative":"Create required links from a sensor region to a classifier region."} {"query":"Generate an intermediate if statement which assigns to a temporary variable which is returned as the expression value at the end of evaluation .","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 ) -> GeneratedPyAST : assert arg_2 . op == NodeOp . IF arg_4 = gen_py_ast ( arg_0 , arg_2 . test ) arg_5 = genname ( _IF_RESULT_PREFIX ) arg_6 = __if_body_to_py_ast ( arg_0 , arg_2 . then , arg_5 ) arg_7 = __if_body_to_py_ast ( arg_0 , arg_2 . else_ , arg_5 ) arg_8 = genname ( _IF_TEST_PREFIX ) arg_9 = ast . Assign ( targets = [ ast . Name ( id = arg_8 , arg_0 = ast . Store ( ) ) ] , value = arg_4 . node ) arg_10 = ast . If ( test = ast . BoolOp ( op = ast . Or ( ) , values = [ ast . Compare ( left = ast . NameConstant ( None ) , ops = [ ast . Is ( ) ] , comparators = [ ast . Name ( id = arg_8 , arg_0 = ast . Load ( ) ) ] , ) , ast . Compare ( left = ast . NameConstant ( False ) , ops = [ ast . Is ( ) ] , comparators = [ ast . Name ( id = arg_8 , arg_0 = ast . Load ( ) ) ] , ) , ] , ) , values = [ ] , body = list ( map ( statementize , chain ( arg_7 . dependencies , [ arg_7 . node ] ) ) ) , orelse = list ( map ( statementize , chain ( arg_6 . dependencies , [ arg_6 . node ] ) ) ) , ) return GeneratedPyAST ( arg_2 = ast . Name ( id = arg_5 , arg_0 = ast . Load ( ) ) , dependencies = list ( chain ( arg_4 . dependencies , [ arg_9 , arg_10 ] ) ) , )","id_":252837,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/compiler\/generator.py#L1265-L1314","negative":"Set the rotation of this body using a rotation matrix.\n\n Parameters\n ----------\n rotation : sequence of 9 floats\n The desired rotation matrix for this body."} {"query":"Calculate the minimum and maximum for expressions possibly on a grid defined by binby .","positive":"def Func ( arg_0 , arg_1 , arg_2 = [ ] , arg_3 = None , arg_4 = arg_5 , arg_6 = False , arg_7 = False , arg_8 = None ) : @ delayed def finish ( * arg_9 ) : arg_10 = vaex . utils . unlistify ( arg_12 , np . array ( arg_9 ) ) arg_10 = arg_10 . astype ( arg_15 ) return arg_10 @ delayed def calculate ( arg_1 , arg_3 ) : arg_11 = tasks . TaskStatistic ( arg_0 , arg_2 , arg_4 , arg_3 , weight = arg_1 , op = tasks . OP_MIN_MAX , arg_6 = arg_6 ) arg_0 . executor . schedule ( arg_11 ) arg_17 . add_task ( arg_11 , \"Func for %s\" % arg_1 ) return arg_11 @ delayed def finish ( * arg_9 ) : arg_10 = vaex . utils . unlistify ( arg_12 , np . array ( arg_9 ) ) arg_10 = arg_10 . astype ( arg_15 ) return arg_10 arg_1 = _ensure_strings_from_expressions ( arg_1 ) arg_2 = _ensure_strings_from_expressions ( arg_2 ) arg_12 , [ arg_13 , ] = vaex . utils . listify ( arg_1 ) arg_14 = [ arg_0 . dtype ( expr ) for expr in arg_13 ] arg_15 = arg_14 [ 0 ] if not all ( [ arg_16 . kind == arg_15 . kind for arg_16 in arg_14 ] ) : raise ValueError ( \"cannot mix datetime and non-datetime expressions\" ) arg_17 = vaex . utils . progressbars ( arg_8 , name = \"Funces\" ) arg_3 = arg_0 . limits ( arg_2 , arg_3 , arg_6 = arg_6 , arg_7 = True ) arg_18 = [ calculate ( arg_1 , arg_3 ) for arg_1 in arg_13 ] arg_19 = finish ( * arg_18 ) return arg_0 . _delay ( arg_7 , arg_19 )","id_":252838,"task_name":"https:\/\/github.com\/vaexio\/vaex\/blob\/a45b672f8287afca2ada8e36b74b604b9b28dd85\/packages\/vaex-core\/vaex\/dataframe.py#L1047-L1104","negative":"Read the current ADS-state and the machine-state.\n\n Read the current ADS-state and the machine-state from the\n ADS-server.\n\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :rtype: (int, int)\n :return: ads_state, device_state"} {"query":"Return a random Pauli on number of qubits .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : if arg_2 is not None : np . Func . seed ( arg_2 ) arg_3 = np . Func . randint ( 2 , size = arg_1 ) . astype ( np . bool ) arg_4 = np . Func . randint ( 2 , size = arg_1 ) . astype ( np . bool ) return arg_0 ( arg_3 , arg_4 )","id_":252839,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/quantum_info\/operators\/pauli.py#L446-L459","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"don t output message of the given id","positive":"def Func ( arg_0 , arg_1 , arg_2 = \"package\" , arg_3 = None , arg_4 = False ) : arg_0 . _set_msg_status ( arg_1 , enable = False , arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = arg_4 ) arg_0 . _register_by_id_managed_msg ( arg_1 , arg_3 )","id_":252840,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/message\/message_handler_mix_in.py#L95-L100","negative":"Register a file reader for use in parse_config_file.\n\n Registered file readers will be used to try reading files passed to\n `parse_config_file`. All file readers (beginning with the default `open`) will\n be tried until one of them succeeds at opening the file.\n\n This function may also be be used used as a decorator. For example:\n\n @register_file_reader(IOError)\n def exotic_data_source(filename):\n ...\n\n Args:\n *args: (When used as a decorator, only the existence check is supplied.)\n - file_reader_fn: The file reader function to register. This should be a\n function that can be used as a context manager to open a file and\n provide a file-like object, similar to Python's built-in `open`.\n - is_readable_fn: A function taking the file path and returning a boolean\n indicating whether the file can be read by `file_reader_fn`.\n\n Returns:\n `None`, or when used as a decorator, a function that will perform the\n registration using the supplied readability predicate."} {"query":"Test whether a file target is not exists or it exists but allow overwrite .","positive":"def Func ( arg_0 , arg_1 = False ) : if arg_0 . exists ( ) and arg_1 is False : return False else : return True","id_":252841,"task_name":"https:\/\/github.com\/MacHu-GWU\/pathlib_mate-project\/blob\/f9fb99dd7cc9ea05d1bec8b9ce8f659e8d97b0f1\/pathlib_mate\/mate_mutate_methods.py#L91-L99","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Attach a method to a class .","positive":"def Func ( arg_0 ) : def wrapper ( arg_1 ) : setattr ( arg_0 , arg_1 . __name__ , arg_1 ) return arg_1 return wrapper","id_":252842,"task_name":"https:\/\/github.com\/LionelAuroux\/pyrser\/blob\/f153a97ef2b6bf915a1ed468c0252a9a59b754d5\/pyrser\/meta.py#L124-L132","negative":"Show Program Landing page for the Enterprise's Program.\n\n Render the Enterprise's Program Enrollment page for a specific program.\n The Enterprise and Program are both selected by their respective UUIDs.\n\n Unauthenticated learners will be redirected to enterprise-linked SSO.\n\n A 404 will be raised if any of the following conditions are met:\n * No enterprise customer UUID query parameter ``enterprise_uuid`` found in request.\n * No enterprise customer found against the enterprise customer\n uuid ``enterprise_uuid`` in the request kwargs.\n * No Program can be found given ``program_uuid`` either at all or associated with\n the Enterprise.."} {"query":"Unregisters the block associated with block_type from the registry .","positive":"def Func ( arg_0 , arg_1 ) : if arg_1 not in arg_0 . _registry : raise NotRegistered ( 'There is no block registered as \"{}\" with the ' 'RegisteredBlockStreamFieldRegistry registry.' . format ( arg_1 ) ) else : del arg_0 . _registry [ arg_1 ]","id_":252843,"task_name":"https:\/\/github.com\/WGBH\/wagtail-streamfieldtools\/blob\/192f86845532742b0b7d432bef3987357833b8ed\/streamfield_tools\/registry.py#L59-L73","negative":"Export the contents to a file as comma separated values.\n\n Parameters\n ----------\n path : string\n File path where the data should be saved to\n\n Example\n -------\n Export the last ten elements of AME2012 to a new file:\n\n >>> Table('AME2012').tail(10).to_file('last_ten.txt')"} {"query":"Return an adapter factory for ob from registry","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in _get_mro ( getattr ( arg_1 , '__class__' , type ( arg_1 ) ) ) : if arg_2 in arg_0 : return arg_0 [ arg_2 ]","id_":252844,"task_name":"https:\/\/github.com\/AkihikoITOH\/capybara\/blob\/e86c2173ea386654f4ae061148e8fbe3f25e715c\/capybara\/virtualenv\/lib\/python2.7\/site-packages\/pip\/_vendor\/pkg_resources\/__init__.py#L2975-L2979","negative":"Runs the consumer."} {"query":"Convert PythonCard font description to gui2py style","positive":"def Func ( arg_0 ) : if 'faceName' in arg_0 : arg_0 [ 'face' ] = arg_0 . pop ( 'faceName' ) if 'family' in arg_0 and arg_0 [ 'family' ] == 'sansSerif' : arg_0 [ 'family' ] = 'sans serif' return arg_0","id_":252845,"task_name":"https:\/\/github.com\/reingart\/gui2py\/blob\/aca0a05f6fcde55c94ad7cc058671a06608b01a4\/gui\/tools\/migrate.py#L186-L192","negative":"Check if templates directories are setup and issue a warning and help.\n\n Set the environment variable :envvar:`GROMACSWRAPPER_SUPPRESS_SETUP_CHECK`\n skip the check and make it always return ``True``\n\n :return ``True`` if directories were found and ``False`` otherwise\n\n .. versionchanged:: 0.3.1\n Uses :envvar:`GROMACSWRAPPER_SUPPRESS_SETUP_CHECK` to suppress check\n (useful for scripts run on a server)"} {"query":"Returns a Google web query formatted as a GoogleSearch list object .","positive":"def Func ( arg_0 , arg_1 = 0 , arg_2 = 10 , arg_3 = False , arg_4 = False ) : arg_5 = GOOGLE_SEARCH return GoogleSearch ( arg_0 , arg_1 , arg_5 , \"\" , arg_2 , arg_3 , arg_4 )","id_":252846,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/lib\/web\/google.py#L212-L218","negative":"Reassemble a Binder object coming out of the database."} {"query":"Derives a PEP386 - compliant version number from VERSION .","positive":"def Func ( arg_0 = None ) : if arg_0 is None : arg_0 = VERSION assert len ( arg_0 ) == 5 assert arg_0 [ 3 ] in ( \"alpha\" , \"beta\" , \"rc\" , \"final\" ) arg_1 = 2 if arg_0 [ 2 ] == 0 else 3 arg_2 = \".\" . join ( str ( x ) for x in arg_0 [ : arg_1 ] ) arg_3 = \"\" if arg_0 [ 3 ] != \"final\" : arg_4 = { \"alpha\" : \"a\" , \"beta\" : \"b\" , \"rc\" : \"c\" } arg_3 = arg_4 [ arg_0 [ 3 ] ] + str ( arg_0 [ 4 ] ) return arg_2 + arg_3","id_":252847,"task_name":"https:\/\/github.com\/5monkeys\/django-bananas\/blob\/cfd318c737f6c4580036c13d2acf32bca96654bf\/bananas\/__init__.py#L4-L24","negative":"Indicates the start of a new sequence. Clears any predictions and makes sure\n synapses don't grow to the currently active cells in the next time step."} {"query":"Load users .","positive":"def Func ( arg_0 ) : from . tasks . users import load_user loadcommon ( arg_0 , load_user , asynchronous = False )","id_":252848,"task_name":"https:\/\/github.com\/inveniosoftware\/invenio-migrator\/blob\/6902c6968a39b747d15e32363f43b7dffe2622c2\/invenio_migrator\/cli.py#L227-L232","negative":"Revoke the token and remove the cookie."} {"query":"Build Sphinx docs and publish to Confluence .","positive":"def Func ( arg_0 , arg_1 = False , arg_2 = False , arg_3 = '' ) : arg_4 = config . load ( ) if arg_2 : arg_0 . run ( \"invoke clean --docs\" ) arg_5 = [ 'sphinx-build' , '-b' , 'Func' ] arg_5 . extend ( [ '-E' , '-a' ] ) if arg_3 : arg_5 . append ( arg_3 ) arg_5 . extend ( [ '.' , arg_0 . rituals . docs . build + '_cf' ] ) if arg_1 : arg_5 . extend ( [ '-DFunc_publish=False' ] ) notify . info ( \"Starting Sphinx build...\" ) with pushd ( arg_0 . rituals . docs . sources ) : arg_0 . run ( ' ' . join ( arg_5 ) , pty = True )","id_":252849,"task_name":"https:\/\/github.com\/jhermann\/rituals\/blob\/1534f50d81e19bbbe799e2eba0acdefbce047c06\/src\/rituals\/acts\/documentation.py#L236-L254","negative":"Load the configuration.\n\n :param under_test:\n Tell us if we only have to load the configuration file (True)\n or load the configuration file and initate the output directory\n if it does not exist (False).\n :type under_test: bool\n\n :param custom:\n A dict with the configuration index (from .PyFunceble.yaml) to update.\n :type custom: dict\n\n .. warning::\n If :code:`custom` is given, the given :code:`dict` overwrite\n the last value of the given configuration indexes."} {"query":"Decorator that prevents callbacks from calling into link methods that are not reentrant","positive":"def Func ( arg_0 ) : def wrap ( * arg_1 , ** arg_2 ) : arg_3 = arg_1 [ 0 ] if arg_3 . _callback_lock . in_callback : arg_4 = \"Link %s cannot be invoked from a callback!\" % arg_0 raise RuntimeError ( arg_4 ) return arg_0 ( * arg_1 , ** arg_2 ) return wrap","id_":252850,"task_name":"https:\/\/github.com\/kgiusti\/pyngus\/blob\/5392392046989f1bb84ba938c30e4d48311075f1\/pyngus\/link.py#L71-L80","negative":"Write the manifest content to the zip file. It must be a predictable\n order."} {"query":"Calls pre and post delete hooks for DelteViews .","positive":"def Func ( arg_0 , arg_1 , * arg_2 , ** arg_3 ) : arg_0 . object = arg_0 . get_object ( ) arg_5 = arg_0 . get_success_url ( ) arg_0 . pre_Func ( arg_0 . object ) arg_0 . object . Func ( ) arg_0 . post_Func ( arg_0 . object ) return HttpResponseRedirect ( arg_5 )","id_":252851,"task_name":"https:\/\/github.com\/theduke\/django-baseline\/blob\/7be8b956e53c70b35f34e1783a8fe8f716955afb\/django_baseline\/views.py#L160-L171","negative":"Parse a Supybot IRC log file.\n\n The method parses the Supybot IRC log file and returns an iterator of\n dictionaries. Each one of this, contains a message from the file.\n\n :param filepath: path to the IRC log file\n\n :returns: a generator of parsed messages\n\n :raises ParseError: raised when the format of the Supybot log file\n is invalid\n :raises OSError: raised when an error occurs reading the\n given file"} {"query":"Returns Python array from Js array","positive":"def Func ( arg_0 ) : return [ arg_0 . get ( str ( arg_1 ) ) for arg_1 in xrange ( len ( arg_0 ) ) ]","id_":252852,"task_name":"https:\/\/github.com\/PiotrDabkowski\/Js2Py\/blob\/c0fa43f5679cf91ca8986c5747fcb07a433dc584\/js2py\/prototypes\/jsarray.py#L8-L10","negative":"Add feedback on a specific campaign.\n\n :param campaign_id: The unique id for the campaign.\n :type campaign_id: :py:class:`str`\n :param data: The request body parameters\n :type data: :py:class:`dict`\n data = {\n \"message\": string*\n }\n :param queryparams: The query string parameters\n queryparams['fields'] = []\n queryparams['exclude_fields'] = []"} {"query":"Returns the status of Home Mode","positive":"def Func ( arg_0 , ** arg_1 ) : arg_2 = arg_0 . _api_info [ 'home_mode' ] arg_3 = dict ( { 'api' : arg_2 [ 'name' ] , 'method' : 'GetInfo' , 'version' : arg_2 [ 'version' ] , '_sid' : arg_0 . _sid } , ** arg_1 ) arg_4 = arg_0 . _get_json_with_retry ( arg_2 [ 'url' ] , arg_3 ) return arg_4 [ 'data' ] [ 'on' ]","id_":252853,"task_name":"https:\/\/github.com\/snjoetw\/py-synology\/blob\/4f7eb0a3a9f86c24ad65993802e6fb11fbaa1f7f\/synology\/api.py#L122-L133","negative":"Decompress and unpickle."} {"query":"Print a list of all rules","positive":"def Func ( arg_0 ) : for arg_1 in sorted ( arg_0 . all_rules , key = lambda arg_1 : arg_1 . name ) : print ( arg_1 ) if arg_0 . args . verbose : for arg_2 in arg_1 . doc . split ( \"\\n\" ) : print ( \" \" , arg_2 )","id_":252854,"task_name":"https:\/\/github.com\/boakley\/robotframework-lint\/blob\/3e3578f4e39af9af9961aa0a715f146b74474091\/rflint\/rflint.py#L178-L184","negative":"Register a file reader for use in parse_config_file.\n\n Registered file readers will be used to try reading files passed to\n `parse_config_file`. All file readers (beginning with the default `open`) will\n be tried until one of them succeeds at opening the file.\n\n This function may also be be used used as a decorator. For example:\n\n @register_file_reader(IOError)\n def exotic_data_source(filename):\n ...\n\n Args:\n *args: (When used as a decorator, only the existence check is supplied.)\n - file_reader_fn: The file reader function to register. This should be a\n function that can be used as a context manager to open a file and\n provide a file-like object, similar to Python's built-in `open`.\n - is_readable_fn: A function taking the file path and returning a boolean\n indicating whether the file can be read by `file_reader_fn`.\n\n Returns:\n `None`, or when used as a decorator, a function that will perform the\n registration using the supplied readability predicate."} {"query":"Not accurate false due to spikes are observed","positive":"def Func ( ) : arg_0 = legion_parameters ( ) arg_0 . teta_x = - 1.1 template_dynamic_legion ( 16 , 2000 , 1500 , conn_type = conn_type . GRID_FOUR , params = arg_0 , stimulus = [ 1 , 1 , 1 , 0 , 1 , 1 , 1 , 0 , 0 , 0 , 0 , 1 , 0 , 0 , 1 , 1 ] )","id_":252855,"task_name":"https:\/\/github.com\/annoviko\/pyclustering\/blob\/98aa0dd89fd36f701668fb1eb29c8fb5662bf7d0\/pyclustering\/nnet\/examples\/legion_examples.py#L99-L106","negative":"Load a default value for redshift from config and set it as the redshift for source or lens galaxies that have\n falsey redshifts\n\n Parameters\n ----------\n key: str\n\n Returns\n -------\n decorator\n A decorator that wraps the setter function to set defaults"} {"query":"Leave group .","positive":"def Func ( arg_0 ) : arg_1 = Group . query . get_or_404 ( arg_0 ) if arg_1 . can_Func ( current_user ) : try : arg_1 . remove_member ( current_user ) except Exception as e : flash ( str ( e ) , \"error\" ) return redirect ( url_for ( '.index' ) ) flash ( _ ( 'You have successfully left %(group_name)s group.' , group_name = arg_1 . name ) , 'success' ) return redirect ( url_for ( '.index' ) ) flash ( _ ( 'You cannot Func the group %(group_name)s' , group_name = arg_1 . name ) , 'error' ) return redirect ( url_for ( '.index' ) )","id_":252856,"task_name":"https:\/\/github.com\/inveniosoftware-contrib\/invenio-groups\/blob\/109481d6b02701db00b72223dd4a65e167c589a6\/invenio_groups\/views.py#L267-L294","negative":"Revoke the token and remove the cookie."} {"query":"Returns a message that will display a set of attachments in list form .","positive":"def Func ( arg_0 : arg_1 [ arg_2 ] , arg_3 : arg_4 = None , arg_5 : arg_4 = None , arg_6 : arg_7 [ arg_8 , arg_4 ] = None ) -> Activity : return attachment_activity ( AttachmentLayoutTypes . Func , arg_0 , arg_3 , arg_5 , arg_6 )","id_":252857,"task_name":"https:\/\/github.com\/Microsoft\/botbuilder-python\/blob\/274663dd91c811bae6ac4488915ba5880771b0a7\/libraries\/botbuilder-core\/botbuilder\/core\/message_factory.py#L93-L116","negative":"Concert velocities from a cartesian to a spherical coordinate system\n\n TODO: errors\n\n :param x: name of x column (input)\n :param y: y\n :param z: z\n :param vx: vx\n :param vy: vy\n :param vz: vz\n :param vr: name of the column for the radial velocity in the r direction (output)\n :param vlong: name of the column for the velocity component in the longitude direction (output)\n :param vlat: name of the column for the velocity component in the latitude direction, positive points to the north pole (output)\n :param distance: Expression for distance, if not given defaults to sqrt(x**2+y**2+z**2), but if this column already exists, passing this expression may lead to a better performance\n :return:"} {"query":"Connect to port item on subunit","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 . direction == DIRECTION . IN : if arg_0 . src is not None : raise HwtSyntaxError ( \"Port %s is already associated with %r\" % ( arg_0 . name , arg_0 . src ) ) arg_0 . src = arg_1 arg_1 . endpoints . append ( arg_0 ) elif arg_0 . direction == DIRECTION . OUT : if arg_0 . dst is not None : raise HwtSyntaxError ( \"Port %s is already associated with %r\" % ( arg_0 . name , arg_0 . dst ) ) arg_0 . dst = arg_1 arg_1 . drivers . append ( arg_0 ) else : raise NotImplementedError ( arg_0 ) arg_1 . hidden = False arg_1 . ctx . subUnits . add ( arg_0 . unit )","id_":252858,"task_name":"https:\/\/github.com\/Nic30\/hwt\/blob\/8cbb399e326da3b22c233b98188a9d08dec057e6\/hwt\/hdl\/portItem.py#L20-L44","negative":"Return HgResource object for path.\n\n See DAVProvider.get_resource_inst()"} {"query":"Report elapsed time .","positive":"def Func ( arg_0 ) : if not arg_0 . end_time : arg_0 . end ( ) print ( \"Time: {} mins\" . format ( ( arg_0 . end_time - arg_0 . start_time ) \/ 60 ) )","id_":252859,"task_name":"https:\/\/github.com\/zomux\/deepy\/blob\/090fbad22a08a809b12951cd0d4984f5bd432698\/deepy\/utils\/timer.py#L21-L27","negative":"Deletes the specified file from the given S3 bucket."} {"query":"Return the database specifier for a database string . This accepts a database name or URL and returns a database specifier in the format accepted by specifier_to_db . It is recommended that you consult the documentation for that function for an explanation of the format .","positive":"def Func ( arg_0 ) : arg_1 = PLAIN_RE . match ( arg_0 ) arg_2 = URL_RE . match ( arg_0 ) if arg_1 : return 'local:' + arg_1 . groupdict ( ) [ 'database' ] elif arg_2 : arg_3 , arg_4 , arg_5 = map ( arg_2 . groupdict ( ) . get , ( 'hostname' , 'portnum' , 'database' ) ) arg_6 = settings . _ ( 'COUCHDB_SERVER' , 'http:\/\/127.0.0.1:5984\/' ) arg_7 , arg_8 = urlparse . urlparse ( arg_6 ) [ 1 ] . split ( ':' ) if ( arg_7 == arg_3 ) and ( arg_8 == arg_4 ) : return 'local:' + arg_5 return 'remote:%s:%s:%s' % ( arg_3 , arg_4 , arg_5 ) raise ValueError ( 'Invalid database string: %r' % ( arg_0 , ) )","id_":252860,"task_name":"https:\/\/github.com\/zvoase\/django-relax\/blob\/10bb37bf3a512b290816856a6877c17fa37e930f\/relax\/couchdb\/__init__.py#L71-L97","negative":"Wrapper function for TUN and serial port monitoring\n\n Wraps the necessary functions to loop over until self._isRunning\n threading.Event() is set(). This checks for data on the TUN\/serial\n interfaces and then sends data over the appropriate interface. This\n function is automatically run when Threading.start() is called on the\n Monitor class."} {"query":"Decorator to log user actions","positive":"def Func ( arg_0 ) : @ functools . wraps ( arg_0 ) def wrapper ( * arg_1 , ** arg_2 ) : with create_session ( ) as session : if g . user . is_anonymous : arg_3 = 'anonymous' else : arg_3 = g . user . username arg_4 = Log ( event = arg_0 . __name__ , task_instance = None , owner = arg_3 , extra = str ( list ( request . args . items ( ) ) ) , task_id = request . args . get ( 'task_id' ) , dag_id = request . args . get ( 'dag_id' ) ) if 'execution_date' in request . args : arg_4 . execution_date = pendulum . parse ( request . args . get ( 'execution_date' ) ) session . add ( arg_4 ) return arg_0 ( * arg_1 , ** arg_2 ) return wrapper","id_":252861,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/www\/decorators.py#L29-L58","negative":"Returns the maximum delay for the InferenceElements in the inference\n dictionary\n\n Parameters:\n -----------------------------------------------------------------------\n inferences: A dictionary where the keys are InferenceElements"} {"query":"Remove padding .","positive":"def Func ( arg_0 , arg_1 = 0 ) : arg_2 = copy . deepcopy ( arg_0 ) for arg_3 , arg_4 in enumerate ( arg_0 ) : for arg_5 in range ( 1 , len ( arg_0 [ arg_3 ] ) ) : if arg_0 [ arg_3 ] [ - arg_5 ] != arg_1 : arg_2 [ arg_3 ] = arg_2 [ arg_3 ] [ 0 : - arg_5 + 1 ] break return arg_2","id_":252862,"task_name":"https:\/\/github.com\/tensorlayer\/tensorlayer\/blob\/aa9e52e36c7058a7e6fd81d36563ca6850b21956\/tensorlayer\/prepro.py#L3365-L3399","negative":"Revoke the token and remove the cookie."} {"query":"Checks if a bundle exists at the provided path","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in arg_0 . _attached_bundles : if arg_1 == arg_2 . path : return True return False","id_":252863,"task_name":"https:\/\/github.com\/rbw\/flask-journey\/blob\/6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285\/flask_journey\/journey.py#L100-L111","negative":"Calculate the seconds to reset the token requests, by obtaining the different\n between the current date and the next date when the token is fully regenerated."} {"query":"Much like the built - in function range but accepts floats","positive":"def Func ( arg_0 = 0 , arg_1 = None , arg_2 = 1 ) : arg_0 = float ( arg_0 ) while arg_0 < arg_1 : yield arg_0 arg_0 += arg_2","id_":252864,"task_name":"https:\/\/github.com\/jaraco\/svg.charts\/blob\/23053497b3f1af4e760f355050107ae3bc05909d\/svg\/charts\/util.py#L31-L41","negative":"Returns protobuf mapcontainer. Read from translation file."} {"query":"Save a model to protobuf files so that it can be used in tensorflow inference .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = \"little_endian\" , arg_4 = \"nhwc\" ) : callBigDlFunc ( arg_0 . bigdl_type , \"saveTF\" , arg_0 . value , arg_1 , arg_2 , arg_3 , arg_4 )","id_":252865,"task_name":"https:\/\/github.com\/intel-analytics\/BigDL\/blob\/e9c19788285986ab789a2e2998f9a85d7524779f\/pyspark\/bigdl\/nn\/layer.py#L543-L557","negative":"One way to calibrate the band pass is to take the median value\n for every frequency fine channel, and divide by it."} {"query":"Merges two Reservation s .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None , arg_4 = False , arg_5 = True , ** arg_6 ) : arg_7 = arg_0 . _Func_initial ( arg_1 = arg_1 , arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = True , ** arg_6 ) def get_long_running_output ( arg_8 ) : arg_9 = arg_0 . _deserialize ( '[ReservationResponse]' , arg_8 ) if arg_4 : arg_10 = ClientRawResponse ( arg_9 , arg_8 ) return arg_10 return arg_9 arg_11 = arg_6 . get ( 'long_running_operation_timeout' , arg_0 . config . long_running_operation_timeout ) if arg_5 is True : arg_12 = ARMPolling ( arg_11 , ** arg_6 ) elif arg_5 is False : arg_12 = NoPolling ( ) else : arg_12 = arg_5 return LROPoller ( arg_0 . _client , arg_7 , get_long_running_output , arg_12 )","id_":252866,"task_name":"https:\/\/github.com\/Azure\/azure-sdk-for-python\/blob\/d7306fde32f60a293a7567678692bdad31e4b667\/azure-mgmt-reservations\/azure\/mgmt\/reservations\/operations\/reservation_operations.py#L194-L243","negative":"delete a backend, and update the secrets file"} {"query":"Checks the video upload status Newly uploaded videos may be in the processing state","positive":"def Func ( arg_0 , arg_1 ) : if not arg_0 . authenticated : raise ApiError ( _ ( \"Authentication is required\" ) ) arg_2 = arg_0 . fetch_video ( arg_1 ) arg_3 = Api . yt_service . CheckUploadStatus ( arg_2 ) if arg_3 is not None : arg_4 = arg_3 [ 0 ] arg_5 = arg_3 [ 1 ] return { \"upload_state\" : arg_4 , \"detailed_message\" : arg_5 } else : return True","id_":252867,"task_name":"https:\/\/github.com\/laplacesdemon\/django-youtube\/blob\/8051ef372473eccb053f773c68e2e5e1b2cfb538\/django_youtube\/api.py#L223-L247","negative":"Compute the deflections of a list of galaxies from an input grid, by summing the individual deflections \\\n of each galaxy's mass profile.\n\n If the input grid is a *grids.SubGrid*, the potential is calculated on the sub-grid and binned-up to the \\\n original regular grid by taking the mean value of every set of sub-pixels.\n\n If no galaxies are entered into the function, an array of all zeros is returned.\n\n Parameters\n -----------\n grid : RegularGrid\n The grid (regular or sub) of (y,x) arc-second coordinates at the centre of every unmasked pixel which the \\\n deflections is calculated on.\n galaxies : [galaxy.Galaxy]\n The galaxies whose mass profiles are used to compute the surface densities."} {"query":"Get CAs whose certificates are suggested for client authentication .","positive":"def Func ( arg_0 ) : arg_1 = _lib . SSL_get_client_CA_list ( arg_0 . _ssl ) if arg_1 == _ffi . NULL : return [ ] arg_2 = [ ] for arg_3 in range ( _lib . sk_X509_NAME_num ( arg_1 ) ) : arg_4 = _lib . sk_X509_NAME_value ( arg_1 , arg_3 ) arg_5 = _lib . X509_NAME_dup ( arg_4 ) _openssl_assert ( arg_5 != _ffi . NULL ) arg_6 = X509Name . __new__ ( X509Name ) arg_6 . _name = _ffi . gc ( arg_5 , _lib . X509_NAME_free ) arg_2 . append ( arg_6 ) return arg_2","id_":252868,"task_name":"https:\/\/github.com\/pyca\/pyopenssl\/blob\/1fbe064c50fd030948141d7d630673761525b0d0\/src\/OpenSSL\/SSL.py#L2022-L2049","negative":"Close this SPK file."} {"query":"alternative to os .","positive":"def Func ( arg_0 , arg_1 , arg_2 = True ) : if arg_2 : arg_3 = 'Func -R %s %s' % ( arg_1 , arg_0 ) else : arg_3 = 'Func %s %s' % ( arg_1 , arg_0 ) return sh ( arg_3 )","id_":252869,"task_name":"https:\/\/github.com\/bmaeser\/pyque\/blob\/856dceab8d89cf3771cf21e682466c29a85ae8eb\/pyque\/sh.py#L82-L91","negative":"Removes the video from youtube and from db\n Requires POST"} {"query":"Returns the transaction as seen by the blockchain after being included into a block","positive":"def Func ( arg_0 , arg_1 , arg_2 = 10 ) : arg_3 = 10 for arg_4 in arg_0 . blocks ( ) : arg_3 += 1 for arg_5 in arg_4 [ \"transactions\" ] : if sorted ( arg_5 [ \"signatures\" ] ) == sorted ( arg_1 [ \"signatures\" ] ) : return arg_5 if arg_3 > arg_2 : raise Exception ( \"The operation has not been added after 10 blocks!\" )","id_":252870,"task_name":"https:\/\/github.com\/xeroc\/python-graphenelib\/blob\/8bb5396bc79998ee424cf3813af478304173f3a6\/graphenecommon\/blockchain.py#L250-L273","negative":"The speed limit for a boid.\n \n Boids can momentarily go very fast,\n something that is impossible for real animals."} {"query":"This uploads a file to S3 .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = False ) : if not arg_2 : arg_2 = boto3 . client ( 's3' ) try : arg_2 . upload_file ( arg_0 , arg_1 , os . path . basename ( arg_0 ) ) return 's3:\/\/%s\/%s' % ( arg_1 , os . path . basename ( arg_0 ) ) except Exception as e : LOGEXCEPTION ( 'could not upload %s to bucket: %s' % ( arg_0 , arg_1 ) ) if arg_3 : raise return None","id_":252871,"task_name":"https:\/\/github.com\/waqasbhatti\/astrobase\/blob\/2922a14619d183fb28005fa7d02027ac436f2265\/astrobase\/awsutils.py#L275-L318","negative":"Returns a dictionary with all the past baking statuses of a single book."} {"query":"Convert batch normalization layer .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 , arg_6 ) : print ( 'Converting batchnorm ...' ) if arg_6 == 'short' : arg_7 = 'BN' + random_string ( 6 ) elif arg_6 == 'keep' : arg_7 = arg_1 else : arg_7 = arg_1 + str ( random . random ( ) ) arg_8 = '{0}.bias' . format ( arg_1 ) arg_9 = '{0}.weight' . format ( arg_1 ) arg_10 = '{0}.running_mean' . format ( arg_1 ) arg_11 = '{0}.running_var' . format ( arg_1 ) if arg_8 in arg_5 : arg_12 = arg_5 [ arg_8 ] . numpy ( ) if arg_9 in arg_5 : arg_13 = arg_5 [ arg_9 ] . numpy ( ) arg_14 = arg_5 [ arg_10 ] . numpy ( ) arg_15 = arg_5 [ arg_11 ] . numpy ( ) arg_16 = arg_0 [ 'epsilon' ] arg_17 = arg_0 [ 'momentum' ] if arg_9 not in arg_5 : arg_18 = keras . layers . BatchNormalization ( axis = 1 , arg_17 = arg_17 , epsilon = arg_16 , center = False , scale = False , arg_5 = [ arg_14 , arg_15 ] , name = arg_7 ) else : arg_18 = keras . layers . BatchNormalization ( axis = 1 , arg_17 = arg_17 , epsilon = arg_16 , arg_5 = [ arg_13 , arg_12 , arg_14 , arg_15 ] , name = arg_7 ) arg_4 [ arg_2 ] = arg_18 ( arg_4 [ arg_3 [ 0 ] ] )","id_":252872,"task_name":"https:\/\/github.com\/nerox8664\/pytorch2keras\/blob\/750eaf747323580e6732d0c5ba9f2f39cb096764\/pytorch2keras\/normalization_layers.py#L9-L61","negative":"Show Program Landing page for the Enterprise's Program.\n\n Render the Enterprise's Program Enrollment page for a specific program.\n The Enterprise and Program are both selected by their respective UUIDs.\n\n Unauthenticated learners will be redirected to enterprise-linked SSO.\n\n A 404 will be raised if any of the following conditions are met:\n * No enterprise customer UUID query parameter ``enterprise_uuid`` found in request.\n * No enterprise customer found against the enterprise customer\n uuid ``enterprise_uuid`` in the request kwargs.\n * No Program can be found given ``program_uuid`` either at all or associated with\n the Enterprise.."} {"query":"Signal the start of the process .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . logger . info ( json . dumps ( [ 'START' , arg_0 . name , arg_1 ] ) )","id_":252873,"task_name":"https:\/\/github.com\/dougalsutherland\/skl-groups\/blob\/2584c10a413626c6d5f9078cdbf3dcc84e4e9a5b\/skl_groups\/utils.py#L114-L123","negative":"Initialize the bucket map assuming the given number of maxBuckets."} {"query":"Handles GET requests and instantiates a blank version of the formset .","positive":"def Func ( arg_0 , arg_1 , * arg_2 , ** arg_3 ) : arg_4 = arg_0 . construct_formset ( ) return arg_0 . render_to_response ( arg_0 . Func_context_data ( arg_4 = arg_4 ) )","id_":252874,"task_name":"https:\/\/github.com\/AndrewIngram\/django-extra-views\/blob\/188e1bf1f15a44d9a599028d020083af9fb43ea7\/extra_views\/formsets.py#L264-L269","negative":"Temporal distance probability density function.\n\n Returns\n -------\n non_delta_peak_split_points: numpy.array\n non_delta_peak_densities: numpy.array\n len(density) == len(temporal_distance_split_points_ordered) -1\n delta_peak_loc_to_probability_mass : dict"} {"query":"Validates JSON dict against a schema .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : try : if isinstance ( arg_1 , str ) : arg_3 = arg_1 arg_1 = _SCHEMAS [ arg_3 ] arg_4 = _get_validator ( arg_3 ) arg_4 . validate ( arg_0 ) else : jsonschema . validate ( arg_0 , arg_1 ) except jsonschema . ValidationError as err : if arg_2 is None : arg_2 = \"JSON failed validation. Set Qiskit log level to DEBUG \" \"for further information.\" arg_5 = SchemaValidationError ( arg_2 ) arg_5 . __cause__ = _SummaryValidationError ( err ) logger . debug ( '%s' , _format_causes ( err ) ) raise arg_5","id_":252875,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/validation\/jsonschema\/schema_validation.py#L112-L145","negative":"Sets the player's paused state."} {"query":"Get the bounding box for the mesh","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . attributes . get ( 'POSITION' ) return arg_2 . min , arg_2 . max","id_":252876,"task_name":"https:\/\/github.com\/Contraz\/demosys-py\/blob\/6466128a3029c4d09631420ccce73024025bd5b6\/demosys\/loaders\/scene\/gltf.py#L452-L455","negative":"setting baudrate if supported"} {"query":"Set the default value on a per instance basis .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = type ( arg_1 ) . mro ( ) arg_3 = '_%s_default' % arg_0 . name for arg_4 in arg_2 [ : arg_2 . index ( arg_0 . this_class ) + 1 ] : if arg_3 in arg_4 . __dict__ : break else : arg_5 = arg_0 . get_default_value ( ) arg_6 = arg_0 . _validate ( arg_1 , arg_5 ) arg_1 . _trait_values [ arg_0 . name ] = arg_6 return arg_1 . _trait_dyn_inits [ arg_0 . name ] = arg_4 . __dict__ [ arg_3 ]","id_":252877,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/utils\/traitlets.py#L245-L267","negative":"Return HgResource object for path.\n\n See DAVProvider.get_resource_inst()"} {"query":"Decode state and return param .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 'user_state' ) : if arg_1 and arg_0 . supports_user_state : return json . loads ( base64 . urlsafe_b64decode ( unquote ( str ( arg_1 ) ) ) . decode ( 'utf-8' ) ) [ arg_2 ] else : return arg_1 if arg_2 == 'csrf' else ''","id_":252878,"task_name":"https:\/\/github.com\/authomatic\/authomatic\/blob\/90a9ce60cc405ae8a2bf5c3713acd5d78579a04e\/authomatic\/providers\/oauth2.py#L262-L284","negative":"Clear out the database"} {"query":"Set a property value or remove a property .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = False ) : assert arg_2 is None or xml_tools . is_etree_element ( arg_2 ) if arg_1 in _lockPropertyNames : raise DAVError ( HTTP_FORBIDDEN , err_condition = PRECONDITION_CODE_ProtectedProperty ) arg_4 = arg_0 . environ [ \"wsgidav.config\" ] arg_5 = arg_4 . get ( \"mutable_live_props\" , [ ] ) if ( arg_1 . startswith ( \"{DAV:}\" ) and arg_1 in _standardLivePropNames and arg_1 in arg_5 ) : if arg_1 in ( \"{DAV:}getlastmodified\" , \"{DAV:}last_modified\" ) : try : return arg_0 . set_last_modified ( arg_0 . path , arg_2 . text , arg_3 ) except Exception : _logger . warning ( \"Provider does not support set_last_modified on {}.\" . format ( arg_0 . path ) ) raise DAVError ( HTTP_FORBIDDEN ) if arg_1 . startswith ( \"{urn:schemas-microsoft-com:}\" ) : arg_6 = arg_0 . environ . get ( \"HTTP_USER_AGENT\" , \"None\" ) arg_7 = arg_4 . get ( \"hotfixes\" , { } ) . get ( \"emulate_win32_lastmod\" , False ) if arg_7 and \"MiniRedir\/6.1\" not in arg_6 : if \"Win32LastModifiedTime\" in arg_1 : return arg_0 . set_last_modified ( arg_0 . path , arg_2 . text , arg_3 ) elif \"Win32FileAttributes\" in arg_1 : return True elif \"Win32CreationTime\" in arg_1 : return True elif \"Win32LastAccessTime\" in arg_1 : return True arg_8 = arg_0 . provider . prop_manager if arg_8 and not arg_1 . startswith ( \"{DAV:}\" ) : arg_9 = arg_0 . get_ref_url ( ) if arg_2 is None : return arg_8 . remove_property ( arg_9 , arg_1 , arg_3 , arg_0 . environ ) else : arg_2 = etree . tostring ( arg_2 ) return arg_8 . write_property ( arg_9 , arg_1 , arg_2 , arg_3 , arg_0 . environ ) raise DAVError ( HTTP_FORBIDDEN )","id_":252879,"task_name":"https:\/\/github.com\/mar10\/wsgidav\/blob\/cec0d84222fc24bea01be1cea91729001963f172\/wsgidav\/dav_provider.py#L718-L807","negative":"Write the index.html file for this report."} {"query":"Retrieve item fields or creator types","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = arg_1 + arg_3 arg_5 = arg_2 . format ( i = arg_3 ) if arg_0 . templates . get ( arg_4 ) and not arg_0 . _updated ( arg_5 , arg_0 . templates [ arg_4 ] , arg_4 ) : return arg_0 . templates [ arg_4 ] [ \"tmplt\" ] arg_6 = arg_0 . _retrieve_data ( arg_5 ) return arg_0 . _cache ( arg_6 , arg_4 )","id_":252880,"task_name":"https:\/\/github.com\/urschrei\/pyzotero\/blob\/b378966b30146a952f7953c23202fb5a1ddf81d9\/pyzotero\/zotero.py#L1094-L1106","negative":"Temporal distance probability density function.\n\n Returns\n -------\n non_delta_peak_split_points: numpy.array\n non_delta_peak_densities: numpy.array\n len(density) == len(temporal_distance_split_points_ordered) -1\n delta_peak_loc_to_probability_mass : dict"} {"query":"Gets a list with a certain size of suggestions for an object","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = ContentType . objects . get_for_model ( type ( arg_0 ) ) try : return ObjectViewDictionary . objects . filter ( current_object_id = arg_0 . id , current_content_type = arg_2 ) . extra ( order_by = [ '-visits' ] ) [ : arg_1 ] except : return ObjectViewDictionary . objects . filter ( current_object_id = arg_0 . id , current_content_type = arg_2 ) . extra ( order_by = [ '-visits' ] )","id_":252881,"task_name":"https:\/\/github.com\/dreidev\/Suggestions\/blob\/f04c181dc815d32c35b44c6e1c91521e88a9dd6c\/suggestions\/views.py#L100-L111","negative":"Propagate \"clk\" clock and reset \"rst\" signal to all subcomponents"} {"query":"Return the value current bound to the name name_sym in the namespace specified by ns_sym .","positive":"def Func ( arg_0 : arg_1 . Symbol , arg_3 : arg_1 . Symbol ) -> \"Optional[Var]\" : arg_4 = Namespace . get ( arg_0 ) if arg_4 : return arg_4 . find ( arg_3 ) return None","id_":252882,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/runtime.py#L251-L257","negative":"Drops the historical sap_success_factors table named herein."} {"query":"To perform the munging operations on a frame specified in steps on the frame fr .","positive":"def Func ( arg_0 , arg_1 ) : assert_is_type ( arg_1 , H2OFrame ) arg_2 = \"[%s]\" % \",\" . join ( quoted ( step [ 1 ] . to_rest ( step [ 0 ] ) . replace ( '\"' , \"'\" ) ) for step in arg_0 . steps ) arg_3 = h2o . api ( \"POST \/99\/Assembly\" , data = { \"steps\" : arg_2 , \"frame\" : arg_1 . frame_id } ) arg_0 . id = arg_3 [ \"assembly\" ] [ \"name\" ] return H2OFrame . get_frame ( arg_3 [ \"result\" ] [ \"name\" ] )","id_":252883,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-py\/h2o\/assembly.py#L131-L142","negative":"Reset the parameters."} {"query":"Return list of sections for the passed course SIS ID .","positive":"def Func ( arg_0 , arg_1 , arg_2 = { } ) : return arg_0 . get_sections_in_course ( arg_0 . _sis_id ( arg_1 , sis_field = \"course\" ) , arg_2 )","id_":252884,"task_name":"https:\/\/github.com\/uw-it-aca\/uw-restclients-canvas\/blob\/9845faf33d49a8f06908efc22640c001116d6ea2\/uw_canvas\/sections.py#L39-L44","negative":"Update the estimate.\n\n Parameters\n ----------\n new_val: float\n new observated value of estimated quantity."} {"query":"Double - click primary mouse button .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = 0 arg_0 . _queueMouseButton ( arg_1 , Quartz . kCGMouseButtonLeft , arg_2 ) arg_0 . _queueMouseButton ( arg_1 , Quartz . kCGMouseButtonLeft , arg_2 , clickCount = 2 ) arg_0 . _postQueuedEvents ( )","id_":252885,"task_name":"https:\/\/github.com\/alex-kostirin\/pyatomac\/blob\/3f46f6feb4504315eec07abb18bb41be4d257aeb\/atomac\/AXClasses.py#L1159-L1175","negative":"Clean up stats file, if configured to do so."} {"query":"Base method to fetch values and to set defaults in case they don t exist .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : try : arg_4 = arg_0 . get ( arg_1 , arg_2 ) except MissingSetting : arg_0 . set ( arg_1 , arg_2 , arg_3 ) arg_4 = arg_3 return arg_4","id_":252886,"task_name":"https:\/\/github.com\/ossobv\/exactonline\/blob\/f6bee418a9cb1fcf3ef17347ea7ab0dd3b573fde\/exactonline\/storage\/base.py#L61-L72","negative":"Show device heap size"} {"query":"Preprocess the excel file .","positive":"def Func ( arg_0 : arg_1 ) -> pd . DataFrame : if not os . path . exists ( arg_0 ) : raise ValueError ( \"Error: %s file not found\" % arg_0 ) return pd . read_excel ( arg_0 , sheetname = 0 , header = 0 )","id_":252887,"task_name":"https:\/\/github.com\/pybel\/pybel-tools\/blob\/3491adea0ac4ee60f57275ef72f9b73da6dbfe0c\/src\/pybel_tools\/analysis\/neurommsig\/export.py#L98-L108","negative":"iterate through the attributes of every logger's handler\n\n this is used to switch out stderr and stdout in tests when buffer is True\n\n :returns: generator of tuples, each tuple has (name, handler, member_name, member_val)"} {"query":"Create and start a swarm job .","positive":"def Func ( arg_0 , arg_1 = \"\" , arg_2 = \"\" , arg_3 = \"\" , arg_4 = None , arg_5 = None , arg_6 = False ) : if arg_4 is None : arg_4 = Configuration . getInt ( \"nupic.hypersearch.minWorkersPerSwarm\" ) if arg_5 is None : arg_5 = Configuration . getInt ( \"nupic.hypersearch.maxWorkersPerSwarm\" ) return ClientJobsDAO . get ( ) . jobInsert ( arg_0 = arg_0 , cmdLine = \"$HYPERSEARCH\" , arg_1 = arg_1 , arg_2 = arg_2 , arg_6 = arg_6 , arg_3 = arg_3 , arg_4 = arg_4 , arg_5 = arg_5 , jobType = ClientJobsDAO . JOB_TYPE_HS )","id_":252888,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/swarming\/api.py#L34-L69","negative":"Return the revocations in this certificate revocation list.\n\n These revocations will be provided by value, not by reference.\n That means it's okay to mutate them: it won't affect this CRL.\n\n :return: The revocations in this CRL.\n :rtype: :class:`tuple` of :class:`Revocation`"} {"query":"poke for a non empty directory","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . hook ( arg_0 . hdfs_conn_id ) . get_conn ( ) arg_3 = [ f for f in arg_2 . ls ( [ arg_0 . filepath ] , include_toplevel = True ) ] arg_3 = arg_0 . filter_for_ignored_ext ( arg_3 , arg_0 . ignored_ext , arg_0 . ignore_copying ) arg_3 = arg_0 . filter_for_filesize ( arg_3 , arg_0 . file_size ) if arg_0 . be_empty : arg_0 . log . info ( 'Poking for filepath %s to a empty directory' , arg_0 . filepath ) return len ( arg_3 ) == 1 and arg_3 [ 0 ] [ 'path' ] == arg_0 . filepath else : arg_0 . log . info ( 'Poking for filepath %s to a non empty directory' , arg_0 . filepath ) arg_3 . pop ( 0 ) return bool ( arg_3 ) and arg_3 [ 0 ] [ 'file_type' ] == 'f'","id_":252889,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/sensors\/hdfs_sensor.py#L57-L74","negative":"Attempts to find the Teradata install directory with the defaults\n for a given platform. Should always return `None` when the defaults\n are not present and the TERADATA_HOME environment variable wasn't\n explicitly set to the correct install location."} {"query":"Count of complete words between two addresses","positive":"def Func ( arg_0 , arg_1 : arg_2 , arg_3 : arg_2 ) : assert arg_3 >= arg_1 , ( arg_1 , arg_3 ) arg_4 = max ( 0 , ( arg_3 - arg_1 ) - ( arg_1 % arg_0 . wordWidth ) ) return arg_4 \/\/ arg_0 . wordWidth","id_":252890,"task_name":"https:\/\/github.com\/Nic30\/hwt\/blob\/8cbb399e326da3b22c233b98188a9d08dec057e6\/hwt\/hdl\/frameTmplUtils.py#L215-L220","negative":"r\"\"\"Bernoulli likelihood sampling.\n\n Sample according to\n\n .. math::\n\n \\mathbf y \\sim \\prod_{i=1}^n\n \\text{Bernoulli}(\\mu_i = \\text{logit}(z_i))\n \\mathcal N(~ o \\mathbf 1 + \\mathbf a^\\intercal \\boldsymbol\\alpha;\n ~ (h^2 - v_c)\\mathrm G^\\intercal\\mathrm G +\n (1-h^2-v_c)\\mathrm I ~)\n\n using the canonical Logit link function to define the conditional Bernoulli\n mean :math:`\\mu_i`.\n\n The causal :math:`\\mathbf a` covariates and the corresponding effect-sizes\n are randomly draw according to the following idea. The ``causal_variants``,\n if given, are first mean-zero and std-one normalized and then having\n its elements divided by the squared-root the the number of variances::\n\n causal_variants = _stdnorm(causal_variants, axis=0)\n causal_variants \/= sqrt(causal_variants.shape[1])\n\n The causal effect-sizes :math:`\\boldsymbol\\alpha` are draw from\n :math:`\\{-1, +1\\}` and subsequently normalized for mean-zero and std-one\"\"\n\n Parameters\n ----------\n random_state : random_state\n Set the initial random state.\n\n Example\n -------\n\n .. doctest::\n\n >>> from glimix_core.random import bernoulli_sample\n >>> from numpy.random import RandomState\n >>> offset = 5\n >>> G = [[1, -1], [2, 1]]\n >>> bernoulli_sample(offset, G, random_state=RandomState(0))\n array([1., 1.])"} {"query":"Create an iterable from the iterables that contains each element once .","positive":"def Func ( arg_0 ) : arg_1 = set ( ) def included ( arg_2 ) : arg_3 = arg_2 in arg_1 arg_1 . add ( arg_2 ) return arg_3 return [ arg_2 for arg_4 in arg_0 for arg_2 in arg_4 if not included ( arg_2 ) ]","id_":252891,"task_name":"https:\/\/github.com\/fossasia\/knittingpattern\/blob\/8e608896b0ab82fea1ca9fbfa2b4ee023d8c8027\/knittingpattern\/utils.py#L8-L22","negative":"Generate a tag for the alignment of the geometry of the bulge and disk of a bulge-disk system, to customize \\ \n phase names based on the bulge-disk model. This adds together the bulge_disk tags generated in the 3 functions\n above"} {"query":"generate a dependencies graph and add some information about it in the report s section","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : _dependencies_graph ( arg_0 , arg_1 ) arg_2 . append ( Paragraph ( \"%simports graph has been written to %s\" % ( arg_3 , arg_0 ) ) )","id_":252892,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/checkers\/imports.py#L187-L192","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"r This function handles the retrieval of a chemical s refractive index . Lookup is based on CASRNs . Will automatically select a data source to use if no Method is provided ; returns None if the data is not available .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = False , arg_3 = None , arg_4 = True ) : def list_methods ( ) : arg_5 = [ ] if arg_0 in CRC_RI_organic . index : arg_5 . append ( CRC ) arg_5 . append ( NONE ) return arg_5 if arg_2 : return list_methods ( ) if not arg_3 : arg_3 = list_methods ( ) [ 0 ] if arg_3 == CRC : arg_6 = float ( CRC_RI_organic . at [ arg_0 , 'RI' ] ) if arg_4 : arg_7 = float ( CRC_RI_organic . at [ arg_0 , 'RIT' ] ) elif arg_3 == NONE : arg_6 , arg_7 = None , None else : raise Exception ( 'Failure in in function' ) if arg_4 : return arg_6 , arg_7 else : return arg_6","id_":252893,"task_name":"https:\/\/github.com\/CalebBell\/thermo\/blob\/3857ed023a3e64fd3039a32d53576c24990ef1c3\/thermo\/refractivity.py#L44-L116","negative":"Multiply tensor of matrices by vectors assuming values stored are logs."} {"query":"Attempt to load plugins from the path specified .","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 , arg_3 , arg_4 in pkgutil . iter_modules ( [ arg_1 ] ) : arg_5 , arg_6 , arg_7 = imp . find_module ( arg_3 , [ arg_1 ] ) arg_8 = \"streamlink.plugin.{0}\" . format ( arg_3 ) try : arg_0 . load_plugin ( arg_8 , arg_5 , arg_6 , arg_7 ) except Exception : sys . stderr . write ( \"Failed to load plugin {0}:\\n\" . format ( arg_3 ) ) print_small_exception ( \"load_plugin\" ) continue","id_":252894,"task_name":"https:\/\/github.com\/streamlink\/streamlink\/blob\/c8ed1daff14ac03195870238b9b900c1109dd5c1\/src\/streamlink\/session.py#L449-L466","negative":"Save a vectorized image to file."} {"query":"Convert RGB to ANSI 256 color","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_0 == arg_1 and arg_1 == arg_2 : if arg_0 < 8 : return 16 if arg_0 > 248 : return 231 return round ( ( ( arg_0 - 8 ) \/ 247.0 ) * 24 ) + 232 arg_3 = 36 * round ( arg_0 \/ 255.0 * 5.0 ) arg_4 = 6 * round ( arg_1 \/ 255.0 * 5.0 ) arg_5 = round ( arg_2 \/ 255.0 * 5.0 ) arg_6 = 16 + arg_3 + arg_4 + arg_5 return arg_6","id_":252895,"task_name":"https:\/\/github.com\/timofurrer\/colorful\/blob\/919fa6da17865cc5e01e6b16119193a97d180dc9\/colorful\/ansi.py#L61-L77","negative":"This method obtains the actual features."} {"query":"Uses glob to find all files or folders that match the regex starting from the base_directory .","positive":"def Func ( arg_0 , arg_1 = '' ) : arg_2 = glob ( op . join ( arg_0 , arg_1 ) ) for arg_3 , arg_4 , arg_5 in os . walk ( arg_0 ) : for arg_6 in arg_4 : arg_2 . extend ( glob ( op . join ( arg_3 , arg_6 , arg_1 ) ) ) return arg_2","id_":252896,"task_name":"https:\/\/github.com\/Neurita\/boyle\/blob\/2dae7199849395a209c887d5f30506e1de8a9ad9\/boyle\/files\/search.py#L253-L274","negative":"Gets status of response."} {"query":"Renew the message lock .","positive":"async def Func ( arg_0 ) : if hasattr ( arg_0 . _receiver , 'locked_until' ) : raise TypeError ( \"Session messages cannot be renewed. Please renew the Session lock instead.\" ) arg_0 . _is_live ( 'renew' ) arg_1 = await arg_0 . _receiver . _Funcs ( arg_0 . lock_token ) arg_0 . _expiry = datetime . datetime . fromtimestamp ( arg_1 [ b'expirations' ] [ 0 ] \/ 1000.0 )","id_":252897,"task_name":"https:\/\/github.com\/Azure\/azure-sdk-for-python\/blob\/d7306fde32f60a293a7567678692bdad31e4b667\/azure-servicebus\/azure\/servicebus\/aio\/async_message.py#L44-L63","negative":"Write the index.html file for this report."} {"query":"Adds secondary inputs to the start of the pipeline .","positive":"def Func ( arg_0 , arg_1 ) : logger . debug ( \"Setting secondary inputs: {}\" . format ( arg_1 ) ) arg_2 = \"\\n\" . join ( list ( arg_1 . values ( ) ) ) arg_0 . _context = { ** arg_0 . _context , ** { \"secondary_inputs\" : arg_2 } }","id_":252898,"task_name":"https:\/\/github.com\/assemblerflow\/flowcraft\/blob\/fc3f4bddded1efc76006600016dc71a06dd908c0\/flowcraft\/generator\/process.py#L739-L755","negative":"Prepare received data for representation.\n\n Args:\n data (dict): values to represent (ex. {'001' : 130})\n number_to_keep (int): number of elements to show individually.\n\n Returns:\n dict: processed data to show."} {"query":"Produce hex dump of all data containing the bits from pos to stream . pos","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 + 7 >> 3 arg_3 = arg_0 . stream . pos + 7 >> 3 return '' . join ( map ( '{:02x} ' . format , arg_0 . stream . data [ arg_2 : arg_3 ] ) )","id_":252899,"task_name":"https:\/\/github.com\/google\/brotli\/blob\/4b2b2d4f83ffeaac7708e44409fe34896a01a278\/research\/brotlidump.py#L1376-L1383","negative":"Set the enthalpy of the package to the specified value, and\n recalculate it's temperature.\n\n :param H: The new enthalpy value. [kWh]"} {"query":"recurs into list for indentation","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 = 1 ) : for arg_4 in arg_0 : if isinstance ( arg_4 , indentable ) : arg_4 . set_indent ( arg_2 ) if isinstance ( arg_4 , arg_1 ) : Func ( arg_4 , arg_2 )","id_":252900,"task_name":"https:\/\/github.com\/LionelAuroux\/pyrser\/blob\/f153a97ef2b6bf915a1ed468c0252a9a59b754d5\/pyrser\/fmt.py#L54-L60","negative":"Save a vectorized image to file."} {"query":"Extracts the X . 509 certificates from the server handshake bytes for use when debugging","positive":"def Func ( arg_0 ) : arg_1 = [ ] arg_2 = None for arg_3 , arg_4 , arg_5 in parse_tls_records ( arg_0 ) : if arg_3 != b'\\x16' : continue for arg_6 , arg_7 in parse_handshake_messages ( arg_5 ) : if arg_6 == b'\\x0b' : arg_2 = arg_7 break if arg_2 : break if arg_2 : arg_8 = 3 while arg_8 < len ( arg_2 ) : arg_9 = int_from_bytes ( arg_2 [ arg_8 : arg_8 + 3 ] ) arg_10 = arg_8 + 3 arg_11 = arg_10 + arg_9 arg_8 = arg_11 arg_12 = arg_2 [ arg_10 : arg_11 ] arg_1 . append ( Certificate . load ( arg_12 ) ) return arg_1","id_":252901,"task_name":"https:\/\/github.com\/wbond\/oscrypto\/blob\/af778bf1c88bf6c4a7342f5353b130686a5bbe1c\/oscrypto\/_tls.py#L37-L74","negative":"Delete group."} {"query":"This returns the residual between the model mags and the actual mags .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 , arg_5 , arg_6 , arg_7 , arg_8 = ( fourier_sinusoidal_func ( arg_0 , arg_1 , arg_2 , arg_3 ) ) return ( arg_7 - arg_4 ) \/ arg_8","id_":252902,"task_name":"https:\/\/github.com\/waqasbhatti\/astrobase\/blob\/2922a14619d183fb28005fa7d02027ac436f2265\/astrobase\/lcmodels\/sinusoidal.py#L77-L114","negative":"Wait until the job finishes.\n\n This method will continuously query the server about the status of the job, until the job reaches a\n completion. During this time we will display (in stdout) a progress bar with % completion status."} {"query":"Coroutine starting point . Produces text stream and forwards to consumers","positive":"def Func ( arg_0 , arg_1 = arg_2 . stdin ) : for arg_4 in arg_1 : while len ( arg_4 ) > 600 : arg_5 , arg_6 , arg_4 = arg_4 . partition ( ' ' ) assert len ( arg_5 ) <= 600 arg_0 . send ( '' . join ( [ arg_5 , arg_6 ] ) ) arg_0 . send ( arg_4 ) arg_1 . close ( ) return arg_0 . close ( )","id_":252903,"task_name":"https:\/\/github.com\/jjangsangy\/py-translate\/blob\/fe6279b2ee353f42ce73333ffae104e646311956\/translate\/coroutines.py#L183-L204","negative":"Decode a CONNACK control packet."} {"query":"Returns True if arg_name might be a valid parameter for fn_or_cls .","positive":"def Func ( arg_0 , arg_1 ) : if inspect . isclass ( arg_0 ) : arg_2 = _find_class_construction_fn ( arg_0 ) else : arg_2 = arg_0 while hasattr ( arg_2 , '__wrapped__' ) : arg_2 = arg_2 . __wrapped__ arg_3 = _get_cached_arg_spec ( arg_2 ) if six . PY3 : if arg_3 . varkw : return True return arg_1 in arg_3 . args or arg_1 in arg_3 . kwonlyargs else : if arg_3 . keywords : return True return arg_1 in arg_3 . args","id_":252904,"task_name":"https:\/\/github.com\/google\/gin-config\/blob\/17a170e0a6711005d1c78e67cf493dc44674d44f\/gin\/config.py#L635-L663","negative":"Drop a node from the network\n\n :param node: node to drop\n :type node: Node"} {"query":"Verifies an RSA DSA or ECDSA signature via CNG","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = False ) : if arg_3 == 'raw' : arg_5 = arg_2 else : arg_6 = { 'md5' : BcryptConst . BCRYPT_MD5_ALGORITHM , 'sha1' : BcryptConst . BCRYPT_SHA1_ALGORITHM , 'sha256' : BcryptConst . BCRYPT_SHA256_ALGORITHM , 'sha384' : BcryptConst . BCRYPT_SHA384_ALGORITHM , 'sha512' : BcryptConst . BCRYPT_SHA512_ALGORITHM } [ arg_3 ] arg_5 = getattr ( hashlib , arg_3 ) ( arg_2 ) . digest ( ) arg_7 = null ( ) arg_8 = 0 if arg_0 . algorithm == 'rsa' : if arg_4 : arg_8 = BcryptConst . BCRYPT_PAD_PSS arg_9 = struct ( bcrypt , 'BCRYPT_PSS_PADDING_INFO' ) arg_10 = unwrap ( arg_9 ) arg_11 = buffer_from_unicode ( arg_6 ) arg_10 . pszAlgId = cast ( bcrypt , 'wchar_t *' , arg_11 ) arg_10 . cbSalt = len ( arg_5 ) else : arg_8 = BcryptConst . BCRYPT_PAD_PKCS1 arg_9 = struct ( bcrypt , 'BCRYPT_PKCS1_PADDING_INFO' ) arg_10 = unwrap ( arg_9 ) if arg_3 == 'raw' : arg_10 . pszAlgId = null ( ) else : arg_11 = buffer_from_unicode ( arg_6 ) arg_10 . pszAlgId = cast ( bcrypt , 'wchar_t *' , arg_11 ) arg_7 = cast ( bcrypt , 'void *' , arg_9 ) else : try : arg_1 = algos . DSASignature . load ( arg_1 ) . to_p1363 ( ) except ( ValueError , OverflowError , TypeError ) : raise SignatureError ( 'Signature is invalid' ) arg_14 = bcrypt . BCryptVerifySignature ( arg_0 . key_handle , arg_7 , arg_5 , len ( arg_5 ) , arg_1 , len ( arg_1 ) , arg_8 ) arg_15 = arg_14 == BcryptConst . STATUS_INVALID_SIGNATURE arg_15 = arg_15 or arg_14 == BcryptConst . STATUS_INVALID_PARAMETER if arg_15 : raise SignatureError ( 'Signature is invalid' ) handle_error ( arg_14 )","id_":252905,"task_name":"https:\/\/github.com\/wbond\/oscrypto\/blob\/af778bf1c88bf6c4a7342f5353b130686a5bbe1c\/oscrypto\/_win\/asymmetric.py#L2415-L2498","negative":"Returns a sorted list of all the mappings for this memory.\n\n :return: a list of mappings.\n :rtype: list"} {"query":"Push latent means and covariances forward through the observation model .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : with tf . name_scope ( \"Func\" ) : arg_3 = build_pushforward_latents_step ( arg_0 . get_observation_matrix_for_timestep , arg_0 . get_observation_noise_for_timestep ) arg_1 = distribution_util . move_dimension ( arg_1 , source_idx = - 2 , dest_idx = 0 ) arg_1 = arg_1 [ ... , tf . newaxis ] arg_2 = distribution_util . move_dimension ( arg_2 , source_idx = - 3 , dest_idx = 0 ) ( arg_4 , arg_5 ) = arg_3 ( _ = None , latent_t_mean_cov = ( arg_0 . initial_step , arg_1 [ arg_0 . initial_step ] , arg_2 [ arg_0 . initial_step ] ) ) arg_6 = tf . range ( arg_0 . initial_step , arg_0 . initial_step + arg_0 . num_timesteps ) arg_7 , arg_8 = tf . scan ( arg_3 , elems = ( arg_6 , arg_1 , arg_2 ) , initializer = ( arg_4 , arg_5 ) , parallel_iterations = 10000 ) arg_7 = distribution_util . move_dimension ( arg_7 [ ... , 0 ] , source_idx = 0 , dest_idx = - 2 ) arg_8 = distribution_util . move_dimension ( arg_8 , source_idx = 0 , dest_idx = - 3 ) return arg_7 , arg_8","id_":252906,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/distributions\/linear_gaussian_ssm.py#L1097-L1145","negative":"This method is called before first step of simulation."} {"query":"Collect all verified variants in a list on institutes and save them to file","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = [ ] arg_4 = 0 arg_5 = datetime . datetime . now ( ) . strftime ( '%Y-%m-%d' ) LOG . info ( 'Creating verified variant document..' ) for arg_6 in arg_1 : arg_7 = arg_0 . verified ( institute_id = arg_6 ) LOG . info ( 'Found {} verified variants for customer {}' . format ( len ( arg_7 ) , arg_6 ) ) if not arg_7 : continue arg_8 = set ( ) for arg_9 , arg_10 in CALLERS . items ( ) : for arg_11 in arg_10 : arg_8 . add ( arg_11 . get ( 'id' ) ) arg_12 = export_verified_variants ( arg_7 , arg_8 ) arg_13 = '.' . join ( [ arg_6 , '_verified_variants' , arg_5 ] ) + '.xlsx' arg_14 = Workbook ( os . path . join ( arg_2 , arg_13 ) ) arg_15 = arg_14 . add_worksheet ( ) arg_16 = 0 for arg_17 , arg_18 in enumerate ( VERIFIED_VARIANTS_HEADER + list ( arg_8 ) ) : arg_15 . write ( arg_16 , arg_17 , arg_18 ) for arg_16 , arg_19 in enumerate ( arg_12 , 1 ) : for arg_17 , arg_18 in enumerate ( arg_19 ) : arg_15 . write ( arg_16 , arg_17 , arg_18 ) arg_14 . close ( ) if os . path . exists ( os . path . join ( arg_2 , arg_13 ) ) : arg_4 += 1 return arg_4","id_":252907,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/server\/blueprints\/variants\/controllers.py#L1180-L1226","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"Retrieve all pages from a namespace starting from apcontinue .","positive":"def Func ( arg_0 , arg_1 , arg_2 = '' ) : arg_3 = { \"action\" : \"query\" , \"list\" : \"allpages\" , \"aplimit\" : arg_0 . limit , \"apnamespace\" : arg_1 , \"format\" : \"json\" } if arg_2 : arg_3 [ 'apcontinue' ] = arg_2 return arg_0 . call ( arg_3 )","id_":252908,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-perceval\/blob\/41c908605e88b7ebc3a536c643fa0f212eaf9e0e\/perceval\/backends\/core\/mediawiki.py#L448-L460","negative":"Set the interval of recording for each indicator.\n\n\n :param tag: tag name. Supported tag names are \"LearningRate\", \"Loss\",\"Throughput\", \"Parameters\". \"Parameters\" is an umbrella tag thatincludes weight, bias, gradWeight, gradBias, and some running status(eg. runningMean and runningVar in BatchNormalization). If youdidn't set any triggers, we will by default record Loss and Throughputin each iteration, while *NOT* recording LearningRate and Parameters,as recording parameters may introduce substantial overhead when themodel is very big, LearningRate is not a public attribute for allOptimMethod.\n :param trigger: trigger"} {"query":"Get emojis from pagination","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = { 'order_by' : 'updated_at' , 'sort' : 'asc' , 'per_page' : PER_PAGE } arg_4 = urijoin ( arg_1 , str ( arg_2 ) , GitLabClient . EMOJI ) return arg_0 . fetch_items ( arg_4 , arg_3 )","id_":252909,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-perceval\/blob\/41c908605e88b7ebc3a536c643fa0f212eaf9e0e\/perceval\/backends\/core\/gitlab.py#L494-L505","negative":"Return an open file-object to the index file"} {"query":"Return the notebook_id for a kernel_id or None .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ k for k , v in arg_0 . _notebook_mapping . iteritems ( ) if v == arg_1 ] if len ( arg_2 ) == 1 : return arg_2 [ 0 ] else : return None","id_":252910,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/frontend\/html\/notebook\/kernelmanager.py#L247-L253","negative":"Wrapper around json.loads.\n\n Wraps errors in custom exception with a snippet of the data in the message."} {"query":"Tell postgres to encrypt this field with a hashing function .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = None ) : if arg_1 is None or arg_1 . startswith ( '\\\\x' ) : return '%s' return arg_0 . get_encrypt_sql ( arg_3 )","id_":252911,"task_name":"https:\/\/github.com\/incuna\/django-pgcrypto-fields\/blob\/406fddf0cbe9091ba71b97206d0f4719c0450ac1\/pgcrypto\/mixins.py#L58-L70","negative":"Downloads all variable star observations by a given observer.\n\n Performs a series of HTTP requests to AAVSO's WebObs search and\n downloads the results page by page. Each page is then passed to\n :py:class:`~pyaavso.parsers.webobs.WebObsResultsParser` and parse results\n are added to the final observation list."} {"query":"TransitionOperator that runs fn repeatedly and traces its outputs .","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 , arg_4 : arg_5 , arg_6 : arg_7 [ [ arg_1 , arg_8 ] , arg_8 ] ) -> Tuple [ arg_1 , arg_8 ] : def fn_wrapper ( arg_9 , arg_10 ) : return tf . nest . map_structure ( tf . convert_to_tensor , call_fn ( arg_2 , arg_9 [ 0 ] ) ) def Func_fn_wrapper ( arg_9 ) : return tf . nest . map_structure ( tf . convert_to_tensor , call_fn ( arg_6 , arg_9 ) ) arg_0 = call_fn ( arg_2 , arg_0 ) arg_11 = Func_fn_wrapper ( arg_0 ) arg_0 , arg_12 = mcmc_util . Func_scan ( fn_wrapper , arg_0 , tf . ones ( arg_4 - 1 ) , arg_6 = Func_fn_wrapper ) arg_13 = lambda x , y : tf . concat ( [ tf . convert_to_tensor ( value = x ) [ tf . newaxis ] , y ] , 0 ) return arg_0 , tf . nest . map_structure ( arg_13 , arg_11 , arg_12 )","id_":252912,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/experimental\/fun_mcmc\/fun_mcmc_lib.py#L87-L119","negative":"Generate a tag for the alignment of the geometry of the bulge and disk of a bulge-disk system, to customize \\ \n phase names based on the bulge-disk model. This adds together the bulge_disk tags generated in the 3 functions\n above"} {"query":"Return the raw pickled data from filename .","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 . debug and arg_0 . debug . should ( 'dataio' ) : arg_0 . debug . write ( \"Reading data from %r\" % ( arg_1 , ) ) arg_2 = open ( arg_1 , 'rb' ) try : arg_3 = pickle . load ( arg_2 ) finally : arg_2 . close ( ) return arg_3","id_":252913,"task_name":"https:\/\/github.com\/tnkteja\/myhelp\/blob\/fb3a4809d448ad14d5b2e6ddf2e7e89ad52b71cb\/virtualEnvironment\/lib\/python2.7\/site-packages\/coverage\/data.py#L141-L150","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Return url with updated query parameters .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 , arg_3 , arg_4 , arg_5 , arg_6 = urlsplit ( arg_0 ) arg_7 = parse_qs ( arg_5 ) arg_7 . update ( arg_1 ) return urlunsplit ( ( arg_2 , arg_3 , arg_4 , urlencode ( sorted ( arg_7 . items ( ) ) , doseq = True ) , arg_6 ) , )","id_":252914,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/enterprise\/utils.py#L446-L467","negative":"Read attribute from sysfs and return as string"} {"query":"Checks if a database exists in CosmosDB .","positive":"def Func ( arg_0 , arg_1 ) : if arg_1 is None : raise AirflowBadRequest ( \"Database name cannot be None.\" ) arg_2 = list ( arg_0 . get_conn ( ) . QueryDatabases ( { \"query\" : \"SELECT * FROM r WHERE r.id=@id\" , \"parameters\" : [ { \"name\" : \"@id\" , \"value\" : arg_1 } ] } ) ) if len ( arg_2 ) == 0 : return False return True","id_":252915,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/azure_cosmos_hook.py#L124-L140","negative":"Fetch the events pages of a given group."} {"query":"Set asset to fee","positive":"def Func ( arg_0 , arg_1 ) : if isinstance ( arg_1 , arg_0 . amount_class ) : arg_0 . fee_asset_id = arg_1 [ \"id\" ] elif isinstance ( arg_1 , arg_0 . asset_class ) : arg_0 . fee_asset_id = arg_1 [ \"id\" ] elif arg_1 : arg_0 . fee_asset_id = arg_1 else : arg_0 . fee_asset_id = \"1.3.0\"","id_":252916,"task_name":"https:\/\/github.com\/xeroc\/python-graphenelib\/blob\/8bb5396bc79998ee424cf3813af478304173f3a6\/graphenecommon\/transactionbuilder.py#L329-L339","negative":"Process data to produce velocity and dropout information."} {"query":"Compute the yticks labels of this grid_stack used for plotting the y - axis ticks when visualizing an image \\","positive":"def Func ( arg_0 ) : return np . linspace ( np . amin ( arg_0 . grid_stack . regular [ : , 0 ] ) , np . amax ( arg_0 . grid_stack . regular [ : , 0 ] ) , 4 )","id_":252917,"task_name":"https:\/\/github.com\/Jammy2211\/PyAutoLens\/blob\/91e50369c7a9c048c83d217625578b72423cd5a7\/autolens\/lens\/plane.py#L371-L374","negative":"Stop the profiler."} {"query":"Return the correspond math mode latex string .","positive":"def Func ( arg_0 , arg_1 = 15 , arg_2 = None ) : if not arg_2 : return \"\\textrm{\" + arg_0 . name + \"}\" else : if arg_0 . name not in arg_2 [ - 1 ] : raise NodeException ( \"Expected local parameter name: \" , \"name=%s, \" % arg_0 . name , \"line=%s, \" % arg_0 . line , \"file=%s\" % arg_0 . file ) else : return arg_2 [ - 1 ] [ arg_0 . name ] . Func ( arg_1 , arg_2 [ 0 : - 1 ] )","id_":252918,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/qasm\/node\/id.py#L42-L54","negative":"Converts py_zipkin's annotations dict to protobuf.\n\n :param annotations: annotations dict.\n :type annotations: dict\n :return: corresponding protobuf's list of annotations.\n :rtype: list"} {"query":"Prepend msg to add some context information","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_0 . err_context = arg_1 arg_0 . succ_context = arg_2","id_":252919,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/tools\/cli\/src\/python\/result.py#L94-L101","negative":"Compares the given tokens.\n\n :param expected: The expected token.\n :type expected: Union[str, bytes]\n :param actual: The actual token.\n :type actual: Union[str, bytes]\n :return: Do the tokens match?\n :rtype: bool"} {"query":"Parse an ID3v1 tag returning a list of ID3v2 . 4 frames .","positive":"def Func ( arg_0 ) : try : arg_0 = arg_0 [ arg_0 . index ( b'TAG' ) : ] except ValueError : return None if 128 < len ( arg_0 ) or len ( arg_0 ) < 124 : return None arg_1 = \"3s30s30s30s%ds29sBB\" % ( len ( arg_0 ) - 124 ) try : arg_2 , arg_3 , arg_4 , arg_5 , arg_6 , arg_7 , arg_8 , arg_9 = unpack ( arg_1 , arg_0 ) except StructError : return None if arg_2 != b\"TAG\" : return None def fix ( arg_0 ) : return arg_0 . split ( b'\\x00' ) [ 0 ] . strip ( ) . decode ( 'latin1' ) arg_3 , arg_4 , arg_5 , arg_6 , arg_7 = map ( fix , [ arg_3 , arg_4 , arg_5 , arg_6 , arg_7 ] ) arg_10 = { } if arg_3 : arg_10 [ 'TIT2' ] = TIT2 ( encoding = 0 , text = arg_3 ) if arg_4 : arg_10 [ 'TPE1' ] = TPE1 ( encoding = 0 , text = [ arg_4 ] ) if arg_5 : arg_10 [ 'TALB' ] = TALB ( encoding = 0 , text = arg_5 ) if arg_6 : arg_10 [ 'TDRC' ] = TDRC ( encoding = 0 , text = arg_6 ) if arg_7 : arg_10 [ 'COMM' ] = COMM ( encoding = 0 , lang = 'eng' , desc = \"ID3v1 Comment\" , text = arg_7 ) if arg_8 and ( ( arg_8 != 32 ) or ( arg_0 [ - 3 ] == b'\\x00' [ 0 ] ) ) : arg_10 [ 'TRCK' ] = TRCK ( encoding = 0 , text = str ( arg_8 ) ) if arg_9 != 255 : arg_10 [ 'TCON' ] = TCON ( encoding = 0 , text = str ( arg_9 ) ) return arg_10","id_":252920,"task_name":"https:\/\/github.com\/LordSputnik\/mutagen\/blob\/38e62c8dc35c72b16554f5dbe7c0fde91acc3411\/mutagen\/id3.py#L779-L830","negative":"Returns a sorted list of all the mappings for this memory.\n\n :return: a list of mappings.\n :rtype: list"} {"query":"Returns a pipeline string from a recipe name .","positive":"def Func ( arg_0 ) : arg_1 = \"{}.\" . format ( recipes . __name__ ) for arg_2 , arg_3 , arg_4 in pkgutil . iter_modules ( recipes . __path__ , arg_1 ) : arg_5 = arg_2 . find_module ( arg_3 ) . load_module ( arg_3 ) arg_6 = [ arg_7 for arg_7 in arg_5 . __dict__ . values ( ) if isinstance ( arg_7 , type ) ] for arg_7 in arg_6 : arg_8 = arg_7 ( ) if getattr ( arg_8 , \"name\" , None ) == arg_0 : return arg_8 . brew ( ) logger . error ( colored_print ( \"Recipe name '{}' does not exist.\" . format ( arg_0 ) ) ) sys . exit ( 1 )","id_":252921,"task_name":"https:\/\/github.com\/assemblerflow\/flowcraft\/blob\/fc3f4bddded1efc76006600016dc71a06dd908c0\/flowcraft\/generator\/recipe.py#L649-L688","negative":"Inform the widget about the encoding of the underlying character stream."} {"query":"if maxlen > 0 the message is shortened to maxlen traces .","positive":"def Func ( arg_0 , arg_1 = 6 ) : arg_2 = \"\" for arg_3 , arg_4 in enumerate ( arg_0 ) : arg_2 += \"{0}: {1}\\n\" . format ( arg_3 , get_node_repr ( arg_4 ) ) arg_2 = arg_2 . strip ( \"\\n\" ) arg_5 = arg_2 . split ( \"\\n\" ) if arg_1 and len ( arg_5 ) > arg_1 : arg_3 = int ( arg_1 \/ 2 ) arg_5 = arg_5 [ : arg_3 ] + [ \"...\" ] + arg_5 [ - ( arg_1 - arg_3 ) : ] arg_2 = \"\\n\" . join ( arg_5 ) return arg_2","id_":252922,"task_name":"https:\/\/github.com\/fumitoh\/modelx\/blob\/0180da34d052c44fb94dab9e115e218bbebfc9c3\/modelx\/core\/system.py#L124-L140","negative":"Handle registration form received.\n\n [client only]\n\n Call self.registration_callback with the registration form received\n as the argument. Use the value returned by the callback will be a\n filled-in form.\n\n :Parameters:\n - `stanza`: the stanza received.\n :Types:\n - `stanza`: `pyxmpp.iq.Iq`"} {"query":"Remove the given object .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = False , ** arg_3 ) : if not arg_1 : arg_4 = arg_0 . _lookup ( arg_2 = arg_2 , ** arg_3 ) if not arg_4 : return { 'changed' : False } arg_1 = arg_4 [ 'id' ] arg_5 = '%s%s\/' % ( arg_0 . endpoint , arg_1 ) debug . log ( 'DELETE %s' % arg_5 , fg = 'blue' , bold = True ) try : client . Func ( arg_5 ) return { 'changed' : True } except exc . NotFound : if arg_2 : raise return { 'changed' : False }","id_":252923,"task_name":"https:\/\/github.com\/ansible\/tower-cli\/blob\/a2b151fed93c47725018d3034848cb3a1814bed7\/tower_cli\/models\/base.py#L432-L470","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Decorator to register a new function with vaex .","positive":"def Func ( arg_0 = None , arg_1 = False , arg_2 = None ) : arg_3 = '' if arg_0 : arg_3 = arg_0 + \"_\" if arg_0 not in scopes : raise KeyError ( \"unknown scope\" ) def wrapper ( arg_4 , arg_2 = arg_2 ) : arg_2 = arg_2 or arg_4 . __name__ if arg_2 . startswith ( arg_3 ) : arg_2 = arg_2 [ len ( arg_3 ) : ] arg_5 = arg_3 + arg_2 if arg_0 : def closure ( arg_2 = arg_2 , arg_5 = arg_5 , arg_6 = arg_4 ) : def wrapper ( arg_7 , * arg_8 , ** arg_9 ) : arg_10 = getattr ( arg_7 . expression . ds . func , arg_5 ) arg_8 = ( arg_7 . expression , ) + arg_8 return arg_10 ( * arg_8 , ** arg_9 ) return functools . wraps ( arg_6 ) ( wrapper ) if arg_1 : setattr ( scopes [ arg_0 ] , arg_2 , property ( closure ( ) ) ) else : setattr ( scopes [ arg_0 ] , arg_2 , closure ( ) ) else : def closure ( arg_2 = arg_2 , arg_5 = arg_5 , arg_6 = arg_4 ) : def wrapper ( arg_7 , * arg_8 , ** arg_9 ) : arg_10 = getattr ( arg_7 . ds . func , arg_5 ) arg_8 = ( arg_7 , ) + arg_8 return arg_10 ( * arg_8 , ** arg_9 ) return functools . wraps ( arg_6 ) ( wrapper ) setattr ( arg_11 . expression . Expression , arg_2 , closure ( ) ) arg_11 . expression . expression_namespace [ arg_3 + arg_2 ] = arg_4 return arg_4 return wrapper","id_":252924,"task_name":"https:\/\/github.com\/vaexio\/vaex\/blob\/a45b672f8287afca2ada8e36b74b604b9b28dd85\/packages\/vaex-core\/vaex\/functions.py#L19-L71","negative":"Get the decryption for col."} {"query":"Get options list with requested prefix","positive":"def Func ( arg_0 , arg_1 , arg_2 = '' ) : arg_3 = [ ] try : for arg_4 in arg_0 . config . options ( arg_1 ) : if not arg_2 or arg_4 . find ( arg_2 ) == 0 : arg_3 += [ ( arg_4 [ len ( arg_2 ) : ] , arg_0 . config . get ( arg_1 , arg_4 ) ) ] except ConfigParser . NoSectionError as ex : logger . warning ( \"No section: %s\" , ex ) logger . debug ( \"Section: [%s] prefix: '%s' options:\\n%s\" , arg_1 , arg_2 , arg_3 ) return arg_3","id_":252925,"task_name":"https:\/\/github.com\/yandex\/yandex-tank\/blob\/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b\/yandextank\/core\/tankcore.py#L637-L650","negative":"Parses a file and returns a document object.\n File, a file like object."} {"query":"Find the lines matching one of a list of regexes .","positive":"def Func ( arg_0 , * arg_1 ) : arg_2 = re . compile ( join_regex ( arg_1 ) ) arg_3 = set ( ) for arg_4 , arg_5 in enumerate ( arg_0 . lines ) : if arg_2 . search ( arg_5 ) : arg_3 . add ( arg_4 + 1 ) return arg_3","id_":252926,"task_name":"https:\/\/github.com\/tnkteja\/myhelp\/blob\/fb3a4809d448ad14d5b2e6ddf2e7e89ad52b71cb\/virtualEnvironment\/lib\/python2.7\/site-packages\/coverage\/parser.py#L77-L90","negative":"Returns the maximum delay for the InferenceElements in the inference\n dictionary\n\n Parameters:\n -----------------------------------------------------------------------\n inferences: A dictionary where the keys are InferenceElements"} {"query":"Load a dictionary into the model .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False , arg_3 = True ) : for arg_4 , arg_5 in arg_1 . items ( ) : if arg_4 not in arg_0 . _elements . keys ( ) and not arg_3 : raise AttributeError ( \"Model {} is not loaded\" . format ( arg_4 ) ) elif arg_4 not in arg_0 . _elements . keys ( ) and arg_3 : arg_0 . _load_model ( arg_4 ) arg_6 = getattr ( arg_0 , arg_4 ) _Func ( arg_6 , arg_5 )","id_":252927,"task_name":"https:\/\/github.com\/napalm-automation\/napalm-yang\/blob\/998e8a933171d010b8544bcc5dc448e2b68051e2\/napalm_yang\/base.py#L131-L166","negative":"Thresholds a distance matrix and returns the result.\n\n Parameters\n ----------\n\n dist_matrix: array_like\n Input array or object that can be converted to an array.\n\n perc_thr: float in range of [0,100]\n Percentile to compute which must be between 0 and 100 inclusive.\n\n k: int, optional\n Diagonal above which to zero elements.\n k = 0 (the default) is the main diagonal,\n k < 0 is below it and k > 0 is above.\n\n Returns\n -------\n array_like"} {"query":"Delete all indexes for the database","positive":"def Func ( arg_0 ) : LOG . warning ( \"Dropping all indexe\" ) for arg_1 in INDEXES : LOG . warning ( \"Dropping all indexes for collection name %s\" , arg_1 ) arg_0 . db [ arg_1 ] . Func ( )","id_":252928,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/adapter\/mongo\/index.py#L74-L79","negative":"Obtiene los dataframes de los datos de PVPC con resampling diario y mensual."} {"query":"Open a tunneled connection from a 0MQ url .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None , arg_4 = None , arg_5 = 60 ) : arg_6 = select_random_ports ( 1 ) [ 0 ] arg_7 , arg_0 = arg_0 . split ( ':\/\/' ) arg_8 , arg_9 = arg_0 . split ( ':' ) arg_9 = int ( arg_9 ) if arg_4 is None : arg_4 = sys . platform == 'win32' if arg_4 : arg_10 = paramiko_tunnel else : arg_10 = openssh_tunnel arg_11 = arg_10 ( arg_6 , arg_9 , arg_1 , remoteip = arg_8 , arg_2 = arg_2 , arg_3 = arg_3 , arg_5 = arg_5 ) return 'tcp:\/\/127.0.0.1:%i' % arg_6 , arg_11","id_":252929,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/external\/ssh\/tunnel.py#L145-L168","negative":"Get course's duration as a timedelta.\n\n Arguments:\n obj (CourseOverview): CourseOverview object\n\n Returns:\n (timedelta): Duration of a course."} {"query":"Allocate the spatial pooler instance .","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 . _sfdr : return arg_2 = dict ( ( name , getattr ( arg_0 , name ) ) for name in arg_0 . _spatialArgNames ) if ( ( arg_0 . SpatialClass == CPPSpatialPooler ) or ( arg_0 . SpatialClass == PYSpatialPooler ) ) : arg_2 [ 'columnDimensions' ] = [ arg_0 . columnCount ] arg_2 [ 'inputDimensions' ] = [ arg_0 . inputWidth ] arg_2 [ 'potentialRadius' ] = arg_0 . inputWidth arg_0 . _sfdr = arg_0 . SpatialClass ( ** arg_2 )","id_":252930,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/regions\/sp_region.py#L437-L456","negative":"Does this filename match any of the patterns?"} {"query":"Check if the given token is a trailing comma","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 [ arg_1 ] if arg_2 . exact_type != tokenize . COMMA : return False arg_3 = itertools . islice ( arg_0 , arg_1 + 1 , None ) arg_4 = list ( itertools . takewhile ( lambda other_token , _token = arg_2 : other_token . start [ 0 ] == _token . start [ 0 ] , arg_3 , ) ) arg_5 = all ( other_token . type in ( tokenize . NEWLINE , tokenize . COMMENT ) for other_token in arg_4 ) if not arg_4 or not arg_5 : return False def get_curline_index_start ( ) : for arg_6 , arg_2 in enumerate ( reversed ( arg_0 [ : arg_1 ] ) ) : if arg_2 . type in ( tokenize . NEWLINE , tokenize . NL ) : return arg_1 - arg_6 return 0 arg_7 = get_curline_index_start ( ) arg_8 = { \"return\" , \"yield\" } for arg_9 in arg_0 [ arg_7 : arg_1 ] : if \"=\" in arg_9 . string or arg_9 . string in arg_8 : return True return False","id_":252931,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/checkers\/refactoring.py#L65-L107","negative":"Hashes the data in a file on disk.\n\n Args:\n fpath (PathLike): file path string\n\n blocksize (int): 2 ** 16. Affects speed of reading file\n\n stride (int): strides > 1 skip data to hash, useful for faster\n hashing, but less accurate, also makes hash dependant on\n blocksize.\n\n hasher (HASH): hash algorithm from hashlib, defaults to `sha512`.\n\n hashlen (int): maximum number of symbols in the returned hash. If\n not specified, all are returned.\n\n base (list, str): list of symbols or shorthand key. Valid keys are\n 'abc', 'hex', and 'dec'. Defaults to 'hex'.\n\n Notes:\n For better hashes keep stride = 1\n For faster hashes set stride > 1\n blocksize matters when stride > 1\n\n References:\n http:\/\/stackoverflow.com\/questions\/3431825\/md5-checksum-of-a-file\n http:\/\/stackoverflow.com\/questions\/5001893\/when-to-use-sha-1-vs-sha-2\n\n Example:\n >>> import ubelt as ub\n >>> from os.path import join\n >>> fpath = join(ub.ensure_app_cache_dir('ubelt'), 'tmp.txt')\n >>> ub.writeto(fpath, 'foobar')\n >>> print(ub.hash_file(fpath, hasher='sha1', base='hex'))\n 8843d7f92416211de9ebb963ff4ce28125932878\n\n Example:\n >>> import ubelt as ub\n >>> from os.path import join\n >>> fpath = ub.touch(join(ub.ensure_app_cache_dir('ubelt'), 'empty_file'))\n >>> # Test that the output is the same as sha1sum\n >>> if ub.find_exe('sha1sum'):\n >>> want = ub.cmd(['sha1sum', fpath], verbose=2)['out'].split(' ')[0]\n >>> got = ub.hash_file(fpath, hasher='sha1')\n >>> print('want = {!r}'.format(want))\n >>> print('got = {!r}'.format(got))\n >>> assert want.endswith(got)\n >>> # Do the same for sha512 sum and md5sum\n >>> if ub.find_exe('sha512sum'):\n >>> want = ub.cmd(['sha512sum', fpath], verbose=2)['out'].split(' ')[0]\n >>> got = ub.hash_file(fpath, hasher='sha512')\n >>> print('want = {!r}'.format(want))\n >>> print('got = {!r}'.format(got))\n >>> assert want.endswith(got)\n >>> if ub.find_exe('md5sum'):\n >>> want = ub.cmd(['md5sum', fpath], verbose=2)['out'].split(' ')[0]\n >>> got = ub.hash_file(fpath, hasher='md5')\n >>> print('want = {!r}'.format(want))\n >>> print('got = {!r}'.format(got))\n >>> assert want.endswith(got)"} {"query":"filter to keep spouts","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ ] for arg_3 in arg_0 : if arg_3 [ 0 ] == 'spout' : arg_2 . append ( arg_3 ) return arg_2 , arg_1","id_":252932,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/tools\/explorer\/src\/python\/logicalplan.py#L101-L107","negative":"Turn a mongodb-style search dict into an SQL query."} {"query":"Installs and configures RabbitMQ .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = 0 , arg_3 = 0 ) : arg_1 = arg_1 or ALL arg_2 = int ( arg_2 ) if arg_2 and not arg_3 : arg_4 = arg_0 . get_satchel ( 'packager' ) arg_4 . install_required ( type = SYSTEM , service = arg_0 . name ) arg_5 = arg_0 . local_renderer arg_6 = arg_0 . get_user_vhosts ( arg_1 = arg_1 ) with settings ( warn_only = True ) : arg_0 . add_admin_user ( ) arg_6 = sorted ( list ( arg_6 ) ) if not arg_3 : for arg_7 , arg_8 , arg_9 in arg_6 : arg_5 . env . broker_user = arg_7 arg_5 . env . broker_password = arg_8 arg_5 . env . broker_vhost = arg_9 with settings ( warn_only = True ) : arg_5 . sudo ( 'rabbitmqctl add_user {broker_user} {broker_password}' ) arg_5 . sudo ( 'rabbitmqctl add_vhost {broker_vhost}' ) arg_5 . sudo ( 'rabbitmqctl set_permissions -p {broker_vhost} {broker_user} \".*\" \".*\" \".*\"' ) arg_5 . sudo ( 'rabbitmqctl set_permissions -p {broker_vhost} {admin_username} \".*\" \".*\" \".*\"' ) return arg_6","id_":252933,"task_name":"https:\/\/github.com\/chrisspen\/burlap\/blob\/a92b0a8e5206850bb777c74af8421ea8b33779bd\/burlap\/rabbitmq.py#L281-L313","negative":"Cache a refresh token, ignoring any failure.\n\n Args:\n refresh_token (str): Refresh token to cache."} {"query":"Add the whole list of API parameters into optparse .","positive":"def Func ( arg_0 ) : for arg_1 , arg_2 , arg_3 in BotoClient . EXTRA_CLIENT_PARAMS : arg_0 . add_option ( '--API-' + arg_1 , help = arg_3 , type = arg_2 , dest = arg_1 )","id_":252934,"task_name":"https:\/\/github.com\/bloomreach\/s4cmd\/blob\/bb51075bf43703e7cd95aa39288cf7732ec13a6d\/s4cmd.py#L441-L444","negative":"Returns how the result count compares to the query options.\n\n The return value is negative if too few results were found, zero if enough were found, and\n positive if too many were found.\n\n Returns:\n int: -1, 0, or 1."} {"query":"r Tempo evaluation","positive":"def Func ( arg_0 , arg_1 , ** arg_2 ) : arg_0 = coerce_annotation ( arg_0 , 'Func' ) arg_1 = coerce_annotation ( arg_1 , 'Func' ) arg_3 = np . asarray ( [ o . value for o in arg_0 ] ) arg_4 = arg_0 . data [ 0 ] . confidence arg_5 = np . asarray ( [ o . value for o in arg_1 ] ) return mir_eval . Func . evaluate ( arg_3 , arg_4 , arg_5 , ** arg_2 )","id_":252935,"task_name":"https:\/\/github.com\/marl\/jams\/blob\/b16778399b9528efbd71434842a079f7691a7a66\/jams\/eval.py#L322-L362","negative":"Called when the power state changes."} {"query":"Constructs the ArgumentParser for the CLI","positive":"def Func ( ) : arg_0 = ArgumentParser ( prog = 'pynetgear' ) arg_0 . add_argument ( \"--format\" , choices = [ 'json' , 'prettyjson' , 'py' ] , default = 'prettyjson' ) arg_1 = arg_0 . add_argument_group ( \"router connection config\" ) arg_1 . add_argument ( \"--host\" , help = \"Hostname for the router\" ) arg_1 . add_argument ( \"--user\" , help = \"Account for login\" ) arg_1 . add_argument ( \"--port\" , help = \"Port exposed on the router\" ) arg_1 . add_argument ( \"--login-v2\" , help = \"Force the use of the cookie-based authentication\" , dest = \"force_login_v2\" , default = False , action = \"store_true\" ) arg_1 . add_argument ( \"--password\" , help = \"Not required with a wired connection.\" + \"Optionally, set the PYNETGEAR_PASSWORD environment variable\" ) arg_1 . add_argument ( \"--url\" , help = \"Overrides host:port and ssl with url to router\" ) arg_1 . add_argument ( \"--no-ssl\" , dest = \"ssl\" , default = True , action = \"store_false\" , help = \"Connect with https\" ) arg_2 = arg_0 . add_subparsers ( description = \"Runs subcommand against the specified router\" , dest = \"subcommand\" ) arg_3 = arg_2 . add_parser ( \"block_device\" , help = \"Blocks a device from connecting by mac address\" ) arg_3 . add_argument ( \"--mac-addr\" ) arg_4 = arg_2 . add_parser ( \"allow_device\" , help = \"Allows a device with the mac address to connect\" ) arg_4 . add_argument ( \"--mac-addr\" ) arg_2 . add_parser ( \"login\" , help = \"Attempts to login to router.\" ) arg_5 = arg_2 . add_parser ( \"attached_devices\" , help = \"Outputs all attached devices\" ) arg_5 . add_argument ( \"-v\" , \"--verbose\" , action = \"store_true\" , default = False , help = \"Choose between verbose and slower or terse and fast.\" ) arg_2 . add_parser ( \"traffic_meter\" , help = \"Output router's traffic meter data\" ) return arg_0","id_":252936,"task_name":"https:\/\/github.com\/MatMaul\/pynetgear\/blob\/247d6b9524fcee4b2da0e65ca12c52ebdd3676b2\/pynetgear\/__main__.py#L39-L88","negative":"Parse issue and generate single line formatted issue line.\n\n Example output:\n - Add coveralls integration [\\#223](https:\/\/github.com\/skywinder\/github-changelog-generator\/pull\/223) ([skywinder](https:\/\/github.com\/skywinder))\n - Add coveralls integration [\\#223](https:\/\/github.com\/skywinder\/github-changelog-generator\/pull\/223) (@skywinder)\n\n\n :param dict issue: Fetched issue from GitHub.\n :rtype: str\n :return: Markdown-formatted single issue."} {"query":"Send an update request to the given path of the CRUD API with the given data dict which will be converted into json","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : return arg_0 . handleresult ( arg_0 . r . put ( urljoin ( arg_0 . url + CRUD_PATH , arg_1 ) , arg_2 = json . dumps ( arg_2 ) ) )","id_":252937,"task_name":"https:\/\/github.com\/connectordb\/connectordb-python\/blob\/2092b0cb30898139a247176bcf433d5a4abde7cb\/connectordb\/_connection.py#L143-L148","negative":"Verify a certificate in a context.\n\n .. versionadded:: 0.15\n\n :raises X509StoreContextError: If an error occurred when validating a\n certificate in the context. Sets ``certificate`` attribute to\n indicate which certificate caused the error."} {"query":"Check if a file exists on Azure Data Lake .","positive":"def Func ( arg_0 , arg_1 ) : try : arg_2 = arg_0 . connection . glob ( arg_1 , details = False , invalidate_cache = True ) return len ( arg_2 ) == 1 except FileNotFoundError : return False","id_":252938,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/azure_data_lake_hook.py#L55-L68","negative":"Decompress and unpickle."} {"query":"Create a string representation for each item in a dict .","positive":"def Func ( arg_0 , ** arg_1 ) : import ubelt as ub arg_2 = arg_1 . get ( 'explicit' , False ) arg_1 [ 'explicit' ] = _rectify_countdown_or_bool ( arg_2 ) arg_3 = arg_1 . get ( 'precision' , None ) arg_4 = arg_1 . get ( 'kvsep' , ': ' ) if arg_2 : arg_4 = '=' def make_item_str ( arg_5 , arg_6 ) : if arg_2 or arg_1 . get ( 'strkeys' , False ) : arg_7 = six . text_type ( arg_5 ) else : arg_7 = repr2 ( arg_5 , arg_3 = arg_3 , newlines = 0 ) arg_8 = arg_7 + arg_4 arg_1 [ '_return_info' ] = True arg_9 , arg_10 = repr2 ( arg_6 , ** arg_1 ) arg_11 = arg_9 . find ( '\\n' ) arg_12 = arg_9 if arg_11 == - 1 else arg_9 [ : arg_11 ] arg_13 = arg_1 . get ( 'cbr' , arg_1 . get ( 'compact_brace' , False ) ) if arg_13 or not arg_12 . rstrip ( ) . endswith ( tuple ( '([{<' ) ) : arg_14 = '' if arg_11 == - 1 else arg_9 [ arg_11 : ] arg_9 = arg_12 . lstrip ( ) + arg_14 if '\\n' in arg_8 : arg_15 = arg_8 + arg_9 else : arg_15 = ub . hzcat ( [ arg_8 , arg_9 ] ) else : arg_15 = arg_8 + arg_9 return arg_15 , arg_10 arg_16 = list ( six . iteritems ( arg_0 ) ) arg_17 = [ make_item_str ( arg_5 , arg_6 ) for ( arg_5 , arg_6 ) in arg_16 ] arg_18 = [ t [ 0 ] for t in arg_17 ] arg_19 = max ( [ t [ 1 ] [ 'max_height' ] for t in arg_17 ] ) if arg_17 else 0 arg_10 = { 'max_height' : arg_19 + 1 , } arg_20 = arg_1 . get ( 'sort' , None ) if arg_20 is None : arg_20 = True if isinstance ( arg_0 , collections . OrderedDict ) : arg_20 = False if arg_20 : arg_18 = _sort_itemstrs ( arg_16 , arg_18 ) return arg_18 , arg_10","id_":252939,"task_name":"https:\/\/github.com\/Erotemic\/ubelt\/blob\/db802f3ad8abba025db74b54f86e6892b8927325\/ubelt\/util_format.py#L598-L665","negative":"Register a file reader for use in parse_config_file.\n\n Registered file readers will be used to try reading files passed to\n `parse_config_file`. All file readers (beginning with the default `open`) will\n be tried until one of them succeeds at opening the file.\n\n This function may also be be used used as a decorator. For example:\n\n @register_file_reader(IOError)\n def exotic_data_source(filename):\n ...\n\n Args:\n *args: (When used as a decorator, only the existence check is supplied.)\n - file_reader_fn: The file reader function to register. This should be a\n function that can be used as a context manager to open a file and\n provide a file-like object, similar to Python's built-in `open`.\n - is_readable_fn: A function taking the file path and returning a boolean\n indicating whether the file can be read by `file_reader_fn`.\n\n Returns:\n `None`, or when used as a decorator, a function that will perform the\n registration using the supplied readability predicate."} {"query":"Given the model check that all the metadata role values have valid information in them and any required metadata fields contain values .","positive":"def Func ( arg_0 ) : arg_1 = ( ATTRIBUTED_ROLE_KEYS [ 0 ] , ATTRIBUTED_ROLE_KEYS [ 4 ] , ) for arg_2 in ATTRIBUTED_ROLE_KEYS : try : arg_3 = arg_0 . metadata [ arg_2 ] except KeyError : if arg_2 in arg_1 : raise exceptions . MissingRequiredMetadata ( arg_2 ) else : if arg_2 in arg_1 and len ( arg_3 ) == 0 : raise exceptions . MissingRequiredMetadata ( arg_2 ) for arg_4 in arg_3 : if arg_4 . get ( 'type' ) != 'cnx-id' : raise exceptions . InvalidRole ( arg_2 , arg_4 )","id_":252940,"task_name":"https:\/\/github.com\/openstax\/cnx-publishing\/blob\/f55b4a2c45d8618737288f1b74b4139d5ac74154\/cnxpublishing\/db.py#L292-L309","negative":"Convert string into path case.\n Join punctuation with slash.\n\n Args:\n string: String to convert.\n\n Returns:\n string: Path cased string."} {"query":"Return a SVGDumper for this instruction .","positive":"def Func ( arg_0 , arg_1 = None ) : if arg_1 is None : from knittingpattern . convert . InstructionSVGCache import default_svg_cache arg_1 = default_svg_cache ( ) return arg_1 . Func ( arg_0 )","id_":252941,"task_name":"https:\/\/github.com\/fossasia\/knittingpattern\/blob\/8e608896b0ab82fea1ca9fbfa2b4ee023d8c8027\/knittingpattern\/Instruction.py#L217-L231","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Make OAuth token request .","positive":"def Func ( arg_0 , arg_1 ) : try : arg_2 = arg_0 . post ( OAUTH2_TOKEN_REQUEST_URL , data = arg_1 ) arg_2 . raise_for_status ( ) except requests . RequestException as e : raise GoogleAuthError ( 'Token request failed: {}' . format ( e ) ) else : arg_3 = arg_2 . json ( ) if 'error' in arg_3 : raise GoogleAuthError ( 'Token request error: {!r}' . format ( arg_3 [ 'error' ] ) ) return arg_3","id_":252942,"task_name":"https:\/\/github.com\/tdryer\/hangups\/blob\/85c0bf0a57698d077461283895707260f9dbf931\/hangups\/auth.py#L383-L402","negative":"Convenience function to efficiently construct a MultivariateNormalDiag."} {"query":"Deploy polyaxon .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = read_Funcment_config ( arg_0 ) arg_5 = DeployManager ( arg_4 = arg_4 , filepath = arg_0 , arg_1 = arg_1 , arg_3 = arg_3 ) arg_6 = None if arg_2 : arg_5 . check ( ) Printer . print_success ( 'Polyaxon Funcment file is valid.' ) else : try : arg_5 . install ( ) except Exception as e : Printer . print_error ( 'Polyaxon could not be installed.' ) arg_6 = e if arg_6 : Printer . print_error ( 'Error message `{}`.' . format ( arg_6 ) )","id_":252943,"task_name":"https:\/\/github.com\/polyaxon\/polyaxon-cli\/blob\/a7f5eed74d4d909cad79059f3c21c58606881449\/polyaxon_cli\/cli\/admin.py#L52-L71","negative":"Get list of TensorArrays holding exchanged states, and zeros."} {"query":"docstring for parse_omim_2_line","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = dict ( zip ( arg_1 , arg_0 . split ( '\\t' ) ) ) return arg_2","id_":252944,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/parse\/omim.py#L38-L41","negative":"Play the video and block whilst the video is playing"} {"query":"Disables digital reporting . By turning reporting off for this pin reporting is disabled for all 8 bits in the port -","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 \/\/ 8 arg_3 = [ arg_0 . _command_handler . REPORT_DIGITAL + arg_2 , arg_0 . REPORTING_DISABLE ] arg_0 . _command_handler . send_command ( arg_3 )","id_":252945,"task_name":"https:\/\/github.com\/MrYsLab\/PyMata\/blob\/7e0ec34670b5a0d3d6b74bcbe4f3808c845cc429\/PyMata\/pymata.py#L337-L348","negative":"Gets a list of snapshots for a cluster\n\n :param cluster_identifier: unique identifier of a cluster\n :type cluster_identifier: str"} {"query":"Waits for events on the event queue and calls the registered functions .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : while True : arg_4 = arg_1 . get ( ) if arg_4 == arg_3 : return arg_5 = map ( lambda fm : fm . callback if arg_2 ( arg_4 , fm ) else None , arg_0 ) arg_5 = filter ( lambda f : f is not None , arg_5 ) for arg_6 in arg_5 : arg_6 ( arg_4 )","id_":252946,"task_name":"https:\/\/github.com\/piface\/pifacecommon\/blob\/006bca14c18d43ba2d9eafaa84ef83b512c51cf6\/pifacecommon\/interrupts.py#L309-L341","negative":"Replace target with replacement"} {"query":"Login to pybotvac account using provided email and password .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = requests . post ( urljoin ( arg_0 . ENDPOINT , 'sessions' ) , json = { 'email' : arg_1 , 'password' : arg_2 , 'platform' : 'ios' , 'token' : binascii . hexlify ( os . urandom ( 64 ) ) . decode ( 'utf8' ) } , headers = arg_0 . _headers ) arg_3 . raise_for_status ( ) arg_4 = arg_3 . json ( ) [ 'access_token' ] arg_0 . _headers [ 'Authorization' ] = 'Token token=%s' % arg_4","id_":252947,"task_name":"https:\/\/github.com\/stianaske\/pybotvac\/blob\/e3f655e81070ff209aaa4efb7880016cf2599e6d\/pybotvac\/account.py#L36-L54","negative":"Get a single publication."} {"query":"Run one pass of cx cancellation on the circuit","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . collect_Funcs ( [ \"cx\" ] ) for arg_3 in arg_2 : arg_4 = [ ] arg_5 = [ ] for arg_6 in range ( len ( arg_3 ) - 1 ) : arg_5 . append ( arg_3 [ arg_6 ] ) arg_7 = arg_3 [ arg_6 ] . qargs arg_8 = arg_3 [ arg_6 + 1 ] . qargs if arg_7 != arg_8 : arg_4 . append ( arg_5 ) arg_5 = [ ] arg_5 . append ( arg_3 [ - 1 ] ) arg_4 . append ( arg_5 ) for arg_5 in arg_4 : if len ( arg_5 ) % 2 == 0 : for arg_9 in arg_5 : arg_1 . remove_op_node ( arg_9 ) else : for arg_9 in arg_5 [ 1 : ] : arg_1 . remove_op_node ( arg_9 ) return arg_1","id_":252948,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/transpiler\/passes\/cx_cancellation.py#L16-L49","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Climbs up the site tree to mark items of current branch .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_2 is not None : arg_2 . in_current_branch = True if hasattr ( arg_2 , 'parent' ) and arg_2 . parent is not None : arg_0 . Func ( arg_1 , arg_0 . get_item_by_id ( arg_1 , arg_2 . parent . id ) )","id_":252949,"task_name":"https:\/\/github.com\/idlesign\/django-sitetree\/blob\/61de4608e6e415247c75fe8691027d7c4ed0d1e7\/sitetree\/sitetreeapp.py#L1072-L1081","negative":"Deserialize a dataframe.\n\n Parameters\n ----------\n reader : file\n File-like object to read from. Must be opened in binary mode.\n data_type_id : dict\n Serialization format of the raw data.\n See the azureml.DataTypeIds class for constants.\n\n Returns\n -------\n pandas.DataFrame\n Dataframe object."} {"query":"Returns an SFTP connection object","positive":"def Func ( arg_0 ) : if arg_0 . conn is None : arg_1 = pysftp . CnOpts ( ) if arg_0 . no_host_key_check : arg_1 . hostkeys = None arg_1 . compression = arg_0 . compress arg_4 = { 'host' : arg_0 . remote_host , 'port' : arg_0 . port , 'username' : arg_0 . username , 'cnopts' : arg_1 } if arg_0 . password and arg_0 . password . strip ( ) : arg_4 [ 'password' ] = arg_0 . password if arg_0 . key_file : arg_4 [ 'private_key' ] = arg_0 . key_file if arg_0 . private_key_pass : arg_4 [ 'private_key_pass' ] = arg_0 . private_key_pass arg_0 . conn = pysftp . Connection ( ** arg_4 ) return arg_0 . conn","id_":252950,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/sftp_hook.py#L92-L115","negative":"Enables uniform interface to value and batch jacobian calculation.\n\n Works in both eager and graph modes.\n\n Arguments:\n f: The scalar function to evaluate.\n x: The value at which to compute the value and the batch jacobian.\n\n Returns:\n A tuple (f(x), J(x)), where J(x) is the batch jacobian."} {"query":"Multiplying a matrix by its inverse produces the identity matrix .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . matrix arg_2 = arg_1 [ 0 ] * arg_1 [ 4 ] - arg_1 [ 1 ] * arg_1 [ 3 ] arg_0 . matrix = [ arg_1 [ 4 ] \/ arg_2 , - arg_1 [ 1 ] \/ arg_2 , 0 , - arg_1 [ 3 ] \/ arg_2 , arg_1 [ 0 ] \/ arg_2 , 0 , ( arg_1 [ 3 ] * arg_1 [ 7 ] - arg_1 [ 4 ] * arg_1 [ 6 ] ) \/ arg_2 , - ( arg_1 [ 0 ] * arg_1 [ 7 ] - arg_1 [ 1 ] * arg_1 [ 6 ] ) \/ arg_2 , 1 ]","id_":252951,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/shoebot\/data\/geometry.py#L223-L234","negative":"Returns the maximum delay for the InferenceElements in the inference\n dictionary\n\n Parameters:\n -----------------------------------------------------------------------\n inferences: A dictionary where the keys are InferenceElements"} {"query":"Dump args to config file .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_3 = ConfigParser ( ) arg_3 . add_section ( SECTION ) if arg_2 is None : for arg_4 in arg_1 : arg_3 . set ( SECTION , arg_4 , arg_1 . attr ) else : arg_5 = ( '--extra-settings' , '--languages' , '--requirements' , '--template' , '--timezone' ) for arg_6 in arg_2 . _actions : if arg_6 . dest in ( 'help' , 'config_file' , 'config_dump' , 'project_name' ) : continue arg_7 = arg_6 . option_strings [ 0 ] arg_8 = arg_7 . lstrip ( '-' ) arg_9 = getattr ( arg_1 , arg_6 . dest ) if any ( [ arg_10 for arg_10 in arg_5 if arg_10 in arg_6 . option_strings ] ) : if arg_6 . dest == 'languages' : if len ( arg_9 ) == 1 and arg_9 [ 0 ] == 'en' : arg_3 . set ( SECTION , arg_8 , '' ) else : arg_3 . set ( SECTION , arg_8 , ',' . join ( arg_9 ) ) else : arg_3 . set ( SECTION , arg_8 , arg_9 if arg_9 else '' ) elif arg_6 . choices == ( 'yes' , 'no' ) : arg_3 . set ( SECTION , arg_8 , 'yes' if arg_9 else 'no' ) elif arg_6 . dest == 'templates' : arg_3 . set ( SECTION , arg_8 , arg_9 if arg_9 else 'no' ) elif arg_6 . dest == 'cms_version' : arg_11 = ( 'stable' if arg_9 == CMS_VERSION_MATRIX [ 'stable' ] else arg_9 ) arg_3 . set ( SECTION , arg_8 , arg_11 ) elif arg_6 . dest == 'django_version' : arg_11 = ( 'stable' if arg_9 == DJANGO_VERSION_MATRIX [ 'stable' ] else arg_9 ) arg_3 . set ( SECTION , arg_8 , arg_11 ) elif arg_6 . const : arg_3 . set ( SECTION , arg_8 , 'true' if arg_9 else 'false' ) else : arg_3 . set ( SECTION , arg_8 , str ( arg_9 ) ) with open ( arg_0 , 'w' ) as fp : arg_3 . write ( fp )","id_":252952,"task_name":"https:\/\/github.com\/nephila\/djangocms-installer\/blob\/9fec66d5f8b1e9a0f3c0ec66dd777db578fab07e\/djangocms_installer\/config\/ini.py#L49-L93","negative":"ASCII adjust after addition.\n\n Adjusts the sum of two unpacked BCD values to create an unpacked BCD\n result. The AL register is the implied source and destination operand\n for this instruction. The AAA instruction is only useful when it follows\n an ADD instruction that adds (binary addition) two unpacked BCD values\n and stores a byte result in the AL register. The AAA instruction then\n adjusts the contents of the AL register to contain the correct 1-digit\n unpacked BCD result.\n If the addition produces a decimal carry, the AH register is incremented\n by 1, and the CF and AF flags are set. If there was no decimal carry,\n the CF and AF flags are cleared and the AH register is unchanged. In either\n case, bits 4 through 7 of the AL register are cleared to 0.\n\n This instruction executes as described in compatibility mode and legacy mode.\n It is not valid in 64-bit mode.\n ::\n IF ((AL AND 0FH) > 9) Operators.OR(AF = 1)\n THEN\n AL = (AL + 6);\n AH = AH + 1;\n AF = 1;\n CF = 1;\n ELSE\n AF = 0;\n CF = 0;\n FI;\n AL = AL AND 0FH;\n :param cpu: current CPU."} {"query":"Merge nested IfContarner form else branch to this IfContainer as elif and else branches","positive":"def Func ( arg_0 , arg_1 : \"IfContainer\" ) : arg_0 . elIfs . append ( ( arg_1 . cond , arg_1 . ifTrue ) ) arg_0 . elIfs . extend ( arg_1 . elIfs ) arg_0 . ifFalse = arg_1 . ifFalse","id_":252953,"task_name":"https:\/\/github.com\/Nic30\/hwt\/blob\/8cbb399e326da3b22c233b98188a9d08dec057e6\/hwt\/hdl\/ifContainter.py#L306-L314","negative":"Generate RENEWING time.\n\n [:rfc:`2131#section-4.4.5`]::\n\n T1\n defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 *\n duration_of_lease). Times T1 and T2 SHOULD be chosen with some\n random \"fuzz\" around a fixed value, to avoid synchronization of\n client reacquisition."} {"query":"Compose the version predicates for requirement in PEP 345 fashion .","positive":"def Func ( arg_0 ) : arg_1 = [ ] for arg_2 , arg_3 in arg_0 . specs : arg_1 . append ( arg_2 + arg_3 ) if not arg_1 : return '' return \" (%s)\" % ',' . join ( arg_1 )","id_":252954,"task_name":"https:\/\/github.com\/AkihikoITOH\/capybara\/blob\/e86c2173ea386654f4ae061148e8fbe3f25e715c\/capybara\/virtualenv\/lib\/python2.7\/site-packages\/wheel\/metadata.py#L207-L214","negative":"Removes the specfied course from the specified organization"} {"query":"Return all installed public keys","positive":"def Func ( arg_0 , arg_1 = False ) : arg_2 = arg_0 . store . Func ( ) if not arg_1 : return arg_2 arg_3 = [ ] for arg_4 in arg_2 : if arg_4 [ : len ( arg_0 . prefix ) ] == arg_0 . prefix : arg_3 . append ( arg_4 ) return arg_3","id_":252955,"task_name":"https:\/\/github.com\/xeroc\/python-graphenelib\/blob\/8bb5396bc79998ee424cf3813af478304173f3a6\/graphenecommon\/wallet.py#L271-L285","negative":"Temporal distance probability density function.\n\n Returns\n -------\n non_delta_peak_split_points: numpy.array\n non_delta_peak_densities: numpy.array\n len(density) == len(temporal_distance_split_points_ordered) -1\n delta_peak_loc_to_probability_mass : dict"} {"query":"Checks whether the convergence criteria have been met .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = tf . math . abs ( tf . math . reduce_max ( input_tensor = arg_1 ) - tf . math . reduce_min ( input_tensor = arg_1 ) ) arg_5 = arg_4 <= arg_2 arg_6 = arg_3 \/ 2 def part_converged ( arg_7 ) : return tf . math . reduce_max ( input_tensor = tf . math . abs ( arg_7 - arg_7 [ 0 ] ) ) <= arg_6 arg_8 = tf . math . reduce_all ( input_tensor = [ part_converged ( arg_7 ) for arg_7 in arg_0 ] ) return arg_5 | arg_8","id_":252956,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/optimizer\/differential_evolution.py#L519-L551","negative":"Resolve the logging path from job and task properties.\n\n Args:\n job_metadata: Job metadata, such as job-id, job-name, and user-id.\n job_resources: Resources specified such as ram, cpu, and logging path.\n task_descriptors: Task metadata, parameters, and resources.\n\n Resolve the logging path, which may have substitution parameters such as\n job-id, task-id, user-id, and job-name."} {"query":"Top Chartbeat template tag .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . split_contents ( ) if len ( arg_2 ) > 1 : raise TemplateSyntaxError ( \"'%s' takes no arguments\" % arg_2 [ 0 ] ) return ChartbeatTopNode ( )","id_":252957,"task_name":"https:\/\/github.com\/jazzband\/django-analytical\/blob\/5487fd677bd47bc63fc2cf39597a0adc5d6c9ab3\/analytical\/templatetags\/chartbeat.py#L46-L55","negative":"Output profiler report."} {"query":"Display the DataFrame from row i1 till i2","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = 'html' ) : from IPython import display if arg_3 == 'html' : arg_4 = arg_0 . _as_html_table ( arg_1 , arg_2 ) display . display ( display . HTML ( arg_4 ) ) else : arg_4 = arg_0 . _as_table ( arg_1 , arg_2 , arg_3 = arg_3 ) print ( arg_4 )","id_":252958,"task_name":"https:\/\/github.com\/vaexio\/vaex\/blob\/a45b672f8287afca2ada8e36b74b604b9b28dd85\/packages\/vaex-core\/vaex\/dataframe.py#L3466-L3481","negative":"Adds all parameters to `traj`"} {"query":"Does a series of FFTs from start_s or start_sample for duration_s or num_samples . Effectively transforms a slice of the AudioSegment into the frequency domain across different time bins .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = None , arg_4 = None , arg_5 = None , arg_6 = None , arg_7 = 0.5 , arg_8 = ( 'tukey' , 0.25 ) ) : if arg_1 is not None and arg_3 is not None : raise ValueError ( \"Only one of start_s and start_sample may be specified.\" ) if arg_2 is not None and arg_4 is not None : raise ValueError ( \"Only one of duration_s and num_samples may be specified.\" ) if arg_5 is not None and arg_6 is not None : raise ValueError ( \"Only one of window_length_s and window_length_samples may be specified.\" ) if arg_5 is None and arg_6 is None : raise ValueError ( \"You must specify a window length, either in window_length_s or in window_length_samples.\" ) if arg_1 is None and arg_3 is None : arg_3 = 0 elif arg_1 is not None : arg_3 = int ( round ( arg_1 * arg_0 . frame_rate ) ) if arg_2 is None and arg_4 is None : arg_4 = len ( arg_0 . get_array_of_samples ( ) ) - int ( arg_3 ) elif arg_2 is not None : arg_4 = int ( round ( arg_2 * arg_0 . frame_rate ) ) if arg_5 is not None : arg_6 = int ( round ( arg_5 * arg_0 . frame_rate ) ) if arg_3 + arg_4 > len ( arg_0 . get_array_of_samples ( ) ) : raise ValueError ( \"The combination of start and duration will run off the end of the AudioSegment object.\" ) arg_9 = arg_0 . to_numpy_array ( ) [ arg_3 : arg_3 + arg_4 ] arg_10 , arg_11 , arg_12 = signal . Func ( arg_9 , arg_0 . frame_rate , scaling = 'spectrum' , nperseg = arg_6 , noverlap = int ( round ( arg_7 * arg_6 ) ) , mode = 'magnitude' , arg_8 = arg_8 ) return arg_10 , arg_11 , arg_12","id_":252959,"task_name":"https:\/\/github.com\/MaxStrange\/AudioSegment\/blob\/1daefb8de626ddff3ff7016697c3ad31d262ecd6\/audiosegment.py#L950-L1042","negative":"Register unit object on interface level object"} {"query":"Unsubscribe to the passed pair s OHLC data channel .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , ** arg_3 ) : arg_4 = [ '1m' , '5m' , '15m' , '30m' , '1h' , '3h' , '6h' , '12h' , '1D' , '7D' , '14D' , '1M' ] if arg_2 : if arg_2 not in arg_4 : raise ValueError ( \"timeframe must be any of %s\" % arg_4 ) else : arg_2 = '1m' arg_5 = ( 'candles' , arg_1 , arg_2 ) arg_1 = 't' + arg_1 if not arg_1 . startswith ( 't' ) else arg_1 arg_6 = 'trade:' + arg_2 + ':' + arg_1 arg_0 . _unsubscribe ( 'candles' , arg_5 , arg_6 = arg_6 , ** arg_3 )","id_":252960,"task_name":"https:\/\/github.com\/Crypto-toolbox\/btfxwss\/blob\/16827fa6aacb2c0e289aa852bf61a18df6905835\/btfxwss\/client.py#L303-L323","negative":"canonical cluster statistics for a single run and a single probability\n\n Parameters\n ----------\n\n microcanonical_statistics : ndarray\n Return value of `bond_microcanonical_statistics`\n\n convolution_factors : 1-D array_like\n The coefficients of the convolution for the given probabilty ``p``\n and for each occupation number ``n``.\n\n Returns\n -------\n ret : ndarray of size ``1``\n Structured array with dtype as returned by\n `canonical_statistics_dtype`\n\n ret['percolation_probability'] : ndarray of float\n The \"percolation probability\" of this run at the value of ``p``.\n Only exists if `microcanonical_statistics` argument has the\n ``has_spanning_cluster`` field.\n\n ret['max_cluster_size'] : ndarray of int\n Weighted size of the largest cluster (absolute number of sites)\n\n ret['moments'] : 1-D :py:class:`numpy.ndarray` of float\n Array of size ``5``.\n The ``k``-th entry is the weighted ``k``-th raw moment of the\n (absolute) cluster size distribution, with ``k`` ranging from ``0`` to\n ``4``.\n\n See Also\n --------\n\n bond_microcanonical_statistics\n canonical_statistics_dtype"} {"query":"Uploads a file","positive":"def Func ( arg_0 , arg_1 ) : print ( 'Uploading file %s' % arg_0 ) arg_2 = os . path . join ( os . getcwd ( ) , arg_0 ) arg_3 = 'sftp:\/\/' + ADDRESS + WORKING_DIR arg_4 = saga . filesystem . File ( arg_2 , arg_1 = arg_1 , flags = OVERWRITE ) arg_4 . copy ( arg_3 ) print ( 'Transfer of `%s` to `%s` successful' % ( arg_0 , arg_3 ) )","id_":252961,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/examples\/example_22_saga_python\/start_saga.py#L25-L32","negative":"Perform the stringprep mapping step of SASLprep. Operates in-place on a\n list of unicode characters provided in `chars`."} {"query":"Writes properties to the file in Java properties format .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = True ) : if arg_2 is not None : write_comment ( arg_0 , arg_2 ) if arg_3 : write_comment ( arg_0 , time . strftime ( '%a %b %d %H:%M:%S %Z %Y' ) ) if hasattr ( arg_1 , 'keys' ) : for arg_4 in arg_1 : write_property ( arg_0 , arg_4 , arg_1 [ arg_4 ] ) else : for arg_4 , arg_5 in arg_1 : write_property ( arg_0 , arg_4 , arg_5 )","id_":252962,"task_name":"https:\/\/github.com\/vaexio\/vaex\/blob\/a45b672f8287afca2ada8e36b74b604b9b28dd85\/packages\/vaex-core\/vaex\/ext\/jprops.py#L33-L53","negative":"Initialize the bucket map assuming the given number of maxBuckets."} {"query":"Computes the min_event_ndims associated with the give list of bijectors .","positive":"def Func ( arg_0 , arg_1 = True ) : arg_2 = 0 arg_3 = 0 if arg_1 : arg_0 = reversed ( arg_0 ) for arg_4 in arg_0 : if arg_1 : arg_5 = arg_4 . forward_min_event_ndims arg_6 = arg_4 . inverse_min_event_ndims else : arg_5 = arg_4 . inverse_min_event_ndims arg_6 = arg_4 . forward_min_event_ndims if arg_3 < arg_5 : arg_2 += ( arg_5 - arg_3 ) arg_3 = max ( arg_5 , arg_3 ) arg_7 = ( arg_5 - arg_6 ) arg_3 -= arg_7 return arg_2","id_":252963,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/bijectors\/chain.py#L40-L109","negative":"Checks if file already exists and ask the user if it should\n be overwritten if it does."} {"query":"Get a location information","positive":"def Func ( arg_0 , arg_1 : arg_2 , arg_3 : arg_2 = None ) : arg_4 = arg_0 . api . LOCATIONS + '\/' + str ( arg_1 ) return arg_0 . _get_model ( arg_4 , arg_3 = arg_3 )","id_":252964,"task_name":"https:\/\/github.com\/cgrok\/clashroyale\/blob\/2618f4da22a84ad3e36d2446e23436d87c423163\/clashroyale\/official_api\/client.py#L462-L475","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Add another chem material package to this material package .","positive":"def Func ( arg_0 , arg_1 ) : if type ( arg_1 ) is MaterialPackage : if arg_0 . material == arg_1 . material : arg_0 . compound_masses += arg_1 . compound_masses else : for arg_2 in arg_1 . material . compounds : if arg_2 not in arg_0 . material . compounds : raise Exception ( \"Packages of '\" + arg_1 . material . name + \"' cannot be added to packages of '\" + arg_0 . material . name + \"'. The compound '\" + arg_2 + \"' was not found in '\" + arg_0 . material . name + \"'.\" ) arg_0 . Func ( ( arg_2 , arg_1 . get_compound_mass ( arg_2 ) ) ) elif arg_0 . _is_compound_mass_tuple ( arg_1 ) : arg_2 = arg_1 [ 0 ] arg_3 = arg_0 . material . get_compound_index ( arg_2 ) arg_4 = arg_1 [ 1 ] arg_0 . compound_masses [ arg_3 ] += arg_4 else : raise TypeError ( 'Invalid addition argument.' )","id_":252965,"task_name":"https:\/\/github.com\/Ex-Mente\/auxi.0\/blob\/2dcdae74154f136f8ca58289fe5b20772f215046\/auxi\/modelling\/process\/materials\/chem.py#L544-L582","negative":"The factory method to create WebDriverResult from JSON Object.\n\n Args:\n obj(dict): The JSON Object returned by server."} {"query":"Remove the pid file .","positive":"def Func ( arg_0 ) : arg_1 = os . path . join ( arg_0 . profile_dir . pid_dir , arg_0 . name + u'.pid' ) if os . path . isfile ( arg_1 ) : try : arg_0 . log . info ( \"Removing pid file: %s\" % arg_1 ) os . remove ( arg_1 ) except : arg_0 . log . warn ( \"Error removing the pid file: %s\" % arg_1 )","id_":252966,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/parallel\/apps\/baseapp.py#L220-L233","negative":"Delete previous webhooks. If local ngrok tunnel, create a webhook."} {"query":"Returns a Redis connection .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . Funcection ( arg_0 . redis_conn_id ) arg_0 . host = arg_1 . host arg_0 . port = arg_1 . port arg_0 . password = None if str ( arg_1 . password ) . lower ( ) in [ 'none' , 'false' , '' ] else arg_1 . password arg_0 . db = arg_1 . extra_dejson . get ( 'db' , None ) if not arg_0 . redis : arg_0 . log . debug ( 'Initializing redis object for conn_id \"%s\" on %s:%s:%s' , arg_0 . redis_conn_id , arg_0 . host , arg_0 . port , arg_0 . db ) arg_0 . redis = Redis ( arg_2 = arg_0 . host , arg_3 = arg_0 . port , arg_4 = arg_0 . password , arg_5 = arg_0 . db ) return arg_0 . redis","id_":252967,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/redis_hook.py#L45-L66","negative":"Apply all filters to issues and pull requests.\n\n :param dict older_tag: All issues before this tag's date will be\n excluded. May be special value, if new tag is\n the first tag. (Means **older_tag** is when\n the repo was created.)\n :param dict newer_tag: All issues after this tag's date will be\n excluded. May be title of unreleased section.\n :rtype: list(dict), list(dict)\n :return: Filtered issues and pull requests."} {"query":"Connected to AMP server start listening locally and give the AMP client a reference to the local listening factory .","positive":"def Func ( arg_0 ) : log . msg ( \"Connected to AMP server, starting to listen locally...\" ) arg_1 = multiplexing . ProxyingFactory ( arg_0 , \"hello\" ) return listeningEndpoint . listen ( arg_1 )","id_":252968,"task_name":"https:\/\/github.com\/lvh\/txampext\/blob\/a7d6cb9f1e9200dba597378cd40eb6a2096d4fd9\/docs\/examples\/multiplexing_client.py#L31-L38","negative":"Get the context for this view."} {"query":"Setup a figure for plotting an image .","positive":"def Func ( arg_0 , arg_1 ) : if not arg_1 : arg_2 = plt . figure ( arg_0 = arg_0 ) return arg_2","id_":252969,"task_name":"https:\/\/github.com\/Jammy2211\/PyAutoLens\/blob\/91e50369c7a9c048c83d217625578b72423cd5a7\/autolens\/plotters\/plotter_util.py#L35-L48","negative":"Gets back all response headers."} {"query":"Plots the rolling volatility versus date .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = arg_3 * 6 , arg_4 = 'best' , arg_5 = None , ** arg_6 ) : if arg_5 is None : arg_5 = plt . gca ( ) arg_7 = FuncFormatter ( utils . two_dec_places ) arg_5 . yaxis . set_major_formatter ( FuncFormatter ( arg_7 ) ) arg_8 = timeseries . rolling_volatility ( arg_0 , arg_2 ) arg_8 . plot ( alpha = .7 , lw = 3 , color = 'orangered' , arg_5 = arg_5 , ** arg_6 ) if arg_1 is not None : arg_9 = timeseries . rolling_volatility ( arg_1 , arg_2 ) arg_9 . plot ( alpha = .7 , lw = 3 , color = 'grey' , arg_5 = arg_5 , ** arg_6 ) arg_5 . set_title ( 'Rolling volatility (6-month)' ) arg_5 . axhline ( arg_8 . mean ( ) , color = 'steelblue' , linestyle = '--' , lw = 3 ) arg_5 . axhline ( 0.0 , color = 'black' , linestyle = '-' , lw = 2 ) arg_5 . set_ylabel ( 'Volatility' ) arg_5 . set_xlabel ( '' ) if arg_1 is None : arg_5 . legend ( [ 'Volatility' , 'Average volatility' ] , loc = arg_4 , frameon = True , framealpha = 0.5 ) else : arg_5 . legend ( [ 'Volatility' , 'Benchmark volatility' , 'Average volatility' ] , loc = arg_4 , frameon = True , framealpha = 0.5 ) return arg_5","id_":252970,"task_name":"https:\/\/github.com\/quantopian\/pyfolio\/blob\/712716ab0cdebbec9fabb25eea3bf40e4354749d\/pyfolio\/plotting.py#L891-L954","negative":"Get 6D data."} {"query":"Create a pool .","positive":"def Func ( ) : arg_0 = request . get_json ( force = True ) try : arg_1 = pool_api . Func ( ** arg_0 ) except AirflowException as err : _log . error ( err ) arg_2 = jsonify ( error = \"{}\" . format ( err ) ) arg_2 . status_code = err . status_code return arg_2 else : return jsonify ( arg_1 . to_json ( ) )","id_":252971,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/www\/api\/experimental\/endpoints.py#L325-L336","negative":"This concatenates all text LCs for an objectid with the given aperture.\n\n Does not care about overlaps or duplicates. The light curves must all be\n from the same aperture.\n\n The intended use is to concatenate light curves across CCDs or instrument\n changes for a single object. These can then be normalized later using\n standard astrobase tools to search for variablity and\/or periodicity.\n\n\n lcbasedir is the directory to start searching in.\n\n objectid is the object to search for.\n\n aperture is the aperture postfix to use: (TF1 = aperture 1,\n TF2 = aperture 2,\n TF3 = aperture 3)\n\n sortby is a column to sort the final concatenated light curve by in\n ascending order.\n\n If normalize is True, then each light curve's magnitude columns are\n normalized to zero, and the whole light curve is then normalized to the\n global median magnitude for each magnitude column.\n\n If recursive is True, then the function will search recursively in lcbasedir\n for any light curves matching the specified criteria. This may take a while,\n especially on network filesystems.\n\n The returned lcdict has an extra column: 'lcn' that tracks which measurement\n belongs to which input light curve. This can be used with\n lcdict['concatenated'] which relates input light curve index to input light\n curve filepath. Finally, there is an 'nconcatenated' key in the lcdict that\n contains the total number of concatenated light curves."} {"query":"Returns a QTextCharFormat for token by reading a Pygments style .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = QtGui . QTextCharFormat ( ) for arg_4 , arg_5 in arg_2 . style_for_token ( arg_1 ) . items ( ) : if arg_5 : if arg_4 == 'color' : arg_3 . setForeground ( arg_0 . _get_brush ( arg_5 ) ) elif arg_4 == 'bgcolor' : arg_3 . setBackground ( arg_0 . _get_brush ( arg_5 ) ) elif arg_4 == 'bold' : arg_3 . setFontWeight ( QtGui . QFont . Bold ) elif arg_4 == 'italic' : arg_3 . setFontItalic ( True ) elif arg_4 == 'underline' : arg_3 . setUnderlineStyle ( QtGui . QTextCharFormat . SingleUnderline ) elif arg_4 == 'sans' : arg_3 . setFontStyleHint ( QtGui . QFont . SansSerif ) elif arg_4 == 'roman' : arg_3 . setFontStyleHint ( QtGui . QFont . Times ) elif arg_4 == 'mono' : arg_3 . setFontStyleHint ( QtGui . QFont . TypeWriter ) return arg_3","id_":252972,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/frontend\/qt\/console\/pygments_highlighter.py#L181-L204","negative":"Clear the output directory."} {"query":"Load sqlite cookies into a cookiejar","positive":"def Func ( arg_0 ) : arg_1 = sqlite3 . connect ( arg_0 . tmp_cookie_file ) arg_2 = arg_1 . cursor ( ) try : arg_2 . execute ( 'SELECT host_key, path, secure, expires_utc, name, value, encrypted_value ' 'FROM cookies WHERE host_key like \"%{}%\";' . format ( arg_0 . domain_name ) ) except sqlite3 . OperationalError : arg_2 . execute ( 'SELECT host_key, path, is_secure, expires_utc, name, value, encrypted_value ' 'FROM cookies WHERE host_key like \"%{}%\";' . format ( arg_0 . domain_name ) ) arg_3 = http . cookiejar . CookieJar ( ) for arg_4 in arg_2 . fetchall ( ) : arg_5 , arg_6 , arg_7 , arg_8 , arg_9 = arg_4 [ : 5 ] arg_10 = arg_0 . _decrypt ( arg_4 [ 5 ] , arg_4 [ 6 ] ) arg_11 = create_cookie ( arg_5 , arg_6 , arg_7 , arg_8 , arg_9 , arg_10 ) arg_3 . set_cookie ( arg_11 ) arg_1 . close ( ) return arg_3","id_":252973,"task_name":"https:\/\/github.com\/borisbabic\/browser_cookie3\/blob\/e695777c54509c286991c5bb5ca65f043d748f55\/__init__.py#L147-L168","negative":"Checks if a temple project is up to date with the repo\n\n Note that the `temple.constants.TEMPLE_ENV_VAR` is set to 'update' for the duration of this\n function.\n\n Args:\n version (str, optional): Update against this git SHA or branch of the template\n\n Returns:\n boolean: True if up to date with ``version`` (or latest version), False otherwise\n\n Raises:\n `NotInGitRepoError`: When running outside of a git repo\n `InvalidTempleProjectError`: When not inside a valid temple repository"} {"query":"Converts a string of comma delimited values and returns a list .","positive":"def Func ( arg_0 ) : if arg_0 is None : return [ ] elif isinstance ( arg_0 , ( tuple , list ) ) : return arg_0 elif not isinstance ( arg_0 , six . string_types ) : raise NotImplementedError ( 'Unknown type: %s' % type ( arg_0 ) ) return [ arg_1 . strip ( ) . lower ( ) for arg_1 in ( arg_0 or '' ) . split ( ',' ) if arg_1 . strip ( ) ]","id_":252974,"task_name":"https:\/\/github.com\/chrisspen\/burlap\/blob\/a92b0a8e5206850bb777c74af8421ea8b33779bd\/burlap\/common.py#L335-L345","negative":"Propagate \"clk\" clock and reset \"rst\" signal to all subcomponents"} {"query":"Creates a running coroutine to receive message instances and send them in a futures executor .","positive":"def Func ( ) : with PoolExecutor ( ) as executor : while True : arg_0 = yield arg_1 = executor . submit ( arg_0 . send ) arg_1 . add_done_callback ( _exception_handler )","id_":252975,"task_name":"https:\/\/github.com\/trp07\/messages\/blob\/7789ebc960335a59ea5d319fceed3dd349023648\/messages\/_eventloop.py#L11-L20","negative":"Unregister an extension code. For testing only."} {"query":"Convert hangouts_pb2 . ParticipantId to UserID .","positive":"def Func ( arg_0 ) : return user . UserID ( chat_id = arg_0 . chat_id , gaia_id = arg_0 . gaia_id )","id_":252976,"task_name":"https:\/\/github.com\/tdryer\/hangups\/blob\/85c0bf0a57698d077461283895707260f9dbf931\/hangups\/parsers.py#L32-L37","negative":"Prepare received data for representation.\n\n Args:\n data (dict): values to represent (ex. {'001' : 130})\n number_to_keep (int): number of elements to show individually.\n\n Returns:\n dict: processed data to show."} {"query":"Remove a case from MatchMaker","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = store . user ( current_user . email ) if 'mme_submitter' not in arg_2 [ 'roles' ] : flash ( 'unauthorized request' , 'warning' ) return redirect ( request . referrer ) arg_3 , arg_4 = institute_and_case ( store , arg_0 , arg_1 ) arg_5 = current_app . config . get ( 'MME_URL' ) arg_6 = current_app . config . get ( 'MME_TOKEN' ) if not arg_5 or not arg_6 : flash ( 'An error occurred reading matchmaker connection parameters. Please check config file!' , 'danger' ) return redirect ( request . referrer ) arg_7 = controllers . mme_delete ( arg_4 , arg_5 , arg_6 ) arg_8 = 0 arg_9 = 'warning' for arg_10 in arg_7 : if arg_10 [ 'status_code' ] == 200 : arg_8 += 1 else : flash ( arg_10 [ 'message' ] , arg_9 ) if arg_8 : arg_9 = 'success' arg_2 = store . user ( current_user . email ) store . case_mme_delete ( arg_4 = arg_4 , arg_2 = arg_2 ) flash ( 'Number of patients deleted from Matchmaker: {} out of {}' . format ( arg_8 , len ( arg_7 ) ) , arg_9 ) return redirect ( request . referrer )","id_":252977,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/server\/blueprints\/cases\/views.py#L277-L311","negative":"Build extra args map"} {"query":"Check if partition is mounted","positive":"def Func ( arg_0 ) : with settings ( hide ( 'running' , 'stdout' ) ) : arg_1 = run_as_root ( 'mount' ) for arg_2 in arg_1 . splitlines ( ) : arg_3 = arg_2 . split ( ) if arg_3 [ 0 ] == arg_0 : return True with settings ( hide ( 'running' , 'stdout' ) ) : arg_1 = run_as_root ( 'swapon -s' ) for arg_2 in arg_1 . splitlines ( ) : arg_3 = arg_2 . split ( ) if arg_3 [ 0 ] == arg_0 : return True return False","id_":252978,"task_name":"https:\/\/github.com\/chrisspen\/burlap\/blob\/a92b0a8e5206850bb777c74af8421ea8b33779bd\/burlap\/disk.py#L91-L118","negative":"deleteOne - Delete one object\n\n\t\t\t@param obj - object to delete\n\t\t\t@param conn - Connection to reuse, or None\n\n\t\t\t@return - number of items deleted (0 or 1)"} {"query":"Takes in a classifier that supports multiclass classification and X and a y and returns a cross validation score .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = 1 ) : arg_5 = [ ] arg_6 = None for arg_7 in range ( arg_4 ) : arg_6 = arg_3 ( ) . fit_transform ( arg_1 , arg_2 ) arg_6 = StandardScaler ( ) . fit_transform ( arg_6 ) arg_5 . append ( cross_validate ( arg_0 , arg_6 , arg_2 , n_jobs = 1 , cv = 5 ) [ 'test_score' ] ) gc . collect ( ) arg_5 = [ arg_2 for z in [ x for x in arg_5 ] for arg_2 in z ] return float ( np . mean ( arg_5 ) ) , float ( np . std ( arg_5 ) ) , arg_5 , arg_6 . shape [ 1 ]","id_":252979,"task_name":"https:\/\/github.com\/scikit-learn-contrib\/categorical-encoding\/blob\/5e9e803c9131b377af305d5302723ba2415001da\/examples\/encoding_examples.py#L27-L49","negative":"Generate the delay in seconds in which the DISCOVER will be sent.\n\n [:rfc:`2131#section-4.4.1`]::\n\n The client SHOULD wait a random time between one and ten seconds to\n desynchronize the use of DHCP at startup."} {"query":"Return the number of hgnc genes in collection","positive":"def Func ( arg_0 , arg_1 = None ) : if arg_1 : LOG . info ( \"Fetching all genes from build %s\" , arg_1 ) else : LOG . info ( \"Fetching all genes\" ) return arg_0 . hgnc_collection . find ( { 'build' : arg_1 } ) . count ( )","id_":252980,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/adapter\/mongo\/hgnc.py#L194-L207","negative":"Delete the associated enterprise learner role assignment record when deleting an EnterpriseCustomerUser record."} {"query":"Return the S - stemmed form of a word .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . lower ( ) if arg_2 [ - 3 : ] == 'ies' and arg_2 [ - 4 : - 3 ] not in { 'e' , 'a' } : return arg_1 [ : - 3 ] + ( 'Y' if arg_1 [ - 1 : ] . isupper ( ) else 'y' ) if arg_2 [ - 2 : ] == 'es' and arg_2 [ - 3 : - 2 ] not in { 'a' , 'e' , 'o' } : return arg_1 [ : - 1 ] if arg_2 [ - 1 : ] == 's' and arg_2 [ - 2 : - 1 ] not in { 'u' , 's' } : return arg_1 [ : - 1 ] return arg_1","id_":252981,"task_name":"https:\/\/github.com\/chrislit\/abydos\/blob\/165466b3ff6afd8024a4c8660421b0c4e7773db9\/abydos\/stemmer\/_s_stemmer.py#L42-L77","negative":"Verifies that `parts` don't broadcast."} {"query":"Clears out the current store and gets a cookie . Set the cross site request forgery token for each subsequent request .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . __get ( '\/Store\/Reset' ) arg_2 = arg_0 . session . cookies [ 'XSRF-TOKEN' ] arg_0 . session . headers . update ( { 'X-XSRF-TOKEN' : arg_2 } ) return arg_1","id_":252982,"task_name":"https:\/\/github.com\/tomasbasham\/dominos\/blob\/59729a8bdca0ae30a84115a0e93e9b1f259faf0e\/dominos\/api.py#L43-L56","negative":"Try to convert channel to a unitary representation Operator."} {"query":"Return a dictionary where the key is the group key file path and the values are sets of unique values of the field name of all DICOM files in the group .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_3 = DefaultOrderedDict ( set ) for arg_4 in arg_0 . dicom_groups : for arg_5 in arg_0 . dicom_groups [ arg_4 ] : arg_6 = DicomFile ( arg_5 ) . get_attributes ( arg_1 ) arg_7 = arg_4 if arg_2 is not None : try : arg_7 = str ( DicomFile ( arg_4 ) . get_attributes ( arg_2 ) ) except KeyError as ke : raise KeyError ( 'Error getting field {} from ' 'file {}' . format ( arg_2 , arg_4 ) ) from ke arg_3 [ arg_7 ] . add ( arg_6 ) return arg_3","id_":252983,"task_name":"https:\/\/github.com\/Neurita\/boyle\/blob\/2dae7199849395a209c887d5f30506e1de8a9ad9\/boyle\/dicom\/comparison.py#L560-L593","negative":"Stop the timer\n\n Returns:\n The time the timer was stopped"} {"query":"Find a single Node among this Node s descendants .","positive":"def Func ( arg_0 , * arg_1 , ** arg_2 ) : arg_3 = operator . methodcaller ( 'Func' , * arg_1 , ** arg_2 ) return arg_0 . _wrap_node ( arg_3 )","id_":252984,"task_name":"https:\/\/github.com\/ChrisBeaumont\/soupy\/blob\/795f2f61f711f574d5218fc8a3375d02bda1104f\/soupy.py#L999-L1016","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Check all API tokens defined and choose one with most remaining API points","positive":"def Func ( arg_0 ) : if arg_0 . n_tokens == 0 : return arg_1 = 0 if arg_0 . n_tokens > 1 : arg_2 = arg_0 . _get_tokens_rate_limits ( ) arg_1 = arg_2 . index ( max ( arg_2 ) ) logger . debug ( \"Remaining API points: {}, choosen index: {}\" . format ( arg_2 , arg_1 ) ) arg_0 . current_token = arg_0 . tokens [ arg_1 ] arg_0 . session . headers . update ( { 'Authorization' : 'token ' + arg_0 . current_token } ) arg_0 . _update_current_rate_limit ( )","id_":252985,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-perceval\/blob\/41c908605e88b7ebc3a536c643fa0f212eaf9e0e\/perceval\/backends\/core\/github.py#L802-L820","negative":"Checks if the blocks in the RDD matches the expected types.\n\n Parameters:\n -----------\n rdd: splearn.BlockRDD\n The RDD to check\n expected_dtype: {type, list of types, tuple of types, dict of types}\n Expected type(s). If the RDD is a DictRDD the parameter type is\n restricted to dict.\n\n Returns:\n --------\n accept: bool\n Returns if the types are matched."} {"query":"Get the list of EnterpriseCustomerUsers we want to render .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = arg_5 ) : arg_6 = arg_1 . GET . get ( 'page' , 1 ) arg_7 = EnterpriseCustomerUser . objects . filter ( enterprise_customer__uuid = arg_3 ) arg_8 = arg_7 . values_list ( 'user_id' , flat = True ) arg_9 = User . objects . filter ( pk__in = arg_8 ) if arg_2 is not None : arg_9 = arg_9 . filter ( Q ( email__icontains = arg_2 ) | Q ( username__icontains = arg_2 ) ) arg_10 = arg_9 . values_list ( 'pk' , flat = True ) arg_7 = arg_7 . filter ( user_id__in = arg_10 ) return paginated_list ( arg_7 , arg_6 , arg_4 )","id_":252986,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/enterprise\/admin\/views.py#L246-L267","negative":"Remove `self` from the containing `DiscoItems` object."} {"query":"Get base - 64 encoded data as a string for the given image . Fallback to return fallback_image_file if cannot get the image data or img is None .","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 is None : return arg_1 arg_2 = arg_0 . format if arg_2 . lower ( ) in [ 'tif' , 'tiff' ] : arg_2 = 'JPEG' try : arg_3 = io . BytesIO ( ) arg_0 . save ( arg_3 , arg_2 ) arg_4 = arg_3 . getvalue ( ) arg_5 = base64 . b64encode ( arg_4 ) return 'data:image\/%s;base64,%s' % ( arg_2 . lower ( ) , arg_5 ) except IOError as exptn : print ( 'IOError while saving image bytes: %s' % exptn ) return arg_1","id_":252987,"task_name":"https:\/\/github.com\/dade-ai\/snipy\/blob\/408520867179f99b3158b57520e2619f3fecd69b\/snipy\/imageme.py#L306-L335","negative":"Set the frame of the completer."} {"query":"Compute LST for observation","positive":"def Func ( arg_0 ) : if arg_0 . header [ b'telescope_id' ] == 6 : arg_0 . coords = gbt_coords elif arg_0 . header [ b'telescope_id' ] == 4 : arg_0 . coords = parkes_coords else : raise RuntimeError ( \"Currently only Parkes and GBT supported\" ) if HAS_SLALIB : arg_2 = 0.0 arg_3 = arg_0 . header [ b'tstart' ] arg_4 = np . deg2rad ( arg_0 . coords [ 1 ] ) arg_5 = s . sla_gmst ( arg_3 ) - arg_4 + s . sla_eqeqx ( arg_3 ) + arg_2 if arg_5 < 0.0 : arg_5 = arg_5 + 2.0 * np . pi return arg_5 else : raise RuntimeError ( \"This method requires pySLALIB\" )","id_":252988,"task_name":"https:\/\/github.com\/UCBerkeleySETI\/blimpy\/blob\/b8822d3e3e911944370d84371a91fa0c29e9772e\/blimpy\/filterbank.py#L328-L346","negative":"Reshape input and output dimensions of operator.\n\n Arg:\n input_dims (tuple): new subsystem input dimensions.\n output_dims (tuple): new subsystem output dimensions.\n\n Returns:\n Operator: returns self with reshaped input and output dimensions.\n\n Raises:\n QiskitError: if combined size of all subsystem input dimension or\n subsystem output dimensions is not constant."} {"query":"Create a venv with these packages in a temp dir and yielf the env .","positive":"def Func ( arg_0 ) : with tempfile . TemporaryDirectory ( ) as tempdir : arg_1 = create ( tempdir , with_pip = True ) arg_2 = [ arg_1 . env_exe , \"-m\" , \"pip\" , \"install\" , ] subprocess . check_call ( arg_2 + [ '-U' , 'pip' ] ) if arg_0 : subprocess . check_call ( arg_2 + arg_0 ) yield arg_1","id_":252989,"task_name":"https:\/\/github.com\/Azure\/azure-sdk-for-python\/blob\/d7306fde32f60a293a7567678692bdad31e4b667\/azure-sdk-tools\/packaging_tools\/venvtools.py#L30-L46","negative":"Removes a NIC from the load balancer.\n\n :param datacenter_id: The unique ID of the data center.\n :type datacenter_id: ``str``\n\n :param loadbalancer_id: The unique ID of the load balancer.\n :type loadbalancer_id: ``str``\n\n :param nic_id: The unique ID of the NIC.\n :type nic_id: ``str``"} {"query":"remove first and last lines to get only json","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . text [ arg_0 . text . find ( '\\n' ) + 1 : arg_0 . text . rfind ( '\\n' ) - 2 ] arg_2 = [ ] try : arg_3 = float ( json . loads ( arg_1 ) [ 'conversion' ] [ 'converted-amount' ] ) except : return arg_2 arg_4 = '{0} {1} = {2} {3}, 1 {1} ({5}) = {4} {3} ({6})' . format ( arg_0 . search_params [ 'amount' ] , arg_0 . search_params [ 'from' ] , arg_0 . search_params [ 'amount' ] * arg_3 , arg_0 . search_params [ 'to' ] , arg_3 , arg_0 . search_params [ 'from_name' ] , arg_0 . search_params [ 'to_name' ] , ) arg_5 = 'https:\/\/duckduckgo.com\/js\/spice\/currency\/1\/{0}\/{1}' . format ( arg_0 . search_params [ 'from' ] . upper ( ) , arg_0 . search_params [ 'to' ] ) arg_2 . append ( { 'answer' : arg_4 , 'url' : arg_5 } ) return arg_2","id_":252990,"task_name":"https:\/\/github.com\/asciimoo\/searx\/blob\/a84caa22cf947e973c10aa968d35fb2bdda6d048\/searx\/engines\/currency_convert.py#L64-L87","negative":"Validates API Root information. Raises errors for required\n properties."} {"query":"Bind and activate HTTP server .","positive":"def Func ( arg_0 ) : HTTPServer . __init__ ( arg_0 , ( arg_0 . host , arg_0 . port ) , HTTPRequestHandler ) arg_0 . port = arg_0 . server_port","id_":252991,"task_name":"https:\/\/github.com\/proofit404\/service-factory\/blob\/a09d4e097e5599244564a2a7f0611e58efb4156a\/service_factory\/providers\/basehttp.py#L68-L72","negative":"Get a temp filename for atomic download."} {"query":"Returns list of assertions related to lu_reconstruct assumptions .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = [ ] arg_4 = 'Input `lower_upper` must have at least 2 dimensions.' if arg_0 . shape . ndims is not None : if arg_0 . shape . ndims < 2 : raise ValueError ( arg_4 ) elif arg_2 : arg_3 . append ( tf . compat . v1 . assert_rank_at_least ( arg_0 , rank = 2 , arg_4 = arg_4 ) ) arg_4 = '`rank(lower_upper)` must equal `rank(perm) + 1`' if arg_0 . shape . ndims is not None and arg_1 . shape . ndims is not None : if arg_0 . shape . ndims != arg_1 . shape . ndims + 1 : raise ValueError ( arg_4 ) elif arg_2 : arg_3 . append ( tf . compat . v1 . assert_rank ( arg_0 , rank = tf . rank ( arg_1 ) + 1 , arg_4 = arg_4 ) ) arg_4 = '`lower_upper` must be square.' if arg_0 . shape [ : - 2 ] . is_fully_defined ( ) : if arg_0 . shape [ - 2 ] != arg_0 . shape [ - 1 ] : raise ValueError ( arg_4 ) elif arg_2 : arg_5 , arg_6 = tf . split ( tf . shape ( input = arg_0 ) [ - 2 : ] , num_or_size_splits = 2 ) arg_3 . append ( tf . compat . v1 . assert_equal ( arg_5 , arg_6 , arg_4 = arg_4 ) ) return arg_3","id_":252992,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/math\/linalg.py#L679-L708","negative":"Downloads a MP4 or WebM file that is associated with the video at the URL passed.\n\n :param str url: URL of the video to be downloaded\n :return str: Filename of the file in local storage"} {"query":"Validate and set all known tags on a port .","positive":"def Func ( arg_0 , arg_1 , ** arg_2 ) : for arg_3 , arg_4 in arg_0 . tags . items ( ) : if arg_3 in arg_2 : arg_5 = arg_2 . pop ( arg_3 ) if arg_5 : try : arg_4 . set ( arg_1 , arg_5 ) except TagValidationError as e : raise n_exc . BadRequest ( resource = \"tags\" , msg = \"%s\" % ( e . message ) )","id_":252993,"task_name":"https:\/\/github.com\/openstack\/quark\/blob\/1112e6a66917d3e98e44cb7b33b107fd5a74bb2e\/quark\/tags.py#L154-L165","negative":"Get events from this conversation.\n\n Makes a request to load historical events if necessary.\n\n Args:\n event_id (str): (optional) If provided, return events preceding\n this event, otherwise return the newest events.\n max_events (int): Maximum number of events to return. Defaults to\n 50.\n\n Returns:\n List of :class:`.ConversationEvent` instances, ordered\n newest-first.\n\n Raises:\n KeyError: If ``event_id`` does not correspond to a known event.\n .NetworkError: If the events could not be requested."} {"query":"Append next object to pipe tail .","positive":"def Func ( arg_0 , arg_1 ) : arg_1 . chained = True if arg_0 . next : arg_0 . next . Func ( arg_1 ) else : arg_0 . next = arg_1","id_":252994,"task_name":"https:\/\/github.com\/GaryLee\/cmdlet\/blob\/5852a63fc2c7dd723a3d7abe18455f8dacb49433\/cmdlet\/cmdlet.py#L115-L125","negative":"This is a parallel worker for running period-recovery.\n\n Parameters\n ----------\n\n task : tuple\n This is used to pass args to the `periodicvar_recovery` function::\n\n task[0] = period-finding result pickle to work on\n task[1] = simbasedir\n task[2] = period_tolerance\n\n Returns\n -------\n\n dict\n This is the dict produced by the `periodicvar_recovery` function for the\n input period-finding result pickle."} {"query":"Function to recursively upload a folder and all of its descendants .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False , arg_3 = False ) : if arg_2 and _has_only_files ( arg_0 ) : print ( 'Creating item from {0}' . format ( arg_0 ) ) _upload_folder_as_item ( arg_0 , arg_1 , arg_3 ) return else : print ( 'Creating folder from {0}' . format ( arg_0 ) ) arg_4 = _create_or_reuse_folder ( arg_0 , arg_1 , arg_3 ) for arg_5 in sorted ( os . listdir ( arg_0 ) ) : arg_6 = os . path . join ( arg_0 , arg_5 ) if os . path . islink ( arg_6 ) : continue elif os . path . isdir ( arg_6 ) : Func ( arg_6 , arg_4 , arg_2 , arg_3 ) else : print ( 'Uploading item from {0}' . format ( arg_6 ) ) _upload_as_item ( arg_5 , arg_4 , arg_6 , arg_3 )","id_":252995,"task_name":"https:\/\/github.com\/midasplatform\/pydas\/blob\/e5f9e96e754fb2dc5da187b05e4abc77a9b2affd\/pydas\/api.py#L349-L394","negative":"Return new rrule with same attributes except for those attributes given new\n values by whichever keyword arguments are specified."} {"query":"Flat plane parameterization","positive":"def Func ( arg_0 , arg_1 = 0 , arg_2 = False ) : arg_3 = '' . join ( [ ' \\n' , ' \\n' , ' \\n' , ' <\/filter>\\n' ] ) util . write_filter ( arg_0 , arg_3 ) return None","id_":252996,"task_name":"https:\/\/github.com\/3DLIRIOUS\/MeshLabXML\/blob\/177cce21e92baca500f56a932d66bd9a33257af8\/meshlabxml\/texture.py#L6-L30","negative":"Stop and remove a worker"} {"query":"Return a data structure evaluated as a reader macro from the input stream .","positive":"def Func ( arg_0 : arg_1 ) -> LispReaderForm : arg_2 = arg_0 . reader . advance ( ) assert arg_2 == \"#\" arg_3 = arg_0 . reader . peek ( ) if arg_3 == \"{\" : return _read_set ( arg_0 ) elif arg_3 == \"(\" : return _read_function ( arg_0 ) elif arg_3 == \"'\" : arg_0 . reader . advance ( ) arg_4 = _read_sym ( arg_0 ) return llist . l ( _VAR , arg_4 ) elif arg_3 == '\"' : return _read_regex ( arg_0 ) elif arg_3 == \"_\" : arg_0 . reader . advance ( ) _read_next ( arg_0 ) return COMMENT elif ns_name_chars . match ( arg_3 ) : arg_4 = _read_sym ( arg_0 ) assert isinstance ( arg_4 , symbol . Symbol ) arg_5 = _read_next_consuming_comment ( arg_0 ) if arg_4 in arg_0 . data_readers : arg_6 = arg_0 . data_readers [ arg_4 ] return arg_6 ( arg_5 ) else : raise SyntaxError ( f\"No data reader found for tag #{s}\" ) raise SyntaxError ( f\"Unexpected token '{token}' in reader macro\" )","id_":252997,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/reader.py#L925-L955","negative":"Infer causal relationships between 2 variables using the RECI statistic\n\n :param a: Input variable 1\n :param b: Input variable 2\n :return: Causation coefficient (Value : 1 if a->b and -1 if b->a)\n :rtype: float"} {"query":"Generate the binary strings for a comma seperated list of commands .","positive":"def Func ( arg_0 ) : for arg_1 in arg_0 . split ( ',' ) : arg_2 = [ 0 , 0 ] arg_3 , arg_1 = arg_1 . strip ( ) . upper ( ) . split ( None , 1 ) arg_2 [ 0 ] = houseCodes [ arg_3 [ 0 ] ] if len ( arg_3 ) > 1 : arg_4 = deviceNumbers [ arg_3 [ 1 : ] ] arg_2 [ 0 ] |= arg_4 [ 0 ] arg_2 [ 1 ] = arg_4 [ 1 ] arg_2 [ 1 ] |= commandCodes [ arg_1 ] yield ' ' . join ( map ( _strBinary , arg_2 ) )","id_":252998,"task_name":"https:\/\/github.com\/clach04\/x10_any\/blob\/5b90a543b127ab9e6112fd547929b5ef4b8f0cbc\/x10_any\/cm17a.py#L73-L93","negative":"Adjust contrast of an Image.\n\n Args:\n img (PIL Image): PIL Image to be adjusted.\n contrast_factor (float): How much to adjust the contrast. Can be any\n non negative number. 0 gives a solid gray image, 1 gives the\n original image while 2 increases the contrast by a factor of 2.\n\n Returns:\n PIL Image: Contrast adjusted image."} {"query":"Populate array with random quartets sampled from a generator . Holding all sets in memory might take a lot but holding a very large list of random numbers for which ones to sample will fit into memory for most reasonable sized sets . So we ll load a list of random numbers in the range of the length of total sets that can be generated then only keep sets from the set generator if they are in the int list . I did several tests to check that random pairs are as likely as 0 & 1 to come up together in a random quartet set .","positive":"def Func ( arg_0 ) : with h5py . File ( arg_0 . database . input , 'a' ) as io5 : arg_1 = io5 [ \"quartets\" ] arg_2 = itertools . combinations ( xrange ( len ( arg_0 . samples ) ) , 4 ) arg_3 = np . arange ( 0 , n_choose_k ( len ( arg_0 . samples ) , 4 ) ) np . random . shuffle ( arg_3 ) arg_4 = arg_3 [ : arg_0 . params . nquartets ] arg_5 = np . sort ( arg_4 ) arg_6 = iter ( arg_5 ) del arg_3 , arg_4 print ( arg_0 . _chunksize ) arg_7 = arg_6 . next ( ) arg_8 = np . zeros ( ( arg_0 . params . nquartets , 4 ) , dtype = np . uint16 ) arg_9 = 0 while 1 : try : for arg_10 , arg_11 in enumerate ( arg_2 ) : if arg_10 == arg_7 : arg_8 [ arg_9 ] = arg_11 arg_9 += 1 arg_7 = arg_6 . next ( ) if not arg_10 % arg_0 . _chunksize : print ( min ( arg_10 , arg_0 . params . nquartets ) ) except StopIteration : break arg_1 [ : ] = arg_8 del arg_8","id_":252999,"task_name":"https:\/\/github.com\/dereneaton\/ipyrad\/blob\/5eeb8a178160f45faf71bf47cec4abe998a575d1\/ipyrad\/analysis\/tetrad2.py#L1118-L1166","negative":"Calls consume_function for each element of this streamlet. This function returns nothing"} {"query":"Assign MUC stanza handlers to the self . stream .","positive":"def Func ( arg_0 , arg_1 = 10 ) : arg_0 . stream . set_message_handler ( \"groupchat\" , arg_0 . __groupchat_message , None , arg_1 ) arg_0 . stream . set_message_handler ( \"error\" , arg_0 . __error_message , None , arg_1 ) arg_0 . stream . set_presence_handler ( \"available\" , arg_0 . __presence_available , None , arg_1 ) arg_0 . stream . set_presence_handler ( \"unavailable\" , arg_0 . __presence_unavailable , None , arg_1 ) arg_0 . stream . set_presence_handler ( \"error\" , arg_0 . __presence_error , None , arg_1 )","id_":253000,"task_name":"https:\/\/github.com\/Jajcus\/pyxmpp2\/blob\/14a40a3950910a9cd008b55f0d8905aa0186ce18\/pyxmpp2\/ext\/muc\/muc.py#L833-L846","negative":"creates a new, empty table in the dataset;\n If the table already exists, update the existing table.\n Since BigQuery does not natively allow table upserts, this is not an\n atomic operation.\n\n :param dataset_id: the dataset to upsert the table into.\n :type dataset_id: str\n :param table_resource: a table resource. see\n https:\/\/cloud.google.com\/bigquery\/docs\/reference\/v2\/tables#resource\n :type table_resource: dict\n :param project_id: the project to upsert the table into. If None,\n project will be self.project_id.\n :return:"} {"query":"Main run method for the noise adaptive layout .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . _initialize_backend_prop ( ) arg_2 = arg_0 . _create_program_graph ( arg_1 ) if arg_2 > len ( arg_0 . swap_graph ) : raise TranspilerError ( 'Number of qubits greater than device.' ) for arg_3 , arg_4 , arg_5 in sorted ( arg_0 . prog_graph . edges ( data = True ) , key = lambda x : x [ 2 ] [ 'weight' ] , reverse = True ) : arg_0 . pending_program_edges . append ( ( arg_3 , arg_4 ) ) while arg_0 . pending_program_edges : arg_6 = arg_0 . _select_next_edge ( ) arg_7 = arg_6 [ 0 ] in arg_0 . prog2hw arg_8 = arg_6 [ 1 ] in arg_0 . prog2hw if ( not arg_7 ) and ( not arg_8 ) : arg_9 = arg_0 . _select_best_remaining_cx ( ) arg_0 . prog2hw [ arg_6 [ 0 ] ] = arg_9 [ 0 ] arg_0 . prog2hw [ arg_6 [ 1 ] ] = arg_9 [ 1 ] arg_0 . available_hw_qubits . remove ( arg_9 [ 0 ] ) arg_0 . available_hw_qubits . remove ( arg_9 [ 1 ] ) elif not arg_7 : arg_11 = arg_0 . _select_best_remaining_qubit ( arg_6 [ 0 ] ) arg_0 . prog2hw [ arg_6 [ 0 ] ] = arg_11 arg_0 . available_hw_qubits . remove ( arg_11 ) else : arg_11 = arg_0 . _select_best_remaining_qubit ( arg_6 [ 1 ] ) arg_0 . prog2hw [ arg_6 [ 1 ] ] = arg_11 arg_0 . available_hw_qubits . remove ( arg_11 ) arg_12 = [ x for x in arg_0 . pending_program_edges if not ( x [ 0 ] in arg_0 . prog2hw and x [ 1 ] in arg_0 . prog2hw ) ] arg_0 . pending_program_edges = arg_12 for arg_14 in arg_0 . qarg_to_id . values ( ) : if arg_14 not in arg_0 . prog2hw : arg_0 . prog2hw [ arg_14 ] = arg_0 . available_hw_qubits [ 0 ] arg_0 . available_hw_qubits . remove ( arg_0 . prog2hw [ arg_14 ] ) arg_15 = Layout ( ) for arg_16 in arg_1 . qubits ( ) : arg_17 = arg_0 . _qarg_to_id ( arg_16 ) arg_18 = arg_0 . prog2hw [ arg_17 ] arg_15 [ ( arg_16 [ 0 ] , arg_16 [ 1 ] ) ] = arg_18 arg_0 . property_set [ 'layout' ] = arg_15","id_":253001,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/transpiler\/passes\/mapping\/noise_adaptive_layout.py#L206-L245","negative":"Builds all indices, listed in model's Meta class.\n\n >>> class SomeModel(Model)\n ... class Meta:\n ... indices = (\n ... Index('foo'),\n ... )\n\n .. note:: this will result in calls to\n :meth:`pymongo.collection.Collection.ensure_index`\n method at import time, so import all your models up\n front."} {"query":"This will return a single tr element with all tds already populated .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = etree . Element ( 'tr' ) arg_4 = get_namespace ( arg_0 , 'w' ) arg_5 = [ ] for arg_6 in arg_0 : if arg_6 in arg_5 : continue arg_5 . append ( arg_6 ) if arg_6 . tag == '%stc' % arg_4 : arg_7 = get_v_merge ( arg_6 ) if ( arg_7 is not None and arg_7 . get ( '%sval' % arg_4 ) != 'restart' ) : continue arg_8 = [ ] for arg_9 in arg_6 : if arg_9 in arg_5 : continue if is_li ( arg_9 , arg_1 ) : arg_10 = get_single_list_nodes_data ( arg_9 , arg_1 , ) arg_11 , arg_12 = build_list ( arg_10 , arg_1 , ) arg_5 . extend ( arg_12 ) arg_8 . append ( etree . tostring ( arg_11 ) ) elif arg_9 . tag == '%stbl' % arg_4 : arg_13 , arg_14 = build_table ( arg_9 , arg_1 , ) arg_5 . extend ( arg_14 ) arg_8 . append ( etree . tostring ( arg_13 ) ) elif arg_9 . tag == '%stcPr' % arg_4 : arg_5 . append ( arg_9 ) continue else : arg_15 = get_element_content ( arg_9 , arg_1 , is_td = True , ) arg_8 . append ( arg_15 ) arg_16 = '
' . join ( t for t in arg_8 if t is not None ) arg_17 = etree . XML ( '%s<\/td>' % arg_16 ) arg_18 = get_grid_span ( arg_6 ) if arg_18 > 1 : arg_17 . set ( 'colspan' , '%d' % arg_18 ) arg_7 = get_v_merge ( arg_6 ) if ( arg_7 is not None and arg_7 . get ( '%sval' % arg_4 ) == 'restart' ) : arg_19 = next ( arg_2 ) arg_17 . set ( 'rowspan' , '%d' % arg_19 ) arg_3 . append ( arg_17 ) return arg_3","id_":253002,"task_name":"https:\/\/github.com\/PolicyStat\/docx2html\/blob\/2dc4afd1e3a3f2f0b357d0bff903eb58bcc94429\/docx2html\/core.py#L1052-L1133","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Parse a docstring into ParameterInfo and ReturnInfo objects .","positive":"def Func ( arg_0 ) : arg_0 = inspect . cleandoc ( arg_0 ) arg_1 = arg_0 . split ( '\\n' ) arg_2 = None arg_3 = None arg_4 = { } arg_5 = None for arg_6 in arg_1 : arg_6 = arg_6 . rstrip ( ) if len ( arg_6 ) == 0 : continue elif str ( arg_6 ) == 'Args:' : arg_2 = 'args' arg_3 = None continue elif str ( arg_6 ) == 'Returns:' : arg_2 = 'return' arg_3 = None continue if arg_2 is not None : arg_7 = arg_6 . lstrip ( ) arg_8 = len ( arg_6 ) - len ( arg_7 ) if arg_3 is None : arg_3 = arg_8 if arg_8 != arg_3 : continue if arg_2 == 'args' : arg_9 , arg_10 = parse_param ( arg_7 ) arg_4 [ arg_9 ] = arg_10 elif arg_2 == 'return' : arg_5 = parse_return ( arg_7 ) return arg_4 , arg_5","id_":253003,"task_name":"https:\/\/github.com\/iotile\/typedargs\/blob\/0a5091a664b9b4d836e091e9ba583e944f438fd8\/typedargs\/doc_annotate.py#L7-L50","negative":"Drop a node from the network\n\n :param node: node to drop\n :type node: Node"} {"query":"Function for doing an upload of a file as an item . This should be a building block for user - level functions .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = False ) : arg_4 = _create_or_reuse_item ( arg_0 , arg_1 , arg_3 ) _create_bitstream ( arg_2 , arg_0 , arg_4 ) for arg_5 in session . item_upload_callbacks : arg_5 ( session . communicator , session . token , arg_4 )","id_":253004,"task_name":"https:\/\/github.com\/midasplatform\/pydas\/blob\/e5f9e96e754fb2dc5da187b05e4abc77a9b2affd\/pydas\/api.py#L307-L328","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Verify a register .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_1 . name not in arg_0 . global_symtab : raise QasmError ( 'Cannot find definition for' , arg_2 , \"'\" + arg_1 . name + \"'\" , 'at line' , str ( arg_1 . line ) , 'file' , arg_1 . file ) arg_3 = arg_0 . global_symtab [ arg_1 . name ] if arg_3 . type != arg_2 : raise QasmError ( \"Type for '\" + arg_3 . name + \"' should be '\" + arg_2 + \"' but was found to be '\" + arg_3 . type + \"'\" , \"line\" , str ( arg_1 . line ) , \"file\" , arg_1 . file ) if arg_1 . type == 'indexed_id' : arg_4 = arg_3 . index arg_5 = arg_1 . index if arg_5 < 0 or arg_5 >= arg_4 : raise QasmError ( \"Register index for '\" + arg_3 . name + \"' out of bounds. Index is\" , str ( arg_5 ) , \"bound is 0 <= index <\" , str ( arg_4 ) , \"at line\" , str ( arg_1 . line ) , \"file\" , arg_1 . file )","id_":253005,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/qasm\/qasmparser.py#L154-L179","negative":"Upload all po files to GDocs ignoring conflicts.\n This method looks for all msgids in po_files and sends them\n as ods to GDocs Spreadsheet."} {"query":"Code to load create user module . Copied off django - browserid .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . rfind ( \".\" ) arg_2 , arg_3 = arg_0 [ : arg_1 ] , arg_0 [ arg_1 + 1 : ] try : arg_4 = import_module ( arg_2 ) except ImportError : raise ImproperlyConfigured ( \"Error importing CAN_LOGIN_AS function: {}\" . format ( arg_2 ) ) except ValueError : raise ImproperlyConfigured ( \"Error importing CAN_LOGIN_AS\" \" function. Is CAN_LOGIN_AS a\" \" string?\" ) try : arg_5 = getattr ( arg_4 , arg_3 ) except AttributeError : raise ImproperlyConfigured ( \"Module {0} does not define a {1} \" \"function.\" . format ( arg_2 , arg_3 ) ) return arg_5","id_":253006,"task_name":"https:\/\/github.com\/skorokithakis\/django-loginas\/blob\/6257857b40ed5b59e4c59a3af4b54d4856cacaf0\/loginas\/views.py#L27-L44","negative":"Set renewal, rebinding times."} {"query":"push an image to Globus endpoint . In this case the name is the globus endpoint id and path .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None ) : arg_4 , arg_5 = arg_0 . _parse_endpoint_name ( arg_2 ) arg_1 = os . path . abspath ( arg_1 ) arg_6 = os . path . basename ( arg_1 ) bot . debug ( \"PUSH %s\" % arg_1 ) arg_7 = parse_image_name ( arg_6 ) if not os . path . exists ( arg_1 ) : bot . error ( '%s does not exist.' % arg_1 ) sys . exit ( 1 ) if not hasattr ( arg_0 , 'transfer_client' ) : arg_0 . _init_transfer_client ( ) arg_8 = arg_0 . _get_endpoints ( ) if len ( arg_8 [ 'my-endpoints' ] ) == 0 : bot . error ( 'You must have a personal endpoint to transfer the container' ) sys . exit ( 1 ) arg_9 = None for arg_10 , arg_11 in arg_8 [ 'my-endpoints' ] . items ( ) : if arg_11 [ 'gcp_connected' ] is True : arg_9 = arg_11 break if arg_9 is None : bot . error ( 'No activated local endpoints online! Go online to transfer' ) sys . exit ( 1 ) arg_0 . _create_endpoint_cache ( arg_4 ) arg_12 = arg_0 . add ( image_path = arg_1 , image_uri = arg_7 [ 'uri' ] , copy = True ) arg_13 = \"Singularity Registry Transfer for %s\" % arg_12 . name arg_14 = globus_sdk . TransferData ( arg_0 . transfer_client , arg_9 [ 'id' ] , arg_4 , arg_13 = arg_13 , sync_level = \"checksum\" ) arg_6 = \".singularity\/shub\/%s\" % arg_6 arg_14 . add_item ( arg_12 . image , arg_6 ) bot . info ( 'Requesting transfer from local %s to %s:%s' % ( SREGISTRY_STORAGE , arg_4 , arg_6 ) ) arg_15 = arg_0 . transfer_client . submit_transfer ( arg_14 ) bot . info ( arg_15 [ 'message' ] ) return arg_15","id_":253007,"task_name":"https:\/\/github.com\/singularityhub\/sregistry-cli\/blob\/abc96140a1d15b5e96d83432e1e0e1f4f8f36331\/sregistry\/main\/globus\/push.py#L24-L97","negative":"Enumerate all possible resonance forms and return them as a list.\n\n :param mol: The input molecule.\n :type mol: rdkit.Chem.rdchem.Mol\n :return: A list of all possible resonance forms of the molecule.\n :rtype: list of rdkit.Chem.rdchem.Mol"} {"query":"r Calculate Parachor for a pure species using its density in the liquid and gas phases surface tension and molecular weight .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_1 , arg_2 = arg_1 * 1000. , arg_2 * 1000. return arg_3 ** 0.25 * arg_0 \/ ( arg_1 - arg_2 )","id_":253008,"task_name":"https:\/\/github.com\/CalebBell\/thermo\/blob\/3857ed023a3e64fd3039a32d53576c24990ef1c3\/thermo\/utils.py#L164-L222","negative":"delete a backend, and update the secrets file"} {"query":"Serial command to set seasons table .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = \"00000000\" ) : arg_3 = False arg_0 . setContext ( \"Func\" ) if not arg_1 : arg_1 = arg_0 . m_seasons_sched_params try : if not arg_0 . request ( False ) : arg_0 . writeCmdMsg ( \"Bad read CRC on setting\" ) else : if not arg_0 . serialCmdPwdAuth ( arg_2 ) : arg_0 . writeCmdMsg ( \"Password failure\" ) else : arg_4 = \"\" arg_4 += binascii . hexlify ( str ( arg_1 [ \"Season_1_Start_Month\" ] ) . zfill ( 2 ) ) arg_4 += binascii . hexlify ( str ( arg_1 [ \"Season_1_Start_Day\" ] ) . zfill ( 2 ) ) arg_4 += binascii . hexlify ( str ( arg_1 [ \"Season_1_Schedule\" ] ) . zfill ( 2 ) ) arg_4 += binascii . hexlify ( str ( arg_1 [ \"Season_2_Start_Month\" ] ) . zfill ( 2 ) ) arg_4 += binascii . hexlify ( str ( arg_1 [ \"Season_2_Start_Day\" ] ) . zfill ( 2 ) ) arg_4 += binascii . hexlify ( str ( arg_1 [ \"Season_2_Schedule\" ] ) . zfill ( 2 ) ) arg_4 += binascii . hexlify ( str ( arg_1 [ \"Season_3_Start_Month\" ] ) . zfill ( 2 ) ) arg_4 += binascii . hexlify ( str ( arg_1 [ \"Season_3_Start_Day\" ] ) . zfill ( 2 ) ) arg_4 += binascii . hexlify ( str ( arg_1 [ \"Season_3_Schedule\" ] ) . zfill ( 2 ) ) arg_4 += binascii . hexlify ( str ( arg_1 [ \"Season_4_Start_Month\" ] ) . zfill ( 2 ) ) arg_4 += binascii . hexlify ( str ( arg_1 [ \"Season_4_Start_Day\" ] ) . zfill ( 2 ) ) arg_4 += binascii . hexlify ( str ( arg_1 [ \"Season_4_Schedule\" ] ) . zfill ( 2 ) ) arg_4 += binascii . hexlify ( str ( 0 ) . zfill ( 24 ) ) arg_5 = \"015731023030383028\" + arg_4 + \"2903\" arg_5 += arg_0 . calc_crc16 ( arg_5 [ 2 : ] . decode ( \"hex\" ) ) arg_0 . m_serial_port . write ( arg_5 . decode ( \"hex\" ) ) if arg_0 . m_serial_port . getResponse ( arg_0 . getContext ( ) ) . encode ( \"hex\" ) == \"06\" : arg_0 . writeCmdMsg ( \"Success(Func): 06 returned.\" ) arg_3 = True arg_0 . serialPostEnd ( ) except : ekm_log ( traceback . format_exc ( sys . exc_info ( ) ) ) arg_0 . setContext ( \"\" ) return arg_3","id_":253009,"task_name":"https:\/\/github.com\/ekmmetering\/ekmmeters\/blob\/b3748bdf30263bfa46ea40157bdf8df2522e1904\/ekmmeters.py#L2494-L2544","negative":"An integer-valued dimension bounded between `min` <= x <= `max`.\n Note that the right endpoint of the interval includes `max`.\n\n When `warp` is None, the base measure associated with this dimension\n is a categorical distribution with each weight on each of the integers\n in [min, max]. With `warp == 'log'`, the base measure is a uniform\n distribution on the log of the variable, with bounds at `log(min)` and\n `log(max)`. This is appropriate for variables that are \"naturally\" in\n log-space. Other `warp` functions are not supported (yet), but may be\n at a later time. Please note that this functionality is not supported\n for `hyperopt_tpe`."} {"query":"Return a dictionary of distances keyed by the keys in the given dict .","positive":"def Func ( arg_0 : arg_1 [ arg_2 ] , arg_3 : arg_4 [ arg_5 , arg_6 [ arg_2 ] ] ) -> arg_4 [ arg_5 , float ] : arg_7 = set ( arg_0 ) return { arg_8 : tanimoto_set_similarity ( arg_7 , arg_9 ) for arg_8 , arg_9 in arg_3 . items ( ) }","id_":253010,"task_name":"https:\/\/github.com\/pybel\/pybel-tools\/blob\/3491adea0ac4ee60f57275ef72f9b73da6dbfe0c\/src\/pybel_tools\/utils.py#L101-L118","negative":"Export as a ``cryptography`` certificate signing request.\n\n :rtype: ``cryptography.x509.CertificateSigningRequest``\n\n .. versionadded:: 17.1.0"} {"query":"random - fill a buffer with random data","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : arg_5 = 0 if arg_3 != 0 : if arg_3 > Decree . CGC_SSIZE_MAX or arg_3 < 0 : arg_5 = Decree . CGC_EINVAL else : if arg_2 not in arg_1 . memory or ( arg_2 + arg_3 ) not in arg_1 . memory : logger . info ( \"RANDOM: buf points to invalid address. Returning EFAULT\" ) return Decree . CGC_EFAULT with open ( \"\/dev\/urandom\" , \"rb\" ) as f : arg_6 = f . read ( arg_3 ) arg_0 . syscall_trace . append ( ( \"_random\" , - 1 , arg_6 ) ) arg_1 . write_bytes ( arg_2 , arg_6 ) if arg_4 : if arg_4 not in arg_1 . memory : logger . info ( \"RANDOM: Not valid rnd_bytes. Returning EFAULT\" ) return Decree . CGC_EFAULT arg_1 . write_int ( arg_4 , len ( arg_6 ) , 32 ) logger . info ( \"RANDOM(0x%08x, %d, 0x%08x) -> <%s>)\" % ( arg_2 , arg_3 , arg_4 , repr ( arg_6 [ : 10 ] ) ) ) return arg_5","id_":253011,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/platforms\/decree.py#L447-L488","negative":"Hunt down the settings.py module by going up the FS path"} {"query":"Decodes an encoded 7 - bit ASCII header value into it s actual value .","positive":"def Func ( arg_0 ) : arg_1 , arg_2 = decode_header ( arg_0 ) [ 0 ] if arg_2 : return arg_1 . decode ( arg_2 ) else : return arg_1","id_":253012,"task_name":"https:\/\/github.com\/disqus\/django-mailviews\/blob\/9993d5e911d545b3bc038433986c5f6812e7e965\/mailviews\/previews.py#L38-L46","negative":"Shrinks the trajectory and removes all exploration ranges from the parameters.\n Only possible if the trajectory has not been stored to disk before or was loaded as new.\n\n :param force:\n\n Usually you cannot shrink the trajectory if it has been stored to disk,\n because there's no guarantee that it is actually shrunk if there\n still exist explored parameters on disk. In case you are certain that\n you did not store explored parameters to disk set or you deleted all\n of them from disk set `force=True`.\n\n :raises: TypeError if the trajectory was stored before."} {"query":"Construct a TaskInstance from the database based on the primary key","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = False ) : arg_3 = airflow . models . TaskInstance arg_4 = arg_1 . query ( arg_3 ) . filter ( arg_3 . dag_id == arg_0 . _dag_id , arg_3 . task_id == arg_0 . _task_id , arg_3 . execution_date == arg_0 . _execution_date ) if arg_2 : arg_5 = arg_4 . with_for_update ( ) . first ( ) else : arg_5 = arg_4 . first ( ) return arg_5","id_":253013,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/utils\/dag_processing.py#L213-L233","negative":"Validate access token.\n\n :param token: A string of random characters\n :param scopes: A list of scopes\n :param request: The Request object passed by oauthlib\n\n The validation validates:\n\n 1) if the token is available\n 2) if the token has expired\n 3) if the scopes are available"} {"query":"Load a lexicon from a JSON file .","positive":"def Func ( arg_0 , arg_1 ) : with open ( arg_1 , 'r' ) as fp : return arg_0 ( json . load ( fp ) )","id_":253014,"task_name":"https:\/\/github.com\/agile-geoscience\/striplog\/blob\/8033b673a151f96c29802b43763e863519a3124c\/striplog\/lexicon.py#L72-L80","negative":"Update the estimate.\n\n Parameters\n ----------\n new_val: float\n new observated value of estimated quantity."} {"query":"Removes a NIC from the server .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = arg_0 . _perform_request ( url = '\/datacenters\/%s\/servers\/%s\/nics\/%s' % ( arg_1 , arg_2 , arg_3 ) , method = 'DELETE' ) return arg_4","id_":253015,"task_name":"https:\/\/github.com\/profitbricks\/profitbricks-sdk-python\/blob\/2c804b141688eccb07d6ae56601d5c60a62abebd\/profitbricks\/client.py#L1132-L1153","negative":"New project."} {"query":"Add the specific arguments of this CLI","positive":"def Func ( arg_0 ) : MetricCommon . Func ( arg_0 ) arg_0 . parser . add_argument ( '-n' , '--metric-name' , dest = 'metricName' , action = 'store' , required = True , metavar = 'metric_name' , help = 'Metric identifier' ) arg_0 . parser . add_argument ( '-d' , '--display-name' , dest = 'displayName' , action = 'store' , required = True , metavar = 'display_name' , help = 'Metric display name' ) arg_0 . parser . add_argument ( '-s' , '--display-name-short' , dest = 'displayNameShort' , action = 'store' , required = True , metavar = 'display_short_name' , help = 'Metric short display name' ) arg_0 . parser . add_argument ( '-i' , '--description' , dest = 'description' , action = 'store' , required = not arg_0 . update , metavar = 'description' , help = 'Metric description' ) arg_0 . parser . add_argument ( '-g' , '--aggregate' , dest = 'aggregate' , action = 'store' , required = True , choices = [ 'avg' , 'max' , 'min' , 'sum' ] , help = 'Metric default aggregate' ) arg_0 . parser . add_argument ( '-u' , '--unit' , dest = 'unit' , action = 'store' , required = False , choices = [ 'percent' , 'number' , 'bytecount' , 'duration' ] , help = 'Metric unit' ) arg_0 . parser . add_argument ( '-r' , '--resolution' , dest = 'resolution' , action = 'store' , metavar = 'resolution' , required = False , help = 'Metric default resolution' ) arg_0 . parser . add_argument ( '-y' , '--type' , dest = 'type' , action = 'store' , default = None , required = False , metavar = 'type' , help = 'Sets the type metadata field' ) arg_0 . parser . add_argument ( '-x' , '--is-disabled' , dest = 'isDisabled' , action = 'store' , default = None , required = False , choices = [ 'true' , 'false' ] , help = 'Enable or disable the metric definition' )","id_":253016,"task_name":"https:\/\/github.com\/boundary\/pulse-api-cli\/blob\/b01ca65b442eed19faac309c9d62bbc3cb2c098f\/boundary\/metric_modify.py#L42-L67","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Custom loads function with an object_hook and automatic decoding","positive":"def Func ( arg_0 , arg_1 = \"utf-8\" , ** arg_2 ) : if isinstance ( arg_0 , bytes ) : arg_0 = arg_0 . decode ( arg_1 ) return json . Func ( arg_0 , object_hook = JSONData , ** arg_2 )","id_":253017,"task_name":"https:\/\/github.com\/odrling\/peony-twitter\/blob\/967f98e16e1889389540f2e6acbf7cc7a1a80203\/peony\/data_processing.py#L149-L172","negative":"r\"\"\"Generate the mexican hat wavelet\n\n The Mexican wavelet is:\n\n .. math:: w[x] = \\cos{5x} \\exp^{-x^2\/2}\n\n :param lb: lower bound\n :param ub: upper bound\n :param int n: waveform data samples\n :return: the waveform\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import mexican\n from pylab import plot\n plot(mexican(0, 10, 100))"} {"query":"Plotting wrapper for hierarchical segmentations","positive":"def Func ( arg_0 , ** arg_1 ) : arg_2 , arg_3 = Func_flatten ( arg_0 ) arg_2 = [ np . asarray ( _ ) for _ in arg_2 ] return mir_eval . display . Func ( arg_2 , arg_3 , ** arg_1 )","id_":253018,"task_name":"https:\/\/github.com\/marl\/jams\/blob\/b16778399b9528efbd71434842a079f7691a7a66\/jams\/display.py#L71-L76","negative":"This function handles the retrieval of Short-term Exposure Limit on\n worker exposure to dangerous chemicals.\n\n This API is considered experimental, and is expected to be removed in a\n future release in favor of a more complete object-oriented interface.\n\n >>> STEL('67-64-1')\n (750.0, 'ppm')\n >>> STEL('7664-38-2')\n (0.7489774978301237, 'ppm')\n >>> STEL('55720-99-5')\n (2.0, 'mg\/m^3')\n >>> STEL('86290-81-5', AvailableMethods=True)\n ['Ontario Limits', 'None']"} {"query":"Gets the file including name and payload .","positive":"def Func ( arg_0 ) : return arg_0 . part . Funcname ( ) , arg_0 . part . get_payload ( decode = True )","id_":253019,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/imap_hook.py#L309-L316","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Makes a subfolder for plots .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = os . path . join ( arg_1 . analysis . plot_folder , arg_1 . v_name , arg_1 . v_crun ) arg_2 = os . path . abspath ( arg_2 ) if not os . path . isdir ( arg_2 ) : os . makedirs ( arg_2 ) return arg_2","id_":253020,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/examples\/example_24_large_scale_brian2_simulation\/clusternet.py#L742-L754","negative":"Gets back all response headers."} {"query":"Helper to get optional details about const references","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 if arg_1 is not None : try : arg_2 = arg_1 [ arg_0 ] except IndexError : raise ValidationError ( \"Consts value out of range: {}\" . format ( arg_0 ) ) from None return arg_2 , repr ( arg_2 )","id_":253021,"task_name":"https:\/\/github.com\/Fuyukai\/Pyte\/blob\/7ef04938d80f8b646bd73d976ac9787a5b88edd9\/pyte\/util.py#L136-L150","negative":"Extracts the update time from a ReMo item.\n\n The timestamp is extracted from 'end' field.\n This date is converted to a perceval format using a float value.\n\n :param item: item generated by the backend\n\n :returns: a UNIX timestamp"} {"query":"Update boost factors when local inhibition is used","positive":"def Func ( arg_0 ) : arg_1 = numpy . zeros ( arg_0 . _numColumns , dtype = realDType ) for arg_2 in xrange ( arg_0 . _numColumns ) : arg_3 = arg_0 . _getColumnNeighborhood ( arg_2 ) arg_1 [ arg_2 ] = numpy . mean ( arg_0 . _activeDutyCycles [ arg_3 ] ) arg_0 . _boostFactors = numpy . exp ( ( arg_1 - arg_0 . _activeDutyCycles ) * arg_0 . _boostStrength )","id_":253022,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/algorithms\/spatial_pooler.py#L1496-L1509","negative":"Get the reason of this revocation.\n\n :return: The reason, or ``None`` if there is none.\n :rtype: bytes or NoneType\n\n .. seealso::\n\n :meth:`all_reasons`, which gives you a list of all supported\n reasons this method might return."} {"query":"Finish computation of prob on one element of the inverse image .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , ** arg_5 ) : arg_2 = arg_0 . _maybe_rotate_dims ( arg_2 , rotate_right = True ) arg_6 = arg_0 . distribution . prob ( arg_2 , ** arg_5 ) if arg_0 . _is_maybe_event_override : arg_6 = tf . reduce_prod ( input_tensor = arg_6 , axis = arg_0 . _reduce_event_indices ) arg_6 *= tf . exp ( tf . cast ( arg_3 , arg_6 . dtype ) ) if arg_0 . _is_maybe_event_override and isinstance ( arg_4 , int ) : tensorshape_util . set_shape ( arg_6 , tf . broadcast_static_shape ( tensorshape_util . with_rank_at_least ( arg_1 . shape , 1 ) [ : - arg_4 ] , arg_0 . batch_shape ) ) return arg_6","id_":253023,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/distributions\/transformed_distribution.py#L451-L465","negative":"This method is called before first step of simulation."} {"query":"Gets the VPC Network ACLs","positive":"def Func ( arg_0 , ** arg_1 ) : arg_2 = describe_network_acls ( Filters = [ { \"Name\" : \"vpc-id\" , \"Values\" : [ arg_0 [ \"id\" ] ] } ] , ** arg_1 ) arg_3 = [ ] for arg_4 in arg_2 : arg_3 . append ( arg_4 [ \"NetworkAclId\" ] ) return arg_3","id_":253024,"task_name":"https:\/\/github.com\/Netflix-Skunkworks\/cloudaux\/blob\/c4b0870c3ac68b1c69e71d33cf78b6a8bdf437ea\/cloudaux\/orchestration\/aws\/vpc.py#L114-L122","negative":"Register a file reader for use in parse_config_file.\n\n Registered file readers will be used to try reading files passed to\n `parse_config_file`. All file readers (beginning with the default `open`) will\n be tried until one of them succeeds at opening the file.\n\n This function may also be be used used as a decorator. For example:\n\n @register_file_reader(IOError)\n def exotic_data_source(filename):\n ...\n\n Args:\n *args: (When used as a decorator, only the existence check is supplied.)\n - file_reader_fn: The file reader function to register. This should be a\n function that can be used as a context manager to open a file and\n provide a file-like object, similar to Python's built-in `open`.\n - is_readable_fn: A function taking the file path and returning a boolean\n indicating whether the file can be read by `file_reader_fn`.\n\n Returns:\n `None`, or when used as a decorator, a function that will perform the\n registration using the supplied readability predicate."} {"query":"Should be called when tuple was buffered into in_stream","positive":"def Func ( arg_0 ) : if arg_0 . output_helper . is_out_queue_available ( ) : arg_0 . _read_tuples_and_execute ( ) arg_0 . output_helper . send_out_tuples ( ) else : arg_0 . bolt_metrics . update_out_queue_full_count ( )","id_":253025,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/instance\/src\/python\/basics\/bolt_instance.py#L161-L173","negative":"filter for indels"} {"query":"Parse a Telegram JSON messages list .","positive":"def Func ( arg_0 ) : arg_1 = json . loads ( arg_0 ) arg_2 = arg_1 [ 'result' ] for arg_3 in arg_2 : yield arg_3","id_":253026,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-perceval\/blob\/41c908605e88b7ebc3a536c643fa0f212eaf9e0e\/perceval\/backends\/core\/telegram.py#L216-L230","negative":"Write data to HDF5 file in one go.\n\n Args:\n filename_out (str): Name of output file"} {"query":"Partition the network between containers","positive":"def Func ( arg_0 ) : arg_1 = load_config ( arg_0 . config ) arg_2 = get_blockade ( arg_1 , arg_0 ) if arg_0 . random : if arg_0 . partitions : raise BlockadeError ( \"Either specify individual partitions \" \"or --random, but not both\" ) arg_2 . random_partition ( ) else : arg_3 = [ ] for arg_4 in arg_0 . partitions : arg_5 = [ ] for arg_6 in arg_4 . split ( \",\" ) : arg_6 = arg_6 . strip ( ) if arg_6 : arg_5 . append ( arg_6 ) arg_3 . append ( arg_5 ) if not arg_3 : raise BlockadeError ( \"Either specify individual partitions \" \"or random\" ) arg_2 . partition ( arg_3 )","id_":253027,"task_name":"https:\/\/github.com\/worstcase\/blockade\/blob\/3dc6ad803f0b0d56586dec9542a6a06aa06cf569\/blockade\/cli.py#L304-L340","negative":"Sparse matrix roll\n\n This operation is equivalent to ``numpy.roll``, but operates on sparse matrices.\n\n Parameters\n ----------\n x : scipy.sparse.spmatrix or np.ndarray\n The sparse matrix input\n\n shift : int\n The number of positions to roll the specified axis\n\n axis : (0, 1, -1)\n The axis along which to roll.\n\n Returns\n -------\n x_rolled : same type as `x`\n The rolled matrix, with the same format as `x`\n\n See Also\n --------\n numpy.roll\n\n Examples\n --------\n >>> # Generate a random sparse binary matrix\n >>> X = scipy.sparse.lil_matrix(np.random.randint(0, 2, size=(5,5)))\n >>> X_roll = roll_sparse(X, 2, axis=0) # Roll by 2 on the first axis\n >>> X_dense_r = roll_sparse(X.toarray(), 2, axis=0) # Equivalent dense roll\n >>> np.allclose(X_roll, X_dense_r.toarray())\n True"} {"query":"Private swap function used by get_or_create to atomically swap the new namespace map into the global cache .","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 . Symbol , arg_5 : arg_6 . ModuleType = None , arg_8 = arg_9 , ) -> lmap . Map : arg_10 = arg_0 . entry ( arg_2 , None ) if arg_10 is not None : return arg_0 arg_11 = Namespace ( arg_2 , arg_5 = arg_5 ) if arg_2 . name != arg_8 : arg_12 = arg_0 . entry ( arg_3 . symbol ( arg_8 ) , None ) assert arg_12 is not None , \"Core namespace not loaded yet!\" arg_11 . refer_all ( arg_12 ) return arg_0 . assoc ( arg_2 , arg_11 )","id_":253028,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/runtime.py#L537-L553","negative":"Set train summary. A TrainSummary object contains information\n necessary for the optimizer to know how often the logs are recorded,\n where to store the logs and how to retrieve them, etc. For details,\n refer to the docs of TrainSummary.\n\n\n :param summary: a TrainSummary object"} {"query":"Reads the features from a file and stores them in the current object .","positive":"def Func ( arg_0 , arg_1 = 1e-3 ) : try : with open ( arg_0 . file_struct . features_file ) as f : arg_2 = json . load ( f ) if arg_0 . dur is None : arg_0 . dur = float ( arg_2 [ \"globals\" ] [ \"dur\" ] ) assert ( np . isclose ( arg_0 . dur , float ( arg_2 [ \"globals\" ] [ \"dur\" ] ) , rtol = arg_1 ) ) assert ( arg_0 . sr == int ( arg_2 [ \"globals\" ] [ \"sample_rate\" ] ) ) assert ( arg_0 . hop_length == int ( arg_2 [ \"globals\" ] [ \"hop_length\" ] ) ) assert ( os . path . basename ( arg_0 . file_struct . audio_file ) == os . path . basename ( arg_2 [ \"globals\" ] [ \"audio_file\" ] ) ) arg_4 = FeatureParamsError ( \"Couldn't find features for %s id in file %s\" % ( arg_0 . get_id ( ) , arg_0 . file_struct . features_file ) ) if arg_0 . get_id ( ) not in arg_2 . keys ( ) : raise arg_4 for arg_5 in arg_0 . get_param_names ( ) : arg_6 = getattr ( arg_0 , arg_5 ) if hasattr ( arg_6 , '__call__' ) : if arg_6 . __name__ != arg_2 [ arg_0 . get_id ( ) ] [ \"params\" ] [ arg_5 ] : raise arg_4 else : if str ( arg_6 ) != arg_2 [ arg_0 . get_id ( ) ] [ \"params\" ] [ arg_5 ] : raise arg_4 arg_0 . _est_beats_times = np . array ( arg_2 [ \"est_beats\" ] ) arg_0 . _est_beatsync_times = np . array ( arg_2 [ \"est_beatsync_times\" ] ) arg_0 . _est_beats_frames = librosa . core . time_to_frames ( arg_0 . _est_beats_times , sr = arg_0 . sr , hop_length = arg_0 . hop_length ) arg_0 . _framesync_features = np . array ( arg_2 [ arg_0 . get_id ( ) ] [ \"framesync\" ] ) arg_0 . _est_beatsync_features = np . array ( arg_2 [ arg_0 . get_id ( ) ] [ \"est_beatsync\" ] ) if \"ann_beats\" in arg_2 . keys ( ) : arg_0 . _ann_beats_times = np . array ( arg_2 [ \"ann_beats\" ] ) arg_0 . _ann_beatsync_times = np . array ( arg_2 [ \"ann_beatsync_times\" ] ) arg_0 . _ann_beats_frames = librosa . core . time_to_frames ( arg_0 . _ann_beats_times , sr = arg_0 . sr , hop_length = arg_0 . hop_length ) arg_0 . _ann_beatsync_features = np . array ( arg_2 [ arg_0 . get_id ( ) ] [ \"ann_beatsync\" ] ) except KeyError : raise WrongFeaturesFormatError ( \"The features file %s is not correctly formatted\" % arg_0 . file_struct . features_file ) except AssertionError : raise FeaturesNotFound ( \"The features for the given parameters were not found in \" \"features file %s\" % arg_0 . file_struct . features_file ) except IOError : raise NoFeaturesFileError ( \"Could not find features file %s\" , arg_0 . file_struct . features_file )","id_":253029,"task_name":"https:\/\/github.com\/urinieto\/msaf\/blob\/9dbb57d77a1310465a65cc40f1641d083ca74385\/msaf\/base.py#L209-L282","negative":"Check that a binary operator is surrounded by exactly one space."} {"query":"A System Model contains top - level packages","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in many ( arg_1 ) . EP_PKG [ 1401 ] ( ) : arg_0 . accept ( arg_2 )","id_":253030,"task_name":"https:\/\/github.com\/xtuml\/pyxtuml\/blob\/7dd9343b9a0191d1db1887ab9288d0a026608d9a\/examples\/print_packageable_elements.py#L34-L39","negative":"Generates a 'code_verifier' as described in section 4.1 of RFC 7636.\n\n This is a 'high-entropy cryptographic random string' that will be\n impractical for an attacker to guess.\n\n Args:\n n_bytes: integer between 31 and 96, inclusive. default: 64\n number of bytes of entropy to include in verifier.\n\n Returns:\n Bytestring, representing urlsafe base64-encoded random data."} {"query":"REST Schedule playing something on a call Helper","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = '\/' + arg_0 . api_version + '\/SchedulePlay\/' arg_3 = 'POST' return arg_0 . request ( arg_2 , arg_3 , arg_1 )","id_":253031,"task_name":"https:\/\/github.com\/plivo\/plivohelper-python\/blob\/a2f706d69e2138fbb973f792041341f662072d26\/plivohelper.py#L244-L249","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Given a list of range specifiers for python ensure compatibility .","positive":"def Func ( arg_0 ) : if not isinstance ( arg_0 , ( list , tuple ) ) : arg_0 = [ arg_0 ] arg_1 = sys . version_info arg_2 = '%s.%s' % ( arg_1 . major , arg_1 . minor ) for arg_3 in arg_0 : if arg_2 == arg_3 : return try : if eval ( arg_2 + arg_3 ) : return except SyntaxError : pass raise ValueError ( 'Python version %s unsupported' % arg_2 )","id_":253032,"task_name":"https:\/\/github.com\/jupyter-widgets\/jupyterlab-sidecar\/blob\/8889d09f1a0933e2cbee06d4874f720b075b29e8\/setupbase.py#L89-L104","negative":"The estimated signal-to-noise_maps mappers of the image."} {"query":"Implements the < = operator with JS - style type coertion .","positive":"def Func ( arg_0 , arg_1 , * arg_2 ) : return ( less ( arg_0 , arg_1 ) or soft_equals ( arg_0 , arg_1 ) ) and ( not arg_2 or Func ( arg_1 , * arg_2 ) )","id_":253033,"task_name":"https:\/\/github.com\/nadirizr\/json-logic-py\/blob\/5fda9125eab4178f8f81c7779291940e31e87bab\/json_logic\/__init__.py#L59-L63","negative":"Log what has been captured so far"} {"query":"Saves the specified file to either S3 or the local filesystem depending on the currently enabled storage type .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : if not ( arg_0 . storage_type and arg_0 . bucket_name ) : arg_4 = arg_0 . _Func_local ( arg_1 , arg_2 , arg_3 ) else : if arg_0 . storage_type != 's3' : raise ValueError ( 'Storage type \"%s\" is invalid, the only supported storage type (apart from default local storage) is s3.' % arg_0 . storage_type ) arg_4 = arg_0 . _Func_s3 ( arg_1 , arg_2 , arg_3 ) if arg_0 . field_name : setattr ( arg_3 , arg_0 . field_name , arg_4 ) if arg_0 . storage_type == 's3' : if arg_0 . storage_type_field : setattr ( arg_3 , arg_0 . storage_type_field , arg_0 . storage_type ) if arg_0 . bucket_name_field : setattr ( arg_3 , arg_0 . bucket_name_field , arg_0 . bucket_name ) else : if arg_0 . storage_type_field : setattr ( arg_3 , arg_0 . storage_type_field , '' ) if arg_0 . bucket_name_field : setattr ( arg_3 , arg_0 . bucket_name_field , '' ) return arg_4","id_":253034,"task_name":"https:\/\/github.com\/Jaza\/s3-saver\/blob\/81dc4447d76c2fc0b0238fb96fa70e879612e355\/s3_saver.py#L128-L153","negative":"This functions returns a list of jobs"} {"query":"The trace function passed to sys . settrace .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : if arg_0 . stopped : return if 0 : sys . stderr . write ( \"trace event: %s %r @%d\\n\" % ( arg_2 , arg_1 . f_code . co_filename , arg_1 . f_lineno ) ) if arg_0 . last_exc_back : if arg_1 == arg_0 . last_exc_back : if arg_0 . arcs and arg_0 . cur_file_data : arg_4 = ( arg_0 . last_line , - arg_0 . last_exc_firstlineno ) arg_0 . cur_file_data [ arg_4 ] = None arg_0 . cur_file_data , arg_0 . last_line = arg_0 . data_stack . pop ( ) arg_0 . last_exc_back = None if arg_2 == 'call' : arg_0 . data_stack . append ( ( arg_0 . cur_file_data , arg_0 . last_line ) ) arg_8 = arg_1 . f_code . co_filename if arg_8 not in arg_0 . shouldFunc_cache : arg_9 = arg_0 . shouldFunc ( arg_8 , arg_1 ) arg_0 . shouldFunc_cache [ arg_8 ] = arg_9 else : arg_9 = arg_0 . shouldFunc_cache [ arg_8 ] if arg_9 : if arg_9 not in arg_0 . data : arg_0 . data [ arg_9 ] = { } arg_0 . cur_file_data = arg_0 . data [ arg_9 ] else : arg_0 . cur_file_data = None arg_0 . last_line = - 1 elif arg_2 == 'line' : if arg_0 . cur_file_data is not None : if arg_0 . arcs : arg_0 . cur_file_data [ ( arg_0 . last_line , arg_1 . f_lineno ) ] = None else : arg_0 . cur_file_data [ arg_1 . f_lineno ] = None arg_0 . last_line = arg_1 . f_lineno elif arg_2 == 'return' : if arg_0 . arcs and arg_0 . cur_file_data : arg_13 = arg_1 . f_code . co_firstlineno arg_0 . cur_file_data [ ( arg_0 . last_line , - arg_13 ) ] = None arg_0 . cur_file_data , arg_0 . last_line = arg_0 . data_stack . pop ( ) elif arg_2 == 'exception' : arg_0 . last_exc_back = arg_1 . f_back arg_0 . last_exc_firstlineno = arg_1 . f_code . co_firstlineno return arg_0 . Func","id_":253035,"task_name":"https:\/\/github.com\/tnkteja\/myhelp\/blob\/fb3a4809d448ad14d5b2e6ddf2e7e89ad52b71cb\/virtualEnvironment\/lib\/python2.7\/site-packages\/coverage\/collector.py#L57-L119","negative":"Get the context for this view."} {"query":"A function to display sympy expression using inline style LaTeX in PNG .","positive":"def Func ( arg_0 ) : arg_1 = latex ( arg_0 , mode = 'inline' ) arg_1 = arg_1 . replace ( '\\\\operatorname' , '' ) arg_1 = arg_1 . replace ( '\\\\overline' , '\\\\bar' ) arg_2 = latex_to_png ( arg_1 ) return arg_2","id_":253036,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/extensions\/sympyprinting.py#L54-L64","negative":"This returns an array of each sector and performance for the current trading day. Performance is based on each sector ETF.\n\n https:\/\/iexcloud.io\/docs\/api\/#sector-performance\n 8am-5pm ET Mon-Fri\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result"} {"query":"This will execute the query returning the key where a ZSET of your results will be stored for pagination further operations etc .","positive":"def Func ( arg_0 , arg_1 ) : if not ( arg_0 . _filters or arg_0 . _order_by ) : raise QueryError ( \"You are missing filter or order criteria\" ) arg_1 = int ( arg_1 ) if arg_1 < 1 : raise QueryError ( \"You must specify a timeout >= 1, you gave %r\" % arg_1 ) return arg_0 . _model . _gindex . search ( _connect ( arg_0 . _model ) , arg_0 . _filters , arg_0 . _order_by , arg_1 = arg_1 )","id_":253037,"task_name":"https:\/\/github.com\/josiahcarlson\/rom\/blob\/8b5607a856341df85df33422accc30ba9294dbdb\/rom\/query.py#L648-L675","negative":"Register a file reader for use in parse_config_file.\n\n Registered file readers will be used to try reading files passed to\n `parse_config_file`. All file readers (beginning with the default `open`) will\n be tried until one of them succeeds at opening the file.\n\n This function may also be be used used as a decorator. For example:\n\n @register_file_reader(IOError)\n def exotic_data_source(filename):\n ...\n\n Args:\n *args: (When used as a decorator, only the existence check is supplied.)\n - file_reader_fn: The file reader function to register. This should be a\n function that can be used as a context manager to open a file and\n provide a file-like object, similar to Python's built-in `open`.\n - is_readable_fn: A function taking the file path and returning a boolean\n indicating whether the file can be read by `file_reader_fn`.\n\n Returns:\n `None`, or when used as a decorator, a function that will perform the\n registration using the supplied readability predicate."} {"query":"Parse the string output from sox s stat function","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . split ( '\\n' ) arg_2 = { } for arg_3 in arg_1 : arg_4 = arg_3 . split ( ':' ) if len ( arg_4 ) == 2 : arg_5 = arg_4 [ 0 ] arg_6 = arg_4 [ 1 ] . strip ( ' ' ) try : arg_6 = float ( arg_6 ) except ValueError : arg_6 = None arg_2 [ arg_5 ] = arg_6 return arg_2","id_":253038,"task_name":"https:\/\/github.com\/rabitt\/pysox\/blob\/eae89bde74567136ec3f723c3e6b369916d9b837\/sox\/file_info.py#L370-L396","negative":"Loads many namespaces and combines their names.\n\n :param iter[str] locations: An iterable of URLs or file paths pointing to BEL namespaces.\n :param bool check_keywords: Should all the keywords be the same? Defaults to ``True``\n :return: A dictionary of {names: labels}\n :rtype: dict[str, str]\n\n Example Usage\n\n >>> from pybel.resources import write_namespace\n >>> from pybel_tools.definition_utils import export_namespace, get_merged_namespace_names\n >>> graph = ...\n >>> original_ns_url = ...\n >>> export_namespace(graph, 'MBS') # Outputs in current directory to MBS.belns\n >>> value_dict = get_merged_namespace_names([original_ns_url, 'MBS.belns'])\n >>> with open('merged_namespace.belns', 'w') as f:\n >>> ... write_namespace('MyBrokenNamespace', 'MBS', 'Other', 'Charles Hoyt', 'PyBEL Citation', value_dict, file=f)"} {"query":"The subparts are seperated by a comma . Make sure that commas inside the part themselves are not considered .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ ] arg_3 = 0 arg_4 = ',' arg_5 = 0 for arg_6 in range ( len ( arg_1 ) ) : if arg_1 [ arg_6 ] == '(' : arg_3 += 1 elif arg_1 [ arg_6 ] == ')' : arg_3 -= 1 elif arg_1 [ arg_6 ] == arg_4 and arg_3 == 0 : arg_2 . append ( arg_1 [ arg_5 : arg_6 ] . strip ( ) ) arg_5 = arg_6 + 1 arg_2 . append ( arg_1 [ arg_5 : ] . strip ( ) ) return arg_2","id_":253039,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/tools\/tracker\/src\/python\/query.py#L83-L99","negative":"Asynchronously request a URL and get the encoded text content of the\n body.\n\n Parameters\n ----------\n url : `str`\n URL to download.\n session : `aiohttp.ClientSession`\n An open aiohttp session.\n\n Returns\n -------\n content : `str`\n Content downloaded from the URL."} {"query":"Get the frequencies for Mel bins","positive":"def Func ( arg_0 , arg_1 = 0 , arg_2 = 11025.0 , ** arg_3 ) : if arg_1 is None : arg_1 = 0 if arg_2 is None : arg_2 = 11025.0 arg_4 = core . mel_frequencies ( arg_0 , arg_1 = arg_1 , arg_2 = arg_2 ) arg_4 [ 1 : ] -= 0.5 * np . diff ( arg_4 ) arg_4 = np . append ( np . maximum ( 0 , arg_4 ) , [ arg_2 ] ) return arg_4","id_":253040,"task_name":"https:\/\/github.com\/librosa\/librosa\/blob\/180e8e6eb8f958fa6b20b8cba389f7945d508247\/librosa\/display.py#L935-L946","negative":"Create Flask app."} {"query":"Compress a file only if needed .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . get_Funced_filename ( arg_1 ) if not arg_2 : return arg_0 . do_Func ( arg_1 , arg_2 )","id_":253041,"task_name":"https:\/\/github.com\/saimn\/sigal\/blob\/912ca39991355d358dc85fd55c7aeabdd7acc386\/sigal\/plugins\/compress_assets.py#L60-L66","negative":"source record and index must have been set"} {"query":"Creates a new pypet leaf instance .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = arg_0 . _all_get_from_attrs ( arg_3 , HDF5StorageService . CLASS_NAME ) arg_5 = arg_2 . _create_class ( arg_4 ) arg_6 = arg_2 . _construct_instance ( arg_5 , arg_1 ) return arg_6","id_":253042,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/storageservice.py#L2632-L2645","negative":"r'''Method to calculate pressure-dependent liquid molar volume at\n temperature `T` and pressure `P` with a given method.\n\n This method has no exception handling; see `TP_dependent_property`\n for that.\n\n Parameters\n ----------\n T : float\n Temperature at which to calculate molar volume, [K]\n P : float\n Pressure at which to calculate molar volume, [K]\n method : str\n Name of the method to use\n\n Returns\n -------\n Vm : float\n Molar volume of the liquid at T and P, [m^3\/mol]"} {"query":"Creates tuple of str tuple - str pairs representing resolved & sorted DAG .","positive":"def Func ( arg_0 ) : def _explore ( arg_1 ) : if arg_1 . depth < 0 : return if not arg_1 . parents : arg_8 . append ( ( arg_1 . name , arg_1 . parents ) ) arg_1 . depth = - 1 return arg_3 = ( arg_1 . name , [ ] ) arg_8 . append ( arg_3 ) arg_1 . depth = - 1 arg_4 = 0 for arg_5 in sorted ( ( arg_0 . get ( p ) for p in arg_1 . parents ) , key = lambda arg_5 : arg_5 . depth ) : arg_6 = len ( arg_8 ) _explore ( arg_5 ) arg_7 = len ( arg_8 ) arg_3 [ 1 ] . extend ( [ '_' ] * arg_4 + [ arg_5 . name ] ) arg_4 = arg_7 - arg_6 - 1 arg_0 = _depth ( arg_0 ) arg_8 = [ ] for arg_1 in sorted ( arg_0 . values ( ) , key = lambda arg_5 : arg_5 . depth , reverse = True ) : _explore ( arg_1 ) return tuple ( reversed ( arg_8 ) )","id_":253043,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/distributions\/joint_distribution_named.py#L218-L242","negative":"Add new java messages to ignore from user text file. It first reads in the new java ignored messages\n from the user text file and generate a dict structure to out of the new java ignored messages. This\n is achieved by function extract_message_to_dict. Next, new java messages will be added to the original\n ignored java messages dict g_ok_java_messages. Again, this is achieved by function update_message_dict.\n\n :return: none"} {"query":"Make a symmetrical binary tree with","positive":"def Func ( arg_0 ) : arg_1 = nx . DiGraph ( ) arg_2 = '0' arg_1 . add_node ( arg_2 ) add_children ( arg_1 , arg_2 , arg_0 , 2 ) return arg_1","id_":253044,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/share\/doc\/ipython\/examples\/parallel\/dagdeps.py#L48-L54","negative":"Obtiene los dataframes de los datos de PVPC con resampling diario y mensual."} {"query":"Start local agent","positive":"def Func ( arg_0 ) : logger . info ( 'Starting agent on localhost' ) arg_1 = arg_0 . python . split ( ) + [ os . path . join ( arg_0 . workdir , arg_0 . AGENT_FILENAME ) , '--telegraf' , arg_0 . path [ 'TELEGRAF_LOCAL_PATH' ] , '--host' , arg_0 . host ] if arg_0 . kill_old : arg_1 . append ( arg_0 . kill_old ) arg_0 . session = arg_0 . popen ( arg_1 ) arg_0 . reader_thread = threading . Thread ( target = arg_0 . read_buffer ) arg_0 . reader_thread . setDaemon ( True ) return arg_0 . session","id_":253045,"task_name":"https:\/\/github.com\/yandex\/yandex-tank\/blob\/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b\/yandextank\/plugins\/Telegraf\/client.py#L105-L121","negative":"Implement a lookup for object level permissions. Basically the same as\n ModelAdmin.has_delete_permission, but also passes the obj parameter in."} {"query":"Validates the model using a series of checks on bits of the data .","positive":"def Func ( arg_0 , arg_1 ) : _validate_license ( arg_1 ) _validate_roles ( arg_1 ) arg_2 = ( 'title' , 'summary' , ) for arg_3 in arg_2 : if arg_1 . metadata . get ( arg_3 ) in [ None , '' , [ ] ] : raise exceptions . MissingRequiredMetadata ( arg_3 ) _validate_derived_from ( arg_0 , arg_1 ) _validate_subjects ( arg_0 , arg_1 )","id_":253046,"task_name":"https:\/\/github.com\/openstax\/cnx-publishing\/blob\/f55b4a2c45d8618737288f1b74b4139d5ac74154\/cnxpublishing\/db.py#L360-L379","negative":"Gets back all response headers."} {"query":"Make a TCP connection to the alarm system .","positive":"async def Func ( arg_0 ) : _LOGGER . debug ( \"Connecting...\" ) try : arg_0 . _reader , arg_0 . _writer = await asyncio . open_Funcion ( arg_0 . _host , arg_0 . _port , loop = arg_0 . _loop ) _LOGGER . debug ( \"sucess Funcing...\" ) except Exception as e : _LOGGER . warning ( \"Exception during Funcing: %s.\" , e ) arg_0 . _writer = None arg_0 . _reader = None return False return True","id_":253047,"task_name":"https:\/\/github.com\/c-soft\/satel_integra\/blob\/3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c\/satel_integra\/satel_integra.py#L170-L186","negative":"Add members found in prior versions up till the next major release\n\n These members are to be considered deprecated. When a new major\n release is made, these members are removed."} {"query":"overridden from install_lib class","positive":"def Func ( arg_0 ) : install_lib . install_lib . Func ( arg_0 ) if include_dirs : for arg_1 in include_dirs : arg_2 = join ( arg_0 . install_dir , arg_1 ) if sys . version_info >= ( 3 , 0 ) : arg_3 = { \"invalid_encoded_data*\" , \"unknown_encoding*\" } else : arg_3 = set ( ) shutil . rmtree ( arg_2 , ignore_errors = True ) shutil . copytree ( arg_1 , arg_2 , ignore = shutil . ignore_patterns ( * arg_3 ) )","id_":253048,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/setup.py#L107-L121","negative":"Returns the profile with the received ID as a dict\n\n If a local copy of the profile exists, it'll be returned. If not, it'll\n be downloaded from the web. The results are cached, so any subsequent\n calls won't hit the filesystem or the web.\n\n Args:\n profile_id (str): The ID of the profile you want.\n\n Raises:\n RegistryError: If there was some problem opening the profile file\n or its format was incorrect."} {"query":"Attach an observer .","positive":"def Func ( arg_0 , arg_1 ) : if not arg_1 in arg_0 . _observers : arg_0 . _observers . append ( arg_1 ) return arg_0","id_":253049,"task_name":"https:\/\/github.com\/mariano\/pyfire\/blob\/42e3490c138abc8e10f2e9f8f8f3b40240a80412\/pyfire\/stream.py#L52-L63","negative":"Returns ON-OFF for all Stokes parameters given a cross_pols noise diode measurement"} {"query":"Levenberg - Marquardt optimization on a set of particles .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 1.0 , arg_3 = 10. , arg_4 = 4 , arg_5 = False , arg_6 = 2 , ** arg_7 ) : arg_8 = LMParticles ( arg_0 , arg_1 , arg_2 = arg_2 , arg_4 = arg_4 , arg_3 = arg_3 , arg_6 = arg_6 , ** arg_7 ) arg_8 . do_run_2 ( ) if arg_5 : return arg_8 . get_termination_stats ( )","id_":253050,"task_name":"https:\/\/github.com\/peri-source\/peri\/blob\/61beed5deaaf978ab31ed716e8470d86ba639867\/peri\/opt\/optimize.py#L2352-L2378","negative":"Register interface in implementation phase"} {"query":"Given a filepath and a list of regex patterns this function returns true if filepath matches any one of those patterns","positive":"def Func ( arg_0 , arg_1 ) : if not arg_0 . only_blame_patterns : return True for arg_2 in arg_0 . only_blame_patterns : if arg_2 . match ( arg_1 ) : return True return False","id_":253051,"task_name":"https:\/\/github.com\/shopkick\/flawless\/blob\/c54b63ca1991c153e6f75080536f6df445aacc64\/flawless\/server\/service.py#L227-L236","negative":"Gets status of response."} {"query":"Sets a custom mock engine replacing the built - in one .","positive":"def Func ( arg_0 , arg_1 ) : if not arg_1 : raise TypeError ( 'engine must be a valid object' ) arg_2 = arg_1 ( arg_0 ) arg_3 = ( 'activate' , 'disable' ) if not all ( [ hasattr ( arg_2 , arg_4 ) for arg_4 in arg_3 ] ) : raise NotImplementedError ( 'engine must implementent the ' 'required methods' ) arg_0 . mock_engine = arg_2 if arg_0 . active : arg_0 . mock_engine . activate ( )","id_":253052,"task_name":"https:\/\/github.com\/h2non\/pook\/blob\/e64094e41e4d89d98d2d29af7608ef27dc50cf19\/pook\/engine.py#L53-L82","negative":"Asynchronously request a URL and get the encoded text content of the\n body.\n\n Parameters\n ----------\n url : `str`\n URL to download.\n session : `aiohttp.ClientSession`\n An open aiohttp session.\n\n Returns\n -------\n content : `str`\n Content downloaded from the URL."} {"query":"It will return the google url to be searched","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = '+' . join ( arg_1 ) arg_4 = 'https:\/\/www.google.co.in\/search?q=' arg_2 = arg_2 . split ( \" \" ) arg_5 = '+' . join ( arg_2 ) arg_6 = arg_4 + arg_3 + arg_5 return arg_6","id_":253053,"task_name":"https:\/\/github.com\/ankitmathur3193\/song-cli\/blob\/ca8ccfe547e9d702313ff6d14e81ae4355989a67\/song\/commands\/SearchEngineParser\/GoogleParser.py#L10-L18","negative":"Load the configuration.\n\n :param under_test:\n Tell us if we only have to load the configuration file (True)\n or load the configuration file and initate the output directory\n if it does not exist (False).\n :type under_test: bool\n\n :param custom:\n A dict with the configuration index (from .PyFunceble.yaml) to update.\n :type custom: dict\n\n .. warning::\n If :code:`custom` is given, the given :code:`dict` overwrite\n the last value of the given configuration indexes."} {"query":"Write data to Vault . Returns the JSON - decoded response .","positive":"def Func ( arg_0 , arg_1 , ** arg_2 ) : arg_3 = arg_0 . request ( 'PUT' , '\/v1\/' + arg_1 , json = arg_2 ) return arg_3 . addCallback ( arg_0 . _handle_response , check_cas = True )","id_":253054,"task_name":"https:\/\/github.com\/praekeltfoundation\/marathon-acme\/blob\/b1b71e3dde0ba30e575089280658bd32890e3325\/marathon_acme\/clients\/vault.py#L138-L143","negative":"Get the Channel Id from the current Activity on the Turn Context.\n\n Args:\n turn_context (TurnContext): The Turn Context to retrieve the Activity's Channel Id from.\n\n Returns:\n str: The Channel Id from the Turn Context's Activity."} {"query":"Return a datetime oject from a string with optional time format .","positive":"def Func ( arg_0 , arg_1 = None ) : if arg_1 is None : arg_2 = du . parser . parse ( arg_0 ) else : arg_2 = dt . datetime . strftime ( arg_0 , arg_1 ) return arg_2","id_":253055,"task_name":"https:\/\/github.com\/oscarbranson\/latools\/blob\/cd25a650cfee318152f234d992708511f7047fbe\/latools\/helpers\/helpers.py#L29-L45","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"Return the string representation of the job description XML .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . as_element ( ) indent ( arg_1 ) arg_2 = ET . Func ( arg_1 , encoding = \"utf-8\" ) arg_2 = re . sub ( r'_[A-Z]_' , '' , arg_2 ) arg_2 = '\\n' + arg_2 return arg_2","id_":253056,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/parallel\/apps\/winhpcjob.py#L144-L152","negative":"Save an image to self.storage at `save_path`.\n\n Arguments:\n `imagefile`: Raw image data, typically a BytesIO instance.\n `save_path`: The path within self.storage where the image should\n be saved.\n `file_ext`: The file extension of the image-to-be-saved.\n `mime_type`: A valid image mime type (as found in\n versatileimagefield.utils)"} {"query":"Execute the enrich phase for a given backend section","positive":"def Func ( arg_0 , arg_1 ) : TaskProjects ( arg_0 ) . execute ( ) arg_2 = TaskEnrich ( arg_0 , arg_1 = arg_1 ) try : arg_2 . execute ( ) logging . info ( \"Loading enriched data finished!\" ) except Exception as e : logging . error ( str ( e ) ) sys . exit ( - 1 )","id_":253057,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-sirmordred\/blob\/d6ac94d28d707fae23170064d078f1edf937d13e\/utils\/micro.py#L104-L118","negative":"Delete old prices, leaving just the last."} {"query":"Level - 2 parser for a DX field object .","positive":"def Func ( arg_0 ) : try : arg_1 = arg_0 . __consume ( ) except DXParserNoTokens : return if arg_1 . equals ( 'component' ) : arg_2 = arg_0 . __consume ( ) . value ( ) if not arg_0 . __consume ( ) . equals ( 'value' ) : raise DXParseError ( 'field: \"value\" expected' ) arg_3 = arg_0 . __consume ( ) . value ( ) try : arg_0 . currentobject [ 'components' ] [ arg_2 ] = arg_3 except KeyError : arg_0 . currentobject [ 'components' ] = { arg_2 : arg_3 } else : raise DXParseError ( 'field: ' + str ( arg_1 ) + ' not recognized.' )","id_":253058,"task_name":"https:\/\/github.com\/MDAnalysis\/GridDataFormats\/blob\/3eeb0432f8cf856912436e4f3e7aba99d3c916be\/gridData\/OpenDX.py#L924-L948","negative":"Return a standardized canonical tautomer SMILES string given a SMILES string.\n\n Note: This is a convenience function for quickly standardizing and finding the canonical tautomer for a single\n SMILES string. It is more efficient to use the :class:`~molvs.standardize.Standardizer` class directly when working\n with many molecules or when custom options are needed.\n\n :param string smiles: The SMILES for the molecule.\n :returns: The SMILES for the standardize canonical tautomer.\n :rtype: string."} {"query":"Called when a connection is made and used to send out headers","positive":"def Func ( arg_0 ) : arg_1 = [ \"GET %s HTTP\/1.1\" % ( \"\/room\/%s\/live.json\" % arg_0 . factory . get_stream ( ) . get_room_id ( ) ) ] arg_2 = arg_0 . factory . get_stream ( ) . get_connection ( ) . get_headers ( ) for arg_3 in arg_2 : arg_1 . append ( \"%s: %s\" % ( arg_3 , arg_2 [ arg_3 ] ) ) arg_1 . append ( \"Host: streaming.campfirenow.com\" ) arg_0 . transport . write ( \"\\r\\n\" . join ( arg_1 ) + \"\\r\\n\\r\\n\" ) arg_0 . factory . get_stream ( ) . set_protocol ( arg_0 )","id_":253059,"task_name":"https:\/\/github.com\/mariano\/pyfire\/blob\/42e3490c138abc8e10f2e9f8f8f3b40240a80412\/pyfire\/stream.py#L350-L364","negative":"Produces a TidyPy configuration that incorporates the configuration files\n stored in the current user's home directory.\n\n :param project_path: the path to the project that is going to be analyzed\n :type project_path: str\n :param use_cache:\n whether or not to use cached versions of any remote\/referenced TidyPy\n configurations. If not specified, defaults to ``True``.\n :type use_cache: bool\n :rtype: dict"} {"query":"The callable makes it possible to include rpcinterface in a Pyramid application .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . registry . settings if asbool ( arg_1 . get ( 'twitcher.rpcinterface' , True ) ) : LOGGER . debug ( 'Twitcher XML-RPC Interface enabled.' ) arg_0 . include ( 'twitcher.config' ) arg_0 . include ( 'twitcher.basicauth' ) arg_0 . include ( 'pyramid_rpc.xmlrpc' ) arg_0 . include ( 'twitcher.db' ) arg_0 . add_xmlrpc_endpoint ( 'api' , '\/RPC2' ) arg_0 . add_xmlrpc_method ( RPCInterface , attr = 'generate_token' , endpoint = 'api' , method = 'generate_token' ) arg_0 . add_xmlrpc_method ( RPCInterface , attr = 'revoke_token' , endpoint = 'api' , method = 'revoke_token' ) arg_0 . add_xmlrpc_method ( RPCInterface , attr = 'revoke_all_tokens' , endpoint = 'api' , method = 'revoke_all_tokens' ) arg_0 . add_xmlrpc_method ( RPCInterface , attr = 'register_service' , endpoint = 'api' , method = 'register_service' ) arg_0 . add_xmlrpc_method ( RPCInterface , attr = 'unregister_service' , endpoint = 'api' , method = 'unregister_service' ) arg_0 . add_xmlrpc_method ( RPCInterface , attr = 'get_service_by_name' , endpoint = 'api' , method = 'get_service_by_name' ) arg_0 . add_xmlrpc_method ( RPCInterface , attr = 'get_service_by_url' , endpoint = 'api' , method = 'get_service_by_url' ) arg_0 . add_xmlrpc_method ( RPCInterface , attr = 'clear_services' , endpoint = 'api' , method = 'clear_services' ) arg_0 . add_xmlrpc_method ( RPCInterface , attr = 'list_services' , endpoint = 'api' , method = 'list_services' )","id_":253060,"task_name":"https:\/\/github.com\/bird-house\/twitcher\/blob\/e6a36b3aeeacf44eec537434b0fb87c09ab54b5f\/twitcher\/rpcinterface.py#L79-L116","negative":"Click the right mouse button without modifiers pressed.\n\n Parameters: coordinates to click on scren (tuple (x, y))\n Returns: None"} {"query":"Get s the IAM Group details .","positive":"def Func ( arg_0 , arg_1 = True , arg_2 = None , ** arg_3 ) : arg_4 = arg_2 . Func ( GroupName = arg_0 , ** arg_3 ) if arg_1 : if arg_4 . get ( 'IsTruncated' ) : arg_5 = { 'GroupName' : arg_0 } arg_5 . update ( arg_3 ) arg_6 = arg_4 [ 'Users' ] arg_5 [ 'Marker' ] = arg_4 [ 'Marker' ] arg_4 [ 'Users' ] = arg_6 + _get_users_for_group ( arg_2 , ** arg_5 ) else : arg_4 . pop ( 'Users' , None ) arg_4 . pop ( 'IsTruncated' , None ) arg_4 . pop ( 'Marker' , None ) return arg_4","id_":253061,"task_name":"https:\/\/github.com\/Netflix-Skunkworks\/cloudaux\/blob\/c4b0870c3ac68b1c69e71d33cf78b6a8bdf437ea\/cloudaux\/aws\/iam.py#L442-L470","negative":"This is a parallel worker for running period-recovery.\n\n Parameters\n ----------\n\n task : tuple\n This is used to pass args to the `periodicvar_recovery` function::\n\n task[0] = period-finding result pickle to work on\n task[1] = simbasedir\n task[2] = period_tolerance\n\n Returns\n -------\n\n dict\n This is the dict produced by the `periodicvar_recovery` function for the\n input period-finding result pickle."} {"query":"r Method to calculate heat capacity of a gas mixture at temperature T pressure P mole fractions zs and weight fractions ws with a given method .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 ) : if arg_5 == SIMPLE : arg_6 = [ i ( arg_1 ) for i in arg_0 . HeatCapacityGases ] return mixing_simple ( arg_3 , arg_6 ) else : raise Exception ( 'Method not valid' )","id_":253062,"task_name":"https:\/\/github.com\/CalebBell\/thermo\/blob\/3857ed023a3e64fd3039a32d53576c24990ef1c3\/thermo\/heat_capacity.py#L3031-L3062","negative":"Get the reason of this revocation.\n\n :return: The reason, or ``None`` if there is none.\n :rtype: bytes or NoneType\n\n .. seealso::\n\n :meth:`all_reasons`, which gives you a list of all supported\n reasons this method might return."} {"query":"Pickle the Dataset instance to the provided file .","positive":"def Func ( arg_0 , arg_1 ) : if hasattr ( arg_0 , 'feature_table' ) : arg_0 . feature_table . _sdf_to_csr ( ) pickle . dump ( arg_0 , open ( arg_1 , 'wb' ) , - 1 ) if hasattr ( arg_0 , 'feature_table' ) : arg_0 . feature_table . _csr_to_sdf ( )","id_":253063,"task_name":"https:\/\/github.com\/neurosynth\/neurosynth\/blob\/948ce7edce15d7df693446e76834e0c23bfe8f11\/neurosynth\/base\/dataset.py#L444-L453","negative":"Return a warning message of code 'code'.\n\n If code = (cd, str) it returns the warning message of code 'cd' and appends\n str at the end"} {"query":"the client will announce itself given that a command is not in a particular predefined list .","positive":"def Func ( arg_0 , arg_1 = None ) : if arg_1 is not None : if arg_1 not in [ 'get' ] and arg_0 . quiet is False : arg_0 . speak ( )","id_":253064,"task_name":"https:\/\/github.com\/singularityhub\/sregistry-cli\/blob\/abc96140a1d15b5e96d83432e1e0e1f4f8f36331\/sregistry\/main\/base\/__init__.py#L109-L115","negative":"Gets back all response headers."} {"query":"This function will take in a port spec as specified by the port_spec compiler and will output an nginx web proxy config string . This string can then be written to a file and used running nginx","positive":"def Func ( arg_0 , arg_1 ) : arg_2 , arg_3 = \"\" , \"\" for arg_4 in arg_0 [ 'nginx' ] : if arg_4 [ 'type' ] == 'http' : arg_2 += _nginx_http_spec ( arg_4 , arg_1 ) elif arg_4 [ 'type' ] == 'stream' : arg_3 += _nginx_stream_spec ( arg_4 , arg_1 ) return { 'http' : arg_2 , 'stream' : arg_3 }","id_":253065,"task_name":"https:\/\/github.com\/gamechanger\/dusty\/blob\/dc12de90bb6945023d6f43a8071e984313a1d984\/dusty\/compiler\/nginx\/__init__.py#L57-L67","negative":"Get the modes supported by this device."} {"query":"XHDR command .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_3 = arg_1 if range is not None : arg_3 += \" \" + utils . unparse_msgid_range ( arg_2 ) arg_4 , arg_5 = arg_0 . command ( \"XHDR\" , arg_3 ) if arg_4 != 221 : raise NNTPReplyError ( arg_4 , arg_5 ) return arg_0 . info ( arg_4 , arg_5 )","id_":253066,"task_name":"https:\/\/github.com\/greenbender\/pynntp\/blob\/991a76331cdf5d8f9dbf5b18f6e29adc80749a2f\/nntp\/nntp.py#L1097-L1108","negative":"A factory method which can be overridden in subclasses to create\n specialized LogRecords."} {"query":"Computes the resulting metadata statement from a compounded metadata statement . If something goes wrong during the evaluation an exception is raised","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_3 = dict ( [ ( k , v ) for k , v in arg_1 . items ( ) if k not in IgnoreKeys ] ) arg_4 = [ ] if 'metadata_statements' in arg_1 : for arg_5 , arg_6 in arg_1 [ 'metadata_statements' ] . items ( ) : if isinstance ( arg_6 , str ) : arg_6 = json . loads ( arg_6 ) for arg_7 in arg_0 . Func ( arg_6 ) : if isinstance ( arg_6 , Message ) : arg_8 = LessOrEqual ( sup = arg_7 , ** arg_6 . to_dict ( ) ) else : arg_8 = LessOrEqual ( sup = arg_7 , ** arg_6 ) if arg_8 . is_expired ( ) : logger . error ( 'This metadata statement has expired: {}' . format ( arg_6 ) ) logger . info ( 'My time: {}' . format ( utc_time_sans_frac ( ) ) ) continue arg_8 . eval ( arg_3 ) arg_4 . append ( arg_8 ) return arg_4 else : try : arg_9 = arg_1 [ 'iss' ] except : arg_8 = LessOrEqual ( ) arg_8 . eval ( arg_3 ) else : arg_8 = LessOrEqual ( iss = arg_9 , exp = arg_1 [ 'exp' ] ) arg_8 . eval ( arg_3 ) arg_4 . append ( arg_8 ) return arg_4","id_":253067,"task_name":"https:\/\/github.com\/IdentityPython\/fedoidcmsg\/blob\/d30107be02521fa6cdfe285da3b6b0cdd153c8cc\/src\/fedoidcmsg\/operator.py#L382-L428","negative":"Add an HTTP header to response object.\n\n Arguments:\n name (str): HTTP header field name\n value (str): HTTP header field value"} {"query":"Fix outgoing stanza .","positive":"def Func ( arg_0 , arg_1 ) : StreamBase . Func ( arg_0 , arg_1 ) if arg_0 . initiator : if arg_1 . from_jid : arg_1 . from_jid = None else : if not arg_1 . from_jid : arg_1 . from_jid = arg_0 . me","id_":253068,"task_name":"https:\/\/github.com\/Jajcus\/pyxmpp2\/blob\/14a40a3950910a9cd008b55f0d8905aa0186ce18\/pyxmpp2\/clientstream.py#L84-L95","negative":"Does google-lint on a single file.\n\n Args:\n filename: The name of the file to parse.\n\n vlevel: The level of errors to report. Every error of confidence\n >= verbose_level will be reported. 0 is a good default.\n\n extra_check_functions: An array of additional check functions that will be\n run on each source line. Each function takes 4\n arguments: filename, clean_lines, line, error"} {"query":"Get the cast & crew credits for a TV season by season number .","positive":"def Func ( arg_0 , ** arg_1 ) : arg_2 = arg_0 . _get_series_id_season_number_path ( 'Func' ) arg_3 = arg_0 . _GET ( arg_2 , arg_1 ) arg_0 . _set_attrs_to_values ( arg_3 ) return arg_3","id_":253069,"task_name":"https:\/\/github.com\/celiao\/tmdbsimple\/blob\/ff17893110c99771d6398a62c35d36dd9735f4b9\/tmdbsimple\/tv.py#L373-L384","negative":"Bring the interrupt pin on the GPIO into Linux userspace."} {"query":"Returns a Google MLEngine service object .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . _authorize ( ) return build ( 'ml' , 'v1' , http = arg_1 , cache_discovery = False )","id_":253070,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/gcp_mlengine_hook.py#L53-L58","negative":"Sleep for the time specified in the exception. If not specified, wait\n for 60 seconds."} {"query":"Convert a pattern_jku annotation object to mir_eval format .","positive":"def Func ( arg_0 ) : arg_1 = defaultdict ( lambda : defaultdict ( list ) ) for arg_2 , arg_3 in zip ( * arg_0 . to_event_values ( ) ) : arg_4 = arg_3 [ 'pattern_id' ] arg_5 = arg_3 [ 'occurrence_id' ] arg_6 = ( arg_2 , arg_3 [ 'midi_pitch' ] ) arg_1 [ arg_4 ] [ arg_5 ] . append ( arg_6 ) return [ list ( arg_7 . values ( ) ) for arg_7 in six . itervalues ( arg_1 ) ]","id_":253071,"task_name":"https:\/\/github.com\/marl\/jams\/blob\/b16778399b9528efbd71434842a079f7691a7a66\/jams\/eval.py#L415-L452","negative":"Return the generation index of the first generation in the given\n swarm that does not have numParticles particles in it, either still in the\n running state or completed. This does not include orphaned particles.\n\n Parameters:\n ---------------------------------------------------------------------\n swarmId: A string representation of the sorted list of encoders in this\n swarm. For example '__address_encoder.__gym_encoder'\n minNumParticles: minium number of partices required for a full\n generation.\n\n retval: generation index, or None if no particles at all."} {"query":"State size of the LSTMStateTuple .","positive":"def Func ( arg_0 ) : return ( LSTMStateTuple ( arg_0 . _num_units , arg_0 . _num_units ) if arg_0 . _state_is_tuple else 2 * arg_0 . _num_units )","id_":253072,"task_name":"https:\/\/github.com\/tensorlayer\/tensorlayer\/blob\/aa9e52e36c7058a7e6fd81d36563ca6850b21956\/tensorlayer\/layers\/recurrent.py#L561-L563","negative":"Load Flickr25K dataset.\n\n Returns a list of images by a given tag from Flick25k dataset,\n it will download Flickr25k from `the official website `__\n at the first time you use it.\n\n Parameters\n ------------\n tag : str or None\n What images to return.\n - If you want to get images with tag, use string like 'dog', 'red', see `Flickr Search `__.\n - If you want to get all images, set to ``None``.\n\n path : str\n The path that the data is downloaded to, defaults is ``data\/flickr25k\/``.\n n_threads : int\n The number of thread to read image.\n printable : boolean\n Whether to print infomation when reading images, default is ``False``.\n\n Examples\n -----------\n Get images with tag of sky\n\n >>> images = tl.files.load_flickr25k_dataset(tag='sky')\n\n Get all images\n\n >>> images = tl.files.load_flickr25k_dataset(tag=None, n_threads=100, printable=True)"} {"query":"Returns id of a OneDrive user .","positive":"def Func ( arg_0 ) : if arg_0 . _user_id is None : arg_0 . _user_id = arg_0 . get_user_data ( ) [ 'id' ] return arg_0 . _user_id","id_":253073,"task_name":"https:\/\/github.com\/mk-fg\/python-onedrive\/blob\/74d3f6605b0e8a9031a2aab8092f551293ffb533\/onedrive\/api_v5.py#L409-L413","negative":"Initialize the bucket map assuming the given number of maxBuckets."} {"query":"Loads a node from hdf5 file and if desired recursively everything below","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 , arg_6 , arg_7 , arg_8 , arg_9 ) : if arg_5 is None : arg_5 = float ( 'inf' ) arg_10 = [ ( arg_1 , arg_6 , arg_9 ) ] while arg_10 : arg_1 , arg_6 , arg_9 = arg_10 . pop ( ) if isinstance ( arg_9 , pt . link . SoftLink ) : if arg_3 : arg_0 . _tree_load_link ( arg_1 , arg_2 = arg_2 , traj = arg_7 , arg_8 = arg_8 , hdf5_soft_link = arg_9 ) continue arg_11 = arg_9 . _v_name arg_12 = arg_0 . _all_get_from_attrs ( arg_9 , HDF5StorageService . LEAF ) arg_13 = arg_11 in arg_1 . _children if arg_12 : if arg_13 : arg_14 = arg_1 . _children [ arg_11 ] else : arg_14 = arg_0 . _tree_create_leaf ( arg_11 , arg_7 , arg_9 ) arg_1 . _add_leaf_from_storage ( arg_20 = ( arg_14 , ) , kwargs = { } ) arg_0 . _prm_load_parameter_or_result ( arg_14 , arg_2 = arg_2 , _hdf5_group = arg_9 ) if arg_8 : arg_14 . _stored = False else : if arg_13 : arg_16 = arg_1 . _children [ arg_11 ] if arg_2 == pypetconstants . OVERWRITE_DATA : arg_16 . v_annotations . f_empty ( ) arg_16 . v_comment = '' else : if HDF5StorageService . CLASS_NAME in arg_9 . _v_attrs : arg_18 = arg_0 . _all_get_from_attrs ( arg_9 , HDF5StorageService . CLASS_NAME ) arg_19 = arg_7 . _create_class ( arg_18 ) arg_14 = arg_7 . _construct_instance ( arg_19 , arg_11 ) arg_20 = ( arg_14 , ) else : arg_20 = ( arg_11 , ) arg_16 = arg_1 . _add_group_from_storage ( arg_20 = arg_20 , kwargs = { } ) arg_0 . _grp_load_group ( arg_16 , arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = False , arg_5 = arg_5 , _traj = arg_7 , _as_new = arg_8 , _hdf5_group = arg_9 ) if arg_4 and arg_6 < arg_5 : arg_21 = arg_6 + 1 for arg_22 in ( arg_9 . _v_groups , arg_9 . _v_links ) : for arg_23 in arg_22 : arg_24 = arg_22 [ arg_23 ] arg_10 . append ( ( arg_16 , arg_21 , arg_24 ) )","id_":253074,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/storageservice.py#L2647-L2730","negative":"Called when there is an error in the websocket"} {"query":"Gets the IP from the inet interfaces .","positive":"def Func ( ) : arg_0 = None arg_1 = psutil . net_if_addrs ( ) for arg_2 , arg_3 in arg_1 . items ( ) : for arg_4 in arg_3 : if arg_4 . family == socket . AF_INET : arg_5 = ipaddress . ip_address ( arg_4 . address ) if not ( arg_5 . is_link_local or arg_5 . is_loopback ) : arg_0 = str ( arg_5 ) break return arg_0","id_":253075,"task_name":"https:\/\/github.com\/mwgielen\/jackal\/blob\/7fe62732eb5194b7246215d5277fb37c398097bf\/jackal\/utils.py#L191-L204","negative":"Read data synchronous from an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: data name\n :param Type data_type: type of the data given to the PLC, according to\n PLCTYPE constants\n :param bool return_ctypes: return ctypes instead of python types if True\n (default: False)\n :rtype: data_type\n :return: value: **value**"} {"query":"Returns a dictionary of all leaves hanging immediately below this group .","positive":"def Func ( arg_0 , arg_1 = True ) : if arg_1 : return arg_0 . _leaves . copy ( ) else : return arg_0 . _leaves","id_":253076,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/naturalnaming.py#L3204-L3218","negative":"initialize the merger model with a coalescent time\n\n Args:\n - Tc: a float or an iterable, if iterable another argument T of same shape is required\n - T: an array like of same shape as Tc that specifies the time pivots corresponding to Tc\n Returns:\n - None"} {"query":"format an options section using as ReST formatted output","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None ) : if arg_1 : print ( \"%s\\n%s\" % ( arg_1 , \"'\" * len ( arg_1 ) ) , file = arg_0 ) if arg_3 : print ( normalize_text ( arg_3 , line_len = 79 , indent = \"\" ) , file = arg_0 ) print ( file = arg_0 ) for arg_4 , arg_5 , arg_6 in arg_2 : arg_7 = arg_5 . get ( \"help\" ) print ( \":%s:\" % arg_4 , file = arg_0 ) if arg_7 : arg_7 = normalize_text ( arg_7 , line_len = 79 , indent = \" \" ) print ( arg_7 , file = arg_0 ) if arg_6 : arg_6 = str ( _format_option_value ( arg_5 , arg_6 ) ) print ( file = arg_0 ) print ( \" Default: ``%s``\" % arg_6 . replace ( \"`` \" , \"```` ``\" ) , file = arg_0 )","id_":253077,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/message\/message_handler_mix_in.py#L33-L49","negative":"Show an image.\n\n Args:\n img (str or ndarray): The image to be displayed.\n win_name (str): The window name.\n wait_time (int): Value of waitKey param."} {"query":"Returns a normalized house code i . e . upper case . Raises exception X10InvalidHouseCode if house code appears to be invalid","positive":"def Func ( arg_0 ) : if arg_0 is None : raise X10InvalidHouseCode ( '%r is not a valid house code' % arg_0 ) if not isinstance ( arg_0 , basestring ) : raise X10InvalidHouseCode ( '%r is not a valid house code' % arg_0 ) if len ( arg_0 ) != 1 : raise X10InvalidHouseCode ( '%r is not a valid house code' % arg_0 ) arg_0 = arg_0 . upper ( ) if not ( 'A' <= arg_0 <= 'P' ) : raise X10InvalidHouseCode ( '%r is not a valid house code' % arg_0 ) return arg_0","id_":253078,"task_name":"https:\/\/github.com\/clach04\/x10_any\/blob\/5b90a543b127ab9e6112fd547929b5ef4b8f0cbc\/x10_any\/__init__.py#L67-L80","negative":"Stop streaming samples from device and delete samples buffer"} {"query":"Stop logging to logfile and console .","positive":"def Func ( ) : from . import log arg_0 = logging . getLogger ( \"gromacs\" ) arg_0 . info ( \"GromacsWrapper %s STOPPED logging\" , get_version ( ) ) log . clear_handlers ( arg_0 )","id_":253079,"task_name":"https:\/\/github.com\/Becksteinlab\/GromacsWrapper\/blob\/d4f9a8cb6f48292732cf7c7e4ef4a6d2ccbc51b9\/gromacs\/__init__.py#L230-L235","negative":"Parses the version, data license, name, SPDX Identifier, namespace,\n and comment."} {"query":"An agent that keeps track of what locations are clean or dirty .","positive":"def Func ( ) : arg_0 = { loc_A : None , loc_B : None } def program ( ( arg_1 , arg_2 ) ) : arg_0 [ arg_1 ] = arg_2 if arg_0 [ loc_A ] == arg_0 [ loc_B ] == 'Clean' : return 'NoOp' elif arg_2 == 'Dirty' : return 'Suck' elif arg_1 == loc_A : return 'Right' elif arg_1 == loc_B : return 'Left' return Agent ( program )","id_":253080,"task_name":"https:\/\/github.com\/hobson\/aima\/blob\/3572b2fb92039b4a1abe384be8545560fbd3d470\/aima\/agents.py#L181-L191","negative":"Serialize a dataframe.\n\n Parameters\n ----------\n writer : file\n File-like object to write to. Must be opened in binary mode.\n data_type_id : dict\n Serialization format to use.\n See the azureml.DataTypeIds class for constants.\n dataframe: pandas.DataFrame\n Dataframe to serialize."} {"query":"Removes the data center and all its components such as servers NICs load balancers volumes .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . _perform_request ( url = '\/datacenters\/%s' % ( arg_1 ) , method = 'DELETE' ) return arg_2","id_":253081,"task_name":"https:\/\/github.com\/profitbricks\/profitbricks-sdk-python\/blob\/2c804b141688eccb07d6ae56601d5c60a62abebd\/profitbricks\/client.py#L277-L290","negative":"Generate RENEWING time.\n\n [:rfc:`2131#section-4.4.5`]::\n\n T1\n defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 *\n duration_of_lease). Times T1 and T2 SHOULD be chosen with some\n random \"fuzz\" around a fixed value, to avoid synchronization of\n client reacquisition."} {"query":"Add this manager as an annotation to the graph .","positive":"def Func ( arg_0 , arg_1 : arg_2 ) -> None : if 'bio2bel' not in arg_1 . annotation_list : arg_1 . annotation_list [ 'bio2bel' ] = set ( ) arg_1 . annotation_list [ 'bio2bel' ] . add ( arg_0 . module_name )","id_":253082,"task_name":"https:\/\/github.com\/bio2bel\/bio2bel\/blob\/d80762d891fa18b248709ff0b0f97ebb65ec64c2\/src\/bio2bel\/manager\/namespace_manager.py#L306-L311","negative":"Build transition noise distribution for a ConstrainedSeasonalSSM."} {"query":"Get the data sharing consent object associated with a certain user of a customer for a program .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = get_enterprise_customer ( arg_2 ) arg_4 = CourseCatalogApiServiceClient ( arg_3 . site ) arg_5 = arg_4 . get_program_course_keys ( arg_1 ) arg_6 = ( get_data_sharing_consent ( arg_0 , arg_2 , course_id = individual_course_id ) for individual_course_id in arg_5 ) return ProxyDataSharingConsent . from_children ( arg_1 , * arg_6 )","id_":253083,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/consent\/helpers.py#L52-L68","negative":"Compute the harmonic number from its analytic continuation.\n\n Derivation from [here](\n https:\/\/en.wikipedia.org\/wiki\/Digamma_function#Relation_to_harmonic_numbers)\n and [Euler's constant](\n https:\/\/en.wikipedia.org\/wiki\/Euler%E2%80%93Mascheroni_constant).\n\n Args:\n x: input float.\n\n Returns:\n z: The analytic continuation of the harmonic number for the input."} {"query":"Change of basis of bipartite matrix represenation .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = np . array ( [ [ 1 , 0 , 0 , 1 ] , [ 0 , 1 , 1j , 0 ] , [ 0 , 1 , - 1j , 0 ] , [ 1 , 0j , 0 , - 1 ] ] , dtype = complex ) arg_3 = arg_2 for arg_4 in range ( arg_1 - 1 ) : arg_5 = int ( np . sqrt ( len ( arg_3 ) ) ) arg_3 = np . reshape ( np . transpose ( np . reshape ( np . kron ( arg_2 , arg_3 ) , ( 2 , 2 , arg_5 , arg_5 , 4 , arg_5 * arg_5 ) ) , ( 0 , 2 , 1 , 3 , 4 , 5 ) ) , ( 4 * arg_5 * arg_5 , 4 * arg_5 * arg_5 ) ) return np . dot ( np . dot ( arg_3 , arg_0 ) , arg_3 . conj ( ) . T ) \/ 2 ** arg_1","id_":253084,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/quantum_info\/operators\/channel\/transformations.py#L400-L416","negative":"Send a completion status payload to the SuccessFactors OCN Completion Status endpoint\n\n Args:\n user_id (str): The sap user id that the completion status is being sent for.\n payload (str): JSON encoded object (serialized from SapSuccessFactorsLearnerDataTransmissionAudit)\n containing completion status fields per SuccessFactors documentation.\n\n Returns:\n The body of the response from SAP SuccessFactors, if successful\n Raises:\n HTTPError: if we received a failure response code from SAP SuccessFactors"} {"query":"solve Tx = Z by a variation of Levinson algorithm where T is a complex hermitian toeplitz matrix","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : assert len ( arg_1 ) > 0 arg_3 = len ( arg_1 ) arg_4 = numpy . zeros ( arg_3 + 1 , dtype = complex ) arg_5 = numpy . zeros ( arg_3 , dtype = complex ) arg_6 = arg_0 if arg_6 == 0 : raise ValueError ( \"P must be different from zero\" ) arg_4 [ 0 ] = arg_2 [ 0 ] \/ arg_0 for arg_7 in range ( 0 , arg_3 ) : arg_8 = arg_1 [ arg_7 ] arg_9 = arg_4 [ 0 ] * arg_1 [ arg_7 ] if arg_7 == 0 : arg_10 = - arg_8 \/ arg_6 else : for arg_11 in range ( 0 , arg_7 ) : arg_8 = arg_8 + arg_5 [ arg_11 ] * arg_1 [ arg_7 - arg_11 - 1 ] arg_9 = arg_9 + arg_4 [ arg_11 + 1 ] * arg_1 [ arg_7 - arg_11 - 1 ] arg_10 = - arg_8 \/ arg_6 arg_6 = arg_6 * ( 1. - ( arg_10 . real ** 2 + arg_10 . imag ** 2 ) ) if arg_6 <= 0 : raise ValueError ( \"singular matrix\" ) arg_5 [ arg_7 ] = arg_10 arg_12 = ( arg_2 [ arg_7 + 1 ] - arg_9 ) \/ arg_6 if arg_7 == 0 : arg_4 [ arg_7 + 1 ] = arg_12 for arg_11 in range ( 0 , arg_7 + 1 ) : arg_4 [ arg_11 ] = arg_4 [ arg_11 ] + arg_12 * arg_5 [ arg_7 - arg_11 ] . conjugate ( ) continue arg_13 = ( arg_7 + 1 ) \/\/ 2 for arg_11 in range ( 0 , arg_13 ) : arg_14 = arg_7 - arg_11 - 1 arg_8 = arg_5 [ arg_11 ] arg_5 [ arg_11 ] = arg_8 + arg_10 * arg_5 [ arg_14 ] . conjugate ( ) if arg_11 != arg_14 : arg_5 [ arg_14 ] = arg_5 [ arg_14 ] + arg_10 * arg_8 . conjugate ( ) arg_4 [ arg_7 + 1 ] = arg_12 for arg_11 in range ( 0 , arg_7 + 1 ) : arg_4 [ arg_11 ] = arg_4 [ arg_11 ] + arg_12 * arg_5 [ arg_7 - arg_11 ] . conjugate ( ) return arg_4","id_":253085,"task_name":"https:\/\/github.com\/cokelaer\/spectrum\/blob\/bad6c32e3f10e185098748f67bb421b378b06afe\/src\/spectrum\/toeplitz.py#L84-L134","negative":"Register all messages from a checker.\n\n :param BaseChecker checker:"} {"query":"write_file will open a file filename and write content content and properly close the file","positive":"def Func ( arg_0 , arg_1 = \"r\" , arg_2 = True ) : with open ( arg_0 , arg_1 ) as filey : if arg_2 is True : arg_3 = filey . readlines ( ) else : arg_3 = filey . read ( ) return arg_3","id_":253086,"task_name":"https:\/\/github.com\/singularityhub\/sregistry-cli\/blob\/abc96140a1d15b5e96d83432e1e0e1f4f8f36331\/sregistry\/utils\/fileio.py#L311-L320","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"Returns memory stats for a module .","positive":"def Func ( arg_0 ) : arg_1 = { arg_0 . _run_object } try : with open ( arg_0 . _run_object , 'rb' ) as srcfile , _CodeEventsTracker ( arg_1 ) as prof : arg_2 = compile ( srcfile . read ( ) , arg_0 . _run_object , 'exec' ) prof . compute_mem_overhead ( ) exec ( arg_2 , arg_0 . _globs , None ) except SystemExit : pass return prof , None","id_":253087,"task_name":"https:\/\/github.com\/nvdv\/vprof\/blob\/4c3ff78f8920ab10cb9c00b14143452aa09ff6bb\/vprof\/memory_profiler.py#L167-L178","negative":"Revoke the token and remove the cookie."} {"query":"This endpoint sets the job s stability .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = { \"JobID\" : arg_1 , \"JobVersion\" : arg_2 , \"Stable\" : arg_3 } return arg_0 . request ( arg_1 , \"stable\" , json = arg_4 , method = \"post\" ) . json ( )","id_":253088,"task_name":"https:\/\/github.com\/jrxFive\/python-nomad\/blob\/37df37e4de21e6f8ac41c6154e7f1f44f1800020\/nomad\/api\/job.py#L255-L272","negative":"Delete files in `root_folder` which match `regex` before file ext.\n\n Example values:\n * root_folder = 'foo\/'\n * self.name = 'bar.jpg'\n * regex = re.compile('-baz')\n\n Result:\n * foo\/bar-baz.jpg <- Deleted\n * foo\/bar-biz.jpg <- Not deleted"} {"query":"Resets the iterator to the start .","positive":"def Func ( arg_0 ) : arg_0 . __iterator , arg_0 . __saved = itertools . tee ( arg_0 . __saved )","id_":253089,"task_name":"https:\/\/github.com\/jaraco\/jaraco.itertools\/blob\/0dc47c8924fa3d9ab676c3a6e195f03f728b72c6\/jaraco\/itertools.py#L554-L560","negative":"Produces a TidyPy configuration that incorporates the configuration files\n stored in the current user's home directory.\n\n :param project_path: the path to the project that is going to be analyzed\n :type project_path: str\n :param use_cache:\n whether or not to use cached versions of any remote\/referenced TidyPy\n configurations. If not specified, defaults to ``True``.\n :type use_cache: bool\n :rtype: dict"} {"query":"Given a potentially complex type split it into its base type and specializers","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . _canonicalize_type ( arg_1 ) if '(' not in arg_2 : return arg_2 , False , [ ] arg_3 , arg_4 = arg_2 . split ( '(' ) if len ( arg_4 ) == 0 or arg_4 [ - 1 ] != ')' : raise ArgumentError ( \"syntax error in complex type, no matching ) found\" , passed_type = arg_1 , basetype = arg_3 , subtype_string = arg_4 ) arg_4 = arg_4 [ : - 1 ] arg_5 = arg_4 . split ( ',' ) return arg_3 , True , arg_5","id_":253090,"task_name":"https:\/\/github.com\/iotile\/typedargs\/blob\/0a5091a664b9b4d836e091e9ba583e944f438fd8\/typedargs\/typeinfo.py#L187-L203","negative":"Check if a route needs ssl, and redirect it if not. Also redirects back to http for non-ssl routes. Static routes\n are served as both http and https\n\n :return: A response to be returned or None"} {"query":"Build an xsd schema from a bridgepoint component .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = ET . Element ( 'xs:schema' ) arg_2 . set ( 'xmlns:xs' , 'http:\/\/www.w3.org\/2001\/XMLSchema' ) arg_3 = lambda selected : ooaofooa . is_global ( selected ) for arg_4 in arg_0 . select_many ( 'S_DT' , arg_3 ) : arg_5 = build_type ( arg_4 ) if arg_5 is not None : arg_2 . append ( arg_5 ) arg_6 = lambda selected : ooaofooa . is_contained_in ( selected , arg_1 ) for arg_4 in arg_0 . select_many ( 'S_DT' , arg_6 ) : arg_5 = build_type ( arg_4 ) if arg_5 is not None : arg_2 . append ( arg_5 ) arg_7 = build_component ( arg_0 , arg_1 ) arg_2 . append ( arg_7 ) return arg_2","id_":253091,"task_name":"https:\/\/github.com\/xtuml\/pyxtuml\/blob\/7dd9343b9a0191d1db1887ab9288d0a026608d9a\/bridgepoint\/gen_xsd_schema.py#L219-L241","negative":"Get distribution version.\n\n This method is enhanced compared to original distutils implementation.\n If the version string is set to a special value then instead of using\n the actual value the real version is obtained by querying versiontools.\n\n If versiontools package is not installed then the version is obtained\n from the standard section of the ``PKG-INFO`` file. This file is\n automatically created by any source distribution. This method is less\n useful as it cannot take advantage of version control information that\n is automatically loaded by versiontools. It has the advantage of not\n requiring versiontools installation and that it does not depend on\n ``setup_requires`` feature of ``setuptools``."} {"query":"Return zoom level as integer or throw error .","positive":"def Func ( arg_0 , arg_1 ) : try : return int ( arg_0 . strip ( arg_1 ) ) except Exception as e : raise MapcheteConfigError ( \"zoom level could not be determined: %s\" % e )","id_":253092,"task_name":"https:\/\/github.com\/ungarj\/mapchete\/blob\/d482918d0e66a5b414dff6aa7cc854e01fc60ee4\/mapchete\/config.py#L914-L919","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Train our network one batch at a time .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = 'rmsprop' , arg_4 = 'rmsprop' , arg_5 = 0 , arg_6 = None , ** arg_7 ) : if 'rng' not in arg_7 : arg_7 [ 'rng' ] = arg_0 . _rng def create_dataset ( arg_8 , ** arg_7 ) : arg_9 = arg_7 . get ( 'name' , 'dataset' ) arg_10 = '{}_batches' . format ( arg_9 ) return downhill . Dataset ( arg_8 , arg_9 = arg_9 , batch_size = arg_7 . get ( 'batch_size' , 32 ) , iteration_size = arg_7 . get ( 'iteration_size' , arg_7 . get ( arg_10 ) ) , axis = arg_7 . get ( 'axis' , 0 ) , rng = arg_7 [ 'rng' ] ) if arg_2 is None : arg_2 = arg_1 if not isinstance ( arg_2 , downhill . Dataset ) : arg_2 = create_dataset ( arg_2 , arg_9 = 'valid' , ** arg_7 ) if not isinstance ( arg_1 , downhill . Dataset ) : arg_1 = create_dataset ( arg_1 , arg_9 = 'train' , ** arg_7 ) if 'algorithm' in arg_7 : warnings . warn ( 'please use the \"algo\" keyword arg instead of \"algorithm\"' , DeprecationWarning ) arg_3 = arg_7 . pop ( 'algorithm' ) if isinstance ( arg_3 , ( list , tuple ) ) : arg_3 = arg_3 [ 0 ] if isinstance ( arg_3 , util . basestring ) : arg_3 = arg_3 . lower ( ) if arg_3 == 'sample' : arg_3 = trainer . SampleTrainer ( arg_0 ) elif arg_3 . startswith ( 'layer' ) or arg_3 . startswith ( 'sup' ) : arg_3 = trainer . SupervisedPretrainer ( arg_4 , arg_0 ) elif arg_3 . startswith ( 'pre' ) or arg_3 . startswith ( 'unsup' ) : arg_3 = trainer . UnsupervisedPretrainer ( arg_4 , arg_0 ) else : arg_3 = trainer . DownhillTrainer ( arg_3 , arg_0 ) def needs_saving ( arg_11 , arg_12 ) : if arg_6 is None : return False if isinstance ( arg_5 , float ) : return arg_11 > 60 * arg_5 if isinstance ( arg_5 , int ) : return arg_12 % arg_5 == 0 return False arg_13 = time . time ( ) for arg_14 , arg_15 in enumerate ( arg_3 . Func ( arg_1 , arg_2 , ** arg_7 ) ) : yield arg_15 arg_16 = time . time ( ) if arg_14 and needs_saving ( arg_16 - arg_13 , arg_14 ) : arg_17 = arg_6 if isinstance ( arg_17 , util . basestring ) : arg_17 = arg_6 . format ( int ( arg_16 ) ) arg_0 . save ( arg_17 ) arg_13 = arg_16","id_":253093,"task_name":"https:\/\/github.com\/lmjohns3\/theanets\/blob\/79db9f878ef2071f2f576a1cf5d43a752a55894a\/theanets\/graph.py#L240-L365","negative":"Return a list of not null values from the `col_name` column of `df`."} {"query":"Linear Prediction Coefficients via Burg s method","positive":"def Func ( arg_0 , arg_1 ) : if not isinstance ( arg_1 , int ) or arg_1 < 1 : raise ParameterError ( \"order must be an integer > 0\" ) util . valid_audio ( arg_0 , mono = True ) return __Func ( arg_0 , arg_1 )","id_":253094,"task_name":"https:\/\/github.com\/librosa\/librosa\/blob\/180e8e6eb8f958fa6b20b8cba389f7945d508247\/librosa\/core\/audio.py#L536-L607","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"This method obtains the actual features .","positive":"def Func ( arg_0 , arg_1 = [ \"pcp\" , \"tonnetz\" , \"mfcc\" , \"cqt\" , \"tempogram\" ] ) : if arg_0 . feature_str not in arg_1 : raise RuntimeError ( \"Feature %s in not valid for algorithm: %s \" \"(valid features are %s).\" % ( arg_0 . feature_str , __name__ , arg_1 ) ) else : try : arg_2 = arg_0 . features . features except KeyError : raise RuntimeError ( \"Feature %s in not supported by MSAF\" % ( arg_0 . feature_str ) ) return arg_2","id_":253095,"task_name":"https:\/\/github.com\/urinieto\/msaf\/blob\/9dbb57d77a1310465a65cc40f1641d083ca74385\/msaf\/algorithms\/interface.py#L85-L100","negative":"Generate RENEWING time.\n\n [:rfc:`2131#section-4.4.5`]::\n\n T1\n defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 *\n duration_of_lease). Times T1 and T2 SHOULD be chosen with some\n random \"fuzz\" around a fixed value, to avoid synchronization of\n client reacquisition."} {"query":"Password step of set commands","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = False try : arg_3 = \"0150310228\" + binascii . hexlify ( arg_1 ) + \"2903\" arg_4 = arg_0 . calc_crc16 ( arg_3 [ 2 : ] . decode ( \"hex\" ) ) arg_5 = arg_3 + arg_4 arg_0 . m_serial_port . write ( arg_5 . decode ( \"hex\" ) ) if arg_0 . m_serial_port . getResponse ( arg_0 . getContext ( ) ) . encode ( \"hex\" ) == \"06\" : ekm_log ( \"Password accepted (\" + arg_0 . getContext ( ) + \")\" ) arg_2 = True else : ekm_log ( \"Password call failure no 06(\" + arg_0 . getContext ( ) + \")\" ) except : ekm_log ( \"Password call failure by exception(\" + arg_0 . getContext ( ) + \")\" ) ekm_log ( traceback . format_exc ( sys . exc_info ( ) ) ) return arg_2","id_":253096,"task_name":"https:\/\/github.com\/ekmmetering\/ekmmeters\/blob\/b3748bdf30263bfa46ea40157bdf8df2522e1904\/ekmmeters.py#L2982-L3011","negative":"Returns a dictionary with all the past baking statuses of a single book."} {"query":"Write a notebook to a file in a given format in the current nbformat version .","positive":"def Func ( arg_0 , arg_1 , arg_2 , ** arg_3 ) : return arg_1 . Func ( Funcs ( arg_0 , arg_2 , ** arg_3 ) )","id_":253097,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/nbformat\/current.py#L186-L205","negative":"Return list of GATT descriptors that have been discovered for this\n characteristic."} {"query":"r \\ )","positive":"def Func ( arg_0 , arg_1 ) : arg_1 . endlexpos = arg_1 . lexpos + len ( arg_1 . value ) return arg_1","id_":253098,"task_name":"https:\/\/github.com\/xtuml\/pyxtuml\/blob\/7dd9343b9a0191d1db1887ab9288d0a026608d9a\/xtuml\/load.py#L492-L495","negative":"Set a property value or remove a property.\n\n value == None means 'remove property'.\n Raise HTTP_FORBIDDEN if property is read-only, or not supported.\n\n When dry_run is True, this function should raise errors, as in a real\n run, but MUST NOT change any data.\n\n This default implementation\n\n - raises HTTP_FORBIDDEN, if trying to modify a locking property\n - raises HTTP_FORBIDDEN, if trying to modify an immutable {DAV:}\n property\n - handles Windows' Win32LastModifiedTime to set the getlastmodified\n property, if enabled\n - stores everything else as dead property, if a property manager is\n present.\n - raises HTTP_FORBIDDEN, else\n\n Removing a non-existing prop is NOT an error.\n\n Note: RFC 4918 states that {DAV:}displayname 'SHOULD NOT be protected'\n\n A resource provider may override this method, to update supported custom\n live properties."} {"query":"Relay messages from the forum on the server to the client represented by this actor .","positive":"def Func ( arg_0 , arg_1 ) : info ( \"relaying message: {message}\" ) if not arg_1 . was_sent_by ( arg_0 . _id_factory ) : arg_0 . pipe . send ( arg_1 ) arg_0 . pipe . deliver ( )","id_":253099,"task_name":"https:\/\/github.com\/kxgames\/kxg\/blob\/a68c01dc4aa1abf6b3780ba2c65a7828282566aa\/kxg\/multiplayer.py#L274-L283","negative":"Creates a service from a constructor and checks which kwargs are not used"} {"query":"Return dict of traffic meter stats .","positive":"def Func ( arg_0 ) : _LOGGER . info ( \"Get traffic meter\" ) def parse_text ( arg_1 ) : def tofloats ( arg_2 ) : return ( float ( arg_3 ) for arg_3 in arg_2 ) try : if \"\/\" in arg_1 : return tuple ( tofloats ( arg_1 . split ( '\/' ) ) ) elif \":\" in arg_1 : arg_4 , arg_5 = tofloats ( arg_1 . split ( ':' ) ) return timedelta ( hours = arg_4 , minutes = arg_5 ) else : return float ( arg_1 ) except ValueError : return None arg_6 , arg_7 = arg_0 . _make_request ( SERVICE_DEVICE_CONFIG , \"GetTrafficMeterStatistics\" ) if not arg_6 : return None arg_6 , arg_8 = _find_node ( arg_7 . text , \".\/\/GetTrafficMeterStatisticsResponse\" ) if not arg_6 : return None return { arg_3 . tag : parse_text ( arg_3 . text ) for arg_3 in arg_8 }","id_":253100,"task_name":"https:\/\/github.com\/MatMaul\/pynetgear\/blob\/247d6b9524fcee4b2da0e65ca12c52ebdd3676b2\/pynetgear\/__init__.py#L243-L280","negative":"Send data synchronous to an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: PLC storage address\n :param value: value to write to the storage address of the PLC\n :param Type data_type: type of the data given to the PLC,\n according to PLCTYPE constants"} {"query":"Applies the settings to the index .","positive":"def Func ( arg_0 ) : if not arg_0 . settings : return try : arg_0 . __index . Func ( arg_0 . settings ) logger . info ( 'APPLY SETTINGS ON %s' , arg_0 . index_name ) except AlgoliaException as e : if DEBUG : raise e else : logger . warning ( 'SETTINGS NOT APPLIED ON %s: %s' , arg_0 . model , e )","id_":253101,"task_name":"https:\/\/github.com\/algolia\/algoliasearch-django\/blob\/ca219db41eb56bdd1c0389cdc1508a41698958d7\/algoliasearch_django\/models.py#L388-L401","negative":"Parse the conservation predictors\n\n Args:\n variant(dict): A variant dictionary\n\n Returns:\n conservations(dict): A dictionary with the conservations"} {"query":"r Akaike Information Criterion","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : from numpy import log , array arg_3 = arg_0 * log ( array ( arg_1 ) ) + 2. * ( array ( arg_2 ) + 1 ) return arg_3","id_":253102,"task_name":"https:\/\/github.com\/cokelaer\/spectrum\/blob\/bad6c32e3f10e185098748f67bb421b378b06afe\/src\/spectrum\/criteria.py#L157-L178","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Replaces all occurrences of old with new","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_0 . seq = arg_0 . seq . replace ( arg_1 , arg_2 )","id_":253103,"task_name":"https:\/\/github.com\/sanger-pathogens\/Fastaq\/blob\/2c775c846d2491678a9637daa320592e02c26c72\/pyfastaq\/sequences.py#L250-L252","negative":"Re-enable the FTDI drivers for the current platform."} {"query":"Walk a directory tree with configurable depth .","positive":"def Func ( arg_0 , arg_1 = arg_2 ( 'inf' ) ) : arg_3 = os . path . abspath ( arg_0 ) . count ( os . path . sep ) for arg_4 in os . walk ( arg_0 ) : arg_5 , arg_6 , arg_7 = arg_4 arg_8 = arg_5 . count ( os . path . sep ) - arg_3 yield arg_4 if arg_8 >= arg_1 : arg_6 [ : ] = [ ]","id_":253104,"task_name":"https:\/\/github.com\/thebigmunch\/gmusicapi-wrapper\/blob\/8708683cd33955def1378fc28319ef37805b851d\/gmusicapi_wrapper\/utils.py#L398-L418","negative":"Regenerates the primary or secondary access key for the specified\n storage account.\n\n service_name:\n Name of the storage service account.\n key_type:\n Specifies which key to regenerate. Valid values are:\n Primary, Secondary"} {"query":"deleteOne - Delete one object","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : if not getattr ( arg_1 , '_id' , None ) : return 0 if arg_2 is None : arg_2 = arg_0 . _get_connection ( ) arg_3 = arg_2 . pipeline ( ) arg_4 = True else : arg_3 = arg_2 arg_4 = False arg_3 . delete ( arg_0 . _get_key_for_id ( arg_1 . _id ) ) arg_0 . _rem_id_from_keys ( arg_1 . _id , arg_3 ) for arg_5 in arg_0 . indexedFields : arg_0 . _rem_id_from_index ( arg_5 , arg_1 . _id , arg_1 . _origData [ arg_5 ] , arg_3 ) arg_1 . _id = None if arg_4 is True : arg_3 . execute ( ) return 1","id_":253105,"task_name":"https:\/\/github.com\/kata198\/indexedredis\/blob\/f9c85adcf5218dac25acb06eedc63fc2950816fa\/IndexedRedis\/__init__.py#L2294-L2324","negative":"Make preparations before running Tank"} {"query":"Assert that the argument has the specified type .","positive":"def Func ( arg_0 , * arg_1 , ** arg_2 ) : assert arg_1 , \"The list of expected types was not provided\" arg_3 = arg_1 [ 0 ] if len ( arg_1 ) == 1 else U ( * arg_1 ) if _check_type ( arg_0 , arg_3 ) : return assert set ( arg_2 ) . issubset ( { \"message\" , \"skip_frames\" } ) , \"Unexpected keyword arguments: %r\" % arg_2 arg_4 = arg_2 . get ( \"message\" , None ) arg_5 = arg_2 . get ( \"skip_frames\" , 1 ) arg_6 = _retrieve_assert_arguments ( ) arg_7 = arg_6 [ 0 ] arg_8 = _get_type_name ( arg_3 , dump = \", \" . join ( arg_6 [ 1 : ] ) ) arg_9 = _get_type_name ( type ( arg_0 ) ) raise H2OTypeError ( var_name = arg_7 , var_value = arg_0 , var_type_name = arg_9 , exp_type_name = arg_8 , arg_4 = arg_4 , arg_5 = arg_5 )","id_":253106,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-py\/h2o\/utils\/typechecks.py#L429-L457","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Checks if any of expected matches received .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : if not any ( map ( arg_2 . matches , arg_1 ) ) : raise errors . StatusCodeError ( arg_1 , arg_2 , arg_3 )","id_":253107,"task_name":"https:\/\/github.com\/aio-libs\/aioftp\/blob\/b45395b1aba41301b898040acade7010e6878a08\/aioftp\/client.py#L198-L215","negative":"Close the file and restore the channel."} {"query":"Permanently erase one or more VM instances from existence .","positive":"def Func ( arg_0 = None , arg_1 = None , arg_2 = None , arg_3 = None , arg_4 = 1 , arg_5 = 1 ) : arg_5 = int ( arg_5 ) if env . vm_type == EC2 : arg_6 = get_ec2_connection ( ) arg_7 = list_instances ( arg_0 = arg_0 , arg_1 = arg_1 , arg_2 = arg_2 , arg_3 = arg_3 , ) for arg_8 , arg_9 in arg_7 . items ( ) : arg_10 = arg_9 [ 'public_dns_name' ] print ( '\\nDeleting %s (%s)...' % ( arg_8 , arg_9 [ 'id' ] ) ) if not get_dryrun ( ) : arg_6 . terminate_instances ( instance_ids = [ arg_9 [ 'id' ] ] ) arg_11 = os . path . expanduser ( '~\/.ssh\/known_hosts' ) arg_12 = 'ssh-keygen -f \"%s\" -R %s' % ( arg_11 , arg_10 ) local_or_dryrun ( arg_12 ) else : raise NotImplementedError","id_":253108,"task_name":"https:\/\/github.com\/chrisspen\/burlap\/blob\/a92b0a8e5206850bb777c74af8421ea8b33779bd\/burlap\/vm.py#L597-L628","negative":"Checks if a Pong message was received.\n\n :return:"} {"query":"Given a mnemonic word string return a string of the computed checksum .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . split ( \" \" ) if len ( arg_2 ) < 12 : raise ValueError ( \"Invalid mnemonic phrase\" ) if len ( arg_2 ) > 13 : arg_1 = arg_2 [ : 24 ] else : arg_1 = arg_2 [ : 12 ] arg_3 = \"\" . join ( word [ : arg_0 . unique_prefix_length ] for word in arg_1 ) arg_3 = bytearray ( arg_3 . encode ( 'utf-8' ) ) arg_4 = ( ( crc32 ( arg_3 ) & 0xffffffff ) ^ 0xffffffff ) >> 0 arg_5 = ( ( arg_4 ^ 0xffffffff ) >> 0 ) % len ( arg_1 ) return arg_2 [ arg_5 ]","id_":253109,"task_name":"https:\/\/github.com\/monero-ecosystem\/monero-python\/blob\/64149f6323af57a3924f45ed87997d64387c5ee0\/monero\/wordlists\/wordlist.py#L74-L92","negative":"Set up the OpenDNP3 configuration."} {"query":"Creates a solver from a specification dict .","positive":"def Func ( arg_0 , arg_1 = None ) : return util . get_object ( obj = arg_0 , predefined = tensorforce . core . optimizers . solvers . solvers , arg_1 = arg_1 )","id_":253110,"task_name":"https:\/\/github.com\/tensorforce\/tensorforce\/blob\/520a8d992230e382f08e315ede5fc477f5e26bfb\/tensorforce\/core\/optimizers\/solvers\/solver.py#L48-L56","negative":"Clip input array with a vector list.\n\n Parameters\n ----------\n array : array\n input raster data\n array_affine : Affine\n Affine object describing the raster's geolocation\n geometries : iterable\n iterable of dictionaries, where every entry has a 'geometry' and\n 'properties' key.\n inverted : bool\n invert clip (default: False)\n clip_buffer : integer\n buffer (in pixels) geometries before clipping\n\n Returns\n -------\n clipped array : array"} {"query":"Setting up and running GIES with all arguments .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = True ) : arg_4 = str ( uuid . uuid4 ( ) ) os . makedirs ( '\/tmp\/cdt_gies' + arg_4 + '\/' ) arg_0 . arguments [ '{FOLDER}' ] = '\/tmp\/cdt_gies' + arg_4 + '\/' def retrieve_result ( ) : return read_csv ( '\/tmp\/cdt_gies' + arg_4 + '\/result.csv' , delimiter = ',' ) . values try : arg_1 . to_csv ( '\/tmp\/cdt_gies' + arg_4 + '\/data.csv' , header = False , index = False ) if arg_2 is not None : arg_2 . to_csv ( '\/tmp\/cdt_gies' + arg_4 + '\/fixedgaps.csv' , index = False , header = False ) arg_0 . arguments [ '{SKELETON}' ] = 'TRUE' else : arg_0 . arguments [ '{SKELETON}' ] = 'FALSE' arg_6 = launch_R_script ( \"{}\/R_templates\/gies.R\" . format ( os . path . dirname ( os . path . realpath ( __file__ ) ) ) , arg_0 . arguments , output_function = retrieve_result , arg_3 = arg_3 ) except Exception as e : rmtree ( '\/tmp\/cdt_gies' + arg_4 + '' ) raise e except KeyboardInterrupt : rmtree ( '\/tmp\/cdt_gies' + arg_4 + '\/' ) raise KeyboardInterrupt rmtree ( '\/tmp\/cdt_gies' + arg_4 + '' ) return arg_6","id_":253111,"task_name":"https:\/\/github.com\/Diviyan-Kalainathan\/CausalDiscoveryToolbox\/blob\/be228b078ba9eb76c01b3ccba9a1c0ad9e9e5ed1\/cdt\/causality\/graph\/GIES.py#L146-L174","negative":"Given an email address, check the email_remapping table to see if the email\n should be sent to a different address. This function also handles overriding\n the email domain if ignore_vcs_email_domain is set or the domain was missing"} {"query":"Computes graph and static sample_shape .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = ( tf . rank ( arg_1 ) if tensorshape_util . rank ( arg_1 . shape ) is None else tensorshape_util . rank ( arg_1 . shape ) ) arg_3 = ( tf . size ( input = arg_0 . event_shape_tensor ( ) ) if tensorshape_util . rank ( arg_0 . event_shape ) is None else tensorshape_util . rank ( arg_0 . event_shape ) ) arg_4 = ( tf . size ( input = arg_0 . _batch_shape_unexpanded ) if tensorshape_util . rank ( arg_0 . batch_shape ) is None else tensorshape_util . rank ( arg_0 . batch_shape ) ) arg_5 = arg_2 - arg_4 - arg_3 if isinstance ( arg_5 , int ) : arg_6 = arg_1 . shape [ : arg_5 ] else : arg_6 = tf . TensorShape ( None ) if tensorshape_util . is_fully_defined ( arg_6 ) : arg_7 = np . int32 ( arg_6 ) else : arg_7 = tf . shape ( input = arg_1 ) [ : arg_5 ] return arg_7 , arg_6","id_":253112,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/distributions\/batch_reshape.py#L210-L232","negative":"Return HgResource object for path.\n\n See DAVProvider.get_resource_inst()"} {"query":"Convert any non - DAVError exception to HTTP_INTERNAL_ERROR .","positive":"def Func ( arg_0 ) : if isinstance ( arg_0 , DAVError ) : return arg_0 elif isinstance ( arg_0 , Exception ) : return DAVError ( HTTP_INTERNAL_ERROR , src_exception = arg_0 ) else : return DAVError ( HTTP_INTERNAL_ERROR , \"{}\" . format ( arg_0 ) )","id_":253113,"task_name":"https:\/\/github.com\/mar10\/wsgidav\/blob\/cec0d84222fc24bea01be1cea91729001963f172\/wsgidav\/dav_error.py#L282-L290","negative":"Configure the Python logging module for this file."} {"query":"Grab contents of doc and return it","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . get_start_iter ( ) arg_3 = arg_1 . get_end_iter ( ) arg_4 = arg_1 . get_text ( arg_2 , arg_3 , False ) return arg_4","id_":253114,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/extensions\/gedit\/gedit2-plugin\/shoebotit\/__init__.py#L186-L196","negative":"Handle Error messages and log them accordingly.\n\n :param data:\n :param ts:"} {"query":"docstring for build_clnsig","positive":"def Func ( arg_0 ) : arg_1 = dict ( value = arg_0 [ 'value' ] , accession = arg_0 . get ( 'accession' ) , revstat = arg_0 . get ( 'revstat' ) ) return arg_1","id_":253115,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/build\/variant\/clnsig.py#L2-L10","negative":"Default exchange proposal function, for replica exchange MC.\n\n With probability `prob_exchange` propose combinations of replica for exchange.\n When exchanging, create combinations of adjacent replicas in\n [Replica Exchange Monte Carlo](\n https:\/\/en.wikipedia.org\/wiki\/Parallel_tempering)\n\n ```\n exchange_fn = default_exchange_proposed_fn(prob_exchange=0.5)\n exchange_proposed = exchange_fn(num_replica=3)\n\n exchange_proposed.eval()\n ==> [[0, 1]] # 1 exchange, 0 <--> 1\n\n exchange_proposed.eval()\n ==> [] # 0 exchanges\n ```\n\n Args:\n prob_exchange: Scalar `Tensor` giving probability that any exchanges will\n be generated.\n\n Returns:\n default_exchange_proposed_fn_: Python callable which take a number of\n replicas (a Python integer), and return combinations of replicas for\n exchange as an [n, 2] integer `Tensor`, `0 <= n <= num_replica \/\/ 2`,\n with *unique* values in the set `{0, ..., num_replica}`."} {"query":"Journey route decorator","positive":"def Func ( arg_0 , * arg_1 , ** arg_2 ) : arg_2 [ 'strict_slashes' ] = arg_2 . pop ( 'strict_slashes' , False ) arg_3 = _validate_schema ( arg_2 . pop ( '_body' , None ) ) arg_4 = _validate_schema ( arg_2 . pop ( '_query' , None ) ) arg_5 = _validate_schema ( arg_2 . pop ( 'marshal_with' , None ) ) arg_6 = arg_2 . pop ( 'validate' , True ) def decorator ( arg_7 ) : @ arg_0 . Func ( * arg_1 , ** arg_2 ) @ wraps ( arg_7 ) def wrapper ( * arg_8 , ** arg_9 ) : try : if arg_4 is not None : arg_4 . strict = arg_6 arg_11 = furl ( request . url ) arg_9 [ '_query' ] = arg_4 . load ( arg_13 = arg_11 . args ) if arg_3 is not None : arg_3 . strict = arg_6 arg_12 = request . get_json ( ) if arg_12 is None : arg_12 = { } arg_9 [ '_body' ] = arg_3 . load ( arg_13 = arg_12 ) except ValidationError as err : return jsonify ( err . messages ) , 422 if arg_5 : arg_13 = arg_5 . dump ( arg_7 ( * arg_8 , ** arg_9 ) ) return jsonify ( arg_13 [ 0 ] ) return arg_7 ( * arg_8 , ** arg_9 ) return arg_7 return decorator","id_":253116,"task_name":"https:\/\/github.com\/rbw\/flask-journey\/blob\/6181f59a7b5eef6a85b86ce6ed7d03c91f6bd285\/flask_journey\/utils.py#L50-L107","negative":"Send data synchronous to an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: PLC storage address\n :param value: value to write to the storage address of the PLC\n :param Type data_type: type of the data given to the PLC,\n according to PLCTYPE constants"} {"query":"Check for escape character and return either a handler to handle it or None if there is no escape char .","positive":"def Func ( arg_0 , arg_1 ) : if arg_1 . line [ - 1 ] == ESC_HELP and arg_1 . esc != ESC_SHELL and arg_1 . esc != ESC_SH_CAP : return arg_0 . prefilter_manager . get_handler_by_name ( 'help' ) else : if arg_1 . pre : return None return arg_0 . prefilter_manager . get_handler_by_esc ( arg_1 . esc )","id_":253117,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/core\/prefilter.py#L570-L583","negative":"Write the index.html file for this report."} {"query":"Updates the binary annotations for the current span .","positive":"def Func ( arg_0 , arg_1 ) : if not arg_0 . logging_context : arg_0 . binary_annotations . update ( arg_1 ) else : arg_0 . logging_context . tags . update ( arg_1 )","id_":253118,"task_name":"https:\/\/github.com\/Yelp\/py_zipkin\/blob\/0944d9a3fb1f1798dbb276694aeed99f2b4283ba\/py_zipkin\/zipkin.py#L534-L543","negative":"Called when a device is disconnected."} {"query":"Find the location of a dataset on disk downloading if needed .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = os . path . join ( DATASETS , arg_0 ) arg_3 = os . path . dirname ( arg_2 ) if not os . path . exists ( arg_3 ) : print ( 'creating dataset directory: %s' , arg_3 ) os . makedirs ( arg_3 ) if not os . path . exists ( arg_2 ) : if sys . version_info < ( 3 , ) : urllib . urlretrieve ( arg_1 , arg_2 ) else : urllib . request . urlretrieve ( arg_1 , arg_2 ) return arg_2","id_":253119,"task_name":"https:\/\/github.com\/lmjohns3\/theanets\/blob\/79db9f878ef2071f2f576a1cf5d43a752a55894a\/examples\/utils.py#L18-L30","negative":"Reassemble a Binder object coming out of the database."} {"query":"Create coverage reports and open them in the browser .","positive":"def Func ( ) : arg_0 = \"Usage: %prog PATH_TO_PACKAGE\" arg_1 = optparse . OptionParser ( arg_0 = arg_0 ) arg_1 . add_option ( \"-v\" , \"--verbose\" , action = \"store_true\" , dest = \"verbose\" , default = False , help = \"Show debug output\" ) arg_1 . add_option ( \"-d\" , \"--output-dir\" , action = \"store\" , type = \"string\" , dest = \"output_dir\" , default = '' , help = \"\" ) arg_1 . add_option ( \"-t\" , \"--test-args\" , action = \"store\" , type = \"string\" , dest = \"test_args\" , default = '' , help = ( \"Pass argument on to bin\/test. Quote the argument, \" + \"for instance \\\"-t '-m somemodule'\\\".\" ) ) ( arg_2 , arg_3 ) = arg_1 . parse_args ( ) if arg_2 . verbose : arg_4 = logging . DEBUG else : arg_4 = logging . INFO logging . basicConfig ( level = arg_4 , format = \"%(levelname)s: %(message)s\" ) arg_5 = os . getcwd ( ) arg_6 = os . path . join ( arg_5 , 'bin' , 'test' ) if not os . path . exists ( arg_6 ) : raise RuntimeError ( \"Test command doesn't exist: %s\" % arg_6 ) arg_7 = os . path . join ( arg_5 , 'bin' , 'coverage' ) if not os . path . exists ( arg_7 ) : logger . debug ( \"Trying globally installed coverage command.\" ) arg_7 = 'coverage' logger . info ( \"Running tests in coverage mode (can take a long time)\" ) arg_8 = [ arg_7 , 'run' , arg_6 ] if arg_2 . test_args : arg_8 . append ( arg_2 . test_args ) system ( \" \" . join ( arg_8 ) ) logger . debug ( \"Creating coverage reports...\" ) if arg_2 . output_dir : arg_9 = arg_2 . output_dir arg_10 = False else : arg_9 = 'htmlcov' arg_10 = True system ( \"%s html --directory=%s\" % ( arg_7 , arg_9 ) ) logger . info ( \"Wrote coverage files to %s\" , arg_9 ) if arg_10 : arg_11 = os . path . abspath ( os . path . join ( arg_9 , 'index.html' ) ) logger . debug ( \"About to open %s in your webbrowser.\" , arg_11 ) webbrowser . open ( 'file:\/\/' + arg_11 ) logger . info ( \"Opened reports in your browser.\" )","id_":253120,"task_name":"https:\/\/github.com\/reinout\/createcoverage\/blob\/8062cf77bcaf74fe902917a2661c3f1e02aac36c\/createcoverage\/script.py#L38-L94","negative":"Get the decryption for col."} {"query":"Create a NamedTemporaryFile instance to be passed to atomic_writer","positive":"def Func ( arg_0 ) : return tempfile . NamedTemporaryFile ( mode = 'w' , dir = os . path . dirname ( arg_0 ) , prefix = os . path . basename ( arg_0 ) , suffix = os . fsencode ( '.tmp' ) , delete = False )","id_":253121,"task_name":"https:\/\/github.com\/hefnawi\/json-storage-manager\/blob\/c7521fc4a576cf23a8c2454106bed6fb8c951b8d\/json_storage_manager\/atomic.py#L9-L17","negative":"translate exception str to http code"} {"query":"Create the role_based_access_control switch if it does not already exist .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . get_model ( 'waffle' , 'Switch' ) arg_2 . objects . update_or_create ( name = ENTERPRISE_ROLE_BASED_ACCESS_CONTROL_SWITCH , defaults = { 'active' : False } )","id_":253122,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/enterprise\/migrations\/0067_add_role_based_access_control_switch.py#L9-L12","negative":"Call the segmenter in order to split text in sentences.\n\n Args:\n text (str): Text to be segmented.\n\n Returns:\n dict, int: A dict containing a list of dicts with the offsets of\n each sentence; an integer representing the response code."} {"query":"Called when transport has been connected .","positive":"def Func ( arg_0 ) : with arg_0 . lock : if arg_0 . initiator : if arg_0 . _output_state is None : arg_0 . _initiate ( )","id_":253123,"task_name":"https:\/\/github.com\/Jajcus\/pyxmpp2\/blob\/14a40a3950910a9cd008b55f0d8905aa0186ce18\/pyxmpp2\/streambase.py#L251-L259","negative":"Generate a path value of type result_type.\n\n result_type can either be bytes or text_type"} {"query":"Returns the Session currently used .","positive":"def Func ( arg_0 ) : arg_1 = _lib . SSL_get1_session ( arg_0 . _ssl ) if arg_1 == _ffi . NULL : return None arg_2 = Session . __new__ ( Session ) arg_2 . _session = _ffi . gc ( arg_1 , _lib . SSL_SESSION_free ) return arg_2","id_":253124,"task_name":"https:\/\/github.com\/pyca\/pyopenssl\/blob\/1fbe064c50fd030948141d7d630673761525b0d0\/src\/OpenSSL\/SSL.py#L2266-L2281","negative":"Wrapper method that calls the appropriate main updating methods of\n the inspection.\n\n It is meant to be used inside a loop (like while), so that it can\n continuously update the class attributes from the trace and log files.\n It already implements checks to parse these files only when they\n change, and they ignore entries that have been previously processes."} {"query":"Render the sourcecode .","positive":"def Func ( arg_0 ) : return SOURCE_TABLE_HTML % u'\\n' . join ( arg_1 . render ( ) for arg_1 in arg_0 . get_annotated_lines ( ) )","id_":253125,"task_name":"https:\/\/github.com\/AkihikoITOH\/capybara\/blob\/e86c2173ea386654f4ae061148e8fbe3f25e715c\/capybara\/virtualenv\/lib\/python2.7\/site-packages\/werkzeug\/debug\/tbtools.py#L433-L436","negative":"Convenience function to efficiently construct a MultivariateNormalDiag."} {"query":"Returns molecular weight of molecule .","positive":"def Func ( arg_0 ) : arg_1 = elements ( ) arg_2 = re . compile ( '\\(([A-z0-9]+)\\)([0-9]+)?' ) arg_3 = re . compile ( '([A-Z][a-z]?)([0-9]+)?' ) arg_4 = arg_2 . findall ( arg_0 ) arg_5 = arg_2 . sub ( '' , arg_0 ) arg_6 = 0 if len ( arg_4 ) > 0 : for arg_7 , arg_8 in arg_4 : arg_9 = 0 for arg_10 , arg_11 in arg_3 . findall ( arg_7 ) : arg_12 = ( arg_1 . loc [ arg_10 , 'atomic_weight' ] * arg_1 . loc [ arg_10 , 'percent' ] \/ 100 ) . sum ( ) if arg_11 == '' : arg_11 = 1 else : arg_11 = int ( arg_11 ) arg_9 += arg_12 * arg_11 if arg_8 == '' : arg_8 = 1 else : arg_8 = int ( arg_8 ) arg_6 += arg_9 * arg_8 for arg_10 , arg_11 in arg_3 . findall ( arg_5 ) : arg_12 = ( arg_1 . loc [ arg_10 , 'atomic_weight' ] * arg_1 . loc [ arg_10 , 'percent' ] \/ 100 ) . sum ( ) if arg_11 == '' : arg_11 = 1 else : arg_11 = int ( arg_11 ) arg_6 += arg_12 * arg_11 return arg_6","id_":253126,"task_name":"https:\/\/github.com\/oscarbranson\/latools\/blob\/cd25a650cfee318152f234d992708511f7047fbe\/latools\/helpers\/chemistry.py#L26-L75","negative":"Returns a DataFrame of offensive team splits for a season.\n\n :year: int representing the season.\n :returns: Pandas DataFrame of split data."} {"query":"Return a description of the given bit in the encoded output . This will include the field name and the offset within the field .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False ) : ( arg_3 , arg_4 ) = ( None , None ) arg_5 = arg_0 . getDescription ( ) for arg_6 in xrange ( len ( arg_5 ) ) : ( arg_7 , arg_8 ) = arg_5 [ arg_6 ] if arg_2 : arg_8 = arg_8 + arg_6 if arg_1 == arg_8 - 1 : arg_3 = \"separator\" arg_4 = arg_1 break if arg_1 < arg_8 : break ( arg_3 , arg_4 ) = ( arg_7 , arg_8 ) arg_9 = arg_0 . getDisplayWidth ( ) if arg_2 else arg_0 . getWidth ( ) if arg_4 is None or arg_1 > arg_0 . getWidth ( ) : raise IndexError ( \"Bit is outside of allowable range: [0 - %d]\" % arg_9 ) return ( arg_3 , arg_1 - arg_4 )","id_":253127,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/encoders\/base.py#L419-L453","negative":"Cycles through notifications with latest results from data feeds."} {"query":"Get Spotify catalog information about artists similar to a given artist .","positive":"async def Func ( arg_0 ) -> List [ Artist ] : arg_1 = await arg_0 . __client . http . artist_Func ( arg_0 . id ) return list ( Artist ( arg_0 . __client , arg_2 ) for arg_2 in arg_1 [ 'artists' ] )","id_":253128,"task_name":"https:\/\/github.com\/mental32\/spotify.py\/blob\/bb296cac7c3dd289908906b7069bd80f43950515\/spotify\/models\/artist.py#L137-L148","negative":"Pickle the Dataset instance to the provided file."} {"query":"Get crate versions data","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . client . crate_attribute ( arg_1 , \"versions\" ) arg_3 = json . loads ( arg_2 ) return arg_3","id_":253129,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-perceval-mozilla\/blob\/4514f8d3d609d3cb79d83c72d51fcc4b4a7daeb4\/perceval\/backends\/mozilla\/crates.py#L222-L229","negative":"bind user variable to `_wrapped`\n\n .. note::\n\n you don't need call this method by yourself.\n\n program will call it in `cliez.parser.parse`\n\n\n .. expection::\n\n if path is not correct,will cause an `ImportError`\n\n\n :param str mod_path: module path, *use dot style,'mod.mod1'*\n :param str with_path: add path to `sys.path`,\n if path is file,use its parent.\n :return: A instance of `Settings`"} {"query":"Fits a new component to an old component","positive":"def Func ( arg_0 , arg_1 , ** arg_2 ) : arg_3 = arg_0 . category arg_0 . category = 'ilm' arg_5 = states . ImageState ( Image ( arg_1 . get ( ) . copy ( ) ) , [ arg_0 ] , pad = 0 , mdl = mdl . SmoothFieldModel ( ) ) do_levmarq ( arg_5 , arg_0 . params , ** arg_2 ) arg_0 . category = arg_3","id_":253130,"task_name":"https:\/\/github.com\/peri-source\/peri\/blob\/61beed5deaaf978ab31ed716e8470d86ba639867\/peri\/opt\/optimize.py#L2730-L2761","negative":"Check if templates directories are setup and issue a warning and help.\n\n Set the environment variable :envvar:`GROMACSWRAPPER_SUPPRESS_SETUP_CHECK`\n skip the check and make it always return ``True``\n\n :return ``True`` if directories were found and ``False`` otherwise\n\n .. versionchanged:: 0.3.1\n Uses :envvar:`GROMACSWRAPPER_SUPPRESS_SETUP_CHECK` to suppress check\n (useful for scripts run on a server)"} {"query":"Return single program by name or None if not found .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . _load_data ( arg_0 . PROGRAMS_ENDPOINT , default = [ ] ) arg_3 = [ program for program in arg_2 if program . get ( 'title' ) == arg_1 ] if len ( arg_3 ) > 1 : raise MultipleProgramMatchError ( len ( arg_3 ) ) elif len ( arg_3 ) == 1 : return arg_3 [ 0 ] else : return None","id_":253131,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/enterprise\/api_client\/discovery.py#L294-L312","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Set a url parameter .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_2 is None or isinstance ( arg_2 , ( int , float , bool ) ) : arg_2 = str ( arg_2 ) if arg_1 . endswith ( '64' ) : arg_2 = urlsafe_b64encode ( arg_2 . encode ( 'utf-8' ) ) arg_2 = arg_2 . replace ( b ( '=' ) , b ( '' ) ) arg_0 . _parameters [ arg_1 ] = arg_2","id_":253132,"task_name":"https:\/\/github.com\/imgix\/imgix-python\/blob\/117e0b169552695232689dd0443be7810263e5c5\/imgix\/urlhelper.py#L75-L92","negative":"Register a file reader for use in parse_config_file.\n\n Registered file readers will be used to try reading files passed to\n `parse_config_file`. All file readers (beginning with the default `open`) will\n be tried until one of them succeeds at opening the file.\n\n This function may also be be used used as a decorator. For example:\n\n @register_file_reader(IOError)\n def exotic_data_source(filename):\n ...\n\n Args:\n *args: (When used as a decorator, only the existence check is supplied.)\n - file_reader_fn: The file reader function to register. This should be a\n function that can be used as a context manager to open a file and\n provide a file-like object, similar to Python's built-in `open`.\n - is_readable_fn: A function taking the file path and returning a boolean\n indicating whether the file can be read by `file_reader_fn`.\n\n Returns:\n `None`, or when used as a decorator, a function that will perform the\n registration using the supplied readability predicate."} {"query":"Add v to the hash recursively if needed .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . md5 . Func ( to_bytes ( str ( type ( arg_1 ) ) ) ) if isinstance ( arg_1 , string_class ) : arg_0 . md5 . Func ( to_bytes ( arg_1 ) ) elif arg_1 is None : pass elif isinstance ( arg_1 , ( int , float ) ) : arg_0 . md5 . Func ( to_bytes ( str ( arg_1 ) ) ) elif isinstance ( arg_1 , ( tuple , list ) ) : for arg_2 in arg_1 : arg_0 . Func ( arg_2 ) elif isinstance ( arg_1 , dict ) : arg_3 = arg_1 . keys ( ) for arg_4 in sorted ( arg_3 ) : arg_0 . Func ( arg_4 ) arg_0 . Func ( arg_1 [ arg_4 ] ) else : for arg_4 in dir ( arg_1 ) : if arg_4 . startswith ( '__' ) : continue arg_5 = getattr ( arg_1 , arg_4 ) if inspect . isroutine ( arg_5 ) : continue arg_0 . Func ( arg_4 ) arg_0 . Func ( arg_5 )","id_":253133,"task_name":"https:\/\/github.com\/tnkteja\/myhelp\/blob\/fb3a4809d448ad14d5b2e6ddf2e7e89ad52b71cb\/virtualEnvironment\/lib\/python2.7\/site-packages\/coverage\/misc.py#L113-L138","negative":"Sets the player's paused state."} {"query":"Enable event loop integration with pyglet .","positive":"def Func ( arg_0 , arg_1 = None ) : import pyglet from IPython . lib . inputhookpyglet import inputhook_pyglet arg_0 . set_inputhook ( inputhook_pyglet ) arg_0 . _current_gui = GUI_PYGLET return arg_1","id_":253134,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/lib\/inputhook.py#L403-L424","negative":"Dump a certificate revocation list to a buffer.\n\n :param type: The file type (one of ``FILETYPE_PEM``, ``FILETYPE_ASN1``, or\n ``FILETYPE_TEXT``).\n :param CRL crl: The CRL to dump.\n\n :return: The buffer with the CRL.\n :rtype: bytes"} {"query":"Retrieves a list of all NICs bound to the specified server .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = 1 ) : arg_4 = arg_0 . _perform_request ( '\/datacenters\/%s\/servers\/%s\/nics?depth=%s' % ( arg_1 , arg_2 , str ( arg_3 ) ) ) return arg_4","id_":253135,"task_name":"https:\/\/github.com\/profitbricks\/profitbricks-sdk-python\/blob\/2c804b141688eccb07d6ae56601d5c60a62abebd\/profitbricks\/client.py#L1110-L1130","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Parse a StackExchange API raw response .","positive":"def Func ( arg_0 ) : arg_1 = json . loads ( arg_0 ) arg_2 = arg_1 [ 'items' ] for arg_3 in arg_2 : yield arg_3","id_":253136,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-perceval\/blob\/41c908605e88b7ebc3a536c643fa0f212eaf9e0e\/perceval\/backends\/core\/stackexchange.py#L160-L173","negative":"Given the request and response headers, return `True` if an HTTP\n \"Not Modified\" response could be returned instead."} {"query":"Change the number of servers in the queue to n .","positive":"def Func ( arg_0 , arg_1 ) : if not isinstance ( arg_1 , numbers . Integral ) and arg_1 is not infty : arg_2 = \"n must be an integer or infinity.\\n{0}\" raise TypeError ( arg_2 . format ( str ( arg_0 ) ) ) elif arg_1 <= 0 : arg_2 = \"n must be a positive integer or infinity.\\n{0}\" raise ValueError ( arg_2 . format ( str ( arg_0 ) ) ) else : arg_0 . num_servers = arg_1","id_":253137,"task_name":"https:\/\/github.com\/djordon\/queueing-tool\/blob\/ccd418cf647ac03a54f78ba5e3725903f541b808\/queueing_tool\/queues\/queue_servers.py#L657-L681","negative":"Returns the current ZipkinAttrs and generates new ones if needed.\n\n :returns: (report_root_timestamp, zipkin_attrs)\n :rtype: (bool, ZipkinAttrs)"} {"query":"Get the command to start the checkpoint manager process","positive":"def Func ( arg_0 ) : arg_1 = 'org.apache.heron.ckptmgr.CheckpointManager' arg_2 = arg_0 . checkpoint_manager_ram \/ ( 1024 * 1024 ) arg_3 = [ os . path . join ( arg_0 . heron_java_home , \"bin\/java\" ) , '-Xms%dM' % arg_2 , '-Xmx%dM' % arg_2 , '-XX:+PrintCommandLineFlags' , '-verbosegc' , '-XX:+PrintGCDetails' , '-XX:+PrintGCTimeStamps' , '-XX:+PrintGCDateStamps' , '-XX:+PrintGCCause' , '-XX:+UseGCLogFileRotation' , '-XX:NumberOfGCLogFiles=5' , '-XX:GCLogFileSize=100M' , '-XX:+PrintPromotionFailure' , '-XX:+PrintTenuringDistribution' , '-XX:+PrintHeapAtGC' , '-XX:+HeapDumpOnOutOfMemoryError' , '-XX:+UseConcMarkSweepGC' , '-XX:+UseConcMarkSweepGC' , '-Xloggc:log-files\/gc.ckptmgr.log' , '-Djava.net.preferIPv4Stack=true' , '-cp' , arg_0 . checkpoint_manager_classpath , arg_1 , '-t' + arg_0 . topology_name , '-i' + arg_0 . topology_id , '-c' + arg_0 . ckptmgr_ids [ arg_0 . shard ] , '-p' + arg_0 . checkpoint_manager_port , '-f' + arg_0 . stateful_config_file , '-o' + arg_0 . override_config_file , '-g' + arg_0 . heron_internals_config_file ] arg_4 = { } arg_4 [ arg_0 . ckptmgr_ids [ arg_0 . shard ] ] = Command ( arg_3 , arg_0 . shell_env ) return arg_4","id_":253138,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/executor\/src\/python\/heron_executor.py#L843-L882","negative":"Returns the profile with the received ID as a dict\n\n If a local copy of the profile exists, it'll be returned. If not, it'll\n be downloaded from the web. The results are cached, so any subsequent\n calls won't hit the filesystem or the web.\n\n Args:\n profile_id (str): The ID of the profile you want.\n\n Raises:\n RegistryError: If there was some problem opening the profile file\n or its format was incorrect."} {"query":"Retrieve Zotero items via the API Combine endpoint and request to access the specific resource Returns a JSON document","positive":"def Func ( arg_0 , arg_1 = None ) : arg_2 = \"%s%s\" % ( arg_0 . endpoint , arg_1 ) arg_0 . self_link = arg_1 arg_0 . request = requests . get ( url = arg_2 , headers = arg_0 . default_headers ( ) ) arg_0 . request . encoding = \"utf-8\" try : arg_0 . request . raise_for_status ( ) except requests . exceptions . HTTPError : error_handler ( arg_0 . request ) return arg_0 . request","id_":253139,"task_name":"https:\/\/github.com\/urschrei\/pyzotero\/blob\/b378966b30146a952f7953c23202fb5a1ddf81d9\/pyzotero\/zotero.py#L320-L335","negative":"hyperpolarization step. Use to calculate tau and stuff."} {"query":"Return a valid docker_path for a GCS bucket .","positive":"def Func ( arg_0 , arg_1 ) : arg_1 = directory_fmt ( arg_1 ) arg_2 , arg_3 = _gcs_uri_rewriter ( arg_1 ) arg_4 = os . path . join ( arg_0 . _relative_path , arg_3 ) return arg_4","id_":253140,"task_name":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/443ce31daa6023dc2fd65ef2051796e19d18d5a7\/dsub\/lib\/param_util.py#L260-L266","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"given a path or list of files return ABF IDs .","positive":"def Func ( arg_0 ) : if type ( arg_0 ) is str : arg_0 = glob . glob ( arg_0 + \"\/*.*\" ) arg_1 = [ ] for arg_2 in arg_0 : if arg_2 [ - 4 : ] . lower ( ) == '.abf' : arg_3 = arg_2 . split ( '.' ) [ - 1 ] arg_1 . append ( os . path . basename ( arg_2 ) . replace ( '.' + arg_3 , '' ) ) return sorted ( arg_1 )","id_":253141,"task_name":"https:\/\/github.com\/swharden\/SWHLab\/blob\/a86c3c65323cec809a4bd4f81919644927094bf5\/doc\/oldcode\/swhlab\/core\/common.py#L730-L739","negative":"Generate RENEWING time.\n\n [:rfc:`2131#section-4.4.5`]::\n\n T1\n defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 *\n duration_of_lease). Times T1 and T2 SHOULD be chosen with some\n random \"fuzz\" around a fixed value, to avoid synchronization of\n client reacquisition."} {"query":"Add chain to current shelve file","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_1 not in arg_0 . chains : setattr ( arg_0 . chains , arg_1 , MarkovChain ( arg_2 = arg_2 ) ) else : raise ValueError ( \"Chain with this name already exists\" )","id_":253142,"task_name":"https:\/\/github.com\/fm4d\/PyMarkovTextGenerator\/blob\/4a7e8e2cfe14c9745aba6b9df7d7b402a9029a37\/markov.py#L143-L155","negative":"creates a new, empty table in the dataset;\n If the table already exists, update the existing table.\n Since BigQuery does not natively allow table upserts, this is not an\n atomic operation.\n\n :param dataset_id: the dataset to upsert the table into.\n :type dataset_id: str\n :param table_resource: a table resource. see\n https:\/\/cloud.google.com\/bigquery\/docs\/reference\/v2\/tables#resource\n :type table_resource: dict\n :param project_id: the project to upsert the table into. If None,\n project will be self.project_id.\n :return:"} {"query":"See if span tag has underline style and wrap with u tag .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . get ( 'style' ) if arg_2 and 'text-decoration:underline' in arg_2 : arg_1 . wrap ( arg_0 . soup . new_tag ( 'u' ) )","id_":253143,"task_name":"https:\/\/github.com\/nprapps\/copydoc\/blob\/e1ab09b287beb0439748c319cf165cbc06c66624\/copydoc.py#L127-L133","negative":"Sets the main input channels of the pipeline and their forks.\n\n The ``raw_input`` dictionary input should contain one entry for each\n input type (fastq, fasta, etc). The corresponding value should be a\n dictionary\/json with the following key:values:\n\n - ``channel``: Name of the raw input channel (e.g.: channel1)\n - ``channel_str``: The nextflow definition of the channel and\n eventual checks (e.g.: channel1 = Channel.fromPath(param))\n - ``raw_forks``: A list of channels to which the channel name will\n for to.\n\n Each new type of input parameter is automatically added to the\n :attr:`params` attribute, so that they are automatically collected\n for the pipeline description and help.\n\n Parameters\n ----------\n raw_input : dict\n Contains an entry for each input type with the channel name,\n channel string and forks."} {"query":"Update a firewall rule for an Azure SQL Database server .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : _validate_not_none ( 'server_name' , arg_1 ) _validate_not_none ( 'name' , arg_2 ) _validate_not_none ( 'start_ip_address' , arg_3 ) _validate_not_none ( 'end_ip_address' , arg_4 ) return arg_0 . _perform_put ( arg_0 . _get_firewall_rules_path ( arg_1 , arg_2 ) , _SqlManagementXmlSerializer . Func_to_xml ( arg_2 , arg_3 , arg_4 ) )","id_":253144,"task_name":"https:\/\/github.com\/Azure\/azure-sdk-for-python\/blob\/d7306fde32f60a293a7567678692bdad31e4b667\/azure-servicemanagement-legacy\/azure\/servicemanagement\/sqldatabasemanagementservice.py#L218-L246","negative":"Sort list of string with number in human order.\n\n Examples\n ----------\n >>> l = ['im1.jpg', 'im31.jpg', 'im11.jpg', 'im21.jpg', 'im03.jpg', 'im05.jpg']\n >>> l.sort(key=tl.files.natural_keys)\n ['im1.jpg', 'im03.jpg', 'im05', 'im11.jpg', 'im21.jpg', 'im31.jpg']\n >>> l.sort() # that is what we dont want\n ['im03.jpg', 'im05', 'im1.jpg', 'im11.jpg', 'im21.jpg', 'im31.jpg']\n\n References\n ----------\n - `link `__"} {"query":"Get one of the backing abdress books by its name","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in arg_0 . _abooks : if arg_2 . name == arg_1 : return arg_2","id_":253145,"task_name":"https:\/\/github.com\/scheibler\/khard\/blob\/0f69430c2680f1ff5f073a977a3c5b753b96cc17\/khard\/address_book.py#L378-L389","negative":"Whether a connection can be established between those two meshes."} {"query":"Generates a URL to the given endpoint with the method provided .","positive":"def Func ( arg_0 , ** arg_1 ) : arg_2 = _app_ctx_stack . top arg_3 = _request_ctx_stack . top if arg_2 is None : raise RuntimeError ( 'Attempted to generate a URL without the ' 'application context being pushed. This has to be ' 'executed when application context is available.' ) if arg_3 is not None : arg_4 = arg_3 . url_adapter arg_5 = request . blueprint if not arg_3 . request . _is_old_module : if arg_0 [ : 1 ] == '.' : if arg_5 is not None : arg_0 = arg_5 + arg_0 else : arg_0 = arg_0 [ 1 : ] else : if '.' not in arg_0 : if arg_5 is not None : arg_0 = arg_5 + '.' + arg_0 elif arg_0 . startswith ( '.' ) : arg_0 = arg_0 [ 1 : ] arg_6 = arg_1 . pop ( '_external' , False ) else : arg_4 = arg_2 . url_adapter if arg_4 is None : raise RuntimeError ( 'Application was not able to create a URL ' 'adapter for request independent URL generation. ' 'You might be able to fix this by setting ' 'the SERVER_NAME config variable.' ) arg_6 = arg_1 . pop ( '_external' , True ) arg_7 = arg_1 . pop ( '_anchor' , None ) arg_8 = arg_1 . pop ( '_method' , None ) arg_9 = arg_1 . pop ( '_scheme' , None ) arg_2 . app . inject_url_defaults ( arg_0 , arg_1 ) if arg_9 is not None : if not arg_6 : raise ValueError ( 'When specifying _scheme, _external must be True' ) arg_4 . url_scheme = arg_9 try : arg_11 = arg_4 . build ( arg_0 , arg_1 , arg_8 = arg_8 , force_external = arg_6 ) except BuildError as error : arg_1 [ '_external' ] = arg_6 arg_1 [ '_anchor' ] = arg_7 arg_1 [ '_method' ] = arg_8 return arg_2 . app . handle_url_build_error ( error , arg_0 , arg_1 ) if arg_7 is not None : arg_11 += '#' + url_quote ( arg_7 ) return arg_11","id_":253146,"task_name":"https:\/\/github.com\/AkihikoITOH\/capybara\/blob\/e86c2173ea386654f4ae061148e8fbe3f25e715c\/capybara\/virtualenv\/lib\/python2.7\/site-packages\/flask\/helpers.py#L186-L316","negative":"Reads the head of the las file and returns it"} {"query":"Writing the configure file with the attributes in args","positive":"def Func ( arg_0 ) : logging . info ( \"Writing configure file: %s\" % arg_0 . config_file ) if arg_0 . config_file is None : return arg_1 = cparser . ConfigParser ( ) arg_1 . add_section ( \"lrcloud\" ) for arg_2 in [ x for x in dir ( arg_0 ) if not x . startswith ( \"_\" ) ] : if arg_2 in IGNORE_ARGS : continue arg_3 = getattr ( arg_0 , arg_2 ) if arg_3 is not None : arg_1 . set ( 'lrcloud' , arg_2 , str ( arg_3 ) ) with open ( arg_0 . config_file , 'w' ) as f : arg_1 . Func ( f )","id_":253147,"task_name":"https:\/\/github.com\/madsbk\/lrcloud\/blob\/8d99be3e1abdf941642e9a1c86b7d775dc373c0b\/lrcloud\/config_parser.py#L44-L62","negative":"Returns how the result count compares to the query options.\n\n The return value is negative if too few results were found, zero if enough were found, and\n positive if too many were found.\n\n Returns:\n int: -1, 0, or 1."} {"query":"Checks element definitions .","positive":"def Func ( arg_0 ) : arg_1 = set ( arg_0 . elements . type . argiope . values . flatten ( ) ) arg_2 = set ( ELEMENTS . keys ( ) ) if ( arg_1 <= arg_2 ) == False : raise ValueError ( \"Element types {0} not in know elements {1}\" . format ( arg_1 - arg_2 , arg_2 ) ) print ( \"\" )","id_":253148,"task_name":"https:\/\/github.com\/lcharleux\/argiope\/blob\/8170e431362dc760589f7d141090fd133dece259\/argiope\/mesh.py#L307-L317","negative":"Clip input array with a vector list.\n\n Parameters\n ----------\n array : array\n input raster data\n array_affine : Affine\n Affine object describing the raster's geolocation\n geometries : iterable\n iterable of dictionaries, where every entry has a 'geometry' and\n 'properties' key.\n inverted : bool\n invert clip (default: False)\n clip_buffer : integer\n buffer (in pixels) geometries before clipping\n\n Returns\n -------\n clipped array : array"} {"query":"Stackted recurrent neural networks GRU or LSTM","positive":"def Func ( arg_0 : arg_1 . Tensor , arg_3 : arg_4 , arg_5 = 'gru' , arg_6 = None , arg_7 = False , arg_8 = 'RNN_layer' ) : for arg_9 , arg_10 in enumerate ( arg_3 ) : with arg_1 . variable_scope ( arg_8 + '_' + str ( arg_9 ) ) : if arg_5 == 'gru' : arg_11 = arg_1 . nn . rnn_cell . GRUCell ( arg_10 ) arg_12 = arg_1 . nn . rnn_cell . GRUCell ( arg_10 ) elif arg_5 == 'lstm' : arg_11 = arg_1 . nn . rnn_cell . LSTMCell ( arg_10 , arg_7 = arg_7 ) arg_12 = arg_1 . nn . rnn_cell . LSTMCell ( arg_10 , arg_7 = arg_7 ) else : raise RuntimeError ( 'cell_type must be either gru or lstm' ) ( arg_13 , arg_14 ) , ( arg_15 , arg_16 ) = arg_1 . nn . bidirectional_dynamic_rnn ( arg_11 , arg_12 , arg_0 , dtype = arg_1 . float32 , sequence_length = arg_6 ) arg_0 = arg_1 . concat ( [ arg_13 , arg_14 ] , axis = 2 ) if arg_5 == 'gru' : arg_17 = arg_1 . concat ( [ arg_15 , arg_16 ] , axis = 1 ) else : ( arg_18 , arg_19 ) , ( arg_20 , arg_21 ) = arg_15 , arg_16 arg_22 = arg_1 . concat ( [ arg_18 , arg_20 ] , axis = 1 ) arg_23 = arg_1 . concat ( [ arg_19 , arg_21 ] , axis = 1 ) arg_17 = ( arg_23 , arg_22 ) return arg_0 , arg_17","id_":253149,"task_name":"https:\/\/github.com\/deepmipt\/DeepPavlov\/blob\/f3e4a69a3764d25d2f5bad4f1f1aebc872b00f9c\/deeppavlov\/core\/layers\/tf_layers.py#L184-L234","negative":"filter for indels"} {"query":"Starts command in a subprocess . Prints every line the command prints prefaced with description .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = False ) : arg_4 = shlex . split ( arg_0 ) try : arg_5 = subprocess . Popen ( arg_4 , stdout = subprocess . PIPE , stderr = subprocess . STDOUT , stdin = subprocess . PIPE ) except Exception as e : raise IOError ( 'Encountered error: {0} when running command {1}' . format ( e . message , ' ' . join ( arg_4 ) ) ) if arg_2 is not None : arg_5 . stdin . write ( arg_2 ) arg_5 . stdin . flush ( ) while arg_5 . poll ( ) is None : try : arg_6 = arg_5 . stdout . readline ( ) except KeyboardInterrupt : sys . exit ( 'Keyboard interrupt while running {}' . format ( arg_0 ) ) if len ( arg_6 . strip ( ) ) == 0 and arg_3 is True : continue elif 'killed by signal 1' in decode ( arg_6 ) . lower ( ) : continue elif 'to the list of known hosts' in decode ( arg_6 ) . lower ( ) : continue if arg_1 is not None : arg_6 = arg_1 ( arg_6 ) sys . stdout . write ( arg_6 ) arg_7 = arg_5 . poll ( ) return arg_7","id_":253150,"task_name":"https:\/\/github.com\/NarrativeScience\/lsi\/blob\/7d901b03fdb1a34ef795e5412bfe9685d948e32d\/src\/lsi\/utils\/stream.py#L43-L83","negative":"Removes a NIC from the load balancer.\n\n :param datacenter_id: The unique ID of the data center.\n :type datacenter_id: ``str``\n\n :param loadbalancer_id: The unique ID of the load balancer.\n :type loadbalancer_id: ``str``\n\n :param nic_id: The unique ID of the NIC.\n :type nic_id: ``str``"} {"query":"Write the data encoding the MACSignatureKeyInformation struct to a stream .","positive":"def Func ( arg_0 , arg_1 , arg_2 = arg_3 . KMIPVersion . KMIP_1_0 ) : arg_6 = BytearrayStream ( ) if arg_0 . _unique_identifier : arg_0 . _unique_identifier . Func ( arg_6 , arg_2 = arg_2 ) else : raise ValueError ( \"Invalid struct missing the unique identifier attribute.\" ) if arg_0 . _cryptographic_parameters : arg_0 . _cryptographic_parameters . Func ( arg_6 , arg_2 = arg_2 ) arg_0 . length = arg_6 . length ( ) super ( MACSignatureKeyInformation , arg_0 ) . Func ( arg_1 , arg_2 = arg_2 ) arg_1 . Func ( arg_6 . buffer )","id_":253151,"task_name":"https:\/\/github.com\/OpenKMIP\/PyKMIP\/blob\/b51c5b044bd05f8c85a1d65d13a583a4d8fc1b0e\/kmip\/core\/objects.py#L2313-L2349","negative":"Initialize the bucket map assuming the given number of maxBuckets."} {"query":"Register an extension code .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_2 = int ( arg_2 ) if not 1 <= arg_2 <= 0x7fffffff : raise ValueError , \"code out of range\" arg_3 = ( arg_0 , arg_1 ) if ( arg_4 . get ( arg_3 ) == arg_2 and arg_5 . get ( arg_2 ) == arg_3 ) : return if arg_3 in arg_4 : raise ValueError ( \"key %s is already registered with code %s\" % ( arg_3 , arg_4 [ arg_3 ] ) ) if arg_2 in arg_5 : raise ValueError ( \"code %s is already in use for key %s\" % ( arg_2 , arg_5 [ arg_2 ] ) ) arg_4 [ arg_3 ] = arg_2 arg_5 [ arg_2 ] = arg_3","id_":253152,"task_name":"https:\/\/github.com\/google\/grumpy\/blob\/3ec87959189cfcdeae82eb68a47648ac25ceb10b\/third_party\/stdlib\/copy_reg.py#L157-L173","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Returns a new AudioSegment whose data is the same as this one but which has been resampled to the specified characteristics . Any parameter left None will be unchanged .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = None , arg_4 = False ) : if arg_1 is None : arg_1 = arg_0 . frame_rate if arg_2 is None : arg_2 = arg_0 . sample_width if arg_3 is None : arg_3 = arg_0 . channels arg_5 = \"sox {inputfile} -b \" + str ( arg_2 * 8 ) + \" -r \" + str ( arg_1 ) + \" -t wav {outputfile} channels \" + str ( arg_3 ) return arg_0 . _execute_sox_cmd ( arg_5 , arg_4 = arg_4 )","id_":253153,"task_name":"https:\/\/github.com\/MaxStrange\/AudioSegment\/blob\/1daefb8de626ddff3ff7016697c3ad31d262ecd6\/audiosegment.py#L893-L920","negative":"Delete this droplet\n\n Parameters\n ----------\n wait: bool, default True\n Whether to block until the pending action is completed"} {"query":"Loads a class from a string naming the module and class name .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . split ( \".\" ) arg_2 = \".\" . join ( arg_1 [ : - 1 ] ) arg_3 = arg_1 [ - 1 ] arg_4 = importlib . import_module ( arg_2 ) return getattr ( arg_4 , arg_3 )","id_":253154,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/utils\/dynamicimports.py#L19-L34","negative":"Show Program Landing page for the Enterprise's Program.\n\n Render the Enterprise's Program Enrollment page for a specific program.\n The Enterprise and Program are both selected by their respective UUIDs.\n\n Unauthenticated learners will be redirected to enterprise-linked SSO.\n\n A 404 will be raised if any of the following conditions are met:\n * No enterprise customer UUID query parameter ``enterprise_uuid`` found in request.\n * No enterprise customer found against the enterprise customer\n uuid ``enterprise_uuid`` in the request kwargs.\n * No Program can be found given ``program_uuid`` either at all or associated with\n the Enterprise.."} {"query":"Shrinks the simplex around the best vertex .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : arg_5 = arg_1 [ arg_2 ] arg_6 = arg_5 + arg_3 * ( arg_1 - arg_5 ) arg_7 , arg_8 = _evaluate_objective_multiple ( arg_0 , arg_6 , arg_4 ) return ( False , arg_6 , arg_7 , arg_8 )","id_":253155,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/optimizer\/nelder_mead.py#L581-L599","negative":"Handle a request whose date doesn't match the signing key scope date.\n\n This AWS4Auth class implementation regenerates the signing key. See\n StrictAWS4Auth class if you would prefer an exception to be raised.\n\n req -- a requests prepared request object"} {"query":"Fully describes an ELB .","positive":"def Func ( arg_0 , arg_1 = arg_2 . ALL ^ arg_2 . POLICY_TYPES , ** arg_5 ) : try : arg_6 except NameError as _ : arg_6 = str if isinstance ( arg_0 , arg_6 ) : arg_0 = dict ( LoadBalancerName = arg_0 ) return registry . build_out ( arg_1 , start_with = arg_0 , pass_datastructure = True , ** arg_5 )","id_":253156,"task_name":"https:\/\/github.com\/Netflix-Skunkworks\/cloudaux\/blob\/c4b0870c3ac68b1c69e71d33cf78b6a8bdf437ea\/cloudaux\/orchestration\/aws\/elb.py#L167-L184","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Retrieves all virtual machines instances in the current environment .","positive":"def Func ( arg_0 = 1 , arg_1 = None , arg_2 = None , arg_3 = None , arg_4 = None ) : from burlap . common import shelf , OrderedDict , get_verbose arg_5 = get_verbose ( ) require ( 'vm_type' , 'vm_group' ) assert arg_6 . vm_type , 'No VM type specified.' arg_6 . vm_type = ( arg_6 . vm_type or '' ) . lower ( ) arg_8 = arg_1 arg_9 = arg_2 arg_10 = arg_3 if arg_5 : print ( 'name=%s, group=%s, release=%s' % ( arg_8 , arg_9 , arg_10 ) ) arg_6 . vm_elastic_ip_mappings = shelf . get ( 'vm_elastic_ip_mappings' ) arg_12 = type ( arg_6 ) ( ) if arg_6 . vm_type == EC2 : if arg_5 : print ( 'Checking EC2...' ) for arg_13 in get_all_running_ec2_instances ( ) : arg_1 = arg_13 . tags . get ( arg_6 . vm_name_tag ) arg_2 = arg_13 . tags . get ( arg_6 . vm_group_tag ) arg_3 = arg_13 . tags . get ( arg_6 . vm_release_tag ) if arg_6 . vm_group and arg_6 . vm_group != arg_2 : if arg_5 : print ( ( 'Skipping instance %s because its group \"%s\" ' 'does not match env.vm_group \"%s\".' ) % ( arg_13 . public_dns_name , arg_2 , arg_6 . vm_group ) ) continue if arg_9 and arg_2 != arg_9 : if arg_5 : print ( ( 'Skipping instance %s because its group \"%s\" ' 'does not match local group \"%s\".' ) % ( arg_13 . public_dns_name , arg_2 , arg_9 ) ) continue if arg_8 and arg_1 != arg_8 : if arg_5 : print ( ( 'Skipping instance %s because its name \"%s\" ' 'does not match name \"%s\".' ) % ( arg_13 . public_dns_name , arg_1 , arg_8 ) ) continue if arg_10 and arg_3 != arg_10 : if arg_5 : print ( ( 'Skipping instance %s because its release \"%s\" ' 'does not match release \"%s\".' ) % ( arg_13 . public_dns_name , arg_3 , arg_10 ) ) continue if arg_4 and arg_3 == arg_4 : continue if arg_5 : print ( 'Adding instance %s (%s).' % ( arg_1 , arg_13 . public_dns_name ) ) arg_12 . setdefault ( arg_1 , type ( arg_6 ) ( ) ) arg_12 [ arg_1 ] [ 'id' ] = arg_13 . id arg_12 [ arg_1 ] [ 'public_dns_name' ] = arg_13 . public_dns_name if arg_5 : print ( 'Public DNS: %s' % arg_13 . public_dns_name ) if arg_6 . vm_elastic_ip_mappings and arg_1 in arg_6 . vm_elastic_ip_mappings : arg_12 [ arg_1 ] [ 'ip' ] = arg_6 . vm_elastic_ip_mappings [ arg_1 ] else : arg_12 [ arg_1 ] [ 'ip' ] = socket . gethostbyname ( arg_13 . public_dns_name ) if int ( arg_0 ) : pprint ( arg_12 , indent = 4 ) return arg_12 elif arg_6 . vm_type == KVM : pass else : raise NotImplementedError","id_":253157,"task_name":"https:\/\/github.com\/chrisspen\/burlap\/blob\/a92b0a8e5206850bb777c74af8421ea8b33779bd\/burlap\/vm.py#L139-L212","negative":"Checks if a Pong message was received.\n\n :return:"} {"query":"Return a generator that GETs and yields individual JSON items .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , ** arg_3 ) : arg_4 = arg_0 . get_pages ( arg_1 , arg_2 = arg_2 , ** arg_3 ) for arg_5 in arg_4 : assert isinstance ( arg_5 , dict ) arg_6 = arg_5 . get ( 'items' ) if arg_6 is None : arg_7 = \"'items' key not found in JSON data: \" \"{!r}\" . format ( arg_5 ) raise MalformedResponse ( arg_7 ) else : for arg_8 in arg_6 : yield arg_8","id_":253158,"task_name":"https:\/\/github.com\/CiscoDevNet\/webexteamssdk\/blob\/6fc2cc3557e080ba4b2a380664cb2a0532ae45cd\/webexteamssdk\/restsession.py#L332-L369","negative":"Unregister an extension code. For testing only."} {"query":"Return if self is mergeable with timeslots .","positive":"def Func ( arg_0 , arg_1 : 'TimeslotCollection' ) -> bool : for arg_2 in arg_1 . timeslots : for arg_3 in arg_0 . _table [ arg_2 . channel ] : if arg_2 . interval . has_overlap ( arg_3 ) : return False return True","id_":253159,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/pulse\/timeslots.py#L211-L221","negative":"Get the reason of this revocation.\n\n :return: The reason, or ``None`` if there is none.\n :rtype: bytes or NoneType\n\n .. seealso::\n\n :meth:`all_reasons`, which gives you a list of all supported\n reasons this method might return."} {"query":"Perform a rachted step calculating a new shared secret from the public key and deriving new chain keys from this secret .","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 . triggersStep ( arg_1 ) : arg_0 . __wrapOtherPub ( arg_1 ) arg_0 . __newRootKey ( \"receiving\" ) arg_0 . __newRatchetKey ( ) arg_0 . __newRootKey ( \"sending\" )","id_":253160,"task_name":"https:\/\/github.com\/Syndace\/python-doubleratchet\/blob\/d4497af73044e0084efa3e447276ee9d6a6eb66a\/doubleratchet\/ratchets\/dhratchet.py#L76-L94","negative":"r''' Return a datetime.tzinfo implementation for the given timezone \n\n >>> from datetime import datetime, timedelta\n >>> utc = timezone('UTC')\n >>> eastern = timezone('US\/Eastern')\n >>> eastern.zone\n 'US\/Eastern'\n >>> timezone(unicode('US\/Eastern')) is eastern\n True\n >>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc)\n >>> loc_dt = utc_dt.astimezone(eastern)\n >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'\n >>> loc_dt.strftime(fmt)\n '2002-10-27 01:00:00 EST (-0500)'\n >>> (loc_dt - timedelta(minutes=10)).strftime(fmt)\n '2002-10-27 00:50:00 EST (-0500)'\n >>> eastern.normalize(loc_dt - timedelta(minutes=10)).strftime(fmt)\n '2002-10-27 01:50:00 EDT (-0400)'\n >>> (loc_dt + timedelta(minutes=10)).strftime(fmt)\n '2002-10-27 01:10:00 EST (-0500)'\n\n Raises UnknownTimeZoneError if passed an unknown zone.\n\n >>> try:\n ... timezone('Asia\/Shangri-La')\n ... except UnknownTimeZoneError:\n ... print('Unknown')\n Unknown\n\n >>> try:\n ... timezone(unicode('\\N{TRADE MARK SIGN}'))\n ... except UnknownTimeZoneError:\n ... print('Unknown')\n Unknown"} {"query":"Return the number of combinations for n choose k .","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 == 0 : return 0 return reduce ( lambda x , y : x * y [ 0 ] \/ y [ 1 ] , zip ( range ( arg_0 - arg_1 + 1 , arg_0 + 1 ) , range ( 1 , arg_1 + 1 ) ) , 1 )","id_":253161,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/visualization\/interactive\/iplot_qsphere.py#L158-L172","negative":"Returns uptime in seconds or None, on MINIX."} {"query":"Returns the configuration for KEY","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : if arg_1 in arg_0 . config : return arg_0 . config . get ( arg_1 ) else : return arg_2","id_":253162,"task_name":"https:\/\/github.com\/yaz\/yaz\/blob\/48c842fe053bf9cd6446c4b33fb081c65339aa48\/yaz\/task.py#L130-L135","negative":"Generate RENEWING time.\n\n [:rfc:`2131#section-4.4.5`]::\n\n T1\n defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 *\n duration_of_lease). Times T1 and T2 SHOULD be chosen with some\n random \"fuzz\" around a fixed value, to avoid synchronization of\n client reacquisition."} {"query":"Useful utility ; prints the string in hexadecimal","positive":"def Func ( arg_0 ) : for arg_1 in range ( len ( arg_0 ) ) : sys . stdout . write ( \"%2x \" % ( ord ( arg_0 [ arg_1 ] ) ) ) if ( arg_1 + 1 ) % 8 == 0 : print repr ( arg_0 [ arg_1 - 7 : arg_1 + 1 ] ) if ( len ( arg_0 ) % 8 != 0 ) : print string . rjust ( \"\" , 11 ) , repr ( arg_0 [ arg_1 - len ( arg_0 ) % 8 : arg_1 + 1 ] )","id_":253163,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/lib\/tuio\/OSC.py#L38-L46","negative":"Register a file reader for use in parse_config_file.\n\n Registered file readers will be used to try reading files passed to\n `parse_config_file`. All file readers (beginning with the default `open`) will\n be tried until one of them succeeds at opening the file.\n\n This function may also be be used used as a decorator. For example:\n\n @register_file_reader(IOError)\n def exotic_data_source(filename):\n ...\n\n Args:\n *args: (When used as a decorator, only the existence check is supplied.)\n - file_reader_fn: The file reader function to register. This should be a\n function that can be used as a context manager to open a file and\n provide a file-like object, similar to Python's built-in `open`.\n - is_readable_fn: A function taking the file path and returning a boolean\n indicating whether the file can be read by `file_reader_fn`.\n\n Returns:\n `None`, or when used as a decorator, a function that will perform the\n registration using the supplied readability predicate."} {"query":"Verify BOARD variables and construct exported variables","positive":"def Func ( ) : if cij . ssh . Func ( ) : cij . err ( \"board.Func: invalid SSH Funcironment\" ) return 1 arg_0 = cij . Func_to_dict ( PREFIX , REQUIRED ) if arg_0 is None : cij . err ( \"board.Func: invalid BOARD Funcironment\" ) return 1 arg_0 [ \"CLASS\" ] = \"_\" . join ( [ arg_0 [ r ] for r in REQUIRED [ : - 1 ] ] ) arg_0 [ \"IDENT\" ] = \"-\" . join ( [ arg_0 [ \"CLASS\" ] , arg_0 [ \"ALIAS\" ] ] ) cij . Func_export ( PREFIX , EXPORTED , arg_0 ) return 0","id_":253164,"task_name":"https:\/\/github.com\/refenv\/cijoe\/blob\/21d7b2ed4ff68e0a1457e7df2db27f6334f1a379\/modules\/cij\/board.py#L12-L29","negative":"This turns the pickled lightcurve file back into an `lcdict`.\n\n Parameters\n ----------\n\n picklefile : str\n The path to a previously written Kepler LC picklefile generated by\n `kepler_lcdict_to_pkl` above.\n\n Returns\n -------\n\n lcdict\n Returns an `lcdict` (this is useable by most astrobase functions for LC\n processing)."} {"query":"Return a temporary file with the given suffix within dirpath . If dirpath is None will look for a temporary folder in your system .","positive":"def Func ( arg_0 = '.txt' , arg_1 = None ) : if arg_1 is None : arg_1 = get_temp_dir ( ) return tempfile . NamedTemporaryFile ( arg_0 = arg_0 , dir = arg_1 )","id_":253165,"task_name":"https:\/\/github.com\/PythonSanSebastian\/docstamp\/blob\/b43808f2e15351b0b2f0b7eade9c7ef319c9e646\/docstamp\/file_utils.py#L95-L115","negative":"Whether a connection can be established between those two meshes."} {"query":"Return True if QuantumChannel is a unitary channel .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None ) : try : arg_3 = arg_0 . to_operator ( ) return arg_3 . Func ( arg_1 = arg_1 , arg_2 = arg_2 ) except QiskitError : return False","id_":253166,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/quantum_info\/operators\/channel\/quantum_channel.py#L44-L50","negative":"Copy a path from inside a Dusty container to a path on the\n local filesystem. The path on the local filesystem must be\n wrist-accessible by the user specified in mac_username."} {"query":"Handles static files requests .","positive":"def Func ( arg_0 ) : arg_1 = os . path . join ( os . path . dirname ( __file__ ) , _STATIC_DIR , arg_0 . path [ 1 : ] ) with io . open ( arg_1 , 'rb' ) as res_file : arg_2 = res_file . read ( ) arg_3 , arg_4 = os . path . splitext ( arg_0 . path ) return arg_2 , 'text\/%s' % arg_4 [ 1 : ]","id_":253167,"task_name":"https:\/\/github.com\/nvdv\/vprof\/blob\/4c3ff78f8920ab10cb9c00b14143452aa09ff6bb\/vprof\/stats_server.py#L48-L55","negative":"Returns a new random state.\n\n Parameters\n ----------\n seed : None or int, optional\n Optional seed value to use.\n The same datatypes are allowed as for ``numpy.random.RandomState(seed)``.\n\n fully_random : bool, optional\n Whether to use numpy's random initialization for the\n RandomState (used if set to True). If False, a seed is sampled from\n the global random state, which is a bit faster and hence the default.\n\n Returns\n -------\n numpy.random.RandomState\n The new random state."} {"query":"Gets username course_id and enterprise_customer_uuid which are the relevant query parameters for this API endpoint .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = get_request_value ( arg_1 , arg_0 . REQUIRED_PARAM_USERNAME , '' ) arg_3 = get_request_value ( arg_1 , arg_0 . REQUIRED_PARAM_COURSE_ID , '' ) arg_4 = get_request_value ( arg_1 , arg_0 . REQUIRED_PARAM_PROGRAM_UUID , '' ) arg_5 = get_request_value ( arg_1 , arg_0 . REQUIRED_PARAM_ENTERPRISE_CUSTOMER ) if not ( arg_2 and ( arg_3 or arg_4 ) and arg_5 ) : raise ConsentAPIRequestError ( arg_0 . get_missing_params_message ( [ ( \"'username'\" , bool ( arg_2 ) ) , ( \"'enterprise_customer_uuid'\" , bool ( arg_5 ) ) , ( \"one of 'course_id' or 'program_uuid'\" , bool ( arg_3 or arg_4 ) ) , ] ) ) return arg_2 , arg_3 , arg_4 , arg_5","id_":253168,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/consent\/api\/v1\/views.py#L95-L115","negative":"Returns a weak reference to the given method or function.\n If the callback argument is not None, it is called as soon\n as the referenced function is garbage deleted.\n\n :type function: callable\n :param function: The function to reference.\n :type callback: callable\n :param callback: Called when the function dies."} {"query":"Create the task on the server","positive":"def Func ( arg_0 , arg_1 ) : if len ( arg_0 . geometries ) == 0 : raise Exception ( 'no geometries' ) return arg_1 . post ( 'task_admin' , arg_0 . as_payload ( ) , replacements = { 'slug' : arg_0 . __challenge__ . slug , 'identifier' : arg_0 . identifier } )","id_":253169,"task_name":"https:\/\/github.com\/mvexel\/maproulette-api-wrapper\/blob\/835278111afefed2beecf9716a033529304c548f\/maproulette\/task.py#L45-L54","negative":"Inform the widget about the encoding of the underlying character stream."} {"query":"Return a client with same settings of the batch client","positive":"def Func ( arg_0 ) : arg_1 = Client ( arg_0 . host , arg_0 . port , arg_0 . prefix ) arg_0 . _configure_client ( arg_1 ) return arg_1","id_":253170,"task_name":"https:\/\/github.com\/farzadghanei\/statsd-metrics\/blob\/153ff37b79777f208e49bb9d3fb737ba52b99f98\/statsdmetrics\/client\/__init__.py#L352-L358","negative":"Helper method to clean up DAG file processors to avoid leaving orphan processes."} {"query":"prop is a sugar for property .","positive":"def Func ( arg_0 = None , * , arg_1 = arg_2 , arg_3 : arg_4 = True , arg_5 : arg_4 = True , arg_6 : arg_4 = False , arg_7 = arg_2 , arg_8 : arg_9 = arg_2 ) : def wrap ( arg_0 ) : if not callable ( arg_0 ) : raise TypeError arg_10 = arg_0 . __name__ arg_11 = arg_1 if arg_11 is arg_2 : arg_11 = '_' + arg_10 arg_12 , arg_13 , arg_14 = None , None , None if arg_3 : def arg_12 ( arg_15 ) : try : return arg_15 . __dict__ [ arg_11 ] except KeyError : if arg_7 is not arg_2 : return arg_7 raise AttributeError ( f\"'{type(self).__name__}' object has no attribute '{key}'\" ) if arg_5 : def arg_13 ( arg_15 , arg_16 ) : if arg_8 is not arg_2 and not isinstance ( arg_16 , arg_8 ) : if isinstance ( arg_8 , arg_9 ) : arg_17 = arg_9 ( x . __name__ for x in arg_8 ) else : arg_17 = arg_8 . __name__ raise TypeError ( f'type of {type(self).__name__}.{Func_name} must be {types_name}; ' f'got {type(val).__name__} instead' ) arg_15 . __dict__ [ arg_11 ] = arg_16 if arg_6 : def arg_14 ( arg_15 ) : del arg_15 . __dict__ [ arg_11 ] return Funcerty ( arg_12 , arg_13 , arg_14 , arg_0 . __doc__ ) return wrap ( arg_0 ) if arg_0 else wrap","id_":253171,"task_name":"https:\/\/github.com\/Jasily\/jasily-python\/blob\/1c821a120ebbbbc3c5761f5f1e8a73588059242a\/jasily\/lang\/props.py#L11-L73","negative":"Get the reason of this revocation.\n\n :return: The reason, or ``None`` if there is none.\n :rtype: bytes or NoneType\n\n .. seealso::\n\n :meth:`all_reasons`, which gives you a list of all supported\n reasons this method might return."} {"query":"Set the time zone data of this object from a _tzfile object","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in _tzfile . attrs : setattr ( arg_0 , '_' + arg_2 , getattr ( arg_1 , arg_2 ) )","id_":253172,"task_name":"https:\/\/github.com\/MacHu-GWU\/superjson-project\/blob\/782ca4b2edbd4b4018b8cedee42eeae7c921b917\/superjson\/pkg\/dateutil\/tz\/tz.py#L383-L387","negative":"Delete group."} {"query":"Returns an etree HTML node with a document describing the process . This is only supported if the editor provided an SVG representation .","positive":"def Func ( arg_0 ) : arg_1 = ET . Element ( 'html' ) arg_2 = ET . SubElement ( arg_1 , 'head' ) arg_3 = ET . SubElement ( arg_2 , 'title' ) arg_3 . text = arg_0 . description arg_5 = ET . SubElement ( arg_1 , 'body' ) arg_6 = ET . SubElement ( arg_5 , 'h1' ) arg_6 . text = arg_0 . description arg_7 = ET . SubElement ( arg_5 , 'span' ) arg_7 . text = '___CONTENT___' arg_8 = ET . tostring ( arg_1 ) arg_9 = '' arg_10 = set ( ) for arg_11 in arg_0 . get_specs_depth_first ( ) : if arg_11 . svg and arg_11 . svg not in arg_10 : arg_9 += '

' + arg_11 . svg + \"<\/p>\" arg_10 . add ( arg_11 . svg ) return arg_8 . replace ( '___CONTENT___' , arg_9 )","id_":253173,"task_name":"https:\/\/github.com\/knipknap\/SpiffWorkflow\/blob\/f0af7f59a332e0619e4f3c00a7d4a3d230760e00\/SpiffWorkflow\/bpmn\/specs\/BpmnProcessSpec.py#L141-L164","negative":"Reset the parameters."} {"query":"receives a UUID via the request and returns either a fresh or an existing dropbox for it","positive":"def Func ( arg_0 ) : try : arg_1 = int ( arg_0 . registry . settings . get ( 'post_token_max_age_seconds' ) ) except Exception : arg_1 = 300 try : arg_2 = parse_post_token ( token = arg_0 . matchdict [ 'token' ] , secret = arg_0 . registry . settings [ 'post_secret' ] , arg_1 = arg_1 ) except SignatureExpired : raise HTTPGone ( 'dropbox expired' ) except Exception : raise HTTPNotFound ( 'no such dropbox' ) arg_3 = arg_0 . registry . settings [ 'dropbox_container' ] . get_dropbox ( arg_2 ) if arg_3 . status_int >= 20 : raise HTTPGone ( 'dropbox already in processing, no longer accepts data' ) return arg_3","id_":253174,"task_name":"https:\/\/github.com\/ZeitOnline\/briefkasten\/blob\/ce6b6eeb89196014fe21d68614c20059d02daa11\/application\/briefkasten\/__init__.py#L20-L40","negative":"Convert segmentation map to heatmaps object.\n\n Each segmentation map class will be represented as a single heatmap channel.\n\n Parameters\n ----------\n only_nonempty : bool, optional\n If True, then only heatmaps for classes that appear in the segmentation map will be\n generated. Additionally, a list of these class ids will be returned.\n\n not_none_if_no_nonempty : bool, optional\n If `only_nonempty` is True and for a segmentation map no channel was non-empty,\n this function usually returns None as the heatmaps object. If however this parameter\n is set to True, a heatmaps object with one channel (representing class 0)\n will be returned as a fallback in these cases.\n\n Returns\n -------\n imgaug.HeatmapsOnImage or None\n Segmentation map as a heatmaps object.\n If `only_nonempty` was set to True and no class appeared in the segmentation map,\n then this is None.\n\n class_indices : list of int\n Class ids (0 to C-1) of the classes that were actually added to the heatmaps.\n Only returned if `only_nonempty` was set to True."} {"query":"Parses option_settings as they are defined in the configuration file","positive":"def Func ( arg_0 ) : arg_1 = [ ] for arg_2 , arg_3 in list ( arg_0 . items ( ) ) : for arg_4 , arg_5 in list ( arg_3 . items ( ) ) : arg_1 . append ( ( arg_2 , arg_4 , arg_5 ) ) return arg_1","id_":253175,"task_name":"https:\/\/github.com\/briandilley\/ebs-deploy\/blob\/4178c9c1282a9025fb987dab3470bea28c202e10\/ebs_deploy\/__init__.py#L58-L66","negative":"Finds embedded player url in HTTP response.\n\n :param response: Response object.\n :returns: Player url (str)."} {"query":"Construct a circle .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = 50 , ** arg_4 ) : return arg_0 . regular_polygon ( arg_1 , arg_2 , arg_3 , ** arg_4 )","id_":253176,"task_name":"https:\/\/github.com\/hsharrison\/pyglet2d\/blob\/46f610b3c76221bff19e5c0cf3d35d7875ce37a0\/src\/pyglet2d.py#L108-L122","negative":"Wrap a reader function in a decorator to supply line and column\n information along with relevant forms."} {"query":"Looks for files in the extra locations as defined in MEDIA_FIXTURES_FILES_DIRS .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False ) : arg_3 = [ ] for arg_4 , arg_5 in arg_0 . locations : if arg_5 not in searched_locations : searched_locations . append ( arg_5 ) arg_6 = arg_0 . Func_location ( arg_5 , arg_1 , arg_4 ) if arg_6 : if not arg_2 : return arg_6 arg_3 . append ( arg_6 ) return arg_3","id_":253177,"task_name":"https:\/\/github.com\/adrianoveiga\/django-media-fixtures\/blob\/a3f0d9ac84e73d491eeb0c881b23cc47ccca1b54\/django_media_fixtures\/finders.py#L52-L66","negative":"Make a call to the meter via JSON RPC"} {"query":"Adds an empty result group under the current node .","positive":"def Func ( arg_0 , * arg_1 , ** arg_2 ) : return arg_0 . _nn_interface . _add_generic ( arg_0 , type_name = RESULT_GROUP , group_type_name = RESULT_GROUP , arg_1 = arg_1 , arg_2 = arg_2 )","id_":253178,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/naturalnaming.py#L3493-L3508","negative":"Read attribute from sysfs and return as string"} {"query":"Converts py_zipkin s annotations dict to protobuf .","positive":"def Func ( arg_0 ) : arg_1 = [ ] for arg_2 , arg_3 in arg_0 . items ( ) : arg_1 . append ( zipkin_pb2 . Annotation ( timestamp = int ( arg_3 * 1000 * 1000 ) , arg_2 = arg_2 , ) ) return arg_1","id_":253179,"task_name":"https:\/\/github.com\/Yelp\/py_zipkin\/blob\/0944d9a3fb1f1798dbb276694aeed99f2b4283ba\/py_zipkin\/encoding\/protobuf\/__init__.py#L156-L170","negative":"Format the exception part of a traceback.\n\n The arguments are the exception type and value such as given by\n sys.last_type and sys.last_value. The return value is a list of\n strings, each ending in a newline.\n\n Normally, the list contains a single string; however, for\n SyntaxError exceptions, it contains several lines that (when\n printed) display detailed information about where the syntax\n error occurred.\n\n The message indicating which exception occurred is always the last\n string in the list."} {"query":"Invalidate an authorization code after use .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , * arg_4 , ** arg_5 ) : log . debug ( 'Destroy grant token for client %r, %r' , arg_1 , arg_2 ) arg_6 = arg_0 . _grantgetter ( arg_1 = arg_1 , arg_2 = arg_2 ) if arg_6 : arg_6 . delete ( )","id_":253180,"task_name":"https:\/\/github.com\/lepture\/flask-oauthlib\/blob\/9e6f152a5bb360e7496210da21561c3e6d41b0e1\/flask_oauthlib\/provider\/oauth2.py#L808-L818","negative":"Returns opened file object for writing dialog logs.\n\n Returns:\n log_file: opened Python file object."} {"query":"Return TypingStatusMessage from hangouts_pb2 . SetTypingNotification .","positive":"def Func ( arg_0 ) : return TypingStatusMessage ( conv_id = arg_0 . conversation_id . id , user_id = from_participantid ( arg_0 . sender_id ) , timestamp = from_timestamp ( arg_0 . timestamp ) , status = arg_0 . type , )","id_":253181,"task_name":"https:\/\/github.com\/tdryer\/hangups\/blob\/85c0bf0a57698d077461283895707260f9dbf931\/hangups\/parsers.py#L68-L79","negative":"Add one or more files or URLs to the manifest.\n If files contains a glob, it is expanded.\n\n All files are uploaded to SolveBio. The Upload\n object is used to fill the manifest."} {"query":"make messages type report","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if not arg_1 [ \"by_msg\" ] : raise exceptions . EmptyReportError ( ) arg_3 = sorted ( [ ( arg_5 , arg_6 ) for arg_6 , arg_5 in arg_1 [ \"by_msg\" ] . items ( ) if not arg_6 . startswith ( \"I\" ) ] ) arg_3 . reverse ( ) arg_4 = ( \"message id\" , \"occurrences\" ) for arg_5 , arg_6 in arg_3 : arg_4 += ( arg_6 , str ( arg_5 ) ) arg_0 . append ( report_nodes . Table ( children = arg_4 , cols = 2 , rheaders = 1 ) )","id_":253182,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/lint.py#L1293-L1309","negative":"Prepare the actors, the world, and the messaging system to begin \n playing the game.\n \n This method is guaranteed to be called exactly once upon entering the \n game stage."} {"query":"use the GUI to ask YES or NO .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = tkinter . Tk ( ) arg_2 . attributes ( \"-topmost\" , True ) arg_2 . withdraw ( ) arg_3 = tkinter . messagebox . askyesno ( arg_0 , arg_1 ) arg_2 . destroy ( ) return arg_3","id_":253183,"task_name":"https:\/\/github.com\/swharden\/SWHLab\/blob\/a86c3c65323cec809a4bd4f81919644927094bf5\/doc\/oldcode\/swhlab\/core\/common.py#L884-L891","negative":"Runs statistical profiler on a function."} {"query":"Export the graph to a SPIA Excel sheet .","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 , arg_4 : arg_3 ) : if not arg_2 and not arg_4 : click . secho ( 'Specify at least one option --xlsx or --tsvs' , fg = 'red' ) sys . exit ( 1 ) arg_5 = bel_to_spia_matrices ( arg_0 ) if arg_2 : spia_matrices_to_excel ( arg_5 , arg_2 ) if arg_4 : spia_matrices_to_tsvs ( arg_5 , arg_4 )","id_":253184,"task_name":"https:\/\/github.com\/pybel\/pybel-tools\/blob\/3491adea0ac4ee60f57275ef72f9b73da6dbfe0c\/src\/pybel_tools\/analysis\/spia.py#L214-L226","negative":"Converts all of the non-numeric fields from spatialOutput and temporalOutput\n into their scalar equivalents and records them in the output dictionary.\n\n :param spatialOutput: The results of topDownCompute() for the spatial input.\n :param temporalOutput: The results of topDownCompute() for the temporal\n input.\n :param output: The main dictionary of outputs passed to compute(). It is\n expected to have keys 'spatialTopDownOut' and 'temporalTopDownOut' that\n are mapped to numpy arrays."} {"query":"Return the R1 region as defined in the Porter2 specification .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_3 = False if hasattr ( arg_2 , '__iter__' ) : for arg_4 in arg_2 : if arg_1 [ : len ( arg_4 ) ] == arg_4 : return len ( arg_4 ) for arg_5 in range ( len ( arg_1 ) ) : if not arg_3 and arg_1 [ arg_5 ] in arg_0 . _vowels : arg_3 = True elif arg_3 and arg_1 [ arg_5 ] not in arg_0 . _vowels : return arg_5 + 1 return len ( arg_1 )","id_":253185,"task_name":"https:\/\/github.com\/chrislit\/abydos\/blob\/165466b3ff6afd8024a4c8660421b0c4e7773db9\/abydos\/stemmer\/_snowball.py#L42-L69","negative":"Sequentially update the actors, the world, and the messaging system. \n The theater terminates once all of the actors indicate that they are done."} {"query":"Read data synchronous from an ADS - device from data name .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = False ) : arg_5 = adsSyncReadWriteReqEx2 ( arg_0 , arg_1 , ADSIGRP_SYM_HNDBYNAME , 0x0 , PLCTYPE_UDINT , arg_2 , PLCTYPE_STRING , ) arg_6 = adsSyncReadReqEx2 ( arg_0 , arg_1 , ADSIGRP_SYM_VALBYHND , arg_5 , arg_3 , arg_4 ) adsSyncWriteReqEx ( arg_0 , arg_1 , ADSIGRP_SYM_RELEASEHND , 0 , arg_5 , PLCTYPE_UDINT ) return arg_6","id_":253186,"task_name":"https:\/\/github.com\/stlehmann\/pyads\/blob\/44bd84394db2785332ac44b2948373916bea0f02\/pyads\/pyads_ex.py#L558-L592","negative":"Validates API Root information. Raises errors for required\n properties."} {"query":"Return stripped HTML keeping only MathML .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 ( ) arg_2 . feed ( arg_1 ) arg_3 = arg_2 . unescape ( arg_2 . get_data ( ) ) return escape_for_xml ( arg_3 , tags_to_keep = arg_2 . mathml_elements )","id_":253187,"task_name":"https:\/\/github.com\/inspirehep\/harvesting-kit\/blob\/33a7f8aa9dade1d863110c6d8b27dfd955cb471f\/harvestingkit\/html_utils.py#L90-L95","negative":"Show an image.\n\n Args:\n img (str or ndarray): The image to be displayed.\n win_name (str): The window name.\n wait_time (int): Value of waitKey param."} {"query":"A generator yielding all protobuf object data in the file . It is the main parser of the stream encoding .","positive":"def Func ( arg_0 ) : while True : arg_1 = arg_0 . _read_varint ( ) if arg_1 == 0 : break for arg_2 in range ( arg_1 ) : arg_3 = arg_0 . _read_varint ( ) if arg_3 == 0 : raise EOFError ( 'unexpected EOF.' ) yield arg_0 . _fd . read ( arg_3 ) if arg_0 . _group_delim : yield arg_0 . _delimiter ( ) if arg_0 . _delimiter is not None else None","id_":253188,"task_name":"https:\/\/github.com\/cartoonist\/pystream-protobuf\/blob\/40e70b932436887b748905e5e0a82839e4c559f0\/stream\/stream.py#L165-L183","negative":"Waits for the Job to reach a terminal state.\n\n This method will periodically check the job state until the job reach\n a terminal state.\n\n Raises:\n googleapiclient.errors.HttpError: if HTTP error is returned when getting\n the job"} {"query":"Internal utility function for Unified Job Templates . Returns data about the last job run off of that UJT","positive":"def Func ( arg_0 , arg_1 = None , ** arg_2 ) : arg_3 = arg_0 . get ( arg_1 , include_debug_header = True , ** arg_2 ) if 'current_update' in arg_3 [ 'related' ] : debug . log ( 'A current job; retrieving it.' , header = 'details' ) return client . get ( arg_3 [ 'related' ] [ 'current_update' ] [ 7 : ] ) . json ( ) elif arg_3 [ 'related' ] . get ( 'last_update' , None ) : debug . log ( 'No current job or update exists; retrieving the most recent.' , header = 'details' ) return client . get ( arg_3 [ 'related' ] [ 'last_update' ] [ 7 : ] ) . json ( ) else : raise exc . NotFound ( 'No related jobs or updates exist.' )","id_":253189,"task_name":"https:\/\/github.com\/ansible\/tower-cli\/blob\/a2b151fed93c47725018d3034848cb3a1814bed7\/tower_cli\/models\/base.py#L742-L756","negative":"Associates a cobra.Gene object with a cobra.Reaction.\n\n Parameters\n ----------\n cobra_gene : cobra.core.Gene.Gene"} {"query":"Token can be only a single char . Returns position after token if found . Otherwise raises syntax error if throw otherwise returns None","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = True ) : arg_1 = pass_white ( arg_0 , arg_1 ) if arg_1 < len ( arg_0 ) and arg_0 [ arg_1 ] == arg_2 : return arg_1 + 1 if arg_3 : raise SyntaxError ( 'Missing token. Expected %s' % arg_2 ) return None","id_":253190,"task_name":"https:\/\/github.com\/PiotrDabkowski\/Js2Py\/blob\/c0fa43f5679cf91ca8986c5747fcb07a433dc584\/js2py\/legecy_translators\/jsparser.py#L176-L184","negative":"Adds headers to the request\n\n Args:\n headers (dict): The headers to add the request headers\n\n Returns:\n The request builder instance in order to chain calls"} {"query":"Load a public key from a buffer .","positive":"def Func ( arg_0 , arg_1 ) : if isinstance ( arg_1 , _text_type ) : arg_1 = arg_1 . encode ( \"ascii\" ) arg_2 = _new_mem_buf ( arg_1 ) if arg_0 == FILETYPE_PEM : arg_3 = _lib . PEM_read_bio_PUBKEY ( arg_2 , _ffi . NULL , _ffi . NULL , _ffi . NULL ) elif arg_0 == FILETYPE_ASN1 : arg_3 = _lib . d2i_PUBKEY_bio ( arg_2 , _ffi . NULL ) else : raise ValueError ( \"type argument must be FILETYPE_PEM or FILETYPE_ASN1\" ) if arg_3 == _ffi . NULL : _raise_current_error ( ) arg_4 = PKey . __new__ ( PKey ) arg_4 . _pkey = _ffi . gc ( arg_3 , _lib . EVP_PKEY_free ) arg_4 . _only_public = True return arg_4","id_":253191,"task_name":"https:\/\/github.com\/pyca\/pyopenssl\/blob\/1fbe064c50fd030948141d7d630673761525b0d0\/src\/OpenSSL\/crypto.py#L2677-L2707","negative":"Build extra args map"} {"query":"List all events occurring at or after a timestamp .","positive":"async def Func ( arg_0 , arg_1 ) : arg_2 = hangouts_pb2 . SyncAllNewEventsResponse ( ) await arg_0 . _pb_request ( 'conversations\/syncallnewevents' , arg_1 , arg_2 ) return arg_2","id_":253192,"task_name":"https:\/\/github.com\/tdryer\/hangups\/blob\/85c0bf0a57698d077461283895707260f9dbf931\/hangups\/client.py#L660-L665","negative":"write lines, one by one, separated by \\n to device"} {"query":"Print a stack trace from its invocation point .","positive":"def Func ( arg_0 = None , arg_1 = None , arg_2 = None ) : if arg_0 is None : try : raise ZeroDivisionError except ZeroDivisionError : arg_0 = sys . exc_info ( ) [ 2 ] . tb_frame . f_back print_list ( extract_stack ( arg_0 , arg_1 ) , arg_2 )","id_":253193,"task_name":"https:\/\/github.com\/google\/grumpy\/blob\/3ec87959189cfcdeae82eb68a47648ac25ceb10b\/third_party\/stdlib\/traceback.py#L262-L274","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"check that the argument to reversed is a sequence","positive":"def Func ( arg_0 , arg_1 ) : try : arg_2 = utils . safe_infer ( utils . get_argument_from_call ( arg_1 , position = 0 ) ) except utils . NoSuchArgumentError : pass else : if arg_2 is astroid . Uninferable : return if arg_2 is None : if isinstance ( arg_1 . args [ 0 ] , astroid . Call ) : try : arg_3 = next ( arg_1 . args [ 0 ] . func . infer ( ) ) except astroid . InferenceError : return if getattr ( arg_3 , \"name\" , None ) == \"iter\" and utils . is_builtin_object ( arg_3 ) : arg_0 . add_message ( \"bad-reversed-sequence\" , arg_1 = arg_1 ) return if isinstance ( arg_2 , ( astroid . List , astroid . Tuple ) ) : return if isinstance ( arg_2 , astroid . Instance ) : if arg_2 . _proxied . name == \"dict\" and utils . is_builtin_object ( arg_2 . _proxied ) : arg_0 . add_message ( \"bad-reversed-sequence\" , arg_1 = arg_1 ) return if any ( arg_4 . name == \"dict\" and utils . is_builtin_object ( arg_4 ) for arg_4 in arg_2 . _proxied . ancestors ( ) ) : try : arg_2 . locals [ REVERSED_PROTOCOL_METHOD ] except KeyError : arg_0 . add_message ( \"bad-reversed-sequence\" , arg_1 = arg_1 ) return if hasattr ( arg_2 , \"getattr\" ) : for arg_5 in REVERSED_METHODS : for meth in arg_5 : try : arg_2 . getattr ( meth ) except astroid . NotFoundError : break else : break else : arg_0 . add_message ( \"bad-reversed-sequence\" , arg_1 = arg_1 ) else : arg_0 . add_message ( \"bad-reversed-sequence\" , arg_1 = arg_1 )","id_":253194,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/checkers\/base.py#L1397-L1454","negative":"Bookmark group.\n\n Uses [Caching](\/references\/polyaxon-cli\/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon group bookmark\n ```\n\n \\b\n ```bash\n $ polyaxon group -g 2 bookmark\n ```"} {"query":"Build destination URL .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None ) : arg_3 = arg_1 or arg_0 . _settings [ \"url\" ] if arg_1 and arg_0 . _settings [ \"baseFunc\" ] : arg_3 = \"%s\/%s\" % ( arg_0 . _settings [ \"baseFunc\" ] , arg_1 ) arg_3 += \".json\" if arg_2 : arg_3 += \"?%s\" % urllib . urlencode ( arg_2 ) return arg_3","id_":253195,"task_name":"https:\/\/github.com\/mariano\/pyfire\/blob\/42e3490c138abc8e10f2e9f8f8f3b40240a80412\/pyfire\/connection.py#L396-L413","negative":"This method increases the permanence values of synapses of columns whose\n activity level has been too low. Such columns are identified by having an\n overlap duty cycle that drops too much below those of their peers. The\n permanence values for such columns are increased."} {"query":"Receive instructions from Guacamole guacd server .","positive":"def Func ( arg_0 ) : arg_1 = 0 while True : arg_2 = arg_0 . _buffer . find ( INST_TERM . encode ( ) , arg_1 ) if arg_2 != - 1 : arg_3 = arg_0 . _buffer [ : arg_2 + 1 ] . decode ( ) arg_0 . _buffer = arg_0 . _buffer [ arg_2 + 1 : ] arg_0 . logger . debug ( 'Received instruction: %s' % arg_3 ) return arg_3 else : arg_1 = len ( arg_0 . _buffer ) arg_5 = arg_0 . client . recv ( BUF_LEN ) if not arg_5 : arg_0 . close ( ) arg_0 . logger . debug ( 'Failed to Func instruction. Closing.' ) return None arg_0 . _buffer . extend ( arg_5 )","id_":253196,"task_name":"https:\/\/github.com\/mohabusama\/pyguacamole\/blob\/344dccc6cb3a9a045afeaf337677e5d0001aa83a\/guacamole\/client.py#L91-L115","negative":"Writes int to memory\n\n :param int where: address to write to\n :param expr: value to write\n :type expr: int or BitVec\n :param size: bit size of `expr`\n :param force: whether to ignore memory permissions"} {"query":"Select all the faces and vertexes within the specified vertex quality range .","positive":"def Func ( arg_0 , arg_1 = 0.0 , arg_2 = 0.05 , arg_3 = True ) : arg_4 = '' . join ( [ ' \\n' , ' \\n' , ' \\n' , ' \\n' , ' <\/filter>\\n' ] ) util . write_filter ( arg_0 , arg_4 ) return None","id_":253197,"task_name":"https:\/\/github.com\/3DLIRIOUS\/MeshLabXML\/blob\/177cce21e92baca500f56a932d66bd9a33257af8\/meshlabxml\/select.py#L268-L311","negative":"Removes the specfied course from the specified organization"} {"query":"Returns initializer configuration as a JSON - serializable dict .","positive":"def Func ( arg_0 ) : return { 'initializers' : [ tf . compat . v2 . initializers . serialize ( tf . keras . initializers . get ( arg_1 ) ) for arg_1 in arg_0 . initializers ] , 'sizes' : arg_0 . sizes , 'validate_args' : arg_0 . validate_args , }","id_":253198,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/layers\/initializers.py#L106-L116","negative":"Show an image.\n\n Args:\n img (str or ndarray): The image to be displayed.\n win_name (str): The window name.\n wait_time (int): Value of waitKey param."} {"query":"Add options to a parser .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = ( 'store_true' , 'store_false' ) for arg_4 , arg_5 in arg_1 . items ( ) : arg_6 = arg_0 . _conf [ arg_5 ] . def_ [ arg_4 ] arg_7 = copy . deepcopy ( arg_6 . cmd_kwargs ) arg_8 = arg_7 . get ( 'action' ) if arg_8 is internal . Switch : arg_7 . update ( nargs = 0 ) elif arg_6 . default is not None and arg_8 not in arg_3 : arg_7 . setdefault ( 'type' , type ( arg_6 . default ) ) arg_7 . update ( help = arg_6 . help ) arg_7 . setdefault ( 'default' , arg_0 . _conf [ arg_5 ] [ arg_4 ] ) arg_2 . add_argument ( * _names ( arg_0 . _conf [ arg_5 ] , arg_4 ) , ** arg_7 )","id_":253199,"task_name":"https:\/\/github.com\/amorison\/loam\/blob\/a566c943a75e068a4510099331a1ddfe5bbbdd94\/loam\/cli.py#L141-L154","negative":"Seek and return the region information.\n Returns dict containing country_code and region_code.\n\n :arg ipnum: Result of ip2long conversion"} {"query":"Return a default OpenQASM string for the instruction .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . name if arg_0 . params : arg_1 = \"%s(%s)\" % ( arg_1 , \",\" . join ( [ str ( i ) for i in arg_0 . params ] ) ) return arg_0 . _Funcif ( arg_1 )","id_":253200,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/circuit\/instruction.py#L268-L279","negative":"List all events occurring at or after a timestamp."} {"query":"Adds N interpolated points with uniform spacing to each edge .","positive":"def Func ( arg_0 , arg_1 ) : if len ( arg_0 . coords ) <= 1 or arg_1 < 1 : return arg_0 . deepcopy ( ) arg_2 = interpolate_points ( arg_0 . coords , nb_steps = arg_1 , closed = False ) return arg_0 . deepcopy ( arg_2 = arg_2 )","id_":253201,"task_name":"https:\/\/github.com\/aleju\/imgaug\/blob\/786be74aa855513840113ea523c5df495dc6a8af\/imgaug\/augmentables\/lines.py#L1163-L1190","negative":"Build a mongo query across multiple cases.\n Translate query options from a form into a complete mongo query dictionary.\n\n Beware that unindexed queries against a large variant collection will\n be extremely slow.\n\n Currently indexed query options:\n hgnc_symbols\n rank_score\n variant_type\n category\n\n Args:\n query(dict): A query dictionary for the database, from a query form.\n category(str): 'snv', 'sv', 'str' or 'cancer'\n variant_type(str): 'clinical' or 'research'\n\n Returns:\n mongo_query : A dictionary in the mongo query format."} {"query":"Returns a list of the names of the dimensions that will be lost when converting from point_fmt_in to point_fmt_out","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = PointFormat ( arg_0 ) . dtype arg_3 = PointFormat ( arg_1 ) . dtype arg_4 = arg_3 . fields arg_5 = [ ] for arg_6 in arg_2 . names : if arg_6 not in arg_4 : arg_5 . append ( arg_6 ) return arg_5","id_":253202,"task_name":"https:\/\/github.com\/tmontaigu\/pylas\/blob\/8335a1a7d7677f0e4bc391bb6fa3c75b42ed5b06\/pylas\/point\/format.py#L150-L163","negative":"Jumps short if RCX register is 0.\n\n :param cpu: current CPU.\n :param target: destination operand."} {"query":"Encrypt data using asymmetric decryption .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 = None ) : if arg_1 == enums . CryptographicAlgorithm . RSA : if arg_4 == enums . PaddingMethod . OAEP : arg_6 = arg_0 . _encryption_hash_algorithms . get ( arg_5 ) if arg_6 is None : raise exceptions . InvalidField ( \"The hashing algorithm '{0}' is not supported for \" \"asymmetric decryption.\" . format ( arg_5 ) ) arg_4 = asymmetric_padding . OAEP ( mgf = asymmetric_padding . MGF1 ( algorithm = arg_6 ( ) ) , algorithm = arg_6 ( ) , label = None ) elif arg_4 == enums . PaddingMethod . PKCS1v15 : arg_4 = asymmetric_padding . PKCS1v15 ( ) else : raise exceptions . InvalidField ( \"The padding method '{0}' is not supported for asymmetric \" \"decryption.\" . format ( arg_4 ) ) arg_7 = default_backend ( ) try : arg_8 = arg_7 . load_der_private_key ( arg_2 , None ) except Exception : try : arg_8 = arg_7 . load_pem_private_key ( arg_2 , None ) except Exception : raise exceptions . CryptographicFailure ( \"The private key bytes could not be loaded.\" ) arg_9 = arg_8 . decrypt ( arg_3 , arg_4 ) return arg_9 else : raise exceptions . InvalidField ( \"The cryptographic algorithm '{0}' is not supported for \" \"asymmetric decryption.\" . format ( arg_1 ) )","id_":253203,"task_name":"https:\/\/github.com\/OpenKMIP\/PyKMIP\/blob\/b51c5b044bd05f8c85a1d65d13a583a4d8fc1b0e\/kmip\/services\/server\/crypto\/engine.py#L792-L880","negative":"Updates a player's state when a payload with opcode ``playerUpdate`` is received."} {"query":"Generate a coincidence matrix . This is used to generate random inputs to the temporal learner and to compare the predicted output against .","positive":"def Func ( arg_0 = 10 , arg_1 = 500 , arg_2 = 50 ) : arg_3 = SM32 ( int ( arg_0 ) , int ( arg_1 ) ) arg_4 = numpy . array ( [ 1.0 ] * arg_2 , dtype = numpy . float32 ) for arg_5 in xrange ( arg_0 ) : arg_6 = numpy . array ( random . sample ( xrange ( arg_1 ) , arg_2 ) , dtype = numpy . uint32 ) arg_6 . sort ( ) arg_3 . setRowFromSparse ( arg_5 , arg_6 , arg_4 ) arg_7 = SM32 ( int ( arg_0 ) , int ( arg_1 ) ) arg_7 . initializeWithFixedNNZR ( arg_2 ) return arg_3","id_":253204,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/algorithms\/fdrutilities.py#L65-L94","negative":"Return True if we should retry, False otherwise."} {"query":"tries to deserialize a message might fail if data is missing","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = arg_4 , arg_5 = False , arg_6 = arg_7 , arg_8 = arg_9 , arg_10 = arg_11 , arg_12 = arg_13 , arg_14 = False ) : if len ( arg_1 ) < arg_0 . MIN_MESSAGE_SIZE : raise ValueError ( 'not enough data' ) if arg_2 is None : arg_2 = arg_0 . detect_protocol ( arg_1 , arg_3 ) arg_15 = TTransport . TMemoryBuffer ( arg_1 ) arg_16 = arg_2 ( arg_15 ) arg_17 = None if arg_5 : try : arg_17 = ThriftStruct . Func ( arg_16 , arg_6 , arg_8 , arg_10 , arg_12 , arg_14 ) except : arg_15 = TTransport . TMemoryBuffer ( arg_1 ) arg_16 = arg_2 ( arg_15 ) arg_18 , arg_19 , arg_20 = arg_16 . FuncMessageBegin ( ) arg_19 = arg_0 . message_type_to_str ( arg_19 ) if len ( arg_18 ) == 0 or arg_18 . isspace ( ) or arg_18 . startswith ( ' ' ) : raise ValueError ( 'no method name' ) if len ( arg_18 ) > arg_0 . MAX_METHOD_LENGTH : raise ValueError ( 'method name too long' ) arg_21 = range ( 33 , 127 ) if any ( ord ( arg_22 ) not in arg_21 for arg_22 in arg_18 ) : raise ValueError ( 'invalid method name' % arg_18 ) arg_23 = ThriftStruct . Func ( arg_16 , arg_6 , arg_8 , arg_10 , arg_12 , arg_14 ) arg_16 . FuncMessageEnd ( ) arg_24 = arg_15 . _buffer . tell ( ) return arg_0 ( arg_18 , arg_19 , arg_20 , arg_23 , arg_17 , arg_24 ) , arg_24","id_":253205,"task_name":"https:\/\/github.com\/pinterest\/thrift-tools\/blob\/64e74aec89e2491c781fc62d1c45944dc15aba28\/thrift_tools\/thrift_message.py#L82-L150","negative":"Set the rotation state of the camera\n\n :param x: viewport x pos\n :param y: viewport y pos"} {"query":"Parses the API response and raises appropriate errors if raise_errors was set to True","positive":"def Func ( arg_0 , arg_1 ) : if not arg_0 . _raise_errors : return arg_1 arg_2 = str ( arg_1 . status_code ) [ 0 ] == '4' arg_3 = str ( arg_1 . status_code ) [ 0 ] == '5' arg_4 = arg_1 . content if arg_1 . status_code == 403 : raise AuthenticationError ( arg_4 ) elif arg_2 : raise APIError ( arg_4 ) elif arg_3 : raise ServerError ( arg_4 ) return arg_1","id_":253206,"task_name":"https:\/\/github.com\/sendwithus\/sendwithus_python\/blob\/8ae50d514febd44f7d9be3c838b4d92f99412832\/sendwithus\/__init__.py#L142-L160","negative":"Configure the Outstation's database of input point definitions.\n\n Configure two Analog points (group\/variation 30.1) at indexes 1 and 2.\n Configure two Binary points (group\/variation 1.2) at indexes 1 and 2."} {"query":"Try to find the container ID with the specified name","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . _containers . get ( arg_1 , None ) if not arg_2 is None : return arg_2 . get ( 'id' , None ) return None","id_":253207,"task_name":"https:\/\/github.com\/worstcase\/blockade\/blob\/3dc6ad803f0b0d56586dec9542a6a06aa06cf569\/blockade\/state.py#L84-L89","negative":"Attempt to re-establish a connection using previously acquired tokens.\n\n If the Skype token is valid but the registration token is invalid, a new endpoint will be registered.\n\n Raises:\n .SkypeAuthException: if the token file cannot be used to authenticate"} {"query":"Temporal distance probability density function .","positive":"def Func ( arg_0 ) : arg_1 , arg_2 = arg_0 . _temporal_distance_cdf ( ) arg_3 = { } arg_4 = [ arg_1 [ 0 ] ] arg_5 = [ ] for arg_6 in range ( 0 , len ( arg_1 ) - 1 ) : arg_7 = arg_1 [ arg_6 ] arg_8 = arg_1 [ arg_6 + 1 ] arg_9 = arg_8 - arg_7 arg_10 = arg_2 [ arg_6 + 1 ] - arg_2 [ arg_6 ] if arg_9 == 0.0 : arg_3 [ arg_7 ] = arg_10 else : arg_4 . append ( arg_8 ) arg_5 . append ( arg_10 \/ float ( arg_9 ) ) assert ( len ( arg_5 ) == len ( arg_4 ) - 1 ) return numpy . array ( arg_4 ) , numpy . array ( arg_5 ) , arg_3","id_":253208,"task_name":"https:\/\/github.com\/CxAalto\/gtfspy\/blob\/bddba4b74faae6c1b91202f19184811e326547e5\/gtfspy\/routing\/profile_block_analyzer.py#L185-L213","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Pop a column from the H2OFrame at index i .","positive":"def Func ( arg_0 , arg_1 ) : if is_type ( arg_1 , str ) : arg_1 = arg_0 . names . index ( arg_1 ) arg_2 = H2OFrame . _expr ( expr = ExprNode ( \"cols\" , arg_0 , arg_1 ) ) arg_3 = arg_0 . _ex . _cache arg_0 . _ex = ExprNode ( \"cols\" , arg_0 , - ( arg_1 + 1 ) ) arg_0 . _ex . _cache . ncols -= 1 arg_0 . _ex . _cache . names = arg_3 . names [ : arg_1 ] + arg_3 . names [ arg_1 + 1 : ] arg_0 . _ex . _cache . types = { name : arg_3 . types [ name ] for name in arg_0 . _ex . _cache . names } arg_0 . _ex . _cache . _data = None arg_2 . _ex . _cache . ncols = 1 arg_2 . _ex . _cache . names = [ arg_3 . names [ arg_1 ] ] return arg_2","id_":253209,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-py\/h2o\/frame.py#L1629-L1647","negative":"Flush incomming socket messages."} {"query":"find the SHA256 hash string of a file","positive":"def Func ( arg_0 ) : arg_1 = hashlib . sha256 ( ) with open ( arg_0 , \"rb\" ) as f : for arg_2 in iter ( lambda : f . read ( 4096 ) , b\"\" ) : arg_1 . update ( arg_2 ) return arg_1 . hexdigest ( )","id_":253210,"task_name":"https:\/\/github.com\/singularityhub\/sregistry-cli\/blob\/abc96140a1d15b5e96d83432e1e0e1f4f8f36331\/sregistry\/utils\/fileio.py#L244-L251","negative":"Sends a json GCM message"} {"query":"If n2 is a perfect square return its square root else raise error .","positive":"def Func ( arg_0 ) : arg_1 = int ( math . sqrt ( arg_0 ) ) assert arg_1 * arg_1 == arg_0 return arg_1","id_":253211,"task_name":"https:\/\/github.com\/hobson\/aima\/blob\/3572b2fb92039b4a1abe384be8545560fbd3d470\/aima\/search.py#L648-L652","negative":"Serialize a dataframe.\n\n Parameters\n ----------\n writer : file\n File-like object to write to. Must be opened in binary mode.\n data_type_id : dict\n Serialization format to use.\n See the azureml.DataTypeIds class for constants.\n dataframe: pandas.DataFrame\n Dataframe to serialize."} {"query":"Assert model solver status is optimal .","positive":"def Func ( arg_0 , arg_1 = 'optimization failed' ) : arg_2 = arg_0 . solver . status if arg_2 != OPTIMAL : arg_3 = OPTLANG_TO_EXCEPTIONS_DICT . get ( arg_2 , OptimizationError ) raise arg_3 ( \"{} ({})\" . format ( arg_1 , arg_2 ) )","id_":253212,"task_name":"https:\/\/github.com\/opencobra\/cobrapy\/blob\/9d1987cdb3a395cf4125a3439c3b002ff2be2009\/cobra\/util\/solver.py#L424-L441","negative":"Convert this unnormalized batch to an instance of Batch.\n\n As this method is intended to be called before augmentation, it\n assumes that none of the ``*_aug`` attributes is yet set.\n It will produce an AssertionError otherwise.\n\n The newly created Batch's ``*_unaug`` attributes will match the ones\n in this batch, just in normalized form.\n\n Returns\n -------\n imgaug.augmentables.batches.Batch\n The batch, with ``*_unaug`` attributes being normalized."} {"query":"Run the script","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = None , arg_4 = False , arg_5 = None , arg_6 = None , arg_7 = None , arg_8 = True ) : arg_9 = False arg_10 = False if arg_0 . __no_file_in : arg_11 = tempfile . NamedTemporaryFile ( delete = False , suffix = '.xyz' , dir = os . getcwd ( ) ) arg_11 . write ( b'0 0 0' ) arg_11 . close ( ) arg_0 . file_in = [ arg_11 . name ] if not arg_0 . filters : arg_7 = None elif arg_7 is None : arg_9 = True arg_13 = tempfile . NamedTemporaryFile ( delete = False , suffix = '.mlx' ) arg_13 . close ( ) arg_0 . save_to_file ( arg_13 . name ) arg_7 = arg_13 . name if ( arg_0 . parse_geometry or arg_0 . parse_topology or arg_0 . parse_hausdorff ) and ( arg_2 is None ) : arg_10 = True arg_14 = tempfile . NamedTemporaryFile ( delete = False , suffix = '.txt' ) arg_14 . close ( ) arg_2 = arg_14 . name if arg_5 is None : arg_5 = arg_0 . file_out run ( script = arg_7 , arg_1 = arg_1 , arg_2 = arg_2 , mlp_in = arg_0 . mlp_in , arg_3 = arg_3 , arg_4 = arg_4 , arg_12 = arg_0 . file_in , arg_5 = arg_5 , arg_6 = arg_6 , ml_version = arg_0 . ml_version , arg_8 = arg_8 ) if arg_0 . parse_geometry : arg_0 . geometry = compute . parse_geometry ( arg_2 , arg_1 , print_output = arg_8 ) if arg_0 . parse_topology : arg_0 . topology = compute . parse_topology ( arg_2 , arg_1 , print_output = arg_8 ) if arg_0 . parse_hausdorff : arg_0 . hausdorff_distance = compute . parse_hausdorff ( arg_2 , arg_1 , print_output = arg_8 ) if arg_0 . __no_file_in : os . remove ( arg_11 . name ) if arg_9 : os . remove ( arg_13 . name ) if arg_10 : os . remove ( arg_14 . name )","id_":253213,"task_name":"https:\/\/github.com\/3DLIRIOUS\/MeshLabXML\/blob\/177cce21e92baca500f56a932d66bd9a33257af8\/meshlabxml\/mlx.py#L210-L267","negative":"Fetch the events pages of a given group."} {"query":"Reformat binary annotations dict to return list of zipkin_core objects . The value of the binary annotations MUST be in string format .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = zipkin_core . AnnotationType . STRING return [ create_binary_annotation ( arg_3 , str ( arg_4 ) , arg_2 , arg_1 ) for arg_3 , arg_4 in arg_0 . items ( ) ]","id_":253214,"task_name":"https:\/\/github.com\/Yelp\/py_zipkin\/blob\/0944d9a3fb1f1798dbb276694aeed99f2b4283ba\/py_zipkin\/thrift\/__init__.py#L119-L135","negative":"Query the Enrollment API for the specific course modes that are available for the given course_id.\n\n Arguments:\n course_id (str): The string value of the course's unique identifier\n\n Returns:\n list: A list of course mode dictionaries."} {"query":"Add an item to this checklist . Returns a dictionary of values of new item .","positive":"def Func ( arg_0 , arg_1 = None ) : return arg_0 . fetch_json ( uri_path = arg_0 . base_uri + '\/checkItems' , http_method = 'POST' , arg_1 = arg_1 or { } )","id_":253215,"task_name":"https:\/\/github.com\/its-rigs\/Trolly\/blob\/483dc94c352df40dc05ead31820b059b2545cf82\/trolly\/checklist.py#L71-L80","negative":"Parse value from database."} {"query":"Read elements from the file .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = False ) : arg_4 , arg_5 , arg_6 = read_element_tag ( arg_0 , arg_1 ) if arg_2 and arg_4 not in [ etypes [ arg_7 ] [ 'n' ] for arg_7 in arg_2 ] : raise ParseError ( 'Got type {}, expected {}' . format ( arg_4 , ' \/ ' . join ( '{} ({})' . format ( etypes [ arg_7 ] [ 'n' ] , arg_7 ) for arg_7 in arg_2 ) ) ) if not arg_6 : arg_6 = arg_0 . read ( arg_5 ) arg_8 = arg_5 % 8 if arg_8 : arg_0 . seek ( 8 - arg_8 , 1 ) if arg_3 : arg_9 = 's' arg_10 = [ unpack ( arg_1 , arg_9 , s ) for s in arg_6 . split ( b'\\0' ) if s ] if len ( arg_10 ) == 0 : arg_10 = '' elif len ( arg_10 ) == 1 : arg_10 = asstr ( arg_10 [ 0 ] ) else : arg_10 = [ asstr ( s ) for s in arg_10 ] else : arg_9 = etypes [ inv_etypes [ arg_4 ] ] [ 'fmt' ] arg_10 = unpack ( arg_1 , arg_9 , arg_6 ) return arg_10","id_":253216,"task_name":"https:\/\/github.com\/nephics\/mat4py\/blob\/6c1a2ad903937437cc5f24f3c3f5aa2c5a77a1c1\/mat4py\/loadmat.py#L170-L204","negative":"Parse a hub key into a dictionary of component parts\n\n :param key: str, a hub key\n :returns: dict, hub key split into parts\n :raises: ValueError"} {"query":"attrs pipe can extract attribute values of object .","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in arg_0 : arg_3 = [ ] for arg_4 in arg_1 : if hasattr ( arg_2 , arg_4 ) : arg_3 . append ( getattr ( arg_2 , arg_4 ) ) yield arg_3","id_":253217,"task_name":"https:\/\/github.com\/GaryLee\/cmdlet\/blob\/5852a63fc2c7dd723a3d7abe18455f8dacb49433\/cmdlet\/cmds.py#L108-L125","negative":"Trim all the annotations inside the jam and return as a new `JAMS`\n object.\n\n See `Annotation.trim` for details about how the annotations\n are trimmed.\n\n This operation is also documented in the jam-level sandbox\n with a list keyed by ``JAMS.sandbox.trim`` containing a tuple for each\n jam-level trim of the form ``(start_time, end_time)``.\n\n This function also copies over all of the file metadata from the\n original jam.\n\n Note: trimming does not affect the duration of the jam, i.e. the value\n of ``JAMS.file_metadata.duration`` will be the same for the original\n and trimmed jams.\n\n Parameters\n ----------\n start_time : float\n The desired start time for the trimmed annotations in seconds.\n end_time\n The desired end time for trimmed annotations in seconds. Must be\n greater than ``start_time``.\n strict : bool\n When ``False`` (default) observations that lie at the boundaries of\n the trimming range (see `Annotation.trim` for details), will have\n their time and\/or duration adjusted such that only the part of the\n observation that lies within the trim range is kept. When ``True``\n such observations are discarded and not included in the trimmed\n annotation.\n\n Returns\n -------\n jam_trimmed : JAMS\n The trimmed jam with trimmed annotations, returned as a new JAMS\n object."} {"query":"Shows some stats about the currently playing song .","positive":"async def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . bot . lavalink . players . get ( arg_1 . guild . id ) arg_3 = 'Nothing' if arg_2 . current : arg_4 = lavalink . Utils . format_time ( arg_2 . position ) if arg_2 . current . stream : arg_5 = '\ud83d\udd34 LIVE'\r else : arg_5 = lavalink . Utils . format_time ( arg_2 . current . duration ) arg_3 = f'**[{player.current.title}]({player.current.uri})**\\n({position}\/{duration})' arg_6 = discord . Embed ( color = discord . Color . blurple ( ) , title = 'Now Playing' , description = arg_3 ) await arg_1 . send ( arg_6 = arg_6 )","id_":253218,"task_name":"https:\/\/github.com\/Devoxin\/Lavalink.py\/blob\/63f55c3d726d24c4cfd3674d3cd6aab6f5be110d\/examples\/music-v2.py#L138-L152","negative":"Create Flask app."} {"query":"A local version of NumPy s PSD function that returns the plot arrays .","positive":"def Func ( arg_0 , arg_1 = 2 ** 10 , arg_2 = 1 ) : arg_3 , arg_4 = pylab . mlab . psd ( arg_0 , arg_1 , arg_2 ) return arg_3 . flatten ( ) , arg_4","id_":253219,"task_name":"https:\/\/github.com\/mwickert\/scikit-dsp-comm\/blob\/5c1353412a4d81a8d7da169057564ecf940f8b5b\/sk_dsp_comm\/digitalcom.py#L946-L981","negative":"Set renewal, rebinding times."} {"query":"Return motion settings matching camera_id .","positive":"def Func ( arg_0 , arg_1 , ** arg_2 ) : arg_3 = arg_0 . _api_info [ 'camera_event' ] arg_4 = dict ( { '_sid' : arg_0 . _sid , 'api' : arg_3 [ 'name' ] , 'method' : 'MotionEnum' , 'version' : arg_3 [ 'version' ] , 'camId' : arg_1 , } , ** arg_2 ) arg_5 = arg_0 . _get_json_with_retry ( arg_3 [ 'url' ] , arg_4 ) return MotionSetting ( arg_1 , arg_5 [ 'data' ] [ 'MDParam' ] )","id_":253220,"task_name":"https:\/\/github.com\/snjoetw\/py-synology\/blob\/4f7eb0a3a9f86c24ad65993802e6fb11fbaa1f7f\/synology\/api.py#L216-L228","negative":"Map the price entity"} {"query":"Shannon entropy in nats .","positive":"def Func ( arg_0 ) : if any ( arg_0 . _dist_fn_args ) : raise ValueError ( 'Can only compute entropy when all distributions are independent.' ) return sum ( joint_distribution_lib . maybe_check_wont_broadcast ( ( arg_1 ( ) . entropy ( ) for arg_1 in arg_0 . _dist_fn_wrapped ) , arg_0 . validate_args ) )","id_":253221,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/distributions\/joint_distribution_sequential.py#L333-L340","negative":"This method determines if we should apply our namespace indentation check.\n\n Args:\n nesting_state: The current nesting state.\n is_namespace_indent_item: If we just put a new class on the stack, True.\n If the top of the stack is not a class, or we did not recently\n add the class, False.\n raw_lines_no_comments: The lines without the comments.\n linenum: The current line number we are processing.\n\n Returns:\n True if we should apply our namespace indentation check. Currently, it\n only works for classes and namespaces inside of a namespace."} {"query":"Defines a set of URL query params to match .","positive":"def Func ( arg_0 , Func ) : arg_2 = furl ( arg_0 . _request . rawurl ) arg_2 = arg_2 . add ( Func ) arg_0 . _request . url = arg_2 . url arg_0 . add_matcher ( matcher ( 'QueryMatcher' , Func ) )","id_":253222,"task_name":"https:\/\/github.com\/h2non\/pook\/blob\/e64094e41e4d89d98d2d29af7608ef27dc50cf19\/pook\/mock.py#L347-L360","negative":"Initialize all ephemerals used by derived classes."} {"query":"r Return a new schedule with by appending child to parent at the last time of the parent schedule s channels over the intersection of the parent and child schedule s channels .","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_1 , arg_3 : arg_4 = None ) -> Schedule : arg_5 = set ( arg_0 . channels ) & set ( arg_2 . channels ) arg_6 = arg_0 . ch_stop_time ( * arg_5 ) return insert ( arg_0 , arg_6 , arg_2 , arg_3 = arg_3 )","id_":253223,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/pulse\/ops.py#L80-L95","negative":"turns off debugging by removing hidden tmp file"} {"query":"Generate a random density matrix rho .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = 'Hilbert-Schmidt' , arg_3 = None ) : if arg_2 == 'Hilbert-Schmidt' : return __random_density_hs ( arg_0 , arg_1 , arg_3 ) elif arg_2 == 'Bures' : return __random_density_bures ( arg_0 , arg_1 , arg_3 ) else : raise QiskitError ( 'Error: unrecognized method {}' . format ( arg_2 ) )","id_":253224,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/quantum_info\/random\/utils.py#L82-L104","negative":"Disconnect any connected devices that have any of the specified\n service UUIDs."} {"query":"Return disk partitions .","positive":"def Func ( arg_0 ) : arg_1 = _psutil_mswindows . get_Func ( arg_0 ) return [ nt_partition ( * arg_2 ) for arg_2 in arg_1 ]","id_":253225,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/psutil\/_psmswindows.py#L101-L104","negative":"Delete previous webhooks. If local ngrok tunnel, create a webhook."} {"query":"This function should return unicode representation of the value","positive":"def Func ( arg_0 ) : arg_1 = id ( arg_0 ) if arg_1 in recursion_breaker . processed : return u'' recursion_breaker . processed . add ( arg_1 ) try : if isinstance ( arg_0 , six . binary_type ) : return u\"'{0}'\" . format ( arg_0 . decode ( 'utf-8' ) ) elif isinstance ( arg_0 , six . text_type ) : return u\"u'{0}'\" . format ( arg_0 ) elif isinstance ( arg_0 , ( list , tuple ) ) : arg_2 = list ( map ( Func , arg_0 ) ) arg_3 = serialize_list ( u'[' , arg_2 , delimiter = u',' ) + u']' return force_unicode ( arg_3 ) elif isinstance ( arg_0 , dict ) : arg_4 = six . iteritems ( arg_0 ) arg_4 = ( tuple ( map ( Func , item ) ) for item in arg_4 ) arg_4 = list ( arg_4 ) arg_4 . sort ( ) arg_4 = [ serialize_text ( u'{0}: ' . format ( key ) , item_value ) for key , item_value in arg_4 ] arg_3 = serialize_list ( u'{' , arg_4 , delimiter = u',' ) + u'}' return force_unicode ( arg_3 ) return force_unicode ( repr ( arg_0 ) ) finally : recursion_breaker . processed . remove ( arg_1 )","id_":253226,"task_name":"https:\/\/github.com\/svetlyak40wt\/python-repr\/blob\/49e358e77b97d74f29f4977ea009ab2d64c254e8\/src\/magic_repr\/__init__.py#L125-L177","negative":"Sequentially update the actors, the world, and the messaging system. \n The theater terminates once all of the actors indicate that they are done."} {"query":"Convert the munging operations performed on H2OFrame into a POJO .","positive":"def Func ( arg_0 , arg_1 = \"\" , arg_2 = \"\" , arg_3 = True ) : assert_is_type ( arg_1 , str ) assert_is_type ( arg_2 , str ) assert_is_type ( arg_3 , bool ) if arg_1 == \"\" : arg_1 = \"AssemblyPOJO_\" + str ( uuid . uuid4 ( ) ) arg_4 = h2o . api ( \"GET \/99\/Assembly.java\/%s\/%s\" % ( arg_0 . id , arg_1 ) ) arg_5 = arg_2 + \"\/\" + arg_1 + \".java\" if arg_2 == \"\" : print ( arg_4 ) else : with open ( arg_5 , 'w' , encoding = \"utf-8\" ) as f : f . write ( arg_4 ) if arg_3 and arg_2 != \"\" : h2o . api ( \"GET \/3\/h2o-genmodel.jar\" , save_to = os . path . join ( arg_2 , \"h2o-genmodel.jar\" ) )","id_":253227,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-py\/h2o\/assembly.py#L95-L117","negative":"Run stochastic volatility model.\n\n This model estimates the volatility of a returns series over time.\n Returns are assumed to be T-distributed. lambda (width of\n T-distributed) is assumed to follow a random-walk.\n\n Parameters\n ----------\n data : pandas.Series\n Return series to model.\n samples : int, optional\n Posterior samples to draw.\n\n Returns\n -------\n model : pymc.Model object\n PyMC3 model containing all random variables.\n trace : pymc3.sampling.BaseTrace object\n A PyMC3 trace object that contains samples for each parameter\n of the posterior.\n\n See Also\n --------\n plot_stoch_vol : plotting of tochastic volatility model"} {"query":"Verify that the condition is valid .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_2 is not None and arg_2 [ 0 ] . name not in arg_0 . cregs : raise DAGCircuitError ( \"invalid creg in condition for %s\" % arg_1 )","id_":253228,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/dagcircuit\/dagcircuit.py#L243-L255","negative":"A factory method which can be overridden in subclasses to create\n specialized LogRecords."} {"query":"visit an astroid . ImportFrom node","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . modname arg_3 = arg_1 . root ( ) . file if arg_3 is not None : arg_4 = modutils . is_relative ( arg_2 , arg_3 ) else : arg_4 = False for arg_5 in arg_1 . names : if arg_5 [ 0 ] == \"*\" : continue arg_6 = \"%s.%s\" % ( arg_2 , arg_5 [ 0 ] ) if arg_6 . find ( \".\" ) > - 1 : try : arg_6 = modutils . get_module_part ( arg_6 , arg_3 ) except ImportError : continue if arg_6 != arg_2 : arg_0 . _imported_module ( arg_1 , arg_6 , arg_4 )","id_":253229,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/pyreverse\/inspector.py#L250-L274","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Returns a stdin - suitable file - like object based on the optional os_path and optionally skipping any configured sub - command .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = False ) : arg_3 = None if arg_2 else arg_0 . stdin_sub_command arg_4 , arg_5 = arg_0 . _get_in_and_path ( arg_0 . stdin , arg_0 . stdin_root , arg_3 , arg_1 ) if hasattr ( arg_4 , 'stdout' ) : return arg_4 . stdout return arg_4","id_":253230,"task_name":"https:\/\/github.com\/gholt\/swiftly\/blob\/5bcc1c65323b1caf1f85adbefd9fc4988c072149\/swiftly\/cli\/iomanager.py#L149-L160","negative":"Registers `block` to `block_type` in the registry."} {"query":"Implementation of UNIX nslookup .","positive":"def Func ( arg_0 ) : try : if \"current_test_data\" in arg_3 . INTERN : if not Check ( ) . is_ip_valid ( ) : arg_1 = arg_3 . socket . getaddrinfo ( arg_3 . INTERN [ \"to_test\" ] , 80 , 0 , 0 , arg_3 . socket . IPPROTO_TCP , ) for arg_2 in arg_1 : arg_3 . INTERN [ \"current_test_data\" ] [ \"Func\" ] . append ( arg_2 [ - 1 ] [ 0 ] ) else : arg_1 = arg_3 . socket . gethostbyaddr ( arg_3 . INTERN [ \"to_test\" ] ) arg_3 . INTERN [ \"current_test_data\" ] [ \"Func\" ] [ \"hostname\" ] = arg_1 [ 0 ] arg_3 . INTERN [ \"current_test_data\" ] [ \"Func\" ] [ \"aliases\" ] = arg_1 [ 1 ] arg_3 . INTERN [ \"current_test_data\" ] [ \"Func\" ] [ \"ips\" ] = arg_1 [ 2 ] else : if not Check ( ) . is_ip_valid ( ) : arg_3 . socket . getaddrinfo ( arg_3 . INTERN [ \"to_test\" ] , 80 , 0 , 0 , arg_3 . socket . IPPROTO_TCP , ) else : arg_3 . socket . gethostbyaddr ( arg_3 . INTERN [ \"to_test\" ] ) return True except ( OSError , arg_3 . socket . herror , arg_3 . socket . gaierror ) : return False","id_":253231,"task_name":"https:\/\/github.com\/funilrys\/PyFunceble\/blob\/cdf69cbde120199171f7158e1c33635753e6e2f5\/PyFunceble\/lookup.py#L75-L145","negative":"Given the request and response headers, return `True` if an HTTP\n \"Not Modified\" response could be returned instead."} {"query":"Setup a handler to be called when a trait changes .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = False ) : if arg_3 : arg_4 = parse_notifier_name ( arg_2 ) for arg_5 in arg_4 : arg_0 . _remove_notifiers ( arg_1 , arg_5 ) else : arg_4 = parse_notifier_name ( arg_2 ) for arg_5 in arg_4 : arg_0 . _add_notifiers ( arg_1 , arg_5 )","id_":253232,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/utils\/traitlets.py#L486-L518","negative":"Adds all parameters to `traj`"} {"query":"Returns the min and max possible values for x within given constraints","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = 10000 ) : if issymbolic ( arg_2 ) : arg_4 = arg_0 . min ( arg_1 , arg_2 , arg_3 ) arg_5 = arg_0 . max ( arg_1 , arg_2 , arg_3 ) return arg_4 , arg_5 else : return arg_2 , arg_2","id_":253233,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/core\/smtlib\/solver.py#L104-L111","negative":"Set the rotation of this body using a rotation matrix.\n\n Parameters\n ----------\n rotation : sequence of 9 floats\n The desired rotation matrix for this body."} {"query":"Return a folder list in a folder by given a folder path .","positive":"def Func ( arg_0 = \"\" ) : return [ os . path . join ( arg_0 , arg_1 ) for arg_1 in os . listdir ( arg_0 ) if os . path . isdir ( os . path . join ( arg_0 , arg_1 ) ) ]","id_":253234,"task_name":"https:\/\/github.com\/tensorlayer\/tensorlayer\/blob\/aa9e52e36c7058a7e6fd81d36563ca6850b21956\/tensorlayer\/files\/utils.py#L2185-L2194","negative":"Return True if we should retry, False otherwise."} {"query":"If the header key does not exist then set it to value . Returns the header value .","positive":"def Func ( arg_0 , arg_1 : arg_2 , arg_3 : arg_2 ) -> arg_2 : arg_4 = arg_1 . lower ( ) . encode ( \"latin-1\" ) arg_5 = arg_3 . encode ( \"latin-1\" ) for arg_6 , ( arg_7 , arg_8 ) in enumerate ( arg_0 . _list ) : if arg_7 == arg_4 : return arg_8 . decode ( \"latin-1\" ) arg_0 . _list . append ( ( arg_4 , arg_5 ) ) return arg_3","id_":253235,"task_name":"https:\/\/github.com\/encode\/starlette\/blob\/d23bfd0d8ff68d535d0283aa4099e5055da88bb9\/starlette\/datastructures.py#L577-L589","negative":"Propagate \"clk\" clock and reset \"rst\" signal to all subcomponents"} {"query":"Helper to iterate over the files in a directory putting those in the passed StringIO in ini format .","positive":"def Func ( arg_0 , arg_1 , arg_2 = '' , arg_3 = None , arg_4 = None ) : import fnmatch import os arg_5 = [ ( os . path . join ( arg_0 , i ) , i ) for i in os . listdir ( arg_0 ) ] arg_5 = [ i for i in arg_5 if os . path . isfile ( i [ 0 ] ) ] for arg_6 , arg_7 in arg_5 : if arg_4 is not None : if not fnmatch . fnmatch ( arg_6 , arg_4 ) : continue if arg_3 is not None : if fnmatch . fnmatch ( arg_6 , arg_3 ) : continue arg_8 = Md5Hex ( arg_6 ) if arg_2 : arg_1 . write ( '%s\/%s=%s\\n' % ( arg_2 , arg_7 , arg_8 ) ) else : arg_1 . write ( '%s=%s\\n' % ( arg_7 , arg_8 ) )","id_":253236,"task_name":"https:\/\/github.com\/zerotk\/easyfs\/blob\/140923db51fb91d5a5847ad17412e8bce51ba3da\/zerotk\/easyfs\/_easyfs.py#L1804-L1842","negative":"This is a parallel worker for running period-recovery.\n\n Parameters\n ----------\n\n task : tuple\n This is used to pass args to the `periodicvar_recovery` function::\n\n task[0] = period-finding result pickle to work on\n task[1] = simbasedir\n task[2] = period_tolerance\n\n Returns\n -------\n\n dict\n This is the dict produced by the `periodicvar_recovery` function for the\n input period-finding result pickle."} {"query":"Long explanation of the value from the numeric value with optional extra bits Used by Layout . verboseRead when printing the value","positive":"def Func ( arg_0 , arg_1 = None ) : if isinstance ( arg_0 . code , WithExtra ) : return arg_0 . code . callback ( arg_0 , arg_1 ) return arg_0 . code . callback ( arg_0 )","id_":253237,"task_name":"https:\/\/github.com\/google\/brotli\/blob\/4b2b2d4f83ffeaac7708e44409fe34896a01a278\/research\/brotlidump.py#L156-L163","negative":"Check that a binary operator is surrounded by exactly one space."} {"query":"Semver tag triggered deployment helper","positive":"def Func ( arg_0 , arg_1 ) : check_environment ( arg_0 , arg_1 ) click . secho ( 'Fetching tags from the upstream ...' ) arg_2 = TagHandler ( git . list_tags ( ) ) print_information ( arg_2 , arg_1 ) arg_3 = arg_2 . yield_tag ( arg_0 , arg_1 ) confirm ( arg_3 )","id_":253238,"task_name":"https:\/\/github.com\/ewilazarus\/yld\/blob\/157e474d1055f14ffdfd7e99da6c77d5f17d4307\/yld\/__main__.py#L26-L38","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Return the first element of an array","positive":"def Func ( arg_0 ) : from bolt . local . array import BoltArrayLocal arg_1 = arg_0 . _rdd if arg_0 . _ordered else arg_0 . _rdd . sortByKey ( ) return BoltArrayLocal ( arg_1 . values ( ) . Func ( ) )","id_":253239,"task_name":"https:\/\/github.com\/bolt-project\/bolt\/blob\/9cd7104aa085498da3097b72696184b9d3651c51\/bolt\/spark\/array.py#L117-L123","negative":"Show Program Landing page for the Enterprise's Program.\n\n Render the Enterprise's Program Enrollment page for a specific program.\n The Enterprise and Program are both selected by their respective UUIDs.\n\n Unauthenticated learners will be redirected to enterprise-linked SSO.\n\n A 404 will be raised if any of the following conditions are met:\n * No enterprise customer UUID query parameter ``enterprise_uuid`` found in request.\n * No enterprise customer found against the enterprise customer\n uuid ``enterprise_uuid`` in the request kwargs.\n * No Program can be found given ``program_uuid`` either at all or associated with\n the Enterprise.."} {"query":"Groups line reference together","positive":"def Func ( arg_0 , arg_1 , arg_2 = 30 ) : arg_3 = len ( arg_0 . citation ) arg_4 = [ reff . split ( \":\" ) [ - 1 ] for reff in arg_1 ( arg_3 = arg_3 ) ] arg_5 = [ ] arg_6 = 0 while arg_6 + arg_2 - 1 < len ( arg_4 ) : arg_5 . append ( tuple ( [ arg_4 [ arg_6 ] + \"-\" + arg_4 [ arg_6 + arg_2 - 1 ] , arg_4 [ arg_6 ] ] ) ) arg_6 += arg_2 if arg_6 < len ( arg_4 ) : arg_5 . append ( tuple ( [ arg_4 [ arg_6 ] + \"-\" + arg_4 [ len ( arg_4 ) - 1 ] , arg_4 [ arg_6 ] ] ) ) return arg_5","id_":253240,"task_name":"https:\/\/github.com\/Capitains\/flask-capitains-nemo\/blob\/8d91f2c05b925a6c8ea8c997baf698c87257bc58\/flask_nemo\/chunker.py#L40-L61","negative":"Serial command to set seasons table.\n\n If no dictionary is passed, the meter object buffer is used.\n\n Args:\n cmd_dict (dict): Optional dictionary of season schedules.\n password (str): Optional password\n\n Returns:\n bool: True on completion and ACK."} {"query":"Return absolute distance .","positive":"def Func ( arg_0 , arg_1 , arg_2 , * arg_3 , ** arg_4 ) : return arg_0 . dist ( arg_1 , arg_2 , * arg_3 , ** arg_4 )","id_":253241,"task_name":"https:\/\/github.com\/chrislit\/abydos\/blob\/165466b3ff6afd8024a4c8660421b0c4e7773db9\/abydos\/distance\/_distance.py#L79-L99","negative":"Uploads file to GDocs spreadsheet.\n Content type can be provided as argument, default is ods."} {"query":"Perform a quick check on a BAM via samtools quickcheck . This will detect obvious BAM errors such as truncation .","positive":"def Func ( arg_0 ) : arg_1 , arg_2 = os . path . split ( arg_0 ) arg_3 = subprocess . call ( [ 'docker' , 'run' , '-v' , arg_1 + ':\/data' , 'quay.io\/ucsc_cgl\/samtools:1.3--256539928ea162949d8a65ca5c79a72ef557ce7c' , 'quickcheck' , '-vv' , '\/data\/' + arg_2 ] ) if arg_3 != 0 : return False return True","id_":253242,"task_name":"https:\/\/github.com\/BD2KGenomics\/toil-lib\/blob\/022a615fc3dc98fc1aaa7bfd232409962ca44fbd\/src\/toil_lib\/validators.py#L8-L24","negative":"Get the decryption for col."} {"query":"Parse arguments to mixin . Add them to scope as variables . Sets upp special variable","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = list ( zip ( arg_1 , [ ' ' ] * len ( arg_1 ) ) ) if arg_1 and arg_1 [ 0 ] else None arg_4 = itertools . zip_longest if sys . version_info [ 0 ] == 3 else itertools . izip_longest if arg_0 . args : arg_5 = [ v if hasattr ( v , 'parse' ) else v for v in copy . copy ( arg_0 . args ) ] arg_1 = arg_1 if isinstance ( arg_1 , list ) else [ arg_1 ] arg_6 = [ arg_0 . _parse_arg ( arg_7 , arg , arg_2 ) for arg , arg_7 in arg_4 ( [ a for a in arg_1 ] , arg_5 ) ] for arg_7 in arg_6 : if arg_7 : arg_7 . parse ( arg_2 ) if not arg_3 : arg_3 = [ v . value for v in arg_6 if v ] if not arg_3 : arg_3 = '' Variable ( [ '@arguments' , None , arg_3 ] ) . parse ( arg_2 )","id_":253243,"task_name":"https:\/\/github.com\/lesscpy\/lesscpy\/blob\/51e392fb4a3cd4ccfb6175e0e42ce7d2f6b78126\/lesscpy\/plib\/mixin.py#L49-L79","negative":"Remove all binary files in the adslib directory."} {"query":"Get a list of all available fields for an object .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . get_conn ( ) arg_2 = arg_0 . describe_object ( arg_1 ) return [ arg_3 [ 'name' ] for arg_3 in arg_2 [ 'fields' ] ]","id_":253244,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/salesforce_hook.py#L110-L123","negative":"Produces a TidyPy configuration that incorporates the configuration files\n stored in the current user's home directory.\n\n :param project_path: the path to the project that is going to be analyzed\n :type project_path: str\n :param use_cache:\n whether or not to use cached versions of any remote\/referenced TidyPy\n configurations. If not specified, defaults to ``True``.\n :type use_cache: bool\n :rtype: dict"} {"query":"Performs a step to establish the context as an acceptor .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = ffi . new ( 'OM_uint32[1]' ) arg_3 = ffi . new ( 'gss_buffer_desc[1]' ) arg_3 [ 0 ] . length = len ( arg_1 ) arg_5 = ffi . new ( 'char[]' , arg_1 ) arg_3 [ 0 ] . value = arg_5 arg_7 = ffi . new ( 'gss_OID[1]' ) arg_8 = ffi . new ( 'gss_buffer_desc[1]' ) arg_9 = ffi . new ( 'gss_name_t[1]' ) arg_10 = ffi . new ( 'OM_uint32[1]' ) arg_11 = ffi . new ( 'OM_uint32[1]' ) arg_12 = ffi . new ( 'gss_cred_id_t[1]' ) if arg_0 . _cred_object is not None : arg_13 = arg_0 . _cred_object . _cred [ 0 ] else : arg_13 = ffi . cast ( 'gss_cred_id_t' , C . GSS_C_NO_CREDENTIAL ) arg_14 = C . gss_accept_sec_context ( arg_2 , arg_0 . _ctx , arg_13 , arg_3 , arg_0 . _channel_bindings , arg_9 , arg_7 , arg_8 , arg_10 , arg_11 , arg_12 ) if arg_9 [ 0 ] : arg_15 = MechName ( arg_9 , arg_7 [ 0 ] ) try : if arg_8 [ 0 ] . length != 0 : arg_16 = _buf_to_str ( arg_8 [ 0 ] ) else : arg_16 = None if GSS_ERROR ( arg_14 ) : if arg_2 [ 0 ] and arg_7 [ 0 ] : raise _exception_for_status ( arg_14 , arg_2 [ 0 ] , arg_7 [ 0 ] , arg_16 ) else : raise _exception_for_status ( arg_14 , arg_2 [ 0 ] , None , arg_16 ) arg_0 . established = not ( arg_14 & C . GSS_S_CONTINUE_NEEDED ) arg_0 . flags = arg_10 [ 0 ] if ( arg_0 . flags & C . GSS_C_DELEG_FLAG ) : arg_0 . delegated_cred = Credential ( arg_12 ) if arg_7 [ 0 ] : arg_0 . mech_type = OID ( arg_7 [ 0 ] [ 0 ] ) if arg_9 [ 0 ] : arg_15 . _mech_type = arg_0 . mech_type arg_0 . peer_name = arg_15 return arg_16 except : if arg_0 . _ctx : C . gss_delete_sec_context ( arg_2 , arg_0 . _ctx , ffi . cast ( 'gss_buffer_t' , C . GSS_C_NO_BUFFER ) ) arg_0 . _reset_flags ( ) raise finally : if arg_8 [ 0 ] . length != 0 : C . gss_release_buffer ( arg_2 , arg_8 ) if arg_12 [ 0 ] and not arg_0 . delegated_cred : C . gss_release_cred ( arg_2 , arg_12 )","id_":253245,"task_name":"https:\/\/github.com\/sigmaris\/python-gssapi\/blob\/a8ca577b3ccf9d9fa48f16f4954a1eddd5896236\/gssapi\/ctx.py#L865-L951","negative":"Set the rotation of this body using a rotation matrix.\n\n Parameters\n ----------\n rotation : sequence of 9 floats\n The desired rotation matrix for this body."} {"query":"Remove standard padding .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 'pkcs7' ) : arg_3 = len ( arg_0 ) if arg_3 % arg_1 : raise ValueError ( \"Input data is not padded\" ) if arg_2 in ( 'pkcs7' , 'x923' ) : arg_4 = bord ( arg_0 [ - 1 ] ) if arg_4 < 1 or arg_4 > min ( arg_1 , arg_3 ) : raise ValueError ( \"Padding is incorrect.\" ) if arg_2 == 'pkcs7' : if arg_0 [ - arg_4 : ] != bchr ( arg_4 ) * arg_4 : raise ValueError ( \"PKCS#7 padding is incorrect.\" ) else : if arg_0 [ - arg_4 : - 1 ] != bchr ( 0 ) * ( arg_4 - 1 ) : raise ValueError ( \"ANSI X.923 padding is incorrect.\" ) elif arg_2 == 'iso7816' : arg_4 = arg_3 - arg_0 . rfind ( bchr ( 128 ) ) if arg_4 < 1 or arg_4 > min ( arg_1 , arg_3 ) : raise ValueError ( \"Padding is incorrect.\" ) if arg_4 > 1 and arg_0 [ 1 - arg_4 : ] != bchr ( 0 ) * ( arg_4 - 1 ) : raise ValueError ( \"ISO 7816-4 padding is incorrect.\" ) else : raise ValueError ( \"Unknown padding style\" ) return arg_0 [ : - arg_4 ]","id_":253246,"task_name":"https:\/\/github.com\/seiferma\/deterministic_encryption_utils\/blob\/a747da3cd6daf39b0c26d4d497725e8863af1dd1\/deterministic_encryption_utils\/encryption\/Padding.py#L64-L102","negative":"Removes the video from youtube and from db\n Requires POST"} {"query":"Register unit object on interface level object","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : nameAvailabilityCheck ( arg_0 , arg_1 , arg_2 ) assert arg_2 . _parent is None arg_2 . _parent = arg_0 arg_2 . _name = arg_1 arg_0 . _units . append ( arg_2 )","id_":253247,"task_name":"https:\/\/github.com\/Nic30\/hwt\/blob\/8cbb399e326da3b22c233b98188a9d08dec057e6\/hwt\/synthesizer\/interfaceLevel\/propDeclrCollector.py#L247-L255","negative":"pass in a word string that you\n would like to see probable matches for."} {"query":"Download and return a path to a sample that is curated by the PyAV developers .","positive":"def Func ( arg_0 ) : return cached_download ( 'https:\/\/docs.mikeboers.com\/pyav\/samples\/' + arg_0 , os . path . join ( 'pyav-Func' , arg_0 . replace ( '\/' , os . path . sep ) ) )","id_":253248,"task_name":"https:\/\/github.com\/mikeboers\/PyAV\/blob\/9414187088b9b8dbaa180cfe1db6ceba243184ea\/av\/datasets.py#L120-L127","negative":"Associate an existing reservedIP to a deployment.\n\n name:\n Required. Name of the reserved IP address.\n\n service_name:\n Required. Name of the hosted service.\n\n deployment_name:\n Required. Name of the deployment.\n\n virtual_ip_name:\n Optional. Name of the VirtualIP in case of multi Vip tenant.\n If this value is not specified default virtualIP is used\n for this operation."} {"query":"Writes a report summarizing coverage statistics per module .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_0 . find_code_units ( arg_1 ) arg_3 = max ( [ len ( arg_11 . name ) for arg_11 in arg_0 . code_units ] + [ 5 ] ) arg_4 = \"%%- %ds \" % arg_3 arg_5 = \"%s %s: %s\\n\" arg_6 = ( arg_4 % \"Name\" ) + \" Stmts Miss\" arg_7 = arg_4 + \"%6d %6d\" if arg_0 . branches : arg_6 += \" Branch BrMiss\" arg_7 += \" %6d %6d\" arg_8 = Numbers . pc_str_width ( ) arg_6 += \"%*s\" % ( arg_8 + 4 , \"Cover\" ) arg_7 += \"%%%ds%%%%\" % ( arg_8 + 3 , ) if arg_0 . config . show_missing : arg_6 += \" Missing\" arg_7 += \" %s\" arg_9 = \"-\" * len ( arg_6 ) + \"\\n\" arg_6 += \"\\n\" arg_7 += \"\\n\" if not arg_2 : arg_2 = sys . stdout arg_2 . write ( arg_6 ) arg_2 . write ( arg_9 ) arg_10 = Numbers ( ) for arg_11 in arg_0 . code_units : try : arg_12 = arg_0 . coverage . _analyze ( arg_11 ) arg_13 = arg_12 . numbers arg_14 = ( arg_11 . name , arg_13 . n_statements , arg_13 . n_missing ) if arg_0 . branches : arg_14 += ( arg_13 . n_branches , arg_13 . n_missing_branches ) arg_14 += ( arg_13 . pc_covered_str , ) if arg_0 . config . show_missing : arg_14 += ( arg_12 . missing_formatted ( ) , ) arg_2 . write ( arg_7 % arg_14 ) arg_10 += arg_13 except KeyboardInterrupt : raise except : arg_15 = not arg_0 . config . ignore_errors if arg_15 : arg_16 , arg_17 = sys . exc_info ( ) [ : 2 ] if arg_16 is NotPython and not arg_11 . should_be_python ( ) : arg_15 = False if arg_15 : arg_2 . write ( arg_5 % ( arg_11 . name , arg_16 . __name__ , arg_17 ) ) if arg_10 . n_files > 1 : arg_2 . write ( arg_9 ) arg_14 = ( \"TOTAL\" , arg_10 . n_statements , arg_10 . n_missing ) if arg_0 . branches : arg_14 += ( arg_10 . n_branches , arg_10 . n_missing_branches ) arg_14 += ( arg_10 . pc_covered_str , ) if arg_0 . config . show_missing : arg_14 += ( \"\" , ) arg_2 . write ( arg_7 % arg_14 ) return arg_10 . pc_covered","id_":253249,"task_name":"https:\/\/github.com\/tnkteja\/myhelp\/blob\/fb3a4809d448ad14d5b2e6ddf2e7e89ad52b71cb\/virtualEnvironment\/lib\/python2.7\/site-packages\/coverage\/summary.py#L17-L86","negative":"Converts all of the non-numeric fields from spatialOutput and temporalOutput\n into their scalar equivalents and records them in the output dictionary.\n\n :param spatialOutput: The results of topDownCompute() for the spatial input.\n :param temporalOutput: The results of topDownCompute() for the temporal\n input.\n :param output: The main dictionary of outputs passed to compute(). It is\n expected to have keys 'spatialTopDownOut' and 'temporalTopDownOut' that\n are mapped to numpy arrays."} {"query":"Find requested version in POST request .","positive":"def Func ( arg_0 ) : if \"version\" in arg_0 . document . attrib : arg_1 = arg_0 . document . attrib [ \"version\" ] . lower ( ) if arg_1 in allowed_versions [ arg_0 . params [ 'service' ] ] : arg_0 . params [ \"version\" ] = arg_1 else : raise OWSInvalidParameterValue ( \"Version %s is not supported\" % arg_1 , arg_1 = \"version\" ) elif arg_0 . _get_request_type ( ) == \"getcapabilities\" : arg_0 . params [ \"version\" ] = None else : raise OWSMissingParameterValue ( 'Parameter \"version\" is missing' , arg_1 = \"version\" ) return arg_0 . params [ \"version\" ]","id_":253250,"task_name":"https:\/\/github.com\/bird-house\/twitcher\/blob\/e6a36b3aeeacf44eec537434b0fb87c09ab54b5f\/twitcher\/owsrequest.py#L167-L179","negative":"Returns protobuf mapcontainer. Read from translation file."} {"query":"Set the ERP values for this object s degrees of freedom .","positive":"def Func ( arg_0 , Func ) : _set_params ( arg_0 . ode_obj , 'ERP' , Func , arg_0 . ADOF + arg_0 . LDOF )","id_":253251,"task_name":"https:\/\/github.com\/EmbodiedCognition\/pagoda\/blob\/8892f847026d98aba8646ecbc4589397e6dec7bd\/pagoda\/physics.py#L648-L657","negative":"pass in a word string that you\n would like to see probable matches for."} {"query":"Validate the internal representation of the instance .","positive":"def Func ( arg_0 ) : try : arg_1 = arg_0 . schema . validate ( arg_0 . to_dict ( ) ) except ValidationError as ex : raise ModelValidationError ( ex . messages , ex . field_names , ex . fields , ex . data , ** ex . kwargs )","id_":253252,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/validation\/base.py#L227-L233","negative":"Decode the data passed in and potentially flush the decoder."} {"query":"Creates a transfer job that runs periodically .","positive":"def Func ( arg_0 , arg_1 ) : arg_1 = arg_0 . _inject_project_id ( arg_1 , BODY , PROJECT_ID ) return arg_0 . get_conn ( ) . transferJobs ( ) . create ( arg_1 = arg_1 ) . execute ( num_retries = arg_0 . num_retries )","id_":253253,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/gcp_transfer_hook.py#L119-L132","negative":"Construct SimBitsT with cache"} {"query":"Detect infinite recursion and prevent it .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = os . path . abspath ( os . path . normcase ( arg_2 ) ) if arg_4 in arg_0 : raise NamelistRecursionError ( arg_4 ) arg_0 . add ( arg_4 ) try : arg_5 = _Func ( arg_1 , arg_4 , arg_3 ) finally : arg_0 . remove ( arg_4 ) return arg_5","id_":253254,"task_name":"https:\/\/github.com\/davelab6\/pyfontaine\/blob\/e9af7f2667e85803a7f5ea2b1f0d9a34931f3b95\/fontaine\/charsets\/internals\/gfonts_utils.py#L160-L176","negative":"Serialize a dataframe.\n\n Parameters\n ----------\n writer : file\n File-like object to write to. Must be opened in binary mode.\n data_type_id : dict\n Serialization format to use.\n See the azureml.DataTypeIds class for constants.\n dataframe: pandas.DataFrame\n Dataframe to serialize."} {"query":"Set instance variables based on an options dict","positive":"def Func ( arg_0 , ** arg_1 ) : arg_0 . interactive = arg_1 [ 'interactive' ] arg_0 . verbosity = arg_1 [ 'verbosity' ] arg_0 . symlink = arg_1 [ 'link' ] arg_0 . clear = arg_1 [ 'clear' ] arg_0 . dry_run = arg_1 [ 'dry_run' ] arg_7 = arg_1 [ 'ignore_patterns' ] if arg_1 [ 'use_default_ignore_patterns' ] : arg_7 += [ 'CVS' , '.*' , '*~' ] arg_0 . ignore_patterns = list ( set ( arg_7 ) ) arg_0 . post_process = arg_1 [ 'post_process' ]","id_":253255,"task_name":"https:\/\/github.com\/adrianoveiga\/django-media-fixtures\/blob\/a3f0d9ac84e73d491eeb0c881b23cc47ccca1b54\/django_media_fixtures\/management\/commands\/collectmedia.py#L70-L83","negative":"Check that a binary operator is surrounded by exactly one space."} {"query":"Parse a string of space - separated numbers returning a Python list .","positive":"def Func ( arg_0 ) : assert isinstance ( arg_0 , basestring ) return [ int ( arg_1 ) for arg_1 in arg_0 . split ( ) ]","id_":253256,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/data\/utils.py#L207-L215","negative":"Adjust contrast of an Image.\n\n Args:\n img (PIL Image): PIL Image to be adjusted.\n contrast_factor (float): How much to adjust the contrast. Can be any\n non negative number. 0 gives a solid gray image, 1 gives the\n original image while 2 increases the contrast by a factor of 2.\n\n Returns:\n PIL Image: Contrast adjusted image."} {"query":"Initializes sitetree in memory .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_2 . get ( 'request' , None ) if arg_3 is None : raise SiteTreeError ( 'Sitetree requires \"django.core.context_processors.request\" template context processor to be active. ' 'If it is, check that your view pushes request data into the template.' ) if id ( arg_3 ) != id ( arg_0 . current_request ) : arg_0 . init ( arg_2 ) arg_1 = arg_0 . resolve_var ( arg_1 ) arg_1 , arg_4 = arg_0 . get_sitetree ( arg_1 ) if not arg_4 : return None , None return arg_1 , arg_4","id_":253257,"task_name":"https:\/\/github.com\/idlesign\/django-sitetree\/blob\/61de4608e6e415247c75fe8691027d7c4ed0d1e7\/sitetree\/sitetreeapp.py#L709-L737","negative":"TARGET power button"} {"query":"Delete a space .","positive":"def Func ( arg_0 , arg_1 ) : if arg_1 not in arg_0 . spaces : raise ValueError ( \"Space '%s' does not exist\" % arg_1 ) if arg_1 in arg_0 . static_spaces : arg_2 = arg_0 . static_spaces [ arg_1 ] if arg_2 . is_derived : raise ValueError ( \"%s has derived spaces\" % repr ( arg_2 . interface ) ) else : arg_0 . static_spaces . del_item ( arg_1 ) arg_0 . model . spacegraph . remove_node ( arg_2 ) arg_0 . inherit ( ) arg_0 . model . spacegraph . update_subspaces ( arg_0 ) elif arg_1 in arg_0 . dynamic_spaces : arg_0 . dynamic_spaces . del_item ( arg_1 ) else : raise ValueError ( \"Derived cells cannot be deleted\" )","id_":253258,"task_name":"https:\/\/github.com\/fumitoh\/modelx\/blob\/0180da34d052c44fb94dab9e115e218bbebfc9c3\/modelx\/core\/space.py#L1316-L1339","negative":"Gets the CRC32c checksum of an object in Google Cloud Storage.\n\n :param bucket_name: The Google cloud storage bucket where the blob_name is.\n :type bucket_name: str\n :param object_name: The name of the object to check in the Google cloud\n storage bucket_name.\n :type object_name: str"} {"query":"Convert Monero decimal to atomic integer of piconero .","positive":"def Func ( arg_0 ) : if not isinstance ( arg_0 , ( Decimal , float ) + _integer_types ) : raise ValueError ( \"Amount '{}' doesn't have numeric type. Only Decimal, int, long and \" \"float (not recommended) are accepted as amounts.\" ) return int ( arg_0 * 10 ** 12 )","id_":253259,"task_name":"https:\/\/github.com\/monero-ecosystem\/monero-python\/blob\/64149f6323af57a3924f45ed87997d64387c5ee0\/monero\/numbers.py#L15-L20","negative":"Fill missing rates of a currency with the closest available ones."} {"query":"Get a list of top clans by trophy","positive":"def Func ( arg_0 , arg_1 = 'global' , ** arg_2 : arg_3 ) : arg_4 = arg_0 . api . LOCATIONS + '\/' + str ( arg_1 ) + '\/rankings\/clans' return arg_0 . _get_model ( arg_4 , PartialClan , ** arg_2 )","id_":253260,"task_name":"https:\/\/github.com\/cgrok\/clashroyale\/blob\/2618f4da22a84ad3e36d2446e23436d87c423163\/clashroyale\/official_api\/client.py#L478-L493","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Tags the current version .","positive":"def Func ( arg_0 ) : print ( 'Tagging \"{}\"' . format ( arg_0 ) ) arg_1 = '\"Released version {}\"' . format ( arg_0 ) Popen ( [ 'git' , 'tag' , '-s' , '-m' , arg_1 , arg_0 ] ) . wait ( )","id_":253261,"task_name":"https:\/\/github.com\/jfinkels\/birkhoff\/blob\/86fff692c9cfb7217e51e25868230f4e0b53caa0\/make-release.py#L176-L180","negative":"Remove rows with NAs from the H2OFrame.\n\n :returns: new H2OFrame with all rows from the original frame containing any NAs removed."} {"query":"Execute JavaScript Asynchronously in current context .","positive":"def Func ( arg_0 , arg_1 , * arg_2 ) : return arg_0 . _execute ( Command . EXECUTE_ASYNC_SCRIPT , { 'script' : arg_1 , 'args' : list ( arg_2 ) } )","id_":253262,"task_name":"https:\/\/github.com\/macacajs\/wd.py\/blob\/6d3c52060013e01a67cd52b68b5230b387427bad\/macaca\/webdriver.py#L572-L587","negative":"Main executor of the trimmomatic_report template.\n\n Parameters\n ----------\n log_files : list\n List of paths to the trimmomatic log files."} {"query":"iterator for JSON - per - line in a file pattern","positive":"def Func ( arg_0 ) : with open ( arg_0 , 'r' ) as f : for arg_1 in f . readlines ( ) : yield json . loads ( arg_1 )","id_":253263,"task_name":"https:\/\/github.com\/DerwenAI\/pytextrank\/blob\/181ea41375d29922eb96768cf6550e57a77a0c95\/pytextrank\/pytextrank.py#L783-L789","negative":"Verify a certificate in a context.\n\n .. versionadded:: 0.15\n\n :raises X509StoreContextError: If an error occurred when validating a\n certificate in the context. Sets ``certificate`` attribute to\n indicate which certificate caused the error."} {"query":"Add or change list of logbooks .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = [ ] , arg_3 = \"\" ) : if arg_1 is not None and len ( arg_2 ) != 0 : if arg_1 in arg_0 . logList : for arg_4 in arg_2 : if arg_4 not in arg_0 . logList . get ( arg_1 ) [ 0 ] : arg_0 . logList . get ( arg_1 ) [ 0 ] . append ( arg_4 ) else : arg_0 . logList [ arg_1 ] = [ ] arg_0 . logList [ arg_1 ] . append ( arg_2 ) if len ( arg_0 . logList [ arg_1 ] ) > 1 and arg_3 != \"\" : arg_0 . logList . get ( arg_1 ) [ 1 ] == arg_3 else : arg_0 . logList . get ( arg_1 ) . append ( arg_3 ) arg_0 . logType . clear ( ) arg_0 . logType . addItems ( list ( arg_0 . logList . keys ( ) ) ) arg_0 . changeLogType ( )","id_":253264,"task_name":"https:\/\/github.com\/joelfrederico\/SciSalt\/blob\/7bf57c49c7dde0a8b0aa337fbd2fbd527ce7a67f\/scisalt\/facettools\/logbookForm.py#L538-L559","negative":"Parses package fields."} {"query":"Register classes that could be initialized from JSON configuration file . If name is not passed the class name is converted to snake - case .","positive":"def Func ( arg_0 : arg_1 = None ) -> arg_3 : def decorate ( arg_2 : arg_3 , arg_4 : arg_1 = None ) -> arg_3 : arg_5 = arg_4 or short_name ( arg_2 ) global arg_7 arg_6 = arg_2 . __module__ + ':' + arg_2 . __name__ if arg_5 in arg_7 and arg_7 [ arg_5 ] != arg_6 : logger . warning ( 'Registry name \"{}\" has been already Funced and will be overwritten.' . format ( arg_5 ) ) arg_7 [ arg_5 ] = arg_6 return arg_2 return lambda model_cls_name : decorate ( model_cls_name , arg_0 )","id_":253265,"task_name":"https:\/\/github.com\/deepmipt\/DeepPavlov\/blob\/f3e4a69a3764d25d2f5bad4f1f1aebc872b00f9c\/deeppavlov\/core\/common\/registry.py#L43-L57","negative":"Checks if a data is csr, csc, bsr, or dia Scipy sparse matrix"} {"query":"Construct a validation schema for a given namespace .","positive":"def Func ( arg_0 ) : if arg_0 not in __NAMESPACE__ : raise NamespaceError ( 'Unknown Func: {:s}' . format ( arg_0 ) ) arg_1 = copy . deepcopy ( JAMS_SCHEMA [ 'definitions' ] [ 'SparseObservation' ] ) for arg_2 in [ 'value' , 'confidence' ] : try : arg_1 [ 'properties' ] [ arg_2 ] = __NAMESPACE__ [ arg_0 ] [ arg_2 ] except KeyError : pass return arg_1","id_":253266,"task_name":"https:\/\/github.com\/marl\/jams\/blob\/b16778399b9528efbd71434842a079f7691a7a66\/jams\/schema.py#L50-L75","negative":"Return an open file-object to the index file"} {"query":"Reduce dicts of dicts to dot separated keys .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = { } for arg_5 , arg_6 , arg_3 in iterate_schema ( arg_1 , arg_2 , arg_3 ) : arg_7 = arg_5 [ 'name' ] arg_8 = arg_5 [ 'type' ] arg_9 = arg_5 [ 'label' ] arg_10 = arg_6 [ arg_7 ] if arg_7 in arg_6 else None arg_4 [ arg_3 ] = { 'name' : arg_7 , 'value' : arg_10 , 'type' : arg_8 , 'label' : arg_9 } return arg_4","id_":253267,"task_name":"https:\/\/github.com\/genialis\/genesis-pyapi\/blob\/dfe9bcc8b332a8b9873db4ab9994b0cc10eb209a\/genesis\/data.py#L49-L59","negative":"r\"\"\"Bernoulli likelihood sampling.\n\n Sample according to\n\n .. math::\n\n \\mathbf y \\sim \\prod_{i=1}^n\n \\text{Bernoulli}(\\mu_i = \\text{logit}(z_i))\n \\mathcal N(~ o \\mathbf 1 + \\mathbf a^\\intercal \\boldsymbol\\alpha;\n ~ (h^2 - v_c)\\mathrm G^\\intercal\\mathrm G +\n (1-h^2-v_c)\\mathrm I ~)\n\n using the canonical Logit link function to define the conditional Bernoulli\n mean :math:`\\mu_i`.\n\n The causal :math:`\\mathbf a` covariates and the corresponding effect-sizes\n are randomly draw according to the following idea. The ``causal_variants``,\n if given, are first mean-zero and std-one normalized and then having\n its elements divided by the squared-root the the number of variances::\n\n causal_variants = _stdnorm(causal_variants, axis=0)\n causal_variants \/= sqrt(causal_variants.shape[1])\n\n The causal effect-sizes :math:`\\boldsymbol\\alpha` are draw from\n :math:`\\{-1, +1\\}` and subsequently normalized for mean-zero and std-one\"\"\n\n Parameters\n ----------\n random_state : random_state\n Set the initial random state.\n\n Example\n -------\n\n .. doctest::\n\n >>> from glimix_core.random import bernoulli_sample\n >>> from numpy.random import RandomState\n >>> offset = 5\n >>> G = [[1, -1], [2, 1]]\n >>> bernoulli_sample(offset, G, random_state=RandomState(0))\n array([1., 1.])"} {"query":"Population parameter vals == average member parameter vals","positive":"def Func ( arg_0 ) : if len ( arg_0 . __members ) != 0 : if arg_0 . __num_processes > 1 : arg_1 = [ m . get ( ) for m in arg_0 . __members ] else : arg_1 = arg_0 . __members arg_2 = { } for arg_3 in arg_0 . __Func : arg_2 [ arg_3 . name ] = sum ( m . Func [ arg_3 . name ] for m in arg_1 ) \/ len ( arg_1 ) return arg_2 else : return None","id_":253268,"task_name":"https:\/\/github.com\/tjkessler\/PyGenetics\/blob\/b78ee6393605d6e85d2279fb05f3983f5833df40\/pygenetics\/ga_core.py#L176-L191","negative":"Using the record length and appropriate start points, seek to the\n country that corresponds to the converted IP address integer.\n Return offset of record.\n\n :arg ipnum: Result of ip2long conversion"} {"query":"Creates a connection based upon the given configuration object .","positive":"def Func ( arg_0 ) : arg_1 = { } arg_1 [ 'hosts' ] = [ arg_0 . get ( 'jackal' , 'host' ) ] if int ( arg_0 . get ( 'jackal' , 'use_ssl' ) ) : arg_1 [ 'use_ssl' ] = True if arg_0 . get ( 'jackal' , 'ca_certs' ) : arg_1 [ 'ca_certs' ] = arg_0 . get ( 'jackal' , 'ca_certs' ) if int ( arg_0 . get ( 'jackal' , 'client_certs' ) ) : arg_1 [ 'client_cert' ] = arg_0 . get ( 'jackal' , 'client_cert' ) arg_1 [ 'client_key' ] = arg_0 . get ( 'jackal' , 'client_key' ) arg_1 [ 'ssl_assert_hostname' ] = False connections . Func ( ** arg_1 )","id_":253269,"task_name":"https:\/\/github.com\/mwgielen\/jackal\/blob\/7fe62732eb5194b7246215d5277fb37c398097bf\/jackal\/core.py#L21-L38","negative":"Builds fake MNIST-style data for unit testing."} {"query":"A free - text query resolver by Wolfram|Alpha . Returns the first result if available .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : arg_0 = wolframalpha . Client ( pmxbot . config [ 'Wolfram|Alpha API key' ] ) arg_5 = arg_0 . query ( arg_4 ) return next ( arg_5 . results ) . text","id_":253270,"task_name":"https:\/\/github.com\/jaraco\/wolframalpha\/blob\/50bf2e047b698e308a9a88770a23e7e210aa5bcb\/wolframalpha\/pmxbot.py#L11-L18","negative":"Calls consume_function for each element of this streamlet. This function returns nothing"} {"query":"Sends commands to QTM","positive":"def Func ( arg_0 , arg_1 , arg_2 = True , arg_3 = arg_4 . PacketCommand ) : if arg_0 . transport is not None : arg_6 = len ( arg_1 ) LOG . debug ( \"S: %s\" , arg_1 ) arg_0 . transport . write ( struct . pack ( RTCommand % arg_6 , RTheader . size + arg_6 + 1 , arg_3 . value , arg_1 . encode ( ) , b\"\\0\" , ) ) arg_7 = arg_0 . loop . create_future ( ) if arg_2 : arg_0 . request_queue . append ( arg_7 ) else : arg_7 . set_result ( None ) return arg_7 raise QRTCommandException ( \"Not connected!\" )","id_":253271,"task_name":"https:\/\/github.com\/qualisys\/qualisys_python_sdk\/blob\/127d7eeebc2b38b5cafdfa5d1d0198437fedd274\/qtm\/protocol.py#L89-L113","negative":"Return a list of all enrollments for the passed section_id.\n\n https:\/\/canvas.instructure.com\/doc\/api\/enrollments.html#method.enrollments_api.index"} {"query":"Delete a model on the h2o cluster given its key .","positive":"def Func ( arg_0 , arg_1 , arg_2 = True , arg_3 = 60 , ** arg_4 ) : assert arg_1 is not None , '\"key\" parameter is null' arg_5 = arg_0 . do_json_request ( '\/3\/Models.json\/' + arg_1 , cmd = 'delete' , timeout = arg_3 ) if not arg_2 and 'f00b4r' in arg_5 : raise ValueError ( 'Model key not found: ' + arg_1 ) verboseprint ( \"Func result:\" , dump_json ( arg_5 ) ) return arg_5","id_":253272,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/py2\/h2o_ray.py#L628-L641","negative":"Decode the data passed in and potentially flush the decoder."} {"query":"Get a rate for a given currency and date .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_1 == arg_0 . ref_currency : return 1.0 if arg_2 not in arg_0 . _rates [ arg_1 ] : arg_3 , arg_4 = arg_0 . bounds [ arg_1 ] if not arg_0 . fallback_on_wrong_date : raise RateNotFoundError ( '{0} not in {1} bounds {2}\/{3}' . format ( arg_2 , arg_1 , arg_3 , arg_4 ) ) if arg_2 < arg_3 : arg_5 = arg_3 elif arg_2 > arg_4 : arg_5 = arg_4 else : raise AssertionError ( 'Should never happen, bug in the code!' ) if arg_0 . verbose : print ( r'\/!\\ {0} not in {1} bounds {2}\/{3}, falling back to {4}' . format ( arg_2 , arg_1 , arg_3 , arg_4 , arg_5 ) ) arg_2 = arg_5 arg_6 = arg_0 . _rates [ arg_1 ] [ arg_2 ] if arg_6 is None : raise RateNotFoundError ( '{0} has no rate for {1}' . format ( arg_1 , arg_2 ) ) return arg_6","id_":253273,"task_name":"https:\/\/github.com\/alexprengere\/currencyconverter\/blob\/e3cb0d693819c0c824214225b23a47e9380f71df\/currency_converter\/currency_converter.py#L245-L284","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Provide the run IDs of failed jobs","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in arg_1 : try : arg_0 . clusterprocids_finished . remove ( arg_2 ) except ValueError : pass","id_":253274,"task_name":"https:\/\/github.com\/alphatwirl\/alphatwirl\/blob\/5138eeba6cd8a334ba52d6c2c022b33c61e3ba38\/alphatwirl\/concurrently\/condor\/submitter.py#L208-L225","negative":"initialize the merger model with a coalescent time\n\n Args:\n - Tc: a float or an iterable, if iterable another argument T of same shape is required\n - T: an array like of same shape as Tc that specifies the time pivots corresponding to Tc\n Returns:\n - None"} {"query":"Tokenize a document and add an annotation attribute to each token","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = tokenize ( arg_0 , include_hrefs = False ) for arg_3 in arg_2 : arg_3 . annotation = arg_1 return arg_2","id_":253275,"task_name":"https:\/\/github.com\/AkihikoITOH\/capybara\/blob\/e86c2173ea386654f4ae061148e8fbe3f25e715c\/capybara\/virtualenv\/lib\/python2.7\/site-packages\/lxml\/html\/diff.py#L71-L77","negative":"Destroy the SQLStepQueue tables in the database"} {"query":"Generator that yields one by one the return value for self . read_dcm for each file within this set","positive":"def Func ( arg_0 ) : try : for arg_1 in arg_0 . items : yield arg_0 . read_dcm ( arg_1 ) except IOError as ioe : raise IOError ( 'Error reading DICOM file: {}.' . format ( arg_1 ) ) from ioe","id_":253276,"task_name":"https:\/\/github.com\/Neurita\/boyle\/blob\/2dae7199849395a209c887d5f30506e1de8a9ad9\/boyle\/dicom\/sets.py#L173-L182","negative":"Called when there is an error in the websocket"} {"query":"Return binding energies instead of mass excesses","positive":"def Func ( arg_0 ) : arg_1 = 938.2723 arg_2 = 0.5110 arg_3 = 939.5656 arg_4 = 931.494028 arg_5 = arg_0 . Z * ( arg_1 + arg_2 ) + ( arg_0 . A - arg_0 . Z ) * arg_3 - ( arg_0 . df + arg_0 . A * arg_4 ) return Table ( arg_5 = arg_5 , name = 'BE' + '(' + arg_0 . name + ')' )","id_":253277,"task_name":"https:\/\/github.com\/elyase\/masstable\/blob\/3eb72b22cd3337bc5c6bb95bb7bb73fdbe6ae9e2\/masstable\/masstable.py#L418-L431","negative":"Returns Hugo Larochelle's binary static MNIST tf.data.Dataset."} {"query":"Debugging method to print out frames in hex .","positive":"def Func ( arg_0 ) : arg_1 = \"\" for arg_2 in arg_0 : arg_1 += \"\\\\x\" + format ( arg_2 , \"02x\" ) _LOGGER . debug ( arg_1 )","id_":253278,"task_name":"https:\/\/github.com\/c-soft\/satel_integra\/blob\/3b6d2020d1e10dc5aa40f30ee4ecc0f3a053eb3c\/satel_integra\/satel_integra.py#L23-L28","negative":"Returns an aggregator connection."} {"query":"Get the name of the item .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . xmlnode . prop ( \"name\" ) if not arg_1 : arg_1 = \"\" return arg_1 . decode ( \"utf-8\" )","id_":253279,"task_name":"https:\/\/github.com\/Jajcus\/pyxmpp2\/blob\/14a40a3950910a9cd008b55f0d8905aa0186ce18\/pyxmpp2\/ext\/disco.py#L316-L324","negative":"Convert a 2D array of items into a Markdown table.\n\n padding: the number of padding spaces on either side of each divider\n divider: the vertical divider to place between columns\n header_div: the horizontal divider to place between the header row and\n body cells"} {"query":"Break out a date from Omnimeter read .","positive":"def Func ( arg_0 ) : arg_1 = str ( arg_0 ) arg_2 = namedtuple ( 'EkmDate' , [ 'yy' , 'mm' , 'dd' , 'weekday' , 'hh' , 'minutes' , 'ss' ] ) if len ( arg_1 ) != 14 : arg_2 . yy = arg_2 . mm = arg_2 . dd = arg_2 . weekday = arg_2 . hh = arg_2 . minutes = arg_2 . ss = 0 return arg_2 arg_2 . yy = int ( arg_1 [ 0 : 2 ] ) arg_2 . mm = int ( arg_1 [ 2 : 4 ] ) arg_2 . dd = int ( arg_1 [ 4 : 6 ] ) arg_2 . weekday = int ( arg_1 [ 6 : 8 ] ) arg_2 . hh = int ( arg_1 [ 8 : 10 ] ) arg_2 . minutes = int ( arg_1 [ 10 : 12 ] ) arg_2 . ss = int ( arg_1 [ 12 : 14 ] ) return arg_2","id_":253280,"task_name":"https:\/\/github.com\/ekmmetering\/ekmmeters\/blob\/b3748bdf30263bfa46ea40157bdf8df2522e1904\/ekmmeters.py#L1875-L1912","negative":"Check the spacing of a single equals sign."} {"query":"Extract metadata entries from an xml node","positive":"def Func ( arg_0 ) : arg_1 = None arg_2 = None if 'key' in arg_0 . attrib : arg_1 = arg_0 . attrib [ 'key' ] else : arg_1 = None if arg_1 in [ 'time' , 'elevation' ] or arg_1 . startswith ( 'custom_dimension' ) : arg_2 = md_dimension_info ( arg_1 , arg_0 . find ( \"dimensionInfo\" ) ) elif arg_1 == 'DynamicDefaultValues' : arg_2 = md_dynamic_default_values_info ( arg_1 , arg_0 . find ( \"DynamicDefaultValues\" ) ) elif arg_1 == 'JDBC_VIRTUAL_TABLE' : arg_2 = md_jdbc_virtual_table ( arg_1 , arg_0 . find ( \"virtualTable\" ) ) else : arg_2 = arg_0 . text if None in [ arg_1 , arg_2 ] : return None else : return ( arg_1 , arg_2 )","id_":253281,"task_name":"https:\/\/github.com\/boundlessgeo\/gsconfig\/blob\/532f561f32b91ea8debea0573c503dd20988bf40\/src\/geoserver\/support.py#L585-L606","negative":"Setup coverage related extensions."} {"query":"Alias for _assemble_with_columns","positive":"def Func ( arg_0 , arg_1 , arg_2 , * arg_3 , ** arg_4 ) : warnings . warn ( \"Func has been depreciated for _assemble_with_columns. It will be removed in a future version.\" , DeprecationWarning ) return arg_0 . _assemble_with_columns ( arg_1 , arg_2 , * arg_3 , ** arg_4 )","id_":253282,"task_name":"https:\/\/github.com\/mikeshultz\/rawl\/blob\/818ebeabba5e051627d444c4849fde55947f94be\/rawl\/__init__.py#L268-L272","negative":"Implementation of the Context Likelihood or Relatedness Network algorithm.\n\n Args:\n mat (numpy.ndarray): matrix, if it is a square matrix, the program assumes\n it is a relevance matrix where mat(i,j) represents the similarity content\n between nodes i and j. Elements of matrix should be\n non-negative.\n\n Returns:\n mat_nd (numpy.ndarray): Output deconvolved matrix (direct dependency matrix). Its components\n represent direct edge weights of observed interactions.\n\n .. note::\n Ref:Jeremiah J. Faith, Boris Hayete, Joshua T. Thaden, Ilaria Mogno, Jamey\n Wierzbowski, Guillaume Cottarel, Simon Kasif, James J. Collins, and Timothy\n S. Gardner. Large-scale mapping and validation of escherichia coli\n transcriptional regulation from a compendium of expression profiles.\n PLoS Biology, 2007"} {"query":"Open authorize page in a browser print the url if it didn t work","positive":"async def Func ( arg_0 ) : arg_1 = \"https:\/\/api.twitter.com\/oauth\/authorize?oauth_token=\" + arg_0 try : arg_2 = webbrowser . open ( arg_1 ) await asyncio . sleep ( 2 ) if not arg_2 : raise RuntimeError except RuntimeError : print ( \"could not open a browser\\ngo here to enter your PIN: \" + arg_1 ) arg_3 = input ( \"\\nEnter your PIN: \" ) return arg_3","id_":253283,"task_name":"https:\/\/github.com\/odrling\/peony-twitter\/blob\/967f98e16e1889389540f2e6acbf7cc7a1a80203\/peony\/oauth_dance.py#L42-L69","negative":"Set's the package's description.\n Raises CardinalityError if description already set.\n Raises OrderError if no package previously defined."} {"query":"Adds a memory to an event .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = get_object_or_404 ( Event , arg_1 = arg_1 ) arg_3 = MemoryForm ( arg_0 . POST or None , arg_0 . FILES or None ) if arg_3 . is_valid ( ) : arg_4 = arg_3 . save ( commit = False ) arg_4 . user = arg_0 . user arg_4 . event = arg_2 arg_4 . save ( ) arg_6 = \"Your thoughts were added. \" if arg_0 . FILES : arg_7 = arg_0 . FILES . getlist ( 'photos' ) arg_8 = len ( arg_7 ) for arg_9 in arg_7 : process_upload ( arg_9 , arg_4 , arg_3 , arg_2 , arg_0 ) if arg_8 > 1 : arg_6 += \"{} images were added and should appear soon.\" . format ( arg_8 ) else : arg_6 += \"{} image was added and should appear soon.\" . format ( arg_8 ) messages . success ( arg_0 , arg_6 ) return HttpResponseRedirect ( '..\/' ) return render ( arg_0 , 'happenings\/add_memories.html' , { 'form' : arg_3 , 'event' : arg_2 } )","id_":253284,"task_name":"https:\/\/github.com\/tBaxter\/tango-happenings\/blob\/cb3c49ea39e0a6cef9c6ffb534c2fbf401139ba2\/build\/lib\/happenings\/views.py#L266-L288","negative":"Attempts to fetch streams repeatedly\n until some are returned or limit hit."} {"query":"Compare generated mechanisms to actual ones .","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 = 'Subgraph' ) -> Mapping [ arg_3 , Mapping [ arg_3 , float ] ] : arg_4 = get_subgraphs_by_annotation ( arg_0 , arg_2 ) arg_5 = _transform_graph_dict_to_node_dict ( arg_4 ) arg_6 = generate_bioprocess_mechanisms ( arg_0 ) arg_7 = _transform_graph_dict_to_node_dict ( arg_6 ) arg_8 : Dict [ arg_3 , Dict [ arg_3 , float ] ] = defaultdict ( dict ) arg_9 = itt . product ( arg_5 . items ( ) , arg_7 . items ( ) ) for ( arg_10 , arg_11 ) , ( arg_12 , arg_13 ) in arg_9 : arg_14 = tanimoto_set_similarity ( arg_7 , arg_5 ) arg_8 [ arg_10 ] [ arg_12 ] = arg_14 return dict ( arg_8 )","id_":253285,"task_name":"https:\/\/github.com\/pybel\/pybel-tools\/blob\/3491adea0ac4ee60f57275ef72f9b73da6dbfe0c\/src\/pybel_tools\/analysis\/mechanisms.py#L19-L41","negative":"Print STDOUT resulting from a Bash shell command formatted in reStructuredText.\n\n :param command: Bash shell command\n :type command: string\n\n :param nindent: Indentation level\n :type nindent: integer\n\n :param env: Environment variable replacement dictionary. The Bash\n command is pre-processed and any environment variable\n represented in the full notation (:bash:`${...}`) is replaced.\n The dictionary key is the environment variable name and the\n dictionary value is the replacement value. For example, if\n **command** is :code:`'${PYTHON_CMD} -m \"x=5\"'` and **env**\n is :code:`{'PYTHON_CMD':'python3'}` the actual command issued\n is :code:`'python3 -m \"x=5\"'`\n :type env: dictionary\n\n :param fpointer: Output function pointer. Normally is :code:`cog.out` but\n :code:`print` or other functions can be used for\n debugging\n :type fpointer: function object\n\n :param cols: Number of columns of output\n :type cols: integer"} {"query":"Execute a BigQuery query multiple times with different parameters .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : for arg_3 in arg_2 : arg_0 . execute ( arg_1 , arg_3 )","id_":253286,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/bigquery_hook.py#L1832-L1843","negative":"Manage the response when the server rejects a message.\n\n An undo is when required this client sends a message that the server \n refuses to pass on to the other clients playing the game. When this \n happens, the client must undo the changes that the message made to the \n world before being sent or crash. Note that unlike sync requests, undo \n requests are only reported to the client that sent the offending \n message."} {"query":"r Method to set the T P and composition dependent property methods desired for consideration by the user . Can be used to exclude certain methods which might have unacceptable accuracy .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False ) : if isinstance ( arg_1 , str ) : arg_1 = [ arg_1 ] arg_0 . user_methods = arg_1 arg_0 . forced = arg_2 if set ( arg_0 . user_methods ) . difference ( arg_0 . all_methods ) : raise Exception ( \"One of the given methods is not available for this mixture\" ) if not arg_0 . user_methods and arg_0 . forced : raise Exception ( 'Only user specified methods are considered when forced is True, but no methods were provided' ) arg_0 . method = None arg_0 . sorted_valid_methods = [ ] arg_0 . TP_zs_ws_cached = ( None , None , None , None )","id_":253287,"task_name":"https:\/\/github.com\/CalebBell\/thermo\/blob\/3857ed023a3e64fd3039a32d53576c24990ef1c3\/thermo\/utils.py#L3160-L3197","negative":"This function adds the given stream to the logger, but does not check with a ConnectorDB database\n to make sure that the stream exists. Use at your own risk."} {"query":"Run all abort tasks then all exit tasks then exit with error return status","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . log . info ( 'Signal handler received Func request' ) arg_0 . _Func ( arg_1 ) arg_0 . _exit ( arg_1 ) os . _exit ( 1 )","id_":253288,"task_name":"https:\/\/github.com\/antevens\/listen\/blob\/d3ddff8e7fbfb672c5bd7f6f4febeb5e921d8c67\/listen\/signal_handler.py#L119-L125","negative":"This will output the nginx HTTP config string for specific port spec"} {"query":"Build change - of - basis matrices for constrained seasonal effects .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = np . eye ( arg_0 ) - 1. \/ arg_0 arg_2 [ - 1 , : ] = 1. \/ arg_0 arg_3 = np . linalg . inv ( arg_2 ) arg_4 = arg_2 [ : - 1 , : ] arg_5 = arg_3 [ : , : - 1 ] arg_4 = tf . cast ( arg_4 , arg_1 = arg_1 , name = 'effects_to_residuals' ) arg_5 = tf . cast ( arg_5 , arg_1 = arg_1 , name = 'residuals_to_effects' ) return arg_4 , arg_5","id_":253289,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/sts\/seasonal.py#L529-L570","negative":"DDP ping handler."} {"query":"Return the PhoneticSpanish coding of word .","positive":"def Func ( arg_0 , arg_1 , arg_2 = - 1 ) : arg_1 = unicode_normalize ( 'NFKD' , text_type ( arg_1 . upper ( ) ) ) arg_1 = '' . join ( c for c in arg_1 if c in arg_0 . _uc_set ) arg_1 = arg_1 . replace ( 'LL' , 'L' ) arg_1 = arg_1 . replace ( 'R' , 'R' ) arg_3 = arg_1 . translate ( arg_0 . _trans ) if arg_2 > 0 : arg_3 = ( arg_3 + ( '0' * arg_2 ) ) [ : arg_2 ] return arg_3","id_":253290,"task_name":"https:\/\/github.com\/chrislit\/abydos\/blob\/165466b3ff6afd8024a4c8660421b0c4e7773db9\/abydos\/phonetic\/_phonetic_spanish.py#L53-L97","negative":"Get object properties.\n\n @param window_name: Window name to look for, either full name,\n LDTP's name convention, or a Unix glob.\n @type window_name: string\n @param object_name: Object name to look for, either full name,\n LDTP's name convention, or a Unix glob.\n @type object_name: string\n\n @return: list of properties\n @rtype: list"} {"query":"Create a map of command names and handlers","positive":"def Func ( ) : return { 'activate' : activate , 'config' : hconfig , 'deactivate' : deactivate , 'help' : cli_help , 'kill' : kill , 'restart' : restart , 'submit' : submit , 'update' : update , 'version' : version }","id_":253291,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/tools\/cli\/src\/python\/main.py#L78-L92","negative":"Generates and writes the media pages for all media in the gallery"} {"query":"Increase the processed task counter and show progress message","positive":"def Func ( arg_0 ) : arg_0 . Func_tasks += 1 arg_1 = arg_0 . tasks . qsize ( ) if arg_1 > 0 : progress ( '[%d task(s) completed, %d remaining, %d thread(s)]' , arg_0 . Func_tasks , arg_1 , len ( arg_0 . workers ) ) else : progress ( '[%d task(s) completed, %d thread(s)]' , arg_0 . Func_tasks , len ( arg_0 . workers ) )","id_":253292,"task_name":"https:\/\/github.com\/bloomreach\/s4cmd\/blob\/bb51075bf43703e7cd95aa39288cf7732ec13a6d\/s4cmd.py#L606-L613","negative":"Configure the Outstation's database of input point definitions.\n\n Configure two Analog points (group\/variation 30.1) at indexes 1 and 2.\n Configure two Binary points (group\/variation 1.2) at indexes 1 and 2."} {"query":"Return a map from the input stream .","positive":"def Func ( arg_0 : arg_1 ) -> lmap . Map : arg_2 = arg_0 . reader arg_3 = arg_2 . advance ( ) assert arg_3 == \"{\" arg_4 : MutableMapping [ Any , Any ] = { } while True : if arg_2 . peek ( ) == \"}\" : arg_2 . next_token ( ) break arg_5 = _read_next ( arg_0 ) if arg_5 is COMMENT : continue while True : if arg_2 . peek ( ) == \"}\" : raise SyntaxError ( \"Unexpected token '}'; expected map value\" ) arg_6 = _read_next ( arg_0 ) if arg_6 is COMMENT : continue if arg_5 in arg_4 : raise SyntaxError ( f\"Duplicate key '{k}' in map literal\" ) break arg_4 [ arg_5 ] = arg_6 return lmap . map ( arg_4 )","id_":253293,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/reader.py#L431-L455","negative":"Create a tuning job\n\n :param config: the config for tuning\n :type config: dict\n :param wait_for_completion: if the program should keep running until job finishes\n :type wait_for_completion: bool\n :param check_interval: the time interval in seconds which the operator\n will check the status of any SageMaker job\n :type check_interval: int\n :param max_ingestion_time: the maximum ingestion time in seconds. Any\n SageMaker jobs that run longer than this will fail. Setting this to\n None implies no timeout for any SageMaker job.\n :type max_ingestion_time: int\n :return: A response to tuning job creation"} {"query":"Convert types of task fields .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ 'last-update' , 'create-time' , 'start-time' , 'end-time' ] arg_3 = [ 'task-attempt' ] for arg_4 in arg_2 : if arg_4 in arg_1 : arg_1 [ arg_4 ] = arg_0 . default_format_date ( arg_1 [ arg_4 ] ) for arg_4 in arg_3 : if arg_4 in arg_1 and arg_1 [ arg_4 ] is not None : arg_1 [ arg_4 ] = int ( arg_1 [ arg_4 ] ) return arg_1","id_":253294,"task_name":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/443ce31daa6023dc2fd65ef2051796e19d18d5a7\/dsub\/commands\/dstat.py#L74-L87","negative":"Set a property value or remove a property.\n\n value == None means 'remove property'.\n Raise HTTP_FORBIDDEN if property is read-only, or not supported.\n\n When dry_run is True, this function should raise errors, as in a real\n run, but MUST NOT change any data.\n\n This default implementation\n\n - raises HTTP_FORBIDDEN, if trying to modify a locking property\n - raises HTTP_FORBIDDEN, if trying to modify an immutable {DAV:}\n property\n - handles Windows' Win32LastModifiedTime to set the getlastmodified\n property, if enabled\n - stores everything else as dead property, if a property manager is\n present.\n - raises HTTP_FORBIDDEN, else\n\n Removing a non-existing prop is NOT an error.\n\n Note: RFC 4918 states that {DAV:}displayname 'SHOULD NOT be protected'\n\n A resource provider may override this method, to update supported custom\n live properties."} {"query":"Exchange an Authorization Code for an Access Token .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : check_type ( arg_1 , basestring , may_be_none = False ) check_type ( arg_2 , basestring , may_be_none = False ) check_type ( arg_3 , basestring , may_be_none = False ) check_type ( arg_4 , basestring , may_be_none = False ) arg_5 = dict_from_items_with_values ( grant_type = \"authorization_code\" , arg_1 = arg_1 , arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = arg_4 , ) arg_6 = requests . post ( arg_0 . _endpoint_url , data = arg_5 , ** arg_0 . _request_kwargs ) check_response_code ( arg_6 , EXPECTED_RESPONSE_CODE [ 'POST' ] ) arg_7 = extract_and_parse_json ( arg_6 ) return arg_0 . _object_factory ( OBJECT_TYPE , arg_7 )","id_":253295,"task_name":"https:\/\/github.com\/CiscoDevNet\/webexteamssdk\/blob\/6fc2cc3557e080ba4b2a380664cb2a0532ae45cd\/webexteamssdk\/api\/access_tokens.py#L104-L148","negative":"Does this filename match any of the patterns?"} {"query":"Retrieve a list of actions supported by the object .","positive":"def Func ( arg_0 ) : arg_1 = _a11y . AXUIElement . Func ( arg_0 ) return [ arg_2 [ 2 : ] for arg_2 in arg_1 ]","id_":253296,"task_name":"https:\/\/github.com\/alex-kostirin\/pyatomac\/blob\/3f46f6feb4504315eec07abb18bb41be4d257aeb\/atomac\/AXClasses.py#L673-L677","negative":"Returns the Session currently used.\n\n :return: An instance of :class:`OpenSSL.SSL.Session` or\n :obj:`None` if no session exists.\n\n .. versionadded:: 0.14"} {"query":"Flip the shape in the x direction in - place .","positive":"def Func ( arg_0 , arg_1 = None ) : if arg_1 is None : arg_0 . poly . flip ( ) else : arg_0 . poly . flip ( arg_1 [ 0 ] )","id_":253297,"task_name":"https:\/\/github.com\/hsharrison\/pyglet2d\/blob\/46f610b3c76221bff19e5c0cf3d35d7875ce37a0\/src\/pyglet2d.py#L282-L295","negative":"Stop streaming samples from device and delete samples buffer"} {"query":"Assign parent statement and propagate dependency flags if necessary","positive":"def Func ( arg_0 , arg_1 : \"HdlStatement\" ) : arg_2 = arg_0 . parentStm is None arg_0 . parentStm = arg_1 if not arg_0 . _now_is_event_dependent and arg_1 . _now_is_event_dependent : arg_0 . _on_parent_event_dependent ( ) arg_3 = arg_1 while arg_3 . parentStm is not None : arg_3 = arg_3 . parentStm arg_4 = arg_3 . _outputs . append arg_5 = arg_3 . _inputs . append if arg_2 : for arg_6 in arg_0 . _inputs : arg_6 . endpoints . discard ( arg_0 ) arg_6 . endpoints . append ( arg_3 ) arg_5 ( arg_6 ) for arg_7 in arg_0 . _outputs : arg_7 . drivers . discard ( arg_0 ) arg_7 . drivers . append ( arg_3 ) arg_4 ( arg_7 ) arg_8 = arg_0 . _get_rtl_context ( ) arg_8 . statements . discard ( arg_0 ) arg_1 . rank += arg_0 . rank","id_":253298,"task_name":"https:\/\/github.com\/Nic30\/hwt\/blob\/8cbb399e326da3b22c233b98188a9d08dec057e6\/hwt\/hdl\/statements.py#L456-L487","negative":"This is a parallel worker for running period-recovery.\n\n Parameters\n ----------\n\n task : tuple\n This is used to pass args to the `periodicvar_recovery` function::\n\n task[0] = period-finding result pickle to work on\n task[1] = simbasedir\n task[2] = period_tolerance\n\n Returns\n -------\n\n dict\n This is the dict produced by the `periodicvar_recovery` function for the\n input period-finding result pickle."} {"query":"Saves a model instance to the database .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = True ) : if arg_4 : arg_3 . save ( ) return arg_3","id_":253299,"task_name":"https:\/\/github.com\/ricobl\/django-importer\/blob\/6967adfa7a286be7aaf59d3f33c6637270bd9df6\/django_importer\/importers\/base.py#L127-L133","negative":"Read data synchronous from an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: data name\n :param Type data_type: type of the data given to the PLC, according to\n PLCTYPE constants\n :param bool return_ctypes: return ctypes instead of python types if True\n (default: False)\n :rtype: data_type\n :return: value: **value**"} {"query":"Stops running proxy .","positive":"def Func ( arg_0 ) : if not arg_0 . sql_proxy_process : raise AirflowException ( \"The sql proxy is not started yet\" ) else : arg_0 . log . info ( \"Stopping the cloud_sql_proxy pid: %s\" , arg_0 . sql_proxy_process . pid ) arg_0 . sql_proxy_process . kill ( ) arg_0 . sql_proxy_process = None arg_0 . log . info ( \"Removing the socket directory: %s\" , arg_0 . cloud_sql_proxy_socket_directory ) shutil . rmtree ( arg_0 . cloud_sql_proxy_socket_directory , ignore_errors = True ) if arg_0 . sql_proxy_was_downloaded : arg_0 . log . info ( \"Removing downloaded proxy: %s\" , arg_0 . sql_proxy_path ) try : os . remove ( arg_0 . sql_proxy_path ) except OSError as e : if not e . errno == errno . ENOENT : raise else : arg_0 . log . info ( \"Skipped removing proxy - it was not downloaded: %s\" , arg_0 . sql_proxy_path ) if os . path . isfile ( arg_0 . credentials_path ) : arg_0 . log . info ( \"Removing generated credentials file %s\" , arg_0 . credentials_path ) os . remove ( arg_0 . credentials_path )","id_":253300,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/gcp_sql_hook.py#L567-L599","negative":"Returns a list of dicts representing issues from a remote service."} {"query":"Extract the base of the given element .","positive":"def Func ( arg_0 , arg_1 ) : if isinstance ( arg_1 , list ) : return [ arg_0 . Func ( arg_2 ) for arg_2 in arg_1 ] arg_3 = arg_0 . checker . is_url_valid ( url = arg_1 , return_base = True ) if arg_3 : return arg_3 if \"\/\" in arg_1 : return arg_1 . split ( \"\/\" ) [ 0 ] return arg_1","id_":253301,"task_name":"https:\/\/github.com\/funilrys\/PyFunceble\/blob\/cdf69cbde120199171f7158e1c33635753e6e2f5\/PyFunceble\/adblock.py#L178-L214","negative":"issue a command to read the archive records after a known time stamp."} {"query":"Return list of choices s keys","positive":"def Func ( arg_0 ) : for arg_1 , arg_2 in arg_0 : if isinstance ( arg_2 , ( list , tuple ) ) : for arg_1 , arg_3 in arg_2 : yield arg_1 else : yield arg_1","id_":253302,"task_name":"https:\/\/github.com\/kmmbvnr\/django-any\/blob\/6f64ebd05476e2149e2e71deeefbb10f8edfc412\/django_any\/functions.py#L6-L15","negative":"Produces a TidyPy configuration that incorporates the configuration files\n stored in the current user's home directory.\n\n :param project_path: the path to the project that is going to be analyzed\n :type project_path: str\n :param use_cache:\n whether or not to use cached versions of any remote\/referenced TidyPy\n configurations. If not specified, defaults to ``True``.\n :type use_cache: bool\n :rtype: dict"} {"query":"Apply U to q .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : return arg_0 . append ( UBase ( arg_1 , arg_2 , arg_3 ) , [ arg_4 ] , [ ] )","id_":253303,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/extensions\/standard\/ubase.py#L50-L52","negative":"Handle GET request - render linked learners list and \"Link learner\" form.\n\n Arguments:\n request (django.http.request.HttpRequest): Request instance\n customer_uuid (str): Enterprise Customer UUID\n\n Returns:\n django.http.response.HttpResponse: HttpResponse"} {"query":"Return the composition channel .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = False ) : arg_4 = list ( arg_0 . input_dims ( ) ) arg_5 = list ( arg_0 . output_dims ( ) ) if arg_3 : arg_6 = len ( arg_0 . input_dims ( ) ) arg_7 = 2 * len ( arg_0 . output_dims ( ) ) arg_8 = True for arg_9 , arg_10 in enumerate ( arg_2 ) : arg_4 [ arg_10 ] = arg_1 . _input_dims [ arg_9 ] else : arg_6 = len ( arg_0 . output_dims ( ) ) arg_7 = 0 arg_8 = False for arg_9 , arg_10 in enumerate ( arg_2 ) : arg_5 [ arg_10 ] = arg_1 . _output_dims [ arg_9 ] arg_11 = np . reshape ( arg_0 . data , arg_0 . _shape ) arg_12 = np . reshape ( arg_1 . data , arg_1 . _shape ) arg_13 = [ 2 * arg_6 - 1 - arg_10 for arg_10 in arg_2 ] + [ arg_6 - 1 - arg_10 for arg_10 in arg_2 ] arg_14 = [ np . product ( arg_5 ) ** 2 , np . product ( arg_4 ) ** 2 ] arg_15 = np . reshape ( arg_0 . _einsum_matmul ( arg_11 , arg_12 , arg_13 , arg_7 , arg_8 ) , arg_14 ) return SuperOp ( arg_15 , arg_4 , arg_5 )","id_":253304,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/quantum_info\/operators\/channel\/superop.py#L316-L346","negative":"This function creates the command list from available information"} {"query":"Read the contents of a file located relative to setup . py","positive":"def Func ( * arg_0 ) : with open ( join ( abspath ( dirname ( __file__ ) ) , * arg_0 ) ) as thefile : return thefile . Func ( )","id_":253305,"task_name":"https:\/\/github.com\/mrsarm\/mongotail\/blob\/82ba74e32eff92faa320833a8d19c58555f9cd49\/setup.py#L30-L33","negative":"Remove cards from watchlist.\n\n :params trade_id: Trade id."} {"query":"Specify a callback function that will be called when a server offers Next Protocol Negotiation options .","positive":"def Func ( arg_0 , arg_1 ) : _warn_npn ( ) arg_0 . _npn_select_helper = _NpnSelectHelper ( arg_1 ) arg_0 . _npn_select_callback = arg_0 . _npn_select_helper . callback _lib . SSL_CTX_set_next_proto_select_cb ( arg_0 . _context , arg_0 . _npn_select_callback , _ffi . NULL )","id_":253306,"task_name":"https:\/\/github.com\/pyca\/pyopenssl\/blob\/1fbe064c50fd030948141d7d630673761525b0d0\/src\/OpenSSL\/SSL.py#L1425-L1441","negative":"Return the number of combinations for n choose k.\n\n Args:\n n (int): the total number of options .\n k (int): The number of elements.\n\n Returns:\n int: returns the binomial coefficient"} {"query":"Store a training sample and associated category label","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = 0 ) : if arg_0 . _samples is None : arg_0 . _samples = numpy . zeros ( ( 0 , len ( arg_1 ) ) , dtype = RealNumpyDType ) assert arg_0 . _labels is None arg_0 . _labels = [ ] arg_0 . _samples = numpy . concatenate ( ( arg_0 . _samples , numpy . atleast_2d ( arg_1 ) ) , axis = 0 ) arg_0 . _labels += [ arg_2 ] if arg_0 . _partitions is None : arg_0 . _partitions = [ ] if arg_3 is None : arg_3 = 0 arg_0 . _partitions += [ arg_3 ]","id_":253307,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/regions\/knn_classifier_region.py#L858-L879","negative":"Checks if an blob_name is updated in Google Cloud Storage.\n\n :param bucket_name: The Google cloud storage bucket where the object is.\n :type bucket_name: str\n :param object_name: The name of the object to check in the Google cloud\n storage bucket.\n :type object_name: str\n :param ts: The timestamp to check against.\n :type ts: datetime.datetime"} {"query":"Extract required fields from an array","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = { } for arg_3 in arg_1 : arg_4 = arg_0 . extract_fields ( arg_3 ) arg_2 [ arg_3 [ 'name' ] ] = arg_4 return arg_2","id_":253308,"task_name":"https:\/\/github.com\/boundary\/pulse-api-cli\/blob\/b01ca65b442eed19faac309c9d62bbc3cb2c098f\/boundary\/metric_export.py#L59-L67","negative":"Adds all parameters to `traj`"} {"query":"Compare against another Jwt .","positive":"def Func ( arg_0 , arg_1 : 'Jwt' , arg_2 : arg_3 = False ) -> arg_3 : if arg_0 . secret != arg_1 . secret : return False if arg_0 . payload != arg_1 . payload : return False if arg_0 . alg != arg_1 . alg : return False if arg_0 . header != arg_1 . header : return False arg_4 = arg_0 . registered_claims arg_5 = arg_1 . registered_claims if not arg_2 : arg_6 = [ 'exp' , 'nbf' , 'iat' ] arg_4 = { k : { v if k not in arg_6 else None } for k , v in arg_4 . items ( ) } arg_5 = { k : { v if k not in arg_6 else None } for k , v in arg_5 . items ( ) } if arg_4 != arg_5 : return False return True","id_":253309,"task_name":"https:\/\/github.com\/jmwri\/simplejwt\/blob\/0828eaace0846918d2d202f5a60167a003e88b71\/simplejwt\/jwt.py#L333-L362","negative":"Load the configuration.\n\n :param under_test:\n Tell us if we only have to load the configuration file (True)\n or load the configuration file and initate the output directory\n if it does not exist (False).\n :type under_test: bool\n\n :param custom:\n A dict with the configuration index (from .PyFunceble.yaml) to update.\n :type custom: dict\n\n .. warning::\n If :code:`custom` is given, the given :code:`dict` overwrite\n the last value of the given configuration indexes."} {"query":"Determines all modules that script transitively depends upon .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = set ( ) def calc ( arg_0 , arg_1 ) : if arg_0 in arg_3 : return arg_3 . add ( arg_0 ) for arg_4 in collect_imports ( arg_0 , arg_1 , arg_2 ) : if arg_4 . is_native : arg_3 . add ( arg_4 . name ) continue arg_5 = arg_4 . name . split ( '.' ) calc ( arg_4 . name , arg_4 . script ) if len ( arg_5 ) == 1 : continue arg_6 , arg_7 = os . path . split ( arg_4 . script ) if arg_7 == '__init__.py' : arg_6 = os . path . dirname ( arg_6 ) for arg_8 in xrange ( len ( arg_5 ) - 1 , 0 , - 1 ) : arg_0 = '.' . join ( arg_5 [ : arg_8 ] ) arg_1 = os . path . join ( arg_6 , '__init__.py' ) calc ( arg_0 , arg_1 ) arg_6 = os . path . dirname ( arg_6 ) calc ( arg_0 , arg_1 ) arg_3 . remove ( arg_0 ) return arg_3","id_":253310,"task_name":"https:\/\/github.com\/google\/grumpy\/blob\/3ec87959189cfcdeae82eb68a47648ac25ceb10b\/compiler\/imputil.py#L207-L233","negative":"Returns the profile with the received ID as a dict\n\n If a local copy of the profile exists, it'll be returned. If not, it'll\n be downloaded from the web. The results are cached, so any subsequent\n calls won't hit the filesystem or the web.\n\n Args:\n profile_id (str): The ID of the profile you want.\n\n Raises:\n RegistryError: If there was some problem opening the profile file\n or its format was incorrect."} {"query":"Get all variants for an institute having Sanger validations ordered but still not evaluated","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_0 . sanger_ordered ( arg_1 , arg_2 ) arg_4 = [ ] for arg_5 in arg_3 : arg_6 = arg_5 [ '_id' ] arg_7 = arg_0 . case ( arg_6 = arg_6 ) if not arg_7 : continue arg_8 = arg_7 . get ( 'display_name' ) arg_9 = arg_5 [ 'vars' ] arg_10 = { } arg_10 [ arg_8 ] = [ ] for arg_11 in arg_9 : arg_12 = arg_0 . variant ( document_id = arg_11 , arg_6 = arg_6 ) if arg_12 is None or arg_12 . get ( 'sanger_ordered' ) is None or arg_12 . get ( 'sanger_ordered' ) is False : continue arg_13 = arg_12 . get ( 'validation' , 'not_evaluated' ) if arg_13 in [ 'True positive' , 'False positive' ] : continue arg_10 [ arg_8 ] . append ( arg_12 [ '_id' ] ) if len ( arg_10 [ arg_8 ] ) > 0 : arg_4 . append ( arg_10 ) return arg_4","id_":253311,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/server\/blueprints\/cases\/controllers.py#L553-L608","negative":"The 1D index mappings between the masked sparse-grid and unmasked sparse grid."} {"query":"Wait for the termination of a process and log its stdout & stderr","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : proc . stream_process_stdout ( arg_2 , stdout_log_fn ( arg_1 ) ) arg_2 . wait ( )","id_":253312,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/executor\/src\/python\/heron_executor.py#L922-L925","negative":"Test whether FILENAME matches PATTERN, including case.\n\n This is a version of fnmatch() which doesn't case-normalize\n its arguments."} {"query":"returns a random tuple representing person information","positive":"def Func ( arg_0 = None , arg_1 = None ) : arg_0 = arg_0 or [ 'en' ] arg_1 = arg_1 or ( GENDER_FEMALE , GENDER_MALE ) arg_2 = random . choice ( arg_0 ) arg_3 = random . choice ( arg_1 ) arg_4 = title ( [ arg_2 ] , [ arg_3 ] ) return first_name ( [ arg_2 ] , [ arg_3 ] ) , last_name ( [ arg_2 ] ) , arg_4 , arg_3","id_":253313,"task_name":"https:\/\/github.com\/saxix\/sample-data-utils\/blob\/769f1b46e60def2675a14bd5872047af6d1ea398\/sample_data_utils\/people.py#L64-L87","negative":"Open dashboard in browser."} {"query":"Prints all currently defined configurations .","positive":"def Func ( ) : arg_0 , arg_1 = read_latoolscfg ( ) arg_2 = arg_1 [ 'DEFAULT' ] [ 'config' ] arg_3 = '\\nCurrently defined LAtools configurations:\\n\\n' for arg_4 in arg_1 . sections ( ) : if arg_4 == arg_2 : arg_3 += arg_4 + ' [DEFAULT]\\n' elif arg_4 == 'REPRODUCE' : arg_3 += arg_4 + ' [DO NOT ALTER]\\n' else : arg_3 += arg_4 + '\\n' for arg_5 , arg_6 in arg_1 [ arg_4 ] . items ( ) : if arg_5 != 'config' : if arg_6 [ : 9 ] == 'resources' : arg_6 = pkgrs . resource_filename ( 'latools' , arg_6 ) arg_3 += ' ' + arg_5 + ': ' + arg_6 + '\\n' arg_3 += '\\n' print ( arg_3 ) return","id_":253314,"task_name":"https:\/\/github.com\/oscarbranson\/latools\/blob\/cd25a650cfee318152f234d992708511f7047fbe\/latools\/helpers\/config.py#L50-L76","negative":"Revoke the token and remove the cookie."} {"query":"Convert a figure to svg or png for inline display .","positive":"def Func ( arg_0 , arg_1 = 'png' ) : if not arg_0 . axes and not arg_0 . lines : return arg_2 = arg_0 . get_facecolor ( ) arg_3 = arg_0 . get_edgecolor ( ) arg_0 . set_facecolor ( 'white' ) arg_0 . set_edgecolor ( 'white' ) try : arg_4 = BytesIO ( ) arg_0 . canvas . Func ( arg_4 , format = arg_1 , bbox_inches = 'tight' ) arg_5 = arg_4 . getvalue ( ) finally : arg_0 . set_facecolor ( arg_2 ) arg_0 . set_edgecolor ( arg_3 ) return arg_5","id_":253315,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/core\/pylabtools.py#L91-L109","negative":"Sets the package verification code, if not already set.\n code - A string.\n Raises CardinalityError if already defined.\n Raises OrderError if no package previously defined.\n Raises Value error if doesn't match verifcode form"} {"query":"Convert the table and column descriptions of a TableGroup into specifications for the DB schema .","positive":"def Func ( arg_0 ) : arg_1 = { } for arg_2 , arg_3 in arg_0 . tabledict . items ( ) : arg_4 = TableSpec . from_table_metadata ( arg_3 ) arg_1 [ arg_4 . name ] = arg_4 for arg_6 in arg_4 . many_to_many . values ( ) : arg_1 [ arg_6 . name ] = arg_6 arg_7 = OrderedDict ( ) arg_8 = 0 while arg_1 and arg_8 < 100 : arg_8 += 1 for arg_3 in list ( arg_1 . keys ( ) ) : if all ( ( arg_9 [ 1 ] in arg_7 ) or arg_9 [ 1 ] == arg_3 for arg_9 in arg_1 [ arg_3 ] . foreign_keys ) : arg_7 [ arg_3 ] = arg_1 . pop ( arg_3 ) break if arg_1 : raise ValueError ( 'there seem to be cyclic dependencies between the tables' ) return list ( arg_7 . values ( ) )","id_":253316,"task_name":"https:\/\/github.com\/cldf\/csvw\/blob\/181c94b6c599575945e52d370a415f12f3433eab\/src\/csvw\/db.py#L235-L266","negative":"An integer-valued dimension bounded between `min` <= x <= `max`.\n Note that the right endpoint of the interval includes `max`.\n\n When `warp` is None, the base measure associated with this dimension\n is a categorical distribution with each weight on each of the integers\n in [min, max]. With `warp == 'log'`, the base measure is a uniform\n distribution on the log of the variable, with bounds at `log(min)` and\n `log(max)`. This is appropriate for variables that are \"naturally\" in\n log-space. Other `warp` functions are not supported (yet), but may be\n at a later time. Please note that this functionality is not supported\n for `hyperopt_tpe`."} {"query":"Verifies the signature for the given value .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_0 . derive_key ( ) try : arg_2 = base64_decode ( arg_2 ) except Exception : return False return arg_0 . algorithm . Func ( arg_3 , arg_1 , arg_2 )","id_":253317,"task_name":"https:\/\/github.com\/AkihikoITOH\/capybara\/blob\/e86c2173ea386654f4ae061148e8fbe3f25e715c\/capybara\/virtualenv\/lib\/python2.7\/site-packages\/itsdangerous.py#L355-L362","negative":"Sequentially update the actors, the world, and the messaging system. \n The theater terminates once all of the actors indicate that they are done."} {"query":"Displays a dialog for exporting HTML generated by Qt s rich text system .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . control . window ( ) arg_2 = QtGui . QFileDialog ( arg_1 , 'Save as...' ) arg_2 . setAcceptMode ( QtGui . QFileDialog . AcceptSave ) arg_3 = [ 'HTML with PNG figures (*.html *.htm)' , 'XHTML with inline SVG figures (*.xhtml *.xml)' ] arg_2 . setNameFilters ( arg_3 ) if arg_0 . filename : arg_2 . selectFile ( arg_0 . filename ) arg_4 , arg_5 = os . path . splitext ( arg_0 . filename ) if arg_5 . lower ( ) in ( '.xml' , '.xhtml' ) : arg_2 . selectNameFilter ( arg_3 [ - 1 ] ) if arg_2 . exec_ ( ) : arg_0 . filename = arg_2 . selectedFiles ( ) [ 0 ] arg_7 = arg_2 . selectedNameFilter ( ) arg_8 = arg_0 . control . document ( ) . toHtml ( ) . encode ( 'utf-8' ) if arg_7 . startswith ( 'XHTML' ) : arg_9 = Func_xhtml else : arg_10 = arg_0 . inline_png if arg_10 is None and IMG_RE . search ( arg_8 ) : arg_2 = QtGui . QDialog ( arg_1 ) arg_2 . setWindowTitle ( 'Save as...' ) arg_11 = QtGui . QVBoxLayout ( arg_2 ) arg_12 = \"Exporting HTML with PNGs\" arg_13 = \"Would you like inline PNGs (single large html \" \"file) or external image files?\" arg_14 = QtGui . QCheckBox ( \"&Don't ask again\" ) arg_14 . setShortcut ( 'D' ) arg_15 = QtGui . QPushButton ( \"&Inline\" ) arg_15 . setShortcut ( 'I' ) arg_16 = QtGui . QPushButton ( \"&External\" ) arg_16 . setShortcut ( 'E' ) arg_17 = QtGui . QMessageBox ( QtGui . QMessageBox . Question , arg_2 . windowTitle ( ) , arg_12 ) arg_17 . setInformativeText ( arg_13 ) arg_17 . addButton ( arg_15 , QtGui . QMessageBox . NoRole ) arg_17 . addButton ( arg_16 , QtGui . QMessageBox . YesRole ) arg_11 . setSpacing ( 0 ) arg_11 . addWidget ( arg_17 ) arg_11 . addWidget ( arg_14 ) arg_2 . setLayout ( arg_11 ) arg_2 . show ( ) arg_18 = arg_17 . exec_ ( ) arg_2 . hide ( ) arg_10 = ( arg_18 == 0 ) if arg_14 . checkState ( ) : arg_0 . inline_png = arg_10 arg_9 = lambda h , f , i : Func_html ( h , f , i , arg_10 ) try : return arg_9 ( arg_8 , arg_0 . filename , arg_0 . image_tag ) except Exception , e : arg_12 = \"Error Funcing HTML to %s\\n\" % arg_0 . filename + str ( e ) arg_18 = QtGui . QMessageBox . warning ( arg_1 , 'Error' , arg_12 , QtGui . QMessageBox . Ok , QtGui . QMessageBox . Ok ) return None","id_":253318,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/frontend\/qt\/rich_text.py#L47-L119","negative":"Returns assignment data for the given course_id.\n\n https:\/\/canvas.instructure.com\/doc\/api\/analytics.html#method.analytics_api.course_assignments"} {"query":"Encode list of messages . Expects messages to be unicode .","positive":"def Func ( arg_0 , arg_1 ) : if not arg_1 or arg_1 [ 0 ] is None : return '' if len ( arg_1 ) == 1 : return arg_1 [ 0 ] . encode ( 'utf-8' ) arg_2 = u'' . join ( [ ( u'\\ufffd%d\\ufffd%s' % ( len ( p ) , p ) ) for p in arg_1 if p is not None ] ) return arg_2 . encode ( 'utf-8' )","id_":253319,"task_name":"https:\/\/github.com\/abourget\/gevent-socketio\/blob\/1cdb1594a315326987a17ce0924ea448a82fab01\/socketio\/transports.py#L95-L112","negative":"Internal ``RUN_TASK`` consumer to run the task's callable"} {"query":"Like os . path . join but acts relative to this packages bin path .","positive":"def Func ( * arg_0 ) : arg_1 = os . path . dirname ( __file__ ) return os . path . normpath ( os . path . join ( arg_1 , 'bin' , * arg_0 ) )","id_":253320,"task_name":"https:\/\/github.com\/cpenv\/cpenv\/blob\/afbb569ae04002743db041d3629a5be8c290bd89\/cpenv\/utils.py#L83-L87","negative":"Uploads file to GDocs spreadsheet.\n Content type can be provided as argument, default is ods."} {"query":"Write the text to the stream and flush immediately .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . stream . write ( arg_1 ) arg_0 . stream . flush ( )","id_":253321,"task_name":"https:\/\/github.com\/Julian\/Ivoire\/blob\/5b8218cffa409ed733cf850a6fde16fafb8fc2af\/ivoire\/result.py#L165-L172","negative":"validate source directory names in components"} {"query":"Given some error text it will log the text if self . log_errors is True","positive":"def Func ( arg_0 , arg_1 : arg_2 ) -> None : if arg_0 . Funcs : with arg_0 . _log_fp . open ( 'a+' ) as log_file : log_file . write ( f'{text}\\n' )","id_":253322,"task_name":"https:\/\/github.com\/apmoore1\/tweebo_parser_python_api\/blob\/224be2570b8b2508d29771f5e5abe06e1889fd89\/tweebo_parser\/api.py#L47-L55","negative":"Returns the maximum delay for the InferenceElements in the inference\n dictionary\n\n Parameters:\n -----------------------------------------------------------------------\n inferences: A dictionary where the keys are InferenceElements"} {"query":"Build fake CIFAR10 - style data for unit testing .","positive":"def Func ( ) : arg_0 = 10 arg_1 = np . random . rand ( arg_0 , * IMAGE_SHAPE ) . astype ( np . float32 ) arg_2 = np . random . permutation ( np . arange ( arg_0 ) ) . astype ( np . int32 ) arg_3 = np . random . rand ( arg_0 , * IMAGE_SHAPE ) . astype ( np . float32 ) arg_4 = np . random . permutation ( np . arange ( arg_0 ) ) . astype ( np . int32 ) return ( arg_1 , arg_2 ) , ( arg_3 , arg_4 )","id_":253323,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/examples\/cifar10_bnn.py#L155-L162","negative":"Start listening for events from Marathon, running a sync when we first\n successfully subscribe and triggering a sync on API request events."} {"query":"Print top_n big dir and top_n big file in each dir .","positive":"def Func ( arg_0 , arg_1 = 5 ) : arg_0 . assert_is_dir_and_exists ( ) arg_2 = sorted ( [ ( p , p . dirsize ) for p in arg_0 . select_dir ( recursive = False ) ] , key = lambda x : x [ 1 ] , reverse = True , ) for arg_3 , arg_4 in arg_2 [ : arg_1 ] : print ( \"{:<9} {:<9}\" . format ( repr_data_size ( arg_4 ) , arg_3 . abspath ) ) arg_5 = sorted ( [ ( p , p . size ) for p in arg_3 . select_file ( recursive = True ) ] , key = lambda x : x [ 1 ] , reverse = True , ) for arg_6 , arg_7 in arg_5 [ : arg_1 ] : print ( \" {:<9} {:<9}\" . format ( repr_data_size ( arg_7 ) , arg_6 . abspath ) )","id_":253324,"task_name":"https:\/\/github.com\/MacHu-GWU\/pathlib_mate-project\/blob\/f9fb99dd7cc9ea05d1bec8b9ce8f659e8d97b0f1\/pathlib_mate\/mate_tool_box.py#L118-L137","negative":"Return True if we should retry. False otherwise.\n\n Args:\n exception: An exception to test for transience.\n\n Returns:\n True if we should retry. False otherwise."} {"query":"The complete content of an info response .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = False ) : return \"\" . join ( [ arg_4 for arg_4 in arg_0 . Func_gen ( arg_1 , arg_2 , arg_3 ) ] )","id_":253325,"task_name":"https:\/\/github.com\/greenbender\/pynntp\/blob\/991a76331cdf5d8f9dbf5b18f6e29adc80749a2f\/nntp\/nntp.py#L384-L393","negative":"Returns all of the items from queryset where the user has a\n product invoking that item's condition in one of their carts."} {"query":"Get the list of movies for a particular genre by id . By default only movies with 10 or more votes are included .","positive":"def Func ( arg_0 , ** arg_1 ) : arg_2 = arg_0 . _get_id_path ( 'Func' ) arg_3 = arg_0 . _GET ( arg_2 , arg_1 ) arg_0 . _set_attrs_to_values ( arg_3 ) return arg_3","id_":253326,"task_name":"https:\/\/github.com\/celiao\/tmdbsimple\/blob\/ff17893110c99771d6398a62c35d36dd9735f4b9\/tmdbsimple\/genres.py#L66-L87","negative":"Replace target with replacement"} {"query":"Add a new watching rule .","positive":"def Func ( arg_0 , arg_1 , arg_2 , * , arg_3 = None ) : if arg_3 is None : arg_3 = arg_1 if arg_3 in arg_0 . requests : raise ValueError ( \"A Func request is already scheduled for alias %s\" % arg_3 ) arg_0 . requests [ arg_3 ] = ( arg_1 , arg_2 ) if arg_0 . _fd is not None : arg_0 . _setup_Func ( arg_3 , arg_1 , arg_2 )","id_":253327,"task_name":"https:\/\/github.com\/rbarrois\/aionotify\/blob\/6cfa35b26a2660f77f29a92d3efb7d1dde685b43\/aionotify\/base.py#L50-L59","negative":"Given a single spinn3r feed entry, produce a single StreamItem.\n\n Returns 'None' if a complete item can't be constructed."} {"query":"Confirm a build upload is complete .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = { 'uploaded' : True } arg_3 = requests . patch ( arg_0 , auth = ( arg_1 , '' ) , json = arg_2 ) if arg_3 . status_code != 200 : raise KeeperError ( arg_3 )","id_":253328,"task_name":"https:\/\/github.com\/lsst-sqre\/ltd-conveyor\/blob\/c492937c4c1e050ccc4a0b9dcc38f9980d57e305\/ltdconveyor\/keeper\/build.py#L60-L87","negative":"r'''Method to calculate heat capacity of a solid at temperature `T`\n with a given method.\n\n This method has no exception handling; see `T_dependent_property`\n for that.\n\n Parameters\n ----------\n T : float\n Temperature at which to calculate heat capacity, [K]\n method : str\n Name of the method to use\n\n Returns\n -------\n Cp : float\n Heat capacity of the solid at T, [J\/mol\/K]"} {"query":"Declare an environment variable as a special variable . This can be used even if the environment variable is not present .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : if arg_1 in arg_0 . _special : arg_4 = arg_0 . _special [ arg_1 ] if not isinstance ( arg_4 , arg_3 ) or arg_2 != arg_4 . _sep : raise ValueError ( 'variable %s already declared as %s ' 'with separator \"%s\"' % ( arg_1 , arg_4 . __class__ . __name__ , arg_4 . _sep ) ) else : arg_0 . _special [ arg_1 ] = arg_3 ( arg_0 , arg_1 , arg_2 )","id_":253329,"task_name":"https:\/\/github.com\/rackerlabs\/timid\/blob\/b1c6aa159ab380a033740f4aa392cf0d125e0ac6\/timid\/environment.py#L374-L397","negative":"Uploads file to GDocs spreadsheet.\n Content type can be provided as argument, default is ods."} {"query":"Converts the input format to a regular expression as well as extracting fields","positive":"def Func ( arg_0 , arg_1 ) : arg_1 = arg_1 . strip ( ) arg_1 = re . sub ( '[ \\t]+' , ' ' , arg_1 ) arg_2 = [ ] arg_3 = re . compile ( r'^\\\\\"' ) arg_4 = re . compile ( 'Referer|User-Agent' ) arg_5 = re . compile ( '^%.*t$' ) arg_6 = re . compile ( r'^\\\\\"' ) arg_7 = re . compile ( r'\\\\\"$' ) arg_8 = re . compile ( r'.*%\\{([^\\}]+)\\}i' ) for arg_9 in arg_1 . split ( ' ' ) : arg_10 = 0 if arg_3 . search ( arg_9 ) : arg_10 = 1 if arg_10 : arg_9 = arg_6 . sub ( '' , arg_9 ) arg_9 = arg_7 . sub ( '' , arg_9 ) arg_11 = arg_8 . match ( arg_9 ) if arg_11 : arg_0 . _names . append ( arg_11 . groups ( ) [ 0 ] . lower ( ) ) arg_0 . _types . append ( str ) else : arg_0 . _names . append ( arg_0 . alias ( arg_9 ) ) arg_0 . _types . append ( arg_0 . types . get ( arg_9 , [ None , str ] ) [ 1 ] ) arg_12 = '(\\S*)' if arg_10 : if arg_9 == '%r' or arg_4 . search ( arg_9 ) : arg_12 = r'\\\"([^\"\\\\]*(?:\\\\.[^\"\\\\]*)*)\\\"' else : arg_12 = r'\\\"([^\\\"]*)\\\"' elif arg_5 . search ( arg_9 ) : arg_12 = r'(\\[[^\\]]+\\])' elif arg_9 == '%U' : arg_12 = '(.+?)' arg_2 . append ( arg_12 ) arg_0 . _pattern = '^' + ' ' . join ( arg_2 ) + '$' try : arg_0 . _regex = re . compile ( arg_0 . _pattern ) except Exception as e : raise ApacheLogParserError ( e )","id_":253330,"task_name":"https:\/\/github.com\/calston\/tensor\/blob\/7c0c99708b5dbff97f3895f705e11996b608549d\/tensor\/logs\/parsers.py#L65-L122","negative":"Sets the player's paused state."} {"query":"Make a protobuf Descriptor given a DescriptorProto protobuf .","positive":"def Func ( arg_0 , arg_1 = '' , arg_2 = True , arg_3 = None ) : if api_implementation . Type ( ) == 'cpp' and arg_2 : from typy . google . protobuf import descriptor_pb2 arg_4 = descriptor_pb2 . FileDescriptorProto ( ) arg_4 . message_type . add ( ) . MergeFrom ( arg_0 ) arg_5 = str ( uuid . uuid4 ( ) ) if arg_1 : arg_4 . name = os . path . join ( arg_1 . replace ( '.' , '\/' ) , arg_5 + '.proto' ) arg_4 . package = arg_1 else : arg_4 . name = arg_5 + '.proto' _message . default_pool . Add ( arg_4 ) arg_7 = _message . default_pool . FindFileByName ( arg_4 . name ) if _USE_C_DESCRIPTORS : return arg_7 . message_types_by_name [ arg_0 . name ] arg_8 = [ arg_0 . name ] if arg_1 : arg_8 . insert ( 0 , arg_1 ) arg_9 = { } for arg_10 in arg_0 . enum_type : arg_11 = '.' . join ( arg_8 + [ arg_10 . name ] ) arg_12 = EnumDescriptor ( arg_10 . name , arg_11 , None , [ EnumValueDescriptor ( enum_val . name , ii , enum_val . number ) for ii , enum_val in enumerate ( arg_10 . value ) ] ) arg_9 [ arg_11 ] = arg_12 arg_13 = { } for arg_14 in arg_0 . nested_type : arg_11 = '.' . join ( arg_8 + [ arg_14 . name ] ) arg_15 = Func ( arg_14 , arg_1 = '.' . join ( arg_8 ) , arg_2 = False , arg_3 = arg_3 ) arg_13 [ arg_11 ] = arg_15 arg_16 = [ ] for arg_17 in arg_0 . field : arg_11 = '.' . join ( arg_8 + [ arg_17 . name ] ) arg_12 = None arg_15 = None if arg_17 . HasField ( 'type_name' ) : arg_18 = arg_17 . type_name arg_19 = '.' . join ( arg_8 + [ arg_18 [ arg_18 . rfind ( '.' ) + 1 : ] ] ) if arg_19 in arg_13 : arg_15 = arg_13 [ arg_19 ] elif arg_19 in arg_9 : arg_12 = arg_9 [ arg_19 ] arg_20 = FieldDescriptor ( arg_17 . name , arg_11 , arg_17 . number - 1 , arg_17 . number , arg_17 . type , FieldDescriptor . ProtoTypeToCppProtoType ( arg_17 . type ) , arg_17 . label , None , arg_15 , arg_12 , None , False , None , options = arg_17 . options , has_default_value = False ) arg_16 . append ( arg_20 ) arg_21 = '.' . join ( arg_8 ) return Descriptor ( arg_0 . name , arg_21 , None , None , arg_16 , list ( arg_13 . values ( ) ) , list ( arg_9 . values ( ) ) , [ ] , options = arg_0 . options )","id_":253331,"task_name":"https:\/\/github.com\/ibelie\/typy\/blob\/3616845fb91459aacd8df6bf82c5d91f4542bee7\/typy\/google\/protobuf\/descriptor.py#L875-L971","negative":"Utility for creating continuous palette from the cubehelix system.\n\n This produces a colormap with linearly-decreasing (or increasing)\n brightness. That means that information will be preserved if printed to\n black and white or viewed by someone who is colorblind.\n\n Parameters\n ----------\n start : float (0 <= start <= 3)\n The hue at the start of the helix.\n rot : float\n Rotations around the hue wheel over the range of the palette.\n gamma : float (0 <= gamma)\n Gamma factor to emphasize darker (gamma < 1) or lighter (gamma > 1)\n colors.\n hue : float (0 <= hue <= 1)\n Saturation of the colors.\n dark : float (0 <= dark <= 1)\n Intensity of the darkest color in the palette.\n light : float (0 <= light <= 1)\n Intensity of the lightest color in the palette.\n reverse : bool\n If True, the palette will go from dark to light.\n\n Returns\n -------\n out : function\n Continuous color palette that takes a single\n :class:`int` parameter ``n`` and returns ``n``\n equally spaced colors.\n\n\n References\n ----------\n Green, D. A. (2011). \"A colour scheme for the display of astronomical\n intensity images\". Bulletin of the Astromical Society of India, Vol. 39,\n p. 289-295.\n\n Examples\n --------\n >>> palette = cubehelix_pal()\n >>> palette(5)\n ['#edd1cb', '#d499a7', '#aa688f', '#6e4071', '#2d1e3e']"} {"query":"Add a tier . When no linguistic type is given and the default linguistic type is unavailable then the assigned linguistic type will be the first in the list .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 'default-lt' , arg_3 = None , arg_4 = None , arg_5 = None , arg_6 = None , arg_7 = None , arg_8 = None ) : if not arg_1 : raise ValueError ( 'Tier id is empty...' ) if arg_2 not in arg_0 . linguistic_types : arg_2 = sorted ( arg_0 . linguistic_types . keys ( ) ) [ 0 ] if arg_4 and arg_4 not in arg_0 . locales : arg_4 = None if arg_7 and arg_7 not in arg_0 . languages : arg_7 = None if arg_8 is None : arg_0 . tiers [ arg_1 ] = ( { } , { } , { 'TIER_ID' : arg_1 , 'LINGUISTIC_TYPE_REF' : arg_2 , 'PARENT_REF' : arg_3 , 'PARTICIPANT' : arg_5 , 'DEFAULT_LOCALE' : arg_4 , 'LANG_REF' : arg_7 , 'ANNOTATOR' : arg_6 } , len ( arg_0 . tiers ) ) else : arg_0 . tiers [ arg_1 ] = ( { } , { } , arg_8 , len ( arg_0 . tiers ) )","id_":253332,"task_name":"https:\/\/github.com\/dopefishh\/pympi\/blob\/79c747cde45b5ba203ed93154d8c123ac9c3ef56\/pympi\/Elan.py#L360-L400","negative":"Transforms predictions into probability values.\n\n Parameters\n ----------\n logits : array_like\n The logits predicted by the model.\n\n Returns\n -------\n `numpy.ndarray`\n Probability values corresponding to the logits."} {"query":"Attempt to import tensorflow and ensure its version is sufficient .","positive":"def Func ( ) : try : import tensorflow as tf except ImportError : print ( \"\\n\\nFailed to import TensorFlow. Please note that TensorFlow is not \" \"installed by default when you install TensorFlow Probability. This \" \"is so that users can decide whether to install the GPU-enabled \" \"TensorFlow package. To use TensorFlow Probability, please install \" \"the most recent version of TensorFlow, by following instructions at \" \"https:\/\/tensorflow.org\/install.\\n\\n\" ) raise import distutils . version arg_0 = \"1.13\" if ( distutils . version . LooseVersion ( tf . __version__ ) < distutils . version . LooseVersion ( arg_0 ) ) : raise ImportError ( \"This version of TensorFlow Probability requires TensorFlow \" \"version >= {required}; Detected an installation of version {present}. \" \"Please upgrade TensorFlow to proceed.\" . format ( required = arg_0 , present = tf . __version__ ) )","id_":253333,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/__init__.py#L32-L65","negative":"write lines, one by one, separated by \\n to device"} {"query":"Finds and returns a model architecture and its parameters from the database which matches the requirement .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = 'model' , ** arg_4 ) : arg_4 . update ( { 'model_name' : arg_3 } ) arg_0 . _fill_project_info ( arg_4 ) arg_5 = time . time ( ) arg_6 = arg_0 . db . Model . find_one ( filter = arg_4 , arg_2 = arg_2 ) arg_7 = '_find_one_model_ztemp_file' if arg_6 is not None : arg_8 = arg_6 [ 'params_id' ] arg_9 = arg_6 [ 'architecture' ] arg_10 = arg_6 [ 'time' ] exists_or_mkdir ( arg_7 , False ) with open ( os . path . join ( arg_7 , 'graph.pkl' ) , 'wb' ) as file : pickle . dump ( arg_9 , file , protocol = pickle . HIGHEST_PROTOCOL ) else : print ( \"[Database] FAIL! Cannot find model: {}\" . format ( arg_4 ) ) return False try : arg_11 = arg_0 . _deserialization ( arg_0 . model_fs . get ( arg_8 ) . read ( ) ) np . savez ( os . path . join ( arg_7 , 'params.npz' ) , arg_11 = arg_11 ) arg_12 = load_graph_and_params ( name = arg_7 , arg_1 = arg_1 ) del_folder ( arg_7 ) arg_13 = arg_0 . db . Model . find ( arg_4 ) print ( \"[Database] Find one model SUCCESS. kwargs:{} sort:{} save time:{} took: {}s\" . format ( arg_4 , arg_2 , arg_10 , round ( time . time ( ) - arg_5 , 2 ) ) ) for arg_14 in arg_6 : arg_12 . __dict__ . update ( { \"_%s\" % arg_14 : arg_6 [ arg_14 ] } ) arg_15 = arg_13 . distinct ( 'params_id' ) arg_16 = len ( arg_15 ) if arg_16 != 1 : print ( \" Note that there are {} models match the kwargs\" . format ( arg_16 ) ) return arg_12 except Exception as e : arg_17 , arg_18 , arg_19 = sys . exc_info ( ) arg_20 = os . path . split ( arg_19 . tb_frame . f_code . co_filename ) [ 1 ] logging . info ( \"{} {} {} {} {}\" . format ( arg_17 , arg_18 , arg_20 , arg_19 . tb_lineno , e ) ) return False","id_":253334,"task_name":"https:\/\/github.com\/tensorlayer\/tensorlayer\/blob\/aa9e52e36c7058a7e6fd81d36563ca6850b21956\/tensorlayer\/db.py#L171-L240","negative":"Return a datetime with the same value as ``dt``, to a\n resolution of days."} {"query":"Decide whether to trace execution in filename .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 , arg_4 = arg_0 . Func_with_reason ( arg_1 , arg_2 ) if arg_0 . debug . should ( 'trace' ) : if not arg_3 : arg_5 = \"Not tracing %r: %s\" % ( arg_1 , arg_4 ) else : arg_5 = \"Tracing %r\" % ( arg_1 , ) arg_0 . debug . write ( arg_5 ) return arg_3","id_":253335,"task_name":"https:\/\/github.com\/tnkteja\/myhelp\/blob\/fb3a4809d448ad14d5b2e6ddf2e7e89ad52b71cb\/virtualEnvironment\/lib\/python2.7\/site-packages\/coverage\/control.py#L290-L303","negative":"Returns an aggregator connection."} {"query":"Get the clipboard s text on OS X .","positive":"def Func ( ) : arg_0 = subprocess . Popen ( [ 'pbpaste' , '-Prefer' , 'ascii' ] , stdout = subprocess . PIPE ) arg_1 , arg_2 = arg_0 . communicate ( ) arg_1 = arg_1 . replace ( '\\r' , '\\n' ) return arg_1","id_":253336,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/lib\/clipboard.py#L26-L34","negative":"This method increases the permanence values of synapses of columns whose\n activity level has been too low. Such columns are identified by having an\n overlap duty cycle that drops too much below those of their peers. The\n permanence values for such columns are increased."} {"query":"Store the given text contents so that they are later retrievable by the given key .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_0 . _blobservice . create_blob_from_text ( arg_0 . uuid , arg_1 , arg_2 )","id_":253337,"task_name":"https:\/\/github.com\/jic-dtool\/dtool-azure\/blob\/5f5f1faa040e047e619380faf437a74cdfa09737\/dtool_azure\/storagebroker.py#L340-L348","negative":"Create an iterable from the iterables that contains each element once.\n\n :return: an iterable over the iterables. Each element of the result\n appeared only once in the result. They are ordered by the first\n occurrence in the iterables."} {"query":"r Get the count of an n - gram in the corpus .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : if not arg_2 : arg_2 = arg_0 . ngcorpus if not arg_1 : return arg_2 [ None ] if isinstance ( arg_1 , ( text_type , str ) ) : arg_1 = text_type ( arg_1 ) . split ( ) if arg_1 [ 0 ] in arg_2 : return arg_0 . Func ( arg_1 [ 1 : ] , arg_2 [ arg_1 [ 0 ] ] ) return 0","id_":253338,"task_name":"https:\/\/github.com\/chrislit\/abydos\/blob\/165466b3ff6afd8024a4c8660421b0c4e7773db9\/abydos\/corpus\/_ngram_corpus.py#L141-L183","negative":"Enables GPIO interrupts."} {"query":"Run WsgiDAV using wsgiref . simple_server on Python 2 . 5 + .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : from wsgiref . simple_server import make_server , software_version arg_3 = \"WsgiDAV\/{} {}\" . format ( __version__ , software_version ) _logger . info ( \"Running {}...\" . format ( arg_3 ) ) _logger . warning ( \"WARNING: This single threaded server (wsgiref) is not meant for production.\" ) arg_4 = make_server ( arg_1 [ \"host\" ] , arg_1 [ \"port\" ] , arg_0 ) try : arg_4 . serve_forever ( ) except KeyboardInterrupt : _logger . warning ( \"Caught Ctrl-C, shutting down...\" ) return","id_":253339,"task_name":"https:\/\/github.com\/mar10\/wsgidav\/blob\/cec0d84222fc24bea01be1cea91729001963f172\/wsgidav\/server\/server_cli.py#L715-L730","negative":"Stops a timer if it hasn't fired yet\n\n * func - the function passed in start_timer"} {"query":"Retrieve all of the relevant key wrapping data fields and return them as a dictionary .","positive":"def Func ( arg_0 ) : Func = { } arg_2 = { 'unique_identifier' : arg_0 . _kdw_eki_unique_identifier , 'cryptographic_parameters' : { 'block_cipher_mode' : arg_0 . _kdw_eki_cp_block_cipher_mode , 'padding_method' : arg_0 . _kdw_eki_cp_padding_method , 'hashing_algorithm' : arg_0 . _kdw_eki_cp_hashing_algorithm , 'key_role_type' : arg_0 . _kdw_eki_cp_key_role_type , 'digital_signature_algorithm' : arg_0 . _kdw_eki_cp_digital_signature_algorithm , 'cryptographic_algorithm' : arg_0 . _kdw_eki_cp_cryptographic_algorithm , 'random_iv' : arg_0 . _kdw_eki_cp_random_iv , 'iv_length' : arg_0 . _kdw_eki_cp_iv_length , 'tag_length' : arg_0 . _kdw_eki_cp_tag_length , 'fixed_field_length' : arg_0 . _kdw_eki_cp_fixed_field_length , 'invocation_field_length' : arg_0 . _kdw_eki_cp_invocation_field_length , 'counter_length' : arg_0 . _kdw_eki_cp_counter_length , 'initial_counter_value' : arg_0 . _kdw_eki_cp_initial_counter_value } } if not any ( arg_2 [ 'cryptographic_parameters' ] . values ( ) ) : arg_2 [ 'cryptographic_parameters' ] = { } if not any ( arg_2 . values ( ) ) : arg_2 = { } arg_3 = { 'unique_identifier' : arg_0 . _kdw_mski_unique_identifier , 'cryptographic_parameters' : { 'block_cipher_mode' : arg_0 . _kdw_mski_cp_block_cipher_mode , 'padding_method' : arg_0 . _kdw_mski_cp_padding_method , 'hashing_algorithm' : arg_0 . _kdw_mski_cp_hashing_algorithm , 'key_role_type' : arg_0 . _kdw_mski_cp_key_role_type , 'digital_signature_algorithm' : arg_0 . _kdw_mski_cp_digital_signature_algorithm , 'cryptographic_algorithm' : arg_0 . _kdw_mski_cp_cryptographic_algorithm , 'random_iv' : arg_0 . _kdw_mski_cp_random_iv , 'iv_length' : arg_0 . _kdw_mski_cp_iv_length , 'tag_length' : arg_0 . _kdw_mski_cp_tag_length , 'fixed_field_length' : arg_0 . _kdw_mski_cp_fixed_field_length , 'invocation_field_length' : arg_0 . _kdw_mski_cp_invocation_field_length , 'counter_length' : arg_0 . _kdw_mski_cp_counter_length , 'initial_counter_value' : arg_0 . _kdw_mski_cp_initial_counter_value } } if not any ( arg_3 [ 'cryptographic_parameters' ] . values ( ) ) : arg_3 [ 'cryptographic_parameters' ] = { } if not any ( arg_3 . values ( ) ) : arg_3 = { } Func [ 'wrapping_method' ] = arg_0 . _kdw_wrapping_method Func [ 'encryption_key_information' ] = arg_2 Func [ 'mac_signature_key_information' ] = arg_3 Func [ 'mac_signature' ] = arg_0 . _kdw_mac_signature Func [ 'iv_counter_nonce' ] = arg_0 . _kdw_iv_counter_nonce Func [ 'encoding_option' ] = arg_0 . _kdw_encoding_option if not any ( Func . values ( ) ) : Func = { } return Func","id_":253340,"task_name":"https:\/\/github.com\/OpenKMIP\/PyKMIP\/blob\/b51c5b044bd05f8c85a1d65d13a583a4d8fc1b0e\/kmip\/pie\/objects.py#L424-L493","negative":"Updates a player's state when a payload with opcode ``playerUpdate`` is received."} {"query":"Match a parser zero or more times repeatedly .","positive":"def Func ( arg_0 : arg_1 [ arg_2 , arg_3 [ arg_4 ] ] ) -> RepeatedParser : if isinstance ( arg_0 , str ) : arg_0 = lit ( arg_0 ) return RepeatedParser ( arg_0 )","id_":253341,"task_name":"https:\/\/github.com\/drhagen\/parsita\/blob\/d97414a05541f48231381f607d1d2e6b50781d39\/parsita\/parsers.py#L491-L503","negative":"Populate self._thumbnails."} {"query":"Return pairs of run ids and results .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . communicationChannel . Func_all ( ) arg_0 . nruns -= len ( arg_1 ) if arg_0 . nruns > 0 : import logging arg_2 = logging . getLogger ( __name__ ) arg_2 . warning ( 'too few results Funcd: {} results Funcd, {} more expected' . format ( len ( arg_1 ) , arg_0 . nruns ) ) elif arg_0 . nruns < 0 : import logging arg_2 = logging . getLogger ( __name__ ) arg_2 . warning ( 'too many results Funcd: {} results Funcd, {} too many' . format ( len ( arg_1 ) , - arg_0 . nruns ) ) return arg_1","id_":253342,"task_name":"https:\/\/github.com\/alphatwirl\/alphatwirl\/blob\/5138eeba6cd8a334ba52d6c2c022b33c61e3ba38\/alphatwirl\/loop\/MPEventLoopRunner.py#L115-L134","negative":"Decorator for methods accepting old_path and new_path."} {"query":"Reorder levels of an H2O factor for one single column of a H2O frame","positive":"def Func ( arg_0 , arg_1 ) : return H2OFrame . _expr ( expr = ExprNode ( \"Func\" , arg_0 , quote ( arg_1 ) ) )","id_":253343,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-py\/h2o\/frame.py#L1978-L1988","negative":"Get the changeset using the OSM API and return the content as a XML\n ElementTree.\n\n Args:\n changeset: the id of the changeset."} {"query":"Convert the specification into an instruction","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . _instruction_class ( arg_1 ) arg_3 = arg_2 . type if arg_3 in arg_0 . _type_to_instruction : arg_2 . inherit_from ( arg_0 . _type_to_instruction [ arg_3 ] ) return arg_2","id_":253344,"task_name":"https:\/\/github.com\/fossasia\/knittingpattern\/blob\/8e608896b0ab82fea1ca9fbfa2b4ee023d8c8027\/knittingpattern\/InstructionLibrary.py#L82-L96","negative":"Returns a DataFrame of offensive team splits for a season.\n\n :year: int representing the season.\n :returns: Pandas DataFrame of split data."} {"query":"Creates a sequence .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = 8 , arg_4 = 0 ) : arg_5 = ( arg_1 [ 0 ] + arg_2 ) * FRAME_SIZE arg_6 = ( arg_1 [ 0 ] + arg_2 + 1 ) * FRAME_SIZE arg_7 = arg_0 [ arg_5 : arg_6 , ... ] arg_8 = tf . stack ( tf . split ( arg_7 , 13 , axis = 1 ) ) arg_8 = arg_8 [ 0 : arg_1 [ 1 ] ] arg_8 = tf . roll ( arg_8 , shift = - arg_4 , axis = 0 ) arg_8 = tf . tile ( arg_8 , [ 2 , 1 , 1 , 1 ] ) arg_8 = arg_8 [ : arg_3 ] arg_8 = tf . cast ( arg_8 , dtype = tf . float32 ) arg_8 . set_shape ( [ arg_3 , FRAME_SIZE , FRAME_SIZE , CHANNELS ] ) return arg_8","id_":253345,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/examples\/sprites_dataset.py#L152-L185","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"pre - processed the data frame . new filtering methods will be implement here .","positive":"def Func ( arg_0 , arg_1 ) : if isinstance ( arg_0 . data , pd . DataFrame ) : arg_2 = arg_0 . data . copy ( ) if arg_2 . index . dtype == 'O' : arg_2 = arg_2 . reset_index ( ) elif os . path . isfile ( arg_0 . data ) : if arg_0 . data . endswith ( \"gct\" ) : arg_2 = pd . read_csv ( arg_0 . data , skiprows = 1 , comment = '#' , sep = \"\\t\" ) else : arg_2 = pd . read_csv ( arg_0 . data , comment = '#' , sep = \"\\t\" ) else : raise Exception ( 'Error parsing gene expression DataFrame!' ) if arg_2 . iloc [ : , 0 ] . duplicated ( ) . sum ( ) > 0 : arg_0 . _logger . warning ( \"Warning: dropping duplicated gene names, only keep the first values\" ) arg_2 . drop_duplicates ( subset = arg_2 . columns [ 0 ] , inplace = True ) if arg_2 . isnull ( ) . any ( ) . sum ( ) > 0 : arg_0 . _logger . warning ( \"Warning: Input data contains NA, filled NA with 0\" ) arg_2 . dropna ( how = 'all' , inplace = True ) arg_2 = arg_2 . fillna ( 0 ) arg_2 . set_index ( keys = arg_2 . columns [ 0 ] , inplace = True ) arg_3 = arg_2 . select_dtypes ( include = [ np . number ] ) arg_4 = arg_3 . groupby ( by = arg_1 , axis = 1 ) . std ( ) arg_3 = arg_3 [ ~ arg_4 . isin ( [ 0 ] ) . any ( axis = 1 ) ] arg_3 = arg_3 + 0.00001 return arg_3","id_":253346,"task_name":"https:\/\/github.com\/zqfang\/GSEApy\/blob\/673e9ec1391e3b14d3e8a4353117151fd2cb9345\/gseapy\/gsea.py#L348-L382","negative":"Returns location data.\n\n http:\/\/dev.wheniwork.com\/#get-existing-location"} {"query":"Manually add a subscriber to a workflow bypassing the default trigger settings . You can also use this endpoint to trigger a series of automated emails in an API 3 . 0 workflow type or add subscribers to an automated email queue that uses the API request delay type .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_0 . workflow_id = arg_1 arg_0 . email_id = arg_2 if 'email_address' not in arg_3 : raise KeyError ( 'The automation email queue must have an email_address' ) check_email ( arg_3 [ 'email_address' ] ) arg_4 = arg_0 . _mc_client . _post ( url = arg_0 . _build_path ( arg_1 , 'emails' , arg_2 , 'queue' ) , arg_3 = arg_3 ) if arg_4 is not None : arg_0 . subscriber_hash = arg_4 [ 'id' ] else : arg_0 . subscriber_hash = None return arg_4","id_":253347,"task_name":"https:\/\/github.com\/VingtCinq\/python-mailchimp\/blob\/1b472f1b64fdde974732ac4b7ed48908bb707260\/mailchimp3\/entities\/automationemailqueues.py#L31-L61","negative":"Returns the value specified in the XDG_CONFIG_HOME environment variable\n or the appropriate default."} {"query":"For bold italics and underline . Simply checking to see if the various tags are present will not suffice . If the tag is present and set to False then the style should not be present .","positive":"def Func ( arg_0 ) : if arg_0 is None : return False arg_1 = get_namespace ( arg_0 , 'w' ) return arg_0 . get ( '%sval' % arg_1 ) != 'false'","id_":253348,"task_name":"https:\/\/github.com\/PolicyStat\/docx2html\/blob\/2dc4afd1e3a3f2f0b357d0bff903eb58bcc94429\/docx2html\/core.py#L494-L503","negative":"Extract Packed Floating-Point Values\n\n Extracts 128-bits of packed floating-point values from the source\n operand (second operand) at an 128-bit offset from imm8[0] into the\n destination operand (first operand). The destination may be either an\n XMM register or an 128-bit memory location."} {"query":"The speed limit for a boid . Boids can momentarily go very fast something that is impossible for real animals .","positive":"def Func ( arg_0 , arg_1 = 30 ) : if abs ( arg_0 . vx ) > arg_1 : arg_0 . vx = arg_0 . vx \/ abs ( arg_0 . vx ) * arg_1 if abs ( arg_0 . vy ) > arg_1 : arg_0 . vy = arg_0 . vy \/ abs ( arg_0 . vy ) * arg_1 if abs ( arg_0 . vz ) > arg_1 : arg_0 . vz = arg_0 . vz \/ abs ( arg_0 . vz ) * arg_1","id_":253349,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/lib\/boids\/__init__.py#L93-L107","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Customized version of get_data to directly get the data without using the authentication method .","positive":"def Func ( arg_0 , arg_1 , arg_2 = arg_3 ( ) , arg_4 = arg_3 ( ) , arg_5 = True ) : arg_1 = urljoin ( arg_0 . end_point , arg_1 ) arg_6 = requests . get ( arg_1 , arg_2 = arg_2 , arg_4 = arg_4 , timeout = arg_0 . get_timeout ( ) ) if arg_5 : return arg_6 . json ( ) return arg_6 . content","id_":253350,"task_name":"https:\/\/github.com\/koalalorenzo\/python-digitalocean\/blob\/d0221b57856fb1e131cafecf99d826f7b07a947c\/digitalocean\/Metadata.py#L23-L35","negative":"Initializes profiler with a package."} {"query":"Most of this method is from docutils . parser . rst . Directive .","positive":"def Func ( arg_0 ) : if not arg_0 . state . document . settings . file_insertion_enabled : raise arg_0 . warning ( '\"%s\" directive disabled.' % arg_0 . name ) arg_1 = arg_0 . state_machine . input_lines . source ( arg_0 . lineno - arg_0 . state_machine . input_offset - 1 ) arg_2 = os . path . dirname ( os . path . abspath ( arg_1 ) ) arg_3 = rst . directives . path ( arg_0 . arguments [ 0 ] ) arg_3 = os . path . normpath ( os . path . join ( arg_2 , arg_3 ) ) arg_3 = utils . relative_path ( None , arg_3 ) arg_3 = nodes . repFuncicode ( arg_3 ) arg_4 = arg_0 . options . get ( 'encoding' , arg_0 . state . document . settings . input_encoding ) arg_5 = arg_0 . state . document . settings . input_encoding_error_handler arg_6 = arg_0 . options . get ( 'tab-width' , arg_0 . state . document . settings . tab_width ) try : arg_0 . state . document . settings . record_dependencies . add ( arg_3 ) arg_7 = io . FileInput ( source_path = arg_3 , arg_4 = arg_4 , error_handler = arg_5 ) except UnicodeEncodeError as error : raise arg_0 . severe ( 'Problems with \"%s\" directive path:\\n' 'Cannot encode input file path \"%s\" ' '(wrong locale?).' % ( arg_0 . name , SafeString ( arg_3 ) ) ) except IOError as error : raise arg_0 . severe ( 'Problems with \"%s\" directive path:\\n%s.' % ( arg_0 . name , ErrorString ( error ) ) ) try : arg_8 = arg_7 . read ( ) except UnicodeError as error : raise arg_0 . severe ( 'Problem with \"%s\" directive:\\n%s' % ( arg_0 . name , ErrorString ( error ) ) ) arg_9 = arg_0 . state . document . settings . env . config arg_10 = M2R ( no_underscore_emphasis = arg_9 . no_underscore_emphasis ) arg_11 = statemachine . string2lines ( arg_10 ( arg_8 ) , arg_6 , convert_whitespace = True ) arg_0 . state_machine . insert_input ( arg_11 , arg_3 ) return [ ]","id_":253351,"task_name":"https:\/\/github.com\/tensorforce\/tensorforce\/blob\/520a8d992230e382f08e315ede5fc477f5e26bfb\/docs\/m2r.py#L486-L536","negative":"Augment a sample shape to broadcast batch dimensions.\n\n Computes an augmented sample shape, so that any batch dimensions not\n part of the distribution `partial_batch_dist` are treated as identical\n distributions.\n\n # partial_batch_dist.batch_shape = [ 7]\n # full_sample_and_batch_shape = [3, 4, 7]\n # => return an augmented sample shape of [3, 4] so that\n # partial_batch_dist.sample(augmented_sample_shape) has combined\n # sample and batch shape of [3, 4, 7].\n\n Args:\n partial_batch_dist: `tfd.Distribution` instance with batch shape a\n prefix of `full_sample_and_batch_shape`.\n full_sample_and_batch_shape: a Tensor or Tensor-like shape.\n validate_args: if True, check for shape errors at runtime.\n Returns:\n augmented_sample_shape: sample shape such that\n `partial_batch_dist.sample(augmented_sample_shape)` has combined\n sample and batch shape of `full_sample_and_batch_shape`.\n\n Raises:\n ValueError: if `partial_batch_dist.batch_shape` has more dimensions than\n `full_sample_and_batch_shape`.\n NotImplementedError: if broadcasting would be required to make\n `partial_batch_dist.batch_shape` into a prefix of\n `full_sample_and_batch_shape` ."} {"query":"A more flexible str function which intelligently handles stringifying strings lists and other iterables . The results are lexographically sorted to ensure generated responses are consistent when iterables such as Set are used .","positive":"def Func ( arg_0 ) : if arg_0 is None : return None elif ( not isinstance ( arg_0 , str ) and isinstance ( arg_0 , collections . abc . Iterable ) ) : return ', ' . join ( str ( arg_1 ) for arg_1 in sorted ( arg_0 ) ) else : return str ( arg_0 )","id_":253352,"task_name":"https:\/\/github.com\/ashleysommer\/sanic-cors\/blob\/f3d68def8cf859398b3c83e4109d815f1f038ea2\/sanic_cors\/core.py#L329-L342","negative":"Extract Packed Floating-Point Values\n\n Extracts 128-bits of packed floating-point values from the source\n operand (second operand) at an 128-bit offset from imm8[0] into the\n destination operand (first operand). The destination may be either an\n XMM register or an 128-bit memory location."} {"query":"On Windows returns a list of mapped network drives","positive":"def Func ( ) : if sys . platform != 'win32' : raise NotImplementedError arg_0 = [ ] arg_1 = _CallWindowsNetCommand ( [ 'use' ] ) for arg_2 in arg_1 . split ( EOL_STYLE_WINDOWS ) : arg_3 = re . match ( \"(\\w*)\\s+(\\w:)\\s+(.+)\" , arg_2 . rstrip ( ) ) if arg_3 : arg_0 . append ( ( arg_3 . group ( 2 ) , arg_3 . group ( 3 ) , arg_3 . group ( 1 ) == 'OK' ) ) return arg_0","id_":253353,"task_name":"https:\/\/github.com\/zerotk\/easyfs\/blob\/140923db51fb91d5a5847ad17412e8bce51ba3da\/zerotk\/easyfs\/_easyfs.py#L1411-L1429","negative":"It will poll the URL to grab the latest status resource in a given\n timeout and time interval.\n\n Args:\n poll_interval (int): how often to poll the status service.\n timeout (int): how long to poll the URL until giving up. Use <= 0\n to wait forever"} {"query":"Get the items contained in self .","positive":"def Func ( arg_0 ) : arg_1 = [ ] arg_2 = arg_0 . xpath_ctxt . xpathEval ( \"d:item\" ) if arg_2 is not None : for arg_3 in arg_2 : arg_1 . append ( DiscoItem ( arg_0 , arg_3 ) ) return arg_1","id_":253354,"task_name":"https:\/\/github.com\/Jajcus\/pyxmpp2\/blob\/14a40a3950910a9cd008b55f0d8905aa0186ce18\/pyxmpp2\/ext\/disco.py#L464-L474","negative":"issue a command to read the archive records after a known time stamp."} {"query":"This method takes a list of labels and returns a unique category number . This enables this class to store a list of categories for each point since the KNN classifier only stores a single number category for each record .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = 0 for arg_3 in arg_1 : arg_2 += arg_0 . _labelToCategoryNumber ( arg_3 ) return arg_2","id_":253355,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/regions\/knn_anomaly_classifier_region.py#L589-L598","negative":"Computes the light curve model"} {"query":"Fetch the location of the form in the original filename from the input form if it has metadata .","positive":"def Func ( arg_0 : arg_1 [ arg_2 , arg_3 ] ) -> Optional [ Tuple [ int , int ] ] : try : arg_4 = arg_0 . meta arg_5 = arg_4 . get ( reader . READER_LINE_KW ) arg_6 = arg_4 . get ( reader . READER_COL_KW ) except AttributeError : return None else : assert isinstance ( arg_5 , int ) and isinstance ( arg_6 , int ) return arg_5 , arg_6","id_":253356,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/compiler\/parser.py#L432-L443","negative":"Clear all matching our user_id."} {"query":"Mark all message instances for a user as read .","positive":"def Func ( arg_0 ) : arg_1 = stored_messages_settings . STORAGE_BACKEND arg_2 = arg_1 ( ) arg_2 . inbox_purge ( arg_0 )","id_":253357,"task_name":"https:\/\/github.com\/evonove\/django-stored-messages\/blob\/23b71f952d5d3fd03285f5e700879d05796ef7ba\/stored_messages\/api.py#L61-L69","negative":"This is a parallel worker for running period-recovery.\n\n Parameters\n ----------\n\n task : tuple\n This is used to pass args to the `periodicvar_recovery` function::\n\n task[0] = period-finding result pickle to work on\n task[1] = simbasedir\n task[2] = period_tolerance\n\n Returns\n -------\n\n dict\n This is the dict produced by the `periodicvar_recovery` function for the\n input period-finding result pickle."} {"query":"Consume reader and return Success only on complete consumption .","positive":"def Func ( arg_0 : arg_1 [ arg_2 , arg_3 ] , arg_4 : arg_5 [ arg_2 ] ) -> Result [ arg_3 ] : arg_6 = ( arg_0 << eof ) . consume ( arg_4 ) if isinstance ( arg_6 , Continue ) : return Success ( arg_6 . value ) else : arg_7 = set ( ) arg_8 = [ ] for arg_9 in arg_6 . expected : arg_10 = arg_9 ( ) if arg_10 not in arg_7 : arg_7 . add ( arg_10 ) arg_8 . append ( arg_10 ) return Failure ( arg_6 . farthest . expected_error ( ' or ' . join ( arg_8 ) ) )","id_":253358,"task_name":"https:\/\/github.com\/drhagen\/parsita\/blob\/d97414a05541f48231381f607d1d2e6b50781d39\/parsita\/parsers.py#L155-L182","negative":"This will output the nginx HTTP config string for specific port spec"} {"query":"Binarizes the values of x .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = 'upper' ) : arg_4 , arg_5 = arg_1 if arg_2 is None : arg_2 = ( arg_4 + arg_5 ) \/ 2. arg_0 = arg_0 . copy ( ) if arg_3 == 'lower' : arg_0 [ arg_0 <= arg_2 ] = arg_4 arg_0 [ arg_0 > arg_2 ] = arg_5 elif arg_3 == 'upper' : arg_0 [ arg_0 < arg_2 ] = arg_4 arg_0 [ arg_0 >= arg_2 ] = arg_5 else : raise ValueError ( 'included_in must be \"lower\" or \"upper\"' ) return arg_0","id_":253359,"task_name":"https:\/\/github.com\/bethgelab\/foolbox\/blob\/8ab54248c70e45d8580a7d9ee44c9c0fb5755c4a\/foolbox\/utils.py#L89-L117","negative":"Create Flask app."} {"query":"Given two templates and their respective versions return True if a new cookiecutter config needs to be obtained from the user","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : if arg_0 != arg_2 : return True else : return _cookiecutter_configs_have_changed ( arg_2 , arg_1 , arg_3 )","id_":253360,"task_name":"https:\/\/github.com\/CloverHealth\/temple\/blob\/d7b75da2459f72ba74d6f3b6e1ab95c3d1b92ccd\/temple\/update.py#L152-L162","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"Serialize data to an frozen tuple .","positive":"def Func ( arg_0 , arg_1 ) : if hasattr ( arg_1 , \"__hash__\" ) and callable ( arg_1 . __hash__ ) : return arg_1 elif isinstance ( arg_1 , list ) : return tuple ( ( arg_0 . Func ( arg_2 ) for arg_2 in arg_1 ) ) elif isinstance ( arg_1 , dict ) : arg_3 = [ ( key , arg_0 . Func ( value ) ) for key , value in arg_1 . items ( ) ] arg_3 . sort ( ) return tuple ( arg_3 ) else : raise TypeError ( \"Unable to freeze {} data type.\" . format ( type ( arg_1 ) ) )","id_":253361,"task_name":"https:\/\/github.com\/CiscoDevNet\/webexteamssdk\/blob\/6fc2cc3557e080ba4b2a380664cb2a0532ae45cd\/webexteamssdk\/models\/immutable.py#L121-L141","negative":"Temporal distance probability density function.\n\n Returns\n -------\n non_delta_peak_split_points: numpy.array\n non_delta_peak_densities: numpy.array\n len(density) == len(temporal_distance_split_points_ordered) -1\n delta_peak_loc_to_probability_mass : dict"} {"query":"If self is a MUC room join request return the information contained .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . get_muc_child ( ) if not arg_1 : return None if not isinstance ( arg_1 , MucX ) : return None return arg_1","id_":253362,"task_name":"https:\/\/github.com\/Jajcus\/pyxmpp2\/blob\/14a40a3950910a9cd008b55f0d8905aa0186ce18\/pyxmpp2\/ext\/muc\/muccore.py#L742-L753","negative":"Function to create an overview of the services.\n Will print a list of ports found an the number of times the port was seen."} {"query":"Put I2C lines into idle state .","positive":"def Func ( arg_0 ) : arg_0 . _ft232h . setup_pins ( { 0 : GPIO . OUT , 1 : GPIO . OUT , 2 : GPIO . IN } , { 0 : GPIO . HIGH , 1 : GPIO . HIGH } )","id_":253363,"task_name":"https:\/\/github.com\/adafruit\/Adafruit_Python_GPIO\/blob\/a92a23d6b5869663b2bc1ccf78bb11585076a9c4\/Adafruit_GPIO\/FT232H.py#L648-L652","negative":"Return True if file matches exclude pattern."} {"query":"Fetch the hostname using the callable from the config or using socket . getfqdn as a fallback .","positive":"def Func ( ) : try : arg_0 = conf . get ( 'core' , 'hostname_callable' ) except AirflowConfigException : arg_0 = None if not arg_0 : return socket . getfqdn ( ) arg_1 , arg_2 = arg_0 . split ( ':' ) arg_3 = importlib . import_module ( arg_1 ) arg_4 = getattr ( arg_3 , arg_2 ) return arg_4 ( )","id_":253364,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/utils\/net.py#L25-L45","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Setup frequency axis","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None ) : arg_3 = arg_0 . header [ b'fch1' ] arg_4 = arg_0 . header [ b'foff' ] arg_5 , arg_6 = 0 , arg_0 . header [ b'nchans' ] if arg_1 : arg_5 = int ( ( arg_1 - arg_3 ) \/ arg_4 ) if arg_2 : arg_6 = int ( ( arg_2 - arg_3 ) \/ arg_4 ) arg_7 = np . int ( arg_5 ) arg_8 = np . int ( arg_6 ) if arg_5 < arg_6 : arg_9 = np . arange ( arg_7 , arg_8 ) else : arg_9 = np . arange ( arg_8 , arg_7 ) arg_0 . freqs = arg_4 * arg_9 + arg_3 if arg_8 < arg_7 : arg_8 , arg_7 = arg_7 , arg_8 return arg_5 , arg_6 , arg_7 , arg_8","id_":253365,"task_name":"https:\/\/github.com\/UCBerkeleySETI\/blimpy\/blob\/b8822d3e3e911944370d84371a91fa0c29e9772e\/blimpy\/filterbank.py#L186-L216","negative":"Prune the cache"} {"query":"The base handler for the display_data message .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . log . debug ( \"display: %s\" , arg_1 . get ( 'content' , '' ) ) if not arg_0 . _hidden and arg_0 . _is_from_this_session ( arg_1 ) : arg_2 = arg_1 [ 'content' ] [ 'source' ] arg_3 = arg_1 [ 'content' ] [ 'data' ] arg_4 = arg_1 [ 'content' ] [ 'metadata' ] if arg_3 . has_key ( 'text\/html' ) : arg_5 = arg_3 [ 'text\/html' ] arg_0 . _append_html ( arg_5 , True ) elif arg_3 . has_key ( 'text\/plain' ) : arg_6 = arg_3 [ 'text\/plain' ] arg_0 . _append_plain_text ( arg_6 , True ) arg_0 . _append_plain_text ( u'\\n' , True )","id_":253366,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/frontend\/qt\/console\/ipython_widget.py#L235-L255","negative":"Enumerate all possible resonance forms and return them as a list.\n\n :param mol: The input molecule.\n :type mol: rdkit.Chem.rdchem.Mol\n :return: A list of all possible resonance forms of the molecule.\n :rtype: list of rdkit.Chem.rdchem.Mol"} {"query":"Delete the specified cgroup .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = trees . Tree ( ) . root arg_3 = arg_1 . split ( \"\/\" ) for arg_4 in arg_3 : arg_5 = { x . name : x for x in arg_2 . children } if arg_4 not in arg_5 : arg_0 . log . warning ( \"Cgroup does not exist: %s\" , arg_1 ) return else : arg_2 = arg_5 [ arg_4 ] arg_6 = arg_2 . parent arg_0 . log . debug ( \"Deleting cgroup %s\/%s\" , arg_6 , arg_2 . name ) arg_6 . delete_cgroup ( arg_2 . name )","id_":253367,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/task_runner\/cgroup_task_runner.py#L90-L109","negative":"Return HgResource object for path.\n\n See DAVProvider.get_resource_inst()"} {"query":"Build a filter that fails on nodes in the given list .","positive":"def Func ( arg_0 : arg_1 [ arg_2 ] ) -> NodePredicate : arg_3 = set ( arg_0 ) def exclusion_filter ( arg_4 : arg_5 , arg_6 : arg_2 ) -> bool : return arg_6 not in arg_3 return exclusion_filter","id_":253368,"task_name":"https:\/\/github.com\/pybel\/pybel-tools\/blob\/3491adea0ac4ee60f57275ef72f9b73da6dbfe0c\/src\/pybel_tools\/filters\/node_filters.py#L68-L79","negative":"Creates and connects the underlying text widget."} {"query":"Asserts that val is equal to one of the given items .","positive":"def Func ( arg_0 , * arg_1 ) : if len ( arg_1 ) == 0 : raise ValueError ( 'one or more args must be given' ) else : for arg_2 in arg_1 : if arg_0 . val == arg_2 : return arg_0 arg_0 . _err ( 'Expected <%s> to be in %s, but was not.' % ( arg_0 . val , arg_0 . _fmt_items ( arg_1 ) ) )","id_":253369,"task_name":"https:\/\/github.com\/ActivisionGameScience\/assertpy\/blob\/08d799cdb01f9a25d3e20672efac991c7bc26d79\/assertpy\/assertpy.py#L366-L374","negative":"Decode the data passed in and potentially flush the decoder."} {"query":"r Return Heronian mean .","positive":"def Func ( arg_0 ) : arg_1 = len ( arg_0 ) arg_2 = 0 for arg_3 in range ( arg_1 ) : for arg_4 in range ( arg_3 , arg_1 ) : if arg_0 [ arg_3 ] == arg_0 [ arg_4 ] : arg_2 += arg_0 [ arg_3 ] else : arg_2 += ( arg_0 [ arg_3 ] * arg_0 [ arg_4 ] ) ** 0.5 return arg_2 * 2 \/ ( arg_1 * ( arg_1 + 1 ) )","id_":253370,"task_name":"https:\/\/github.com\/chrislit\/abydos\/blob\/165466b3ff6afd8024a4c8660421b0c4e7773db9\/abydos\/stats\/_mean.py#L415-L453","negative":"Run the excel_to_html function from the\n command-line.\n\n Args:\n -p path to file\n -s name of the sheet to convert\n -css classes to apply\n -m attempt to combine merged cells\n -c caption for accessibility\n -su summary for accessibility\n -d details for accessibility\n\n Example use:\n\n excel_to_html -p myfile.xlsx -s SheetName -css diablo-python -m true"} {"query":"Adds a link to an existing node .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : if isinstance ( arg_1 , str ) : arg_3 = arg_1 if isinstance ( arg_2 , str ) : arg_4 = arg_0 . v_root . f_get ( arg_2 ) else : arg_4 = arg_2 else : arg_4 = arg_1 arg_3 = arg_4 . v_name return arg_0 . _nn_interface . _add_generic ( arg_0 , type_name = LINK , group_type_name = GROUP , args = ( arg_3 , arg_4 ) , kwargs = { } , add_prefix = False )","id_":253371,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/naturalnaming.py#L2680-L2707","negative":"Upload all po files to GDocs ignoring conflicts.\n This method looks for all msgids in po_files and sends them\n as ods to GDocs Spreadsheet."} {"query":"Calculates the cross - entropy for a batch of logits .","positive":"def Func ( arg_0 , arg_1 ) : assert arg_1 . ndim == 2 arg_1 = arg_1 - np . max ( arg_1 , axis = 1 , keepdims = True ) arg_2 = np . exp ( arg_1 ) arg_3 = np . sum ( arg_2 , axis = 1 ) arg_4 = np . log ( arg_3 ) - arg_1 [ : , arg_0 ] return arg_4","id_":253372,"task_name":"https:\/\/github.com\/bethgelab\/foolbox\/blob\/8ab54248c70e45d8580a7d9ee44c9c0fb5755c4a\/foolbox\/utils.py#L59-L86","negative":"This method is called before first step of simulation."} {"query":"Genereates a dictionary representation of the list field . Document should be the document the list_field comes from .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = { \"_document\" : arg_1 } if isinstance ( arg_2 . field , EmbeddedDocumentField ) : arg_4 . update ( arg_0 . create_document_dictionary ( arg_1 = arg_2 . field . document_type_obj , owner_document = arg_1 ) ) arg_4 . update ( { \"_document_field\" : arg_2 . field , \"_key\" : arg_3 , \"_field_type\" : ListField , \"_widget\" : get_widget ( arg_2 . field ) , \"_value\" : getattr ( arg_1 , arg_3 , None ) } ) return arg_4","id_":253373,"task_name":"https:\/\/github.com\/jazzband\/django-mongonaut\/blob\/5485b2e029dff8ae267a4cb39c92d0a72cb5b144\/mongonaut\/forms\/forms.py#L101-L121","negative":"Check that a binary operator is surrounded by exactly one space."} {"query":"Identify whether the user is requesting unit validation against astropy . units pint or quantities .","positive":"def Func ( arg_0 ) : if HAS_ASTROPY : from astropy . units import UnitBase if isinstance ( arg_0 , UnitBase ) : return ASTROPY if HAS_PINT : from pint . unit import UnitsContainer if hasattr ( arg_0 , 'dimensionality' ) and isinstance ( arg_0 . dimensionality , UnitsContainer ) : return PINT if HAS_QUANTITIES : from quantities . unitquantity import IrreducibleUnit from quantities import Quantity if isinstance ( arg_0 , IrreducibleUnit ) or isinstance ( arg_0 , Quantity ) : return QUANTITIES raise TraitError ( \"Could not identify unit framework for target unit of type {0}\" . format ( type ( arg_0 ) . __name__ ) )","id_":253374,"task_name":"https:\/\/github.com\/astrofrog\/numtraits\/blob\/d0afadc946e9d81d1d5b6c851530899d6b0e1d7c\/numtraits.py#L167-L198","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Computes the log - normalizer of the distribution .","positive":"def Func ( arg_0 ) : arg_1 = tf . compat . dimension_value ( arg_0 . event_shape [ 0 ] ) if arg_1 is None : raise ValueError ( 'vMF _log_normalizer currently only supports ' 'statically known event shape' ) arg_2 = tf . where ( arg_0 . concentration > 0 , arg_0 . concentration , tf . ones_like ( arg_0 . concentration ) ) arg_3 = ( ( arg_1 \/ 2 - 1 ) * tf . math . log ( arg_2 ) - ( arg_1 \/ 2 ) * np . log ( 2 * np . pi ) - tf . math . log ( _bessel_ive ( arg_1 \/ 2 - 1 , arg_2 ) ) - tf . abs ( arg_2 ) ) arg_4 = ( np . log ( 2. ) + ( arg_1 \/ 2 ) * np . log ( np . pi ) - tf . math . lgamma ( tf . cast ( arg_1 \/ 2 , arg_0 . dtype ) ) ) return tf . where ( arg_0 . concentration > 0 , - arg_3 , arg_4 * tf . ones_like ( arg_3 ) )","id_":253375,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/distributions\/von_mises_fisher.py#L262-L280","negative":"Run complete analysis and return results."} {"query":"Cancel operations .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = [ ] arg_4 = [ ] arg_5 = 256 arg_6 = len ( arg_2 ) for arg_7 in range ( 0 , arg_6 , arg_5 ) : arg_8 , arg_9 = _Func_batch ( arg_0 , arg_1 , arg_2 [ arg_7 : arg_7 + arg_5 ] ) arg_3 . extend ( arg_8 ) arg_4 . extend ( arg_9 ) return arg_3 , arg_4","id_":253376,"task_name":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/443ce31daa6023dc2fd65ef2051796e19d18d5a7\/dsub\/providers\/google_base.py#L439-L466","negative":"Revoke the token and remove the cookie."} {"query":"Constructor for Linux binary analysis .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None , arg_4 = None , arg_5 = None , arg_6 = '' , arg_7 = False , arg_8 = None , ** arg_9 ) : if arg_8 is None : arg_8 = consts . stdin_size try : return arg_0 ( _make_Func ( arg_1 , arg_2 , arg_3 , arg_4 , arg_5 , arg_6 , arg_7 , arg_8 ) , ** arg_9 ) except elftools . common . exceptions . ELFError : raise Exception ( f'Invalid binary: {path}' )","id_":253377,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/native\/manticore.py#L42-L67","negative":"Get merge notes"} {"query":"Convert binary data to type type .","positive":"def Func ( arg_0 , arg_1 , arg_2 , ** arg_3 ) : arg_4 = arg_0 . get_type_size ( arg_2 ) if arg_4 > 0 and len ( arg_1 ) != arg_4 : raise ArgumentError ( \"Could not convert type from binary since the data was not the correct size\" , required_size = arg_4 , actual_size = len ( arg_1 ) , arg_2 = arg_2 ) arg_5 = arg_0 . get_type ( arg_2 ) if not hasattr ( arg_5 , 'convert_binary' ) : raise ArgumentError ( \"Type does not support conversion from binary\" , arg_2 = arg_2 ) return arg_5 . convert_binary ( arg_1 , ** arg_3 )","id_":253378,"task_name":"https:\/\/github.com\/iotile\/typedargs\/blob\/0a5091a664b9b4d836e091e9ba583e944f438fd8\/typedargs\/typeinfo.py#L95-L113","negative":"Set a property value or remove a property.\n\n value == None means 'remove property'.\n Raise HTTP_FORBIDDEN if property is read-only, or not supported.\n\n When dry_run is True, this function should raise errors, as in a real\n run, but MUST NOT change any data.\n\n This default implementation\n\n - raises HTTP_FORBIDDEN, if trying to modify a locking property\n - raises HTTP_FORBIDDEN, if trying to modify an immutable {DAV:}\n property\n - handles Windows' Win32LastModifiedTime to set the getlastmodified\n property, if enabled\n - stores everything else as dead property, if a property manager is\n present.\n - raises HTTP_FORBIDDEN, else\n\n Removing a non-existing prop is NOT an error.\n\n Note: RFC 4918 states that {DAV:}displayname 'SHOULD NOT be protected'\n\n A resource provider may override this method, to update supported custom\n live properties."} {"query":"Get the list of network interfaces . Will return all datalinks on SmartOS .","positive":"def Func ( ) : with settings ( hide ( 'running' , 'stdout' ) ) : if is_file ( '\/usr\/sbin\/dladm' ) : arg_0 = run ( '\/usr\/sbin\/dladm show-link' ) else : arg_0 = sudo ( '\/sbin\/ifconfig -s' ) return [ arg_1 . split ( ' ' ) [ 0 ] for arg_1 in arg_0 . splitlines ( ) [ 1 : ] ]","id_":253379,"task_name":"https:\/\/github.com\/chrisspen\/burlap\/blob\/a92b0a8e5206850bb777c74af8421ea8b33779bd\/burlap\/network.py#L13-L22","negative":"Revoke the token and remove the cookie."} {"query":"Recursively downloads a folder in a vault to a local directory . Only downloads files not datasets .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False , arg_3 = False ) : arg_1 = os . path . normpath ( os . path . expanduser ( arg_1 ) ) if not os . access ( arg_1 , os . W_OK ) : raise Exception ( 'Write access to local path ({}) is required' . format ( arg_1 ) ) arg_4 , arg_5 = solvebio . Object . validate_full_path ( arg_0 ) arg_6 = solvebio . Vault . get_by_full_path ( arg_5 [ 'vault' ] ) print ( 'Downloading all files from {} to {}' . format ( arg_4 , arg_1 ) ) if arg_5 [ 'path' ] == '\/' : arg_7 = None else : arg_8 = solvebio . Object . get_by_full_path ( arg_0 , assert_type = 'folder' ) arg_7 = arg_8 . id print ( 'Creating local directory structure at: {}' . format ( arg_1 ) ) if not os . path . exists ( arg_1 ) : if not arg_2 : os . makedirs ( arg_1 ) arg_9 = arg_6 . folders ( arg_7 = arg_7 ) for arg_10 in arg_9 : arg_11 = os . path . normpath ( arg_1 + arg_10 . path ) if not os . path . exists ( arg_11 ) : print ( 'Creating folder: {}' . format ( arg_11 ) ) if not arg_2 : os . makedirs ( arg_11 ) arg_12 = arg_6 . files ( arg_7 = arg_7 ) for arg_10 in arg_12 : arg_11 = os . path . normpath ( arg_1 + arg_10 . path ) if os . path . exists ( arg_11 ) : if arg_3 : print ( 'Deleting local file (force download): {}' . format ( arg_11 ) ) if not arg_2 : os . remove ( arg_11 ) else : print ( 'Skipping file (already exists): {}' . format ( arg_11 ) ) continue print ( 'Downloading file: {}' . format ( arg_11 ) ) if not arg_2 : arg_10 . download ( arg_11 )","id_":253380,"task_name":"https:\/\/github.com\/solvebio\/solvebio-python\/blob\/b29614643043afd19c1d8074e8f25c6700d51a73\/examples\/download_vault_folder.py#L6-L56","negative":"Save the state of hooks in the sys module.\n\n This has to be called after self.user_module is created."} {"query":"Iterate over all pages for the given url . Feed in the result of self . _build_path as the url .","positive":"def Func ( arg_0 , arg_1 , ** arg_2 ) : if 'fields' in arg_2 : if 'total_items' not in arg_2 [ 'fields' ] . split ( ',' ) : arg_2 [ 'fields' ] += ',total_items' arg_2 . pop ( \"offset\" , None ) arg_2 . pop ( \"count\" , None ) arg_3 = arg_0 . _mc_client . _get ( arg_1 = arg_1 , arg_5 = 0 , count = 1000 , ** arg_2 ) arg_4 = arg_3 [ 'total_items' ] if arg_4 > 1000 : for arg_5 in range ( 1 , int ( arg_4 \/ 1000 ) + 1 ) : arg_3 = merge_results ( arg_3 , arg_0 . _mc_client . _get ( arg_1 = arg_1 , arg_5 = int ( arg_5 * 1000 ) , count = 1000 , ** arg_2 ) ) return arg_3 else : return arg_3","id_":253381,"task_name":"https:\/\/github.com\/VingtCinq\/python-mailchimp\/blob\/1b472f1b64fdde974732ac4b7ed48908bb707260\/mailchimp3\/baseapi.py#L36-L73","negative":"An integer-valued dimension bounded between `min` <= x <= `max`.\n Note that the right endpoint of the interval includes `max`.\n\n When `warp` is None, the base measure associated with this dimension\n is a categorical distribution with each weight on each of the integers\n in [min, max]. With `warp == 'log'`, the base measure is a uniform\n distribution on the log of the variable, with bounds at `log(min)` and\n `log(max)`. This is appropriate for variables that are \"naturally\" in\n log-space. Other `warp` functions are not supported (yet), but may be\n at a later time. Please note that this functionality is not supported\n for `hyperopt_tpe`."} {"query":"returns the list of states obtained by adding the given inputs to the current state one by one .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ ] arg_3 = arg_0 for arg_4 in arg_1 : arg_3 = arg_3 . add_input ( arg_4 ) arg_2 . append ( arg_3 ) return arg_2","id_":253382,"task_name":"https:\/\/github.com\/clab\/dynet\/blob\/21cc62606b74f81bb4b11a9989a6c2bd0caa09c5\/python\/dynet_viz.py#L681-L691","negative":"Call the disambiguation service in order to process a pdf file .\n\n Args:\n pdf (file): PDF file to be disambiguated.\n language (str): language of text (if known)\n\n Returns:\n dict, int: API response and API status."} {"query":"Traverse the bone hierarchy and create physics joints .","positive":"def Func ( arg_0 ) : arg_1 = [ 'root' ] while arg_1 : arg_2 = arg_1 . pop ( ) for arg_3 in arg_0 . hierarchy . get ( arg_2 , ( ) ) : arg_1 . append ( arg_3 ) if arg_2 not in arg_0 . bones : continue arg_4 = arg_0 . bones [ arg_2 ] arg_5 = [ b for b in arg_0 . bodies if b . name == arg_2 ] [ 0 ] for arg_3 in arg_0 . hierarchy . get ( arg_2 , ( ) ) : arg_6 = arg_0 . bones [ arg_3 ] arg_7 = [ b for b in arg_0 . bodies if b . name == arg_3 ] [ 0 ] arg_8 = ( '' , 'hinge' , 'universal' , 'ball' ) [ len ( arg_6 . dof ) ] arg_0 . joints . append ( arg_0 . world . join ( arg_8 , arg_5 , arg_7 ) )","id_":253383,"task_name":"https:\/\/github.com\/EmbodiedCognition\/pagoda\/blob\/8892f847026d98aba8646ecbc4589397e6dec7bd\/pagoda\/parser.py#L468-L483","negative":"Convert a JSON representation into ListValue message."} {"query":"Return a list of strings formatted as HTML bibliography entries","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ ] for arg_3 in arg_1 . entries : arg_2 . append ( arg_3 [ \"content\" ] [ 0 ] [ \"value\" ] ) arg_0 . url_params = None return arg_2","id_":253384,"task_name":"https:\/\/github.com\/urschrei\/pyzotero\/blob\/b378966b30146a952f7953c23202fb5a1ddf81d9\/pyzotero\/zotero.py#L870-L877","negative":"Awake one process waiting to receive data on fd"} {"query":"Start the controller .","positive":"def Func ( arg_0 ) : if arg_0 . mode == \"manual\" : return if arg_0 . ipython_dir != '~\/.ipython' : arg_0 . ipython_dir = os . path . abspath ( os . path . expanduser ( arg_0 . ipython_dir ) ) if arg_0 . log : arg_2 = open ( os . path . join ( arg_0 . ipython_dir , \"{0}.controller.out\" . format ( arg_0 . profile ) ) , 'w' ) arg_3 = open ( os . path . join ( arg_0 . ipython_dir , \"{0}.controller.err\" . format ( arg_0 . profile ) ) , 'w' ) else : arg_2 = open ( os . devnull , 'w' ) arg_3 = open ( os . devnull , 'w' ) try : arg_4 = [ 'ipcontroller' , '' if arg_0 . ipython_dir == '~\/.ipython' else '--ipython-dir={}' . format ( arg_0 . ipython_dir ) , arg_0 . interfaces if arg_0 . interfaces is not None else '--ip=*' , '' if arg_0 . profile == 'default' else '--profile={0}' . format ( arg_0 . profile ) , '--reuse' if arg_0 . reuse else '' , '--location={}' . format ( arg_0 . public_ip ) if arg_0 . public_ip else '' , '--port={}' . format ( arg_0 . port ) if arg_0 . port is not None else '' ] if arg_0 . port_range is not None : arg_4 += [ '--HubFactory.hb={0},{1}' . format ( arg_0 . hb_ping , arg_0 . hb_pong ) , '--HubFactory.control={0},{1}' . format ( arg_0 . control_client , arg_0 . control_engine ) , '--HubFactory.mux={0},{1}' . format ( arg_0 . mux_client , arg_0 . mux_engine ) , '--HubFactory.task={0},{1}' . format ( arg_0 . task_client , arg_0 . task_engine ) ] logger . debug ( \"Starting ipcontroller with '{}'\" . format ( ' ' . join ( [ str ( arg_5 ) for arg_5 in arg_4 ] ) ) ) arg_0 . proc = subprocess . Popen ( arg_4 , arg_2 = arg_2 , arg_3 = arg_3 , preexec_fn = os . setsid ) except FileNotFoundError : arg_7 = \"Could not find ipcontroller. Please make sure that ipyparallel is installed and available in your env\" logger . error ( arg_7 ) raise ControllerError ( arg_7 ) except Exception as e : arg_7 = \"IPPController failed to Func: {0}\" . format ( e ) logger . error ( arg_7 ) raise ControllerError ( arg_7 )","id_":253385,"task_name":"https:\/\/github.com\/Parsl\/parsl\/blob\/d7afb3bc37f50dcf224ae78637944172edb35dac\/parsl\/executors\/ipp_controller.py#L66-L108","negative":"Send data synchronous to an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: PLC storage address\n :param value: value to write to the storage address of the PLC\n :param Type data_type: type of the data given to the PLC,\n according to PLCTYPE constants"} {"query":"Returns the maximum representable value in this data type .","positive":"def Func ( arg_0 ) : arg_0 = tf . as_dtype ( arg_0 ) if hasattr ( arg_0 , 'Func' ) : return arg_0 . Func arg_1 = is_floating ( arg_0 ) or is_complex ( arg_0 ) return np . finfo ( arg_0 ) . Func if arg_1 else np . iinfo ( arg_0 ) . Func","id_":253386,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/internal\/dtype_util.py#L114-L120","negative":"Remove all binary files in the adslib directory."} {"query":"Returns the smallest delimited version of field_key that is an attribute on document .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = True arg_3 = [ ] arg_4 = arg_1 while arg_2 and arg_4 : if hasattr ( arg_0 , arg_4 ) : arg_2 = False else : arg_5 = arg_4 . split ( \"_\" ) arg_3 . append ( arg_5 . pop ( ) ) arg_4 = u\"_\" . join ( arg_5 ) arg_3 . reverse ( ) return arg_4 , arg_3","id_":253387,"task_name":"https:\/\/github.com\/jazzband\/django-mongonaut\/blob\/5485b2e029dff8ae267a4cb39c92d0a72cb5b144\/mongonaut\/utils.py#L32-L51","negative":"Dumps a database table into a tab-delimited file"} {"query":"Augmenter to alpha - blend two image sources using simplex noise alpha masks .","positive":"def Func ( arg_0 = None , arg_1 = None , arg_2 = False , arg_3 = ( 2 , 16 ) , arg_4 = None , arg_5 = ( 1 , 3 ) , arg_6 = \"max\" , arg_7 = True , arg_8 = None , arg_9 = None , arg_10 = False , arg_11 = None ) : arg_12 = iap . Choice ( [ \"nearest\" , \"linear\" , \"cubic\" ] , p = [ 0.05 , 0.6 , 0.35 ] ) arg_13 = iap . Normal ( 0.0 , 5.0 ) arg_14 = iap . SimplexNoise ( arg_3 = arg_3 , arg_4 = arg_4 if arg_4 is not None else arg_12 ) if arg_5 != 1 : arg_14 = iap . IterativeNoiseAggregator ( arg_14 , arg_5 = arg_5 , arg_6 = arg_6 ) if arg_7 is False or ( ia . is_single_number ( arg_7 ) and arg_7 <= 0.01 ) : arg_14 = iap . Sigmoid . create_for_noise ( arg_14 , threshold = arg_8 if arg_8 is not None else arg_13 , activated = arg_7 ) if arg_9 is None : arg_9 = \"Unnamed%s\" % ( ia . caller_name ( ) , ) return AlphaElementwise ( factor = arg_14 , arg_0 = arg_0 , arg_1 = arg_1 , arg_2 = arg_2 , arg_9 = arg_9 , arg_10 = arg_10 , arg_11 = arg_11 )","id_":253388,"task_name":"https:\/\/github.com\/aleju\/imgaug\/blob\/786be74aa855513840113ea523c5df495dc6a8af\/imgaug\/augmenters\/blend.py#L797-L980","negative":"Return whether the input course or program exist."} {"query":"use previously calculated variation of the rate to estimate the uncertainty in a particular numdate due to rate variation .","positive":"def Func ( arg_0 , arg_1 , arg_2 = ( 0.05 , 0.095 ) ) : if hasattr ( arg_1 , \"numdate_rate_variation\" ) : from scipy . special import erfinv arg_3 = [ np . sqrt ( 2.0 ) * erfinv ( - 1.0 + 2.0 * arg_7 ) if arg_7 * ( 1.0 - arg_7 ) else 0 for arg_7 in arg_2 ] arg_4 , arg_5 , arg_6 = [ arg_7 [ 1 ] for arg_7 in arg_1 . numdate_rate_variation ] return np . array ( [ arg_5 + arg_7 * np . abs ( arg_8 - arg_5 ) for arg_7 , arg_8 in zip ( arg_3 , ( arg_4 , arg_6 ) ) ] ) else : return None","id_":253389,"task_name":"https:\/\/github.com\/neherlab\/treetime\/blob\/f6cdb58d19243a18ffdaa2b2ec71872fa00e65c0\/treetime\/clock_tree.py#L760-L780","negative":"Print status of containers and networks"} {"query":"Unlink path but do not complain if file does not exist .","positive":"def Func ( arg_0 ) : try : os . unlink ( arg_0 ) except OSError as err : if err . errno != errno . ENOENT : raise","id_":253390,"task_name":"https:\/\/github.com\/Becksteinlab\/GromacsWrapper\/blob\/d4f9a8cb6f48292732cf7c7e4ef4a6d2ccbc51b9\/gromacs\/utilities.py#L689-L695","negative":"Register a new range type as a PostgreSQL range.\n\n >>> register_range_type(\"int4range\", intrange, conn)\n\n The above will make sure intrange is regarded as an int4range for queries\n and that int4ranges will be cast into intrange when fetching rows.\n\n pgrange should be the full name including schema for the custom range type.\n\n Note that adaption is global, meaning if a range type is passed to a regular\n psycopg2 connection it will adapt it to its proper range type. Parsing of\n rows from the database however is not global and just set on a per connection\n basis."} {"query":"reset output buffer re - parse entire source file and return output Since parsing involves a good deal of randomness this is an easy way to get new output without having to reload a grammar file each time .","positive":"def Func ( arg_0 ) : arg_0 . reset ( ) arg_0 . parse ( arg_0 . source ) return arg_0 . output ( )","id_":253391,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/shoebot\/kgp.py#L147-L156","negative":"Checks if a bundle exists at the provided path\n\n :param path: Bundle path\n :return: bool"} {"query":"Return CLEF German stemmer plus stem .","positive":"def Func ( arg_0 , arg_1 ) : arg_1 = normalize ( 'NFC' , text_type ( arg_1 . lower ( ) ) ) arg_1 = arg_1 . translate ( arg_0 . _accents ) arg_2 = len ( arg_1 ) - 1 if arg_2 > 4 and arg_1 [ - 3 : ] == 'ern' : arg_1 = arg_1 [ : - 3 ] elif arg_2 > 3 and arg_1 [ - 2 : ] in { 'em' , 'en' , 'er' , 'es' } : arg_1 = arg_1 [ : - 2 ] elif arg_2 > 2 and ( arg_1 [ - 1 ] == 'e' or ( arg_1 [ - 1 ] == 's' and arg_1 [ - 2 ] in arg_0 . _st_ending ) ) : arg_1 = arg_1 [ : - 1 ] arg_2 = len ( arg_1 ) - 1 if arg_2 > 4 and arg_1 [ - 3 : ] == 'est' : arg_1 = arg_1 [ : - 3 ] elif arg_2 > 3 and ( arg_1 [ - 2 : ] in { 'er' , 'en' } or ( arg_1 [ - 2 : ] == 'st' and arg_1 [ - 3 ] in arg_0 . _st_ending ) ) : arg_1 = arg_1 [ : - 2 ] return arg_1","id_":253392,"task_name":"https:\/\/github.com\/chrislit\/abydos\/blob\/165466b3ff6afd8024a4c8660421b0c4e7773db9\/abydos\/stemmer\/_clef_german_plus.py#L52-L104","negative":"Replace all the tracks in a playlist, overwriting its existing tracks. \n This powerful request can be useful for replacing tracks, re-ordering existing tracks, or clearing the playlist.\n\n Parameters\n ----------\n playlist : Union[str, PLaylist]\n The playlist to modify\n tracks : Sequence[Union[str, Track]]\n Tracks to place in the playlist"} {"query":"Adds monitors to the network","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = arg_3 [ 'neurons_e' ] arg_5 = [ ] arg_0 . spike_monitor = SpikeMonitor ( arg_4 ) arg_5 . append ( arg_0 . spike_monitor ) arg_0 . V_monitor = StateMonitor ( arg_4 , 'V' , record = list ( arg_1 . neuron_records ) ) arg_5 . append ( arg_0 . V_monitor ) arg_0 . I_syn_e_monitor = StateMonitor ( arg_4 , 'I_syn_e' , record = list ( arg_1 . neuron_records ) ) arg_5 . append ( arg_0 . I_syn_e_monitor ) arg_0 . I_syn_i_monitor = StateMonitor ( arg_4 , 'I_syn_i' , record = list ( arg_1 . neuron_records ) ) arg_5 . append ( arg_0 . I_syn_i_monitor ) arg_2 . add ( * arg_5 ) arg_3 [ 'monitors' ] = arg_5","id_":253393,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/examples\/example_24_large_scale_brian2_simulation\/clusternet.py#L711-L740","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Helper to _covariance and _variance which computes a shared scale .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . total_concentration [ ... , tf . newaxis ] return tf . sqrt ( ( 1. + arg_1 \/ arg_0 . total_count [ ... , tf . newaxis ] ) \/ ( 1. + arg_1 ) )","id_":253394,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/distributions\/dirichlet_multinomial.py#L322-L327","negative":"Downloads all variable star observations by a given observer.\n\n Performs a series of HTTP requests to AAVSO's WebObs search and\n downloads the results page by page. Each page is then passed to\n :py:class:`~pyaavso.parsers.webobs.WebObsResultsParser` and parse results\n are added to the final observation list."} {"query":"Unset default value for AleaIdField .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = arg_2 . get_model ( arg_0 , arg_1 . model_name ) for arg_5 in arg_4 . objects . values_list ( 'pk' , flat = True ) : get_meteor_id ( arg_4 , arg_5 )","id_":253395,"task_name":"https:\/\/github.com\/jazzband\/django-ddp\/blob\/1e1954b06fe140346acea43582515991685e4e01\/dddp\/migrations\/__init__.py#L52-L56","negative":"Sets general options used by plugins and streams originating\n from this session object.\n\n :param key: key of the option\n :param value: value to set the option to\n\n\n **Available options**:\n\n ======================== =========================================\n hds-live-edge ( float) Specify the time live HDS\n streams will start from the edge of\n stream, default: ``10.0``\n\n hds-segment-attempts (int) How many attempts should be done\n to download each HDS segment, default: ``3``\n\n hds-segment-threads (int) The size of the thread pool used\n to download segments, default: ``1``\n\n hds-segment-timeout (float) HDS segment connect and read\n timeout, default: ``10.0``\n\n hds-timeout (float) Timeout for reading data from\n HDS streams, default: ``60.0``\n\n hls-live-edge (int) How many segments from the end\n to start live streams on, default: ``3``\n\n hls-segment-attempts (int) How many attempts should be done\n to download each HLS segment, default: ``3``\n\n hls-segment-threads (int) The size of the thread pool used\n to download segments, default: ``1``\n\n hls-segment-timeout (float) HLS segment connect and read\n timeout, default: ``10.0``\n\n hls-timeout (float) Timeout for reading data from\n HLS streams, default: ``60.0``\n\n http-proxy (str) Specify a HTTP proxy to use for\n all HTTP requests\n\n https-proxy (str) Specify a HTTPS proxy to use for\n all HTTPS requests\n\n http-cookies (dict or str) A dict or a semi-colon (;)\n delimited str of cookies to add to each\n HTTP request, e.g. ``foo=bar;baz=qux``\n\n http-headers (dict or str) A dict or semi-colon (;)\n delimited str of headers to add to each\n HTTP request, e.g. ``foo=bar;baz=qux``\n\n http-query-params (dict or str) A dict or a ampersand (&)\n delimited string of query parameters to\n add to each HTTP request,\n e.g. ``foo=bar&baz=qux``\n\n http-trust-env (bool) Trust HTTP settings set in the\n environment, such as environment\n variables (HTTP_PROXY, etc) and\n ~\/.netrc authentication\n\n http-ssl-verify (bool) Verify SSL certificates,\n default: ``True``\n\n http-ssl-cert (str or tuple) SSL certificate to use,\n can be either a .pem file (str) or a\n .crt\/.key pair (tuple)\n\n http-timeout (float) General timeout used by all HTTP\n requests except the ones covered by\n other options, default: ``20.0``\n\n http-stream-timeout (float) Timeout for reading data from\n HTTP streams, default: ``60.0``\n\n subprocess-errorlog (bool) Log errors from subprocesses to\n a file located in the temp directory\n\n subprocess-errorlog-path (str) Log errors from subprocesses to\n a specific file\n\n ringbuffer-size (int) The size of the internal ring\n buffer used by most stream types,\n default: ``16777216`` (16MB)\n\n rtmp-proxy (str) Specify a proxy (SOCKS) that RTMP\n streams will use\n\n rtmp-rtmpdump (str) Specify the location of the\n rtmpdump executable used by RTMP streams,\n e.g. ``\/usr\/local\/bin\/rtmpdump``\n\n rtmp-timeout (float) Timeout for reading data from\n RTMP streams, default: ``60.0``\n\n ffmpeg-ffmpeg (str) Specify the location of the\n ffmpeg executable use by Muxing streams\n e.g. ``\/usr\/local\/bin\/ffmpeg``\n\n ffmpeg-verbose (bool) Log stderr from ffmpeg to the\n console\n\n ffmpeg-verbose-path (str) Specify the location of the\n ffmpeg stderr log file\n\n ffmpeg-video-transcode (str) The codec to use if transcoding\n video when muxing with ffmpeg\n e.g. ``h264``\n\n ffmpeg-audio-transcode (str) The codec to use if transcoding\n audio when muxing with ffmpeg\n e.g. ``aac``\n\n stream-segment-attempts (int) How many attempts should be done\n to download each segment, default: ``3``.\n General option used by streams not\n covered by other options.\n\n stream-segment-threads (int) The size of the thread pool used\n to download segments, default: ``1``.\n General option used by streams not\n covered by other options.\n\n stream-segment-timeout (float) Segment connect and read\n timeout, default: ``10.0``.\n General option used by streams not\n covered by other options.\n\n stream-timeout (float) Timeout for reading data from\n stream, default: ``60.0``.\n General option used by streams not\n covered by other options.\n\n locale (str) Locale setting, in the RFC 1766 format\n eg. en_US or es_ES\n default: ``system locale``.\n\n user-input-requester (UserInputRequester) instance of UserInputRequester\n to collect input from the user at runtime. Must be\n set before the plugins are loaded.\n default: ``UserInputRequester``.\n ======================== ========================================="} {"query":"Convert directory path to uri","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . replace ( arg_0 . root_path , arg_0 . package_name ) if arg_2 . startswith ( os . path . sep ) : arg_2 = arg_2 [ 1 : ] return arg_2 . replace ( os . path . sep , '.' )","id_":253396,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-docs\/src\/product\/sphinxext\/apigen.py#L154-L159","negative":"Deserialize a dataframe.\n\n Parameters\n ----------\n reader : file\n File-like object to read from. Must be opened in binary mode.\n data_type_id : dict\n Serialization format of the raw data.\n See the azureml.DataTypeIds class for constants.\n\n Returns\n -------\n pandas.DataFrame\n Dataframe object."} {"query":"Plotting wrapper for labeled intervals","positive":"def Func ( arg_0 , ** arg_1 ) : arg_2 , arg_3 = arg_0 . to_interval_values ( ) return mir_eval . display . labeled_Func ( arg_2 , arg_3 , ** arg_1 )","id_":253397,"task_name":"https:\/\/github.com\/marl\/jams\/blob\/b16778399b9528efbd71434842a079f7691a7a66\/jams\/display.py#L64-L68","negative":"Deserialize a dataframe.\n\n Parameters\n ----------\n reader : file\n File-like object to read from. Must be opened in binary mode.\n data_type_id : dict\n Serialization format of the raw data.\n See the azureml.DataTypeIds class for constants.\n\n Returns\n -------\n pandas.DataFrame\n Dataframe object."} {"query":"Determine the assay of self .","positive":"def Func ( arg_0 ) : arg_1 = sum ( arg_0 . compound_masses ) return [ arg_2 \/ arg_1 for arg_2 in arg_0 . compound_masses ]","id_":253398,"task_name":"https:\/\/github.com\/Ex-Mente\/auxi.0\/blob\/2dcdae74154f136f8ca58289fe5b20772f215046\/auxi\/modelling\/process\/materials\/chem.py#L383-L391","negative":"Returns how the result count compares to the query options.\n\n The return value is negative if too few results were found, zero if enough were found, and\n positive if too many were found.\n\n Returns:\n int: -1, 0, or 1."} {"query":"r Method to calculate the integral of a property over temperature with respect to temperature using a specified method . Implements the analytical integrals of all available methods except for tabular data the case of multiple coefficient sets needed to encompass the temperature range of any of the ZABRANSKY methods and the CSP methods using the vapor phase properties .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : if arg_3 == ZABRANSKY_SPLINE : return arg_0 . Zabransky_spline . Func ( arg_1 , arg_2 ) elif arg_3 == ZABRANSKY_SPLINE_C : return arg_0 . Zabransky_spline_iso . Func ( arg_1 , arg_2 ) elif arg_3 == ZABRANSKY_SPLINE_SAT : return arg_0 . Zabransky_spline_sat . Func ( arg_1 , arg_2 ) elif arg_3 == ZABRANSKY_QUASIPOLYNOMIAL : return arg_0 . Zabransky_quasipolynomial . Func ( arg_1 , arg_2 ) elif arg_3 == ZABRANSKY_QUASIPOLYNOMIAL_C : return arg_0 . Zabransky_quasipolynomial_iso . Func ( arg_1 , arg_2 ) elif arg_3 == ZABRANSKY_QUASIPOLYNOMIAL_SAT : return arg_0 . Zabransky_quasipolynomial_sat . Func ( arg_1 , arg_2 ) elif arg_3 == POLING_CONST : return arg_0 . POLING_constant * log ( arg_2 \/ arg_1 ) elif arg_3 == CRCSTD : return arg_0 . CRCSTD_constant * log ( arg_2 \/ arg_1 ) elif arg_3 == DADGOSTAR_SHAW : arg_4 = ( Dadgostar_Shaw_integral_over_T ( arg_2 , arg_0 . similarity_variable ) - Dadgostar_Shaw_integral_over_T ( arg_1 , arg_0 . similarity_variable ) ) return property_mass_to_molar ( arg_4 , arg_0 . MW ) elif arg_3 in arg_0 . tabular_data or arg_3 == COOLPROP or arg_3 in [ ROWLINSON_POLING , ROWLINSON_BONDI ] : return float ( quad ( lambda T : arg_0 . calculate ( T , arg_3 ) \/ T , arg_1 , arg_2 ) [ 0 ] ) else : raise Exception ( 'Method not valid' )","id_":253399,"task_name":"https:\/\/github.com\/CalebBell\/thermo\/blob\/3857ed023a3e64fd3039a32d53576c24990ef1c3\/thermo\/heat_capacity.py#L2142-L2188","negative":"Add a node to run on failure.\n\n =====API DOCS=====\n Add a node to run on failure.\n\n :param parent: Primary key of parent node to associate failure node to.\n :type parent: int\n :param child: Primary key of child node to be associated.\n :type child: int\n :param `**kwargs`: Fields used to create child node if ``child`` is not provided.\n :returns: Dictionary of only one key \"changed\", which indicates whether the association succeeded.\n :rtype: dict\n\n =====API DOCS====="} {"query":"Find an existing profile dir by profile name return its ProfileDir .","positive":"def Func ( arg_0 , arg_1 , arg_2 = u'default' , arg_3 = None ) : arg_4 = u'profile_' + arg_2 arg_5 = [ os . getcwdu ( ) , arg_1 ] for arg_6 in arg_5 : profile_dir = os . path . join ( arg_6 , arg_4 ) if os . path . isdir ( profile_dir ) : return arg_0 ( location = profile_dir , arg_3 = arg_3 ) else : raise ProfileDirError ( 'Profile directory not found in paths: %s' % arg_4 )","id_":253400,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/core\/profiledir.py#L184-L209","negative":"Query for null or blank field."} {"query":"A list of row indices to remove . There are two caveats . First this is a potentially slow operation . Second pattern indices will shift if patterns before them are removed .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = numpy . array ( arg_1 ) arg_0 . _categoryList = numpy . delete ( numpy . array ( arg_0 . _categoryList ) , arg_2 ) . tolist ( ) if arg_0 . fixedCapacity : arg_0 . _categoryRecencyList = numpy . delete ( numpy . array ( arg_0 . _categoryRecencyList ) , arg_2 ) . tolist ( ) for arg_5 in reversed ( arg_1 ) : arg_0 . _partitionIdList . pop ( arg_5 ) arg_0 . _rebuildPartitionIdMap ( arg_0 . _partitionIdList ) if arg_0 . useSparseMemory : for arg_6 in arg_1 [ : : - 1 ] : arg_0 . _Memory . deleteRow ( arg_6 ) else : arg_0 . _M = numpy . delete ( arg_0 . _M , arg_2 , 0 ) arg_8 = len ( arg_1 ) arg_9 = arg_0 . _numPatterns - arg_8 if arg_0 . useSparseMemory : if arg_0 . _Memory is not None : assert arg_0 . _Memory . nRows ( ) == arg_9 else : assert arg_0 . _M . shape [ 0 ] == arg_9 assert len ( arg_0 . _categoryList ) == arg_9 arg_0 . _numPatterns -= arg_8 return arg_8","id_":253401,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/algorithms\/knn_classifier.py#L349-L393","negative":"Start the timer."} {"query":"Attempt to detect if a device at this address is present on the I2C bus . Will send out the device s address for writing and verify an ACK is received . Returns true if the ACK is received and false if not .","positive":"def Func ( arg_0 ) : arg_0 . _idle ( ) arg_0 . _transaction_start ( ) arg_0 . _i2c_start ( ) arg_0 . _i2c_write_bytes ( [ arg_0 . _address_byte ( False ) ] ) arg_0 . _i2c_stop ( ) arg_1 = arg_0 . _transaction_end ( ) if len ( arg_1 ) != 1 : raise RuntimeError ( 'Expected 1 response byte but received {0} byte(s).' . format ( len ( arg_1 ) ) ) return ( ( arg_1 [ 0 ] & 0x01 ) == 0x00 )","id_":253402,"task_name":"https:\/\/github.com\/adafruit\/Adafruit_Python_GPIO\/blob\/a92a23d6b5869663b2bc1ccf78bb11585076a9c4\/Adafruit_GPIO\/FT232H.py#L748-L761","negative":"Get a temp filename for atomic download."} {"query":"This just uncompresses the sqlitecurve . Should be independent of OS .","positive":"def Func ( arg_0 , arg_1 = False ) : arg_2 = arg_0 . replace ( '.gz' , '' ) try : if os . path . exists ( arg_2 ) and not arg_1 : return arg_2 else : with gzip . open ( arg_0 , 'rb' ) as infd : with open ( arg_2 , 'wb' ) as outfd : shutil . copyfileobj ( infd , outfd ) if os . path . exists ( arg_2 ) : return arg_2 except Exception as e : return None","id_":253403,"task_name":"https:\/\/github.com\/waqasbhatti\/astrobase\/blob\/2922a14619d183fb28005fa7d02027ac436f2265\/astrobase\/hatsurveys\/hatlc.py#L456-L479","negative":"Get course's duration as a timedelta.\n\n Arguments:\n obj (CourseOverview): CourseOverview object\n\n Returns:\n (timedelta): Duration of a course."} {"query":"Get the Marathon networking mode for the app .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . get ( 'networks' ) if arg_1 : return arg_1 [ - 1 ] . get ( 'mode' , 'container' ) arg_2 = arg_0 . get ( 'container' ) if arg_2 is not None and 'docker' in arg_2 : arg_3 = arg_2 [ 'docker' ] . get ( 'network' ) if arg_3 == 'USER' : return 'container' elif arg_3 == 'BRIDGE' : return 'container\/bridge' return 'container' if _is_legacy_ip_per_task ( arg_0 ) else 'host'","id_":253404,"task_name":"https:\/\/github.com\/praekeltfoundation\/marathon-acme\/blob\/b1b71e3dde0ba30e575089280658bd32890e3325\/marathon_acme\/marathon_util.py#L34-L53","negative":"Remove all binary files in the adslib directory."} {"query":"Return the number of times b occurs in a .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = 0 for arg_3 in arg_0 : if arg_3 == arg_1 : arg_2 += 1 return arg_2","id_":253405,"task_name":"https:\/\/github.com\/google\/grumpy\/blob\/3ec87959189cfcdeae82eb68a47648ac25ceb10b\/third_party\/ouroboros\/operator.py#L153-L159","negative":"Update repository from its remote.\n\n Calling this method, the repository will be synchronized with\n the remote repository using 'fetch' command for 'heads' refs.\n Any commit stored in the local copy will be removed; refs\n will be overwritten.\n\n :raises RepositoryError: when an error occurs updating the\n repository"} {"query":"Implement a lookup for object level permissions . Basically the same as ModelAdmin . has_delete_permission but also passes the obj parameter in .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : if settings . TREE_EDITOR_OBJECT_PERMISSIONS : arg_3 = arg_0 . opts arg_4 = arg_1 . user . has_perm ( arg_3 . app_label + '.' + arg_3 . get_delete_permission ( ) , arg_2 ) else : arg_4 = True return arg_4 and super ( TreeEditor , arg_0 ) . Func ( arg_1 , arg_2 )","id_":253406,"task_name":"https:\/\/github.com\/20tab\/twentytab-treeeditor\/blob\/f89d459b1348961880cd488df95690e68529f96b\/treeeditor\/admin.py#L363-L374","negative":"Stop the timer\n\n Returns:\n The time the timer was stopped"} {"query":"Removes the topology from the local cache .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = [ ] for arg_4 in arg_0 . topologies : if ( arg_4 . name == arg_1 and arg_4 . state_manager_name == arg_2 ) : if ( arg_1 , arg_2 ) in arg_0 . topologyInfos : arg_0 . topologyInfos . pop ( ( arg_1 , arg_2 ) ) else : arg_3 . append ( arg_4 ) arg_0 . topologies = arg_3","id_":253407,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/tools\/tracker\/src\/python\/tracker.py#L244-L258","negative":"Convenience function to efficiently construct a MultivariateNormalDiag."} {"query":"Wrapper around json . loads .","positive":"def Func ( arg_0 , arg_1 = \"JSON\" , arg_2 = arg_3 , arg_4 = None ) : try : arg_5 = json . loads ( arg_0 ) except ValueError as err : arg_6 = repr ( arg_0 ) if len ( arg_6 ) > 35 : arg_6 = arg_6 [ : 35 ] + \" ...\" else : arg_6 = arg_0 raise arg_2 ( \"Unable to parse {0}: {1} ({2})\" . format ( arg_1 , err , arg_6 ) ) if arg_4 : arg_5 = arg_4 . validate ( arg_5 , arg_1 = arg_1 , arg_2 = arg_2 ) return arg_5","id_":253408,"task_name":"https:\/\/github.com\/streamlink\/streamlink\/blob\/c8ed1daff14ac03195870238b9b900c1109dd5c1\/src\/streamlink\/utils\/__init__.py#L52-L71","negative":"Revoke the token and remove the cookie."} {"query":"Set train summary . A TrainSummary object contains information necessary for the optimizer to know how often the logs are recorded where to store the logs and how to retrieve them etc . For details refer to the docs of TrainSummary .","positive":"def Func ( arg_0 , arg_1 ) : callBigDlFunc ( arg_0 . bigdl_type , \"setTrainSummary\" , arg_0 . value , arg_1 ) return arg_0","id_":253409,"task_name":"https:\/\/github.com\/intel-analytics\/BigDL\/blob\/e9c19788285986ab789a2e2998f9a85d7524779f\/pyspark\/bigdl\/optim\/optimizer.py#L768-L780","negative":"Disconnect internal signals so unit can be reused by parent unit"} {"query":"r Dictionary of atomic fractions for each atom in the mixture .","positive":"def Func ( arg_0 ) : arg_1 = dict ( ) for arg_2 , arg_3 in zip ( arg_0 . zs , arg_0 . atomss ) : for arg_4 , arg_5 in arg_3 . iteritems ( ) : if arg_4 in arg_1 : arg_1 [ arg_4 ] += arg_2 * arg_5 else : arg_1 [ arg_4 ] = arg_2 * arg_5 arg_6 = sum ( arg_1 . values ( ) ) return { arg_4 : arg_7 \/ arg_6 for arg_4 , arg_7 in arg_1 . iteritems ( ) }","id_":253410,"task_name":"https:\/\/github.com\/CalebBell\/thermo\/blob\/3857ed023a3e64fd3039a32d53576c24990ef1c3\/thermo\/mixture.py#L966-L983","negative":"delete a backend, and update the secrets file"} {"query":"Delete all affected samples for a case from MatchMaker","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = [ ] if not arg_1 or not arg_2 : return 'Please check that Matchmaker connection parameters are valid' for arg_4 in arg_0 [ 'mme_submission' ] [ 'patients' ] : arg_5 = arg_4 [ 'id' ] arg_6 = '' . join ( [ arg_1 , '\/patient\/delete\/' , arg_5 ] ) arg_7 = matchmaker_request ( arg_6 = arg_6 , token = arg_2 , method = 'DELETE' , ) arg_3 . append ( { 'patient_id' : arg_5 , 'message' : arg_7 . get ( 'message' ) , 'status_code' : arg_7 . get ( 'status_code' ) } ) return arg_3","id_":253411,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/server\/blueprints\/cases\/controllers.py#L695-L730","negative":"Deploys a version to an environment"} {"query":"Return a SDK client initialized with auth file .","positive":"def Func ( arg_0 , arg_1 = None , ** arg_2 ) : arg_1 = arg_1 or os . environ [ 'AZURE_AUTH_LOCATION' ] with io . open ( arg_1 , 'r' , encoding = 'utf-8-sig' ) as auth_fd : arg_3 = json . load ( auth_fd ) return get_client_from_json_dict ( arg_0 , arg_3 , ** arg_2 )","id_":253412,"task_name":"https:\/\/github.com\/Azure\/azure-sdk-for-python\/blob\/d7306fde32f60a293a7567678692bdad31e4b667\/azure-common\/azure\/common\/client_factory.py#L186-L244","negative":"Find matching q-value for each score in 'scores'"} {"query":"filter alist by taking _out_ all glyph names that are in filter","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = 0 arg_3 = [ ] for arg_4 in arg_0 : try : arg_5 = arg_1 . index ( arg_4 ) except : arg_3 . append ( arg_4 ) return arg_3","id_":253413,"task_name":"https:\/\/github.com\/aholkner\/bacon\/blob\/edf3810dcb211942d392a8637945871399b0650d\/native\/Vendor\/FreeType\/src\/tools\/glnames.py#L5171-L5183","negative":"Wrapper function for scoop, that does not configure logging"} {"query":"Convert journal name to Inspire s short form .","positive":"def Func ( arg_0 , arg_1 ) : if not arg_0 : return '' , '' if not arg_1 : return arg_0 , '' if len ( arg_0 ) < 2 : return arg_0 , '' arg_2 = '' if ( arg_0 [ - 1 ] <= 'Z' and arg_0 [ - 1 ] >= 'A' ) and ( arg_0 [ - 2 ] == '.' or arg_0 [ - 2 ] == ' ' ) : arg_2 += arg_0 [ - 1 ] arg_0 = arg_0 [ : - 1 ] arg_0 = arg_0 . strip ( ) if arg_0 . upper ( ) in arg_1 : arg_0 = arg_1 [ arg_0 . upper ( ) ] . strip ( ) elif arg_0 in arg_1 : arg_0 = arg_1 [ arg_0 ] . strip ( ) elif '.' in arg_0 : arg_3 = arg_0 . replace ( '. ' , ' ' ) arg_3 = arg_3 . replace ( '.' , ' ' ) . strip ( ) . upper ( ) if arg_3 in arg_1 : arg_0 = arg_1 [ arg_3 ] . strip ( ) arg_0 = arg_0 . replace ( '. ' , '.' ) return arg_0 , arg_2","id_":253414,"task_name":"https:\/\/github.com\/inspirehep\/harvesting-kit\/blob\/33a7f8aa9dade1d863110c6d8b27dfd955cb471f\/harvestingkit\/utils.py#L160-L186","negative":"Add noise to the given input.\n\n Parameters:\n -----------------------------------------------\n input: the input to add noise to\n noise: how much noise to add\n doForeground: If true, turn off some of the 1 bits in the input\n doBackground: If true, turn on some of the 0 bits in the input"} {"query":"Converts xml response to service bus metrics objects","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = minidom . parseString ( arg_0 ) arg_3 = arg_1 ( ) arg_4 = dict ( vars ( arg_3 ) ) for arg_5 in _MinidomXmlToObject . get_children_from_path ( arg_2 , 'entry' ) : for arg_6 in _MinidomXmlToObject . get_children_from_path ( arg_5 , 'content' , 'properties' ) : for arg_7 in arg_4 : arg_8 = _get_serialization_name ( arg_7 ) arg_9 = _MinidomXmlToObject . get_child_nodes ( arg_6 , arg_8 ) if not arg_9 : continue arg_10 = arg_9 [ 0 ] arg_11 = arg_10 . getAttributeNS ( \"http:\/\/schemas.microsoft.com\/ado\/2007\/08\/dataservices\/metadata\" , 'type' ) arg_12 = _ServiceBusManagementXmlSerializer . odata_converter ( arg_10 . firstChild . nodeValue , arg_11 ) setattr ( arg_3 , arg_7 , arg_12 ) for arg_7 , arg_13 in _MinidomXmlToObject . get_entry_properties_from_node ( arg_5 , include_id = True , use_title_as_id = False ) . items ( ) : if arg_7 in arg_4 : continue setattr ( arg_3 , arg_7 , arg_13 ) return arg_3","id_":253415,"task_name":"https:\/\/github.com\/Azure\/azure-sdk-for-python\/blob\/d7306fde32f60a293a7567678692bdad31e4b667\/azure-servicemanagement-legacy\/azure\/servicemanagement\/_serialization.py#L1574-L1642","negative":"An integer-valued dimension bounded between `min` <= x <= `max`.\n Note that the right endpoint of the interval includes `max`.\n\n When `warp` is None, the base measure associated with this dimension\n is a categorical distribution with each weight on each of the integers\n in [min, max]. With `warp == 'log'`, the base measure is a uniform\n distribution on the log of the variable, with bounds at `log(min)` and\n `log(max)`. This is appropriate for variables that are \"naturally\" in\n log-space. Other `warp` functions are not supported (yet), but may be\n at a later time. Please note that this functionality is not supported\n for `hyperopt_tpe`."} {"query":"set up ssh tunnels if needed .","positive":"def Func ( arg_0 ) : if not arg_0 . sshserver and not arg_0 . sshkey : return if arg_0 . sshkey and not arg_0 . sshserver : arg_0 . sshserver = arg_0 . ip arg_0 . ip = LOCALHOST arg_3 = dict ( arg_2 = arg_0 . ip , arg_5 = arg_0 . shell_port , arg_6 = arg_0 . iopub_port , arg_7 = arg_0 . stdin_port , arg_8 = arg_0 . hb_port ) arg_0 . log . info ( \"Forwarding connections to %s via %s\" % ( arg_0 . ip , arg_0 . sshserver ) ) arg_0 . ip = LOCALHOST try : arg_4 = tunnel_to_kernel ( arg_3 , arg_0 . sshserver , arg_0 . sshkey ) except : arg_0 . log . error ( \"Could not setup tunnels\" , exc_info = True ) arg_0 . exit ( 1 ) arg_0 . shell_port , arg_0 . iopub_port , arg_0 . stdin_port , arg_0 . hb_port = arg_4 arg_9 = arg_0 . connection_file arg_10 , arg_11 = os . path . splitext ( arg_9 ) arg_10 = os . path . basename ( arg_10 ) arg_0 . connection_file = os . path . basename ( arg_10 ) + '-ssh' + arg_11 arg_0 . log . critical ( \"To connect another client via this tunnel, use:\" ) arg_0 . log . critical ( \"--existing %s\" % arg_0 . connection_file )","id_":253416,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/frontend\/consoleapp.py#L272-L308","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"A simplified URL to be used for caching the given query .","positive":"def Func ( arg_0 , ** arg_1 ) : arg_2 = { 'Operation' : arg_0 . Operation , 'Service' : \"AWSECommerceService\" , 'Version' : arg_0 . Version , } arg_2 . update ( arg_1 ) arg_3 = SERVICE_DOMAINS [ arg_0 . Region ] [ 0 ] return \"http:\/\/\" + arg_3 + \"\/onca\/xml?\" + _quote_query ( arg_2 )","id_":253417,"task_name":"https:\/\/github.com\/AkihikoITOH\/capybara\/blob\/e86c2173ea386654f4ae061148e8fbe3f25e715c\/capybara\/virtualenv\/lib\/python2.7\/site-packages\/bottlenose\/api.py#L168-L179","negative":"Sequentially update the actors, the world, and the messaging system. \n The theater terminates once all of the actors indicate that they are done."} {"query":"register an options provider","positive":"def Func ( arg_0 , arg_1 , arg_2 = True ) : assert arg_1 . priority <= 0 , \"provider's priority can't be >= 0\" for arg_3 in range ( len ( arg_0 . options_providers ) ) : if arg_1 . priority > arg_0 . options_providers [ arg_3 ] . priority : arg_0 . options_providers . insert ( arg_3 , arg_1 ) break else : arg_0 . options_providers . append ( arg_1 ) arg_4 = [ option for option in arg_1 . options if \"group\" not in option [ 1 ] ] arg_5 = getattr ( arg_1 , \"option_groups\" , ( ) ) if arg_2 and arg_4 : arg_0 . add_option_group ( arg_1 . name . upper ( ) , arg_1 . __doc__ , arg_4 , arg_1 , ) else : for arg_6 , arg_7 in arg_4 : arg_0 . add_optik_option ( arg_1 , arg_0 . cmdline_parser , arg_6 , arg_7 ) for arg_8 , arg_9 in arg_5 : arg_8 = arg_8 . upper ( ) arg_10 = [ option for option in arg_1 . options if option [ 1 ] . get ( \"group\" , \"\" ) . upper ( ) == arg_8 ] arg_0 . add_option_group ( arg_8 , arg_9 , arg_10 , arg_1 )","id_":253418,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/config.py#L540-L570","negative":"Apply a quick patch-up to a Filterbank header by overwriting a header value\n\n\n Args:\n filename (str): name of file to open and fix. WILL BE MODIFIED.\n keyword (stt): header keyword to update\n new_value (long, double, angle or string): New value to write.\n\n Notes:\n This will overwrite the current value of the blimpy with a desired\n 'fixed' version. Note that this has limited support for patching\n string-type values - if the length of the string changes, all hell will\n break loose."} {"query":"Converts an Endpoint to a JSON endpoint dict .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = { } if arg_1 . service_name : arg_3 [ 'serviceName' ] = arg_1 . service_name elif arg_2 : arg_3 [ 'serviceName' ] = \"\" if arg_1 . port and arg_1 . port != 0 : arg_3 [ 'port' ] = arg_1 . port if arg_1 . ipv4 is not None : arg_3 [ 'ipv4' ] = arg_1 . ipv4 if arg_1 . ipv6 is not None : arg_3 [ 'ipv6' ] = arg_1 . ipv6 return arg_3","id_":253419,"task_name":"https:\/\/github.com\/Yelp\/py_zipkin\/blob\/0944d9a3fb1f1798dbb276694aeed99f2b4283ba\/py_zipkin\/encoding\/_encoders.py#L159-L185","negative":"Returns a DataFrame of offensive team splits for a season.\n\n :year: int representing the season.\n :returns: Pandas DataFrame of split data."} {"query":"write a single command with variable number of arguments . after the command the device must return ACK","positive":"def Func ( arg_0 , arg_1 , * arg_2 , ** arg_3 ) : arg_4 = arg_3 . setdefault ( 'ok' , False ) arg_0 . _wakeup ( ) if arg_2 : arg_1 = \"%s %s\" % ( arg_1 , ' ' . join ( str ( a ) for a in arg_2 ) ) for arg_5 in xrange ( 3 ) : log . info ( \"send: \" + arg_1 ) arg_0 . port . write ( arg_1 + '\\n' ) if arg_4 : arg_6 = arg_0 . port . read ( len ( arg_0 . OK ) ) log_raw ( 'read' , arg_6 ) if arg_6 == arg_0 . OK : return else : arg_6 = arg_0 . port . read ( len ( arg_0 . ACK ) ) log_raw ( 'read' , arg_6 ) if arg_6 == arg_0 . ACK : return raise NoDeviceException ( 'Can not access weather station' )","id_":253420,"task_name":"https:\/\/github.com\/cmcginty\/PyWeather\/blob\/8c25d9cd1fa921e0a6e460d523656279cac045cb\/weather\/stations\/davis.py#L395-L418","negative":"Return the thresholded z-scored `icc`."} {"query":"Get top centrality dictionary .","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 [ arg_4 ] = 30 ) -> Mapping [ BaseEntity , arg_4 ] : arg_5 = nx . betweenness_centrality ( arg_0 ) arg_6 = Counter ( arg_5 ) return dict ( arg_6 . most_common ( arg_2 ) )","id_":253421,"task_name":"https:\/\/github.com\/pybel\/pybel-tools\/blob\/3491adea0ac4ee60f57275ef72f9b73da6dbfe0c\/src\/pybel_tools\/summary\/node_properties.py#L125-L129","negative":"Show entire demo on screen, block by block"} {"query":"counterpart to _update_hasher","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_0 . hexdigest ( ) arg_4 = _convert_hexstr_base ( arg_3 , arg_2 ) arg_5 = arg_4 [ : arg_1 ] return arg_5","id_":253422,"task_name":"https:\/\/github.com\/Erotemic\/ubelt\/blob\/db802f3ad8abba025db74b54f86e6892b8927325\/ubelt\/util_hash.py#L698-L706","negative":"Return all available input formats.\n\n Returns\n -------\n formats : list\n all available input formats"} {"query":"Lookup observations by geo coordinates .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = requests . get ( API_ENDPOINT_GEO % ( arg_0 , arg_1 ) , params = { 'token' : arg_2 } ) if arg_3 . status_code == 200 and arg_3 . json ( ) [ \"status\" ] == \"ok\" : return parse_observation_response ( arg_3 . json ( ) [ \"data\" ] ) return { }","id_":253423,"task_name":"https:\/\/github.com\/valentinalexeev\/pwaqi\/blob\/81a1fa1ad87be7ba015c1cb07c52c7760ca99d8c\/pwaqi\/__init__.py#L31-L41","negative":"Read data synchronous from an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: data name\n :param Type data_type: type of the data given to the PLC, according to\n PLCTYPE constants\n :param bool return_ctypes: return ctypes instead of python types if True\n (default: False)\n :rtype: data_type\n :return: value: **value**"} {"query":"Retrieve the complete information of the given stream .","positive":"def Func ( arg_0 , arg_1 = arg_2 ) : arg_3 = c_int ( ) arg_4 = lib . lsl_get_fullFunc ( arg_0 . obj , c_double ( arg_1 ) , byref ( arg_3 ) ) handle_error ( arg_3 ) return StreamInfo ( handle = arg_4 )","id_":253424,"task_name":"https:\/\/github.com\/labstreaminglayer\/liblsl-Python\/blob\/1ff6fe2794f8dba286b7491d1f7a4c915b8a0605\/pylsl\/pylsl.py#L688-L705","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Add platform specific checks","positive":"def Func ( arg_0 ) : if not arg_0 . is_valid : raise PolyaxonDeploymentConfigError ( 'Deployment type `{}` not supported' . format ( arg_0 . deployment_type ) ) Func = False if arg_0 . is_kubernetes : Func = arg_0 . check_for_kubernetes ( ) elif arg_0 . is_docker_compose : Func = arg_0 . check_for_docker_compose ( ) elif arg_0 . is_docker : Func = arg_0 . check_for_docker ( ) elif arg_0 . is_heroku : Func = arg_0 . check_for_heroku ( ) if not Func : raise PolyaxonDeploymentConfigError ( 'Deployment `{}` is not valid' . format ( arg_0 . deployment_type ) )","id_":253425,"task_name":"https:\/\/github.com\/polyaxon\/polyaxon-cli\/blob\/a7f5eed74d4d909cad79059f3c21c58606881449\/polyaxon_cli\/managers\/deploy.py#L119-L135","negative":"r'''Method to calculate heat capacity of a solid at temperature `T`\n with a given method.\n\n This method has no exception handling; see `T_dependent_property`\n for that.\n\n Parameters\n ----------\n T : float\n Temperature at which to calculate heat capacity, [K]\n method : str\n Name of the method to use\n\n Returns\n -------\n Cp : float\n Heat capacity of the solid at T, [J\/mol\/K]"} {"query":"Load plugins in nose . plugins . builtin","positive":"def Func ( arg_0 ) : from nose . plugins import builtin for arg_1 in builtin . plugins : arg_0 . addPlugin ( arg_1 ( ) ) super ( BuiltinPluginManager , arg_0 ) . Func ( )","id_":253426,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/nose\/plugins\/manager.py#L409-L415","negative":"Returns a string contains start time and the secondary training job status message.\n\n :param job_description: Returned response from DescribeTrainingJob call\n :type job_description: dict\n :param prev_description: Previous job description from DescribeTrainingJob call\n :type prev_description: dict\n\n :return: Job status string to be printed."} {"query":"Custom version of splitext that doesn t perform splitext on directories","positive":"def Func ( arg_0 ) : return ( ( arg_0 , '' ) if os . path . isdir ( arg_0 ) else os . path . splitext ( arg_0 ) )","id_":253427,"task_name":"https:\/\/github.com\/jaraco\/jaraco.path\/blob\/39e4da09f325382e21b0917b1b5cd027edce8728\/jaraco\/path.py#L46-L50","negative":"Returns grade data for the given account_id and term_id.\n\n https:\/\/canvas.instructure.com\/doc\/api\/analytics.html#method.analytics_api.department_grades"} {"query":"Stores value in memory as a big endian","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = 1 ) : arg_0 . memory . write_BE ( arg_1 , arg_2 , arg_3 ) for arg_4 in range ( arg_3 ) : arg_0 . _publish ( 'did_evm_write_memory' , arg_1 + arg_4 , Operators . EXTRACT ( arg_2 , ( arg_3 - arg_4 - 1 ) * 8 , 8 ) )","id_":253428,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/platforms\/evm.py#L1101-L1105","negative":"Logs into the specified ftp server and returns connector."} {"query":"Translates round trip symbols to sectors .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . copy ( ) arg_2 . symbol = arg_2 . symbol . apply ( lambda x : arg_1 . get ( x , 'No Sector Mapping' ) ) arg_2 = arg_2 . dropna ( axis = 0 ) return arg_2","id_":253429,"task_name":"https:\/\/github.com\/quantopian\/pyfolio\/blob\/712716ab0cdebbec9fabb25eea3bf40e4354749d\/pyfolio\/round_trips.py#L322-L346","negative":"Enables uniform interface to value and batch jacobian calculation.\n\n Works in both eager and graph modes.\n\n Arguments:\n f: The scalar function to evaluate.\n x: The value at which to compute the value and the batch jacobian.\n\n Returns:\n A tuple (f(x), J(x)), where J(x) is the batch jacobian."} {"query":"Generates a list of active integrated channels for active customers filtered from the given options .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . get_channel_classes ( arg_1 . get ( 'channel' ) ) arg_3 = { 'active' : True , 'enterprise_customer__active' : True , } arg_4 = arg_0 . get_enterprise_customer ( arg_1 . get ( 'enterprise_customer' ) ) if arg_4 : arg_3 [ 'enterprise_customer' ] = arg_4 for arg_5 in arg_2 : for arg_6 in arg_5 . objects . filter ( ** arg_3 ) : yield arg_6","id_":253430,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/integrated_channels\/integrated_channel\/management\/commands\/__init__.py#L54-L73","negative":"Build an enum statement"} {"query":"Computes the temporal coverage of each source feed","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = _n_gtfs_sources ( arg_0 ) [ 0 ] arg_3 = None arg_4 = None if arg_2 > 1 : for arg_5 in range ( arg_2 ) : arg_6 = \"feed_\" + str ( arg_5 ) + \"_\" arg_7 = arg_6 + \"calendar_start\" arg_8 = arg_6 + \"calendar_end\" arg_9 = arg_0 . conn . cursor ( ) . execute ( 'SELECT min(date), max(date) FROM trips, days ' 'WHERE trips.trip_I = days.trip_I AND trip_id LIKE ?;' , ( arg_6 + '%' , ) ) . fetchone ( ) arg_1 [ arg_7 ] = arg_9 [ 0 ] arg_1 [ arg_8 ] = arg_9 [ 1 ] if arg_9 [ 0 ] is not None and arg_9 [ 1 ] is not None : if not arg_3 and not arg_4 : arg_3 = arg_9 [ 0 ] arg_4 = arg_9 [ 1 ] else : if arg_0 . get_day_start_ut ( arg_9 [ 0 ] ) > arg_0 . get_day_start_ut ( arg_3 ) : arg_3 = arg_9 [ 0 ] if arg_0 . get_day_start_ut ( arg_9 [ 1 ] ) < arg_0 . get_day_start_ut ( arg_4 ) : arg_4 = arg_9 [ 1 ] arg_1 [ \"latest_feed_start_date\" ] = arg_3 arg_1 [ \"earliest_feed_end_date\" ] = arg_4 else : arg_1 [ \"latest_feed_start_date\" ] = arg_1 [ \"start_date\" ] arg_1 [ \"earliest_feed_end_date\" ] = arg_1 [ \"end_date\" ] return arg_1","id_":253431,"task_name":"https:\/\/github.com\/CxAalto\/gtfspy\/blob\/bddba4b74faae6c1b91202f19184811e326547e5\/gtfspy\/stats.py#L357-L399","negative":"Set the data type of the cluster.\n\n Parameters:\n -----------\n cluster_dtype : numpy.dtype or equivalent\n Defines the dtype of the cluster array."} {"query":"Estimate the photometric uncertainties on each data point following Equation A . 2 of The Paper . Based on the kepcal package of Dan Foreman - Mackey .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . photometry_array . T arg_1 \/= np . median ( arg_1 , axis = 1 ) [ : , None ] arg_2 = np . median ( arg_1 , axis = 0 ) arg_3 , arg_4 = np . shape ( arg_1 ) arg_5 = np . empty ( ( arg_3 , 4 ) ) arg_6 = arg_0 . qs . astype ( int ) for arg_7 in range ( 4 ) : arg_5 [ : , arg_7 ] = np . median ( ( arg_1 \/ arg_2 ) [ : , arg_6 == arg_7 ] , axis = 1 ) arg_8 = ( arg_1 - arg_5 [ : , arg_6 ] * arg_2 ) ** 2 arg_9 = arg_5 [ : , arg_6 ] arg_10 = arg_9 * arg_2 [ None , : ] arg_11 = np . log ( np . nanmedian ( arg_8 , axis = 0 ) ) arg_12 = np . log ( 0.1 * np . nanmedian ( np . abs ( np . diff ( arg_1 , axis = 1 ) ) ) ) arg_13 = np . sqrt ( np . exp ( 2 * ( arg_12 \/ arg_10 ) ) + arg_9 ** 2 * np . exp ( arg_11 ) [ None , : ] ) arg_0 . modeled_uncert = arg_13 arg_0 . target_uncert = arg_13 [ 0 ]","id_":253432,"task_name":"https:\/\/github.com\/benmontet\/f3\/blob\/b2e1dc250e4e3e884a54c501cd35cf02d5b8719e\/f3\/photometry.py#L585-L613","negative":"Gets the level for the variant.\n\n :param int level: the current variant level\n :param int variant: the value for this level if variant\n\n :returns: a level for the object and one for the function\n :rtype: int * int"} {"query":"Static internal method to work out rotation to create the passed in qubit from the zero vector .","positive":"def Func ( arg_0 ) : [ arg_1 , arg_2 ] = arg_0 arg_1 = complex ( arg_1 ) arg_2 = complex ( arg_2 ) arg_3 = np . absolute ( arg_1 ) arg_4 = float ( np . sqrt ( arg_3 ** 2 + np . absolute ( arg_2 ) ** 2 ) ) if arg_4 < _EPS : arg_5 = 0 arg_6 = 0 arg_4 = 0 arg_7 = 0 else : arg_5 = float ( 2 * np . arccos ( arg_3 \/ arg_4 ) ) arg_8 = np . angle ( arg_1 ) arg_9 = np . angle ( arg_2 ) arg_7 = arg_8 + arg_9 arg_6 = arg_9 - arg_8 return arg_4 * np . exp ( 1.J * arg_7 \/ 2 ) , arg_5 , arg_6","id_":253433,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/extensions\/initializer.py#L151-L174","negative":"Cycles through notifications with latest results from data feeds."} {"query":"Return a Python AST Node for a fn expression .","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 , arg_4 : arg_5 [ arg_6 ] = None , arg_7 : arg_5 [ arg_8 ] = None , ) -> GeneratedPyAST : assert arg_2 . op == NodeOp . FN if len ( arg_2 . methods ) == 1 : return __single_arityFunc ( arg_0 , arg_2 , next ( iter ( arg_2 . methods ) ) , arg_4 = arg_4 , arg_7 = arg_7 ) else : return __multi_arityFunc ( arg_0 , arg_2 , arg_2 . methods , arg_4 = arg_4 , arg_7 = arg_7 )","id_":253434,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/compiler\/generator.py#L1213-L1228","negative":"Whether a connection can be established between those two meshes."} {"query":"Push a sample into the outlet .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 0.0 , arg_3 = True ) : if len ( arg_1 ) == arg_0 . channel_count : if arg_0 . channel_format == cf_string : arg_1 = [ v . encode ( 'utf-8' ) for v in arg_1 ] handle_error ( arg_0 . do_Func ( arg_0 . obj , arg_0 . sample_type ( * arg_1 ) , c_double ( arg_2 ) , c_int ( arg_3 ) ) ) else : raise ValueError ( \"length of the data must correspond to the \" \"stream's channel count.\" )","id_":253435,"task_name":"https:\/\/github.com\/labstreaminglayer\/liblsl-Python\/blob\/1ff6fe2794f8dba286b7491d1f7a4c915b8a0605\/pylsl\/pylsl.py#L430-L455","negative":"Reassemble a Binder object coming out of the database."} {"query":"Define a write - only property that in addition to the given setter function also provides a setter decorator defined as the property s getter function .","positive":"def Func ( arg_0 ) : def fget ( arg_1 ) : def inner ( arg_2 ) : arg_0 ( arg_1 , arg_2 ) def outer ( arg_2 = None ) : if arg_2 : inner ( arg_2 ) else : return inner return outer arg_3 = arg_0 . __doc__ return property ( fget , arg_0 , None , arg_3 )","id_":253436,"task_name":"https:\/\/github.com\/elliterate\/capybara.py\/blob\/0c6ae449cc37e4445ec3cd6af95674533beedc6c\/capybara\/utils.py#L181-L231","negative":"Returns a specific volume"} {"query":"Writes the assembly to a new file .","positive":"def Func ( arg_0 , arg_1 , arg_2 = True ) : logger . debug ( \"Writing the filtered assembly into: {}\" . format ( arg_1 ) ) with open ( arg_1 , \"w\" ) as fh : for arg_3 , arg_4 in arg_0 . contigs . items ( ) : if arg_3 not in arg_0 . filtered_ids and arg_2 : fh . write ( \">{}_{}\\\\n{}\\\\n\" . format ( arg_0 . sample , arg_4 [ \"header\" ] , arg_4 [ \"sequence\" ] ) )","id_":253437,"task_name":"https:\/\/github.com\/assemblerflow\/flowcraft\/blob\/fc3f4bddded1efc76006600016dc71a06dd908c0\/flowcraft\/templates\/process_viral_assembly.py#L407-L429","negative":"Initialize the BLE provider. Must be called once before any other\n calls are made to the provider."} {"query":"Bundle the app and return the static url to the bundle .","positive":"def Func ( arg_0 ) : arg_1 , arg_2 = arg_0 . get_paths ( ) arg_3 = arg_0 . opts if arg_0 . system . _has_jspm_log ( ) : arg_0 . command += ' --log {log}' arg_3 . setdefault ( 'log' , 'err' ) if arg_3 . get ( 'minify' ) : arg_0 . command += ' --minify' if arg_3 . get ( 'skip_source_maps' ) : arg_0 . command += ' --skip-source-maps' try : arg_4 = arg_0 . command . format ( app = arg_0 . app , arg_1 = arg_1 , ** arg_3 ) arg_5 = subprocess . Popen ( arg_4 , shell = True , cwd = arg_0 . system . cwd , stdout = arg_0 . stdout , stdin = arg_0 . stdin , stderr = arg_0 . stderr ) arg_6 , arg_7 = arg_5 . communicate ( ) if arg_7 and arg_0 . system . _has_jspm_log ( ) : arg_8 = 'Could not Func \\'%s\\': \\n%s' logger . warn ( arg_8 , arg_0 . app , arg_7 ) raise BundleError ( arg_8 % ( arg_0 . app , arg_7 ) ) if arg_6 . strip ( ) : logger . info ( arg_6 ) except ( IOError , OSError ) as e : if isinstance ( e , BundleError ) : raise raise BundleError ( 'Unable to apply %s (%r): %s' % ( arg_0 . __class__ . __name__ , arg_4 , e ) ) else : if not arg_3 . get ( 'sfx' ) : arg_9 = find_sourcemap_comment ( arg_1 ) with open ( arg_1 , 'a' ) as of : of . write ( \"\\nSystem.import('{app}{ext}');\\n{sourcemap}\" . format ( app = arg_0 . app , ext = '.js' if arg_0 . needs_ext ( ) else '' , arg_9 = arg_9 if arg_9 else '' , ) ) return arg_2","id_":253438,"task_name":"https:\/\/github.com\/sergei-maertens\/django-systemjs\/blob\/efd4a3862a39d9771609a25a5556f36023cf6e5c\/systemjs\/base.py#L140-L185","negative":"This method increases the permanence values of synapses of columns whose\n activity level has been too low. Such columns are identified by having an\n overlap duty cycle that drops too much below those of their peers. The\n permanence values for such columns are increased."} {"query":"Load json from file handle .","positive":"def Func ( arg_0 , arg_1 ) : if isinstance ( arg_1 , str ) : return arg_0 ( json . loads ( arg_1 ) ) else : return arg_0 ( json . load ( arg_1 ) )","id_":253439,"task_name":"https:\/\/github.com\/bprinty\/gems\/blob\/3ff76407af0e71621dada744cd964611e998699c\/gems\/datatypes.py#L101-L115","negative":"Validates the model using a series of checks on bits of the data."} {"query":"Main script for pyconfig command .","positive":"def Func ( ) : arg_0 = argparse . ArgumentParser ( description = \"Helper for working with \" \"pyconfigs\" ) arg_1 = arg_0 . add_mutually_exclusive_group ( ) arg_1 . add_argument ( '-f' , '--filename' , help = \"parse an individual file or directory\" , metavar = 'F' ) arg_1 . add_argument ( '-m' , '--module' , help = \"parse a package or module, recursively looking inside it\" , metavar = 'M' ) arg_0 . add_argument ( '-v' , '--view-call' , help = \"show the actual pyconfig call made (default: show namespace)\" , action = 'store_true' ) arg_0 . add_argument ( '-l' , '--load-configs' , help = \"query the currently set value for each key found\" , action = 'store_true' ) arg_2 = arg_0 . add_mutually_exclusive_group ( ) arg_2 . add_argument ( '-a' , '--all' , help = \"show keys which don't have defaults set\" , action = 'store_true' ) arg_2 . add_argument ( '-k' , '--only-keys' , help = \"show a list of discovered keys without values\" , action = 'store_true' ) arg_0 . add_argument ( '-n' , '--natural-sort' , help = \"sort by filename and line (default: alphabetical by key)\" , action = 'store_true' ) arg_0 . add_argument ( '-s' , '--source' , help = \"show source annotations (implies --natural-sort)\" , action = 'store_true' ) arg_0 . add_argument ( '-c' , '--color' , help = \"toggle output colors (default: %s)\" % bool ( pygments ) , action = 'store_const' , default = bool ( pygments ) , const = ( not bool ( pygments ) ) ) arg_3 = arg_0 . parse_args ( ) if arg_3 . color and not pygments : _error ( \"Pygments is required for color output.\\n\" \" pip install pygments\" ) if arg_3 . module : _handle_module ( arg_3 ) if arg_3 . filename : _handle_file ( arg_3 )","id_":253440,"task_name":"https:\/\/github.com\/shakefu\/pyconfig\/blob\/000cb127db51e03cb4070aae6943e956193cbad5\/pyconfig\/scripts.py#L21-L68","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"Upload a file to s3 .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . solid_config [ 'bucket' ] arg_3 = arg_0 . solid_config [ 'key' ] arg_0 . resources . s3 . put_object ( Bucket = arg_2 , Body = arg_1 . read ( ) , Key = arg_3 , ** ( arg_0 . solid_config . get ( 'kwargs' ) or { } ) ) yield Result ( arg_2 , 'bucket' ) yield Result ( arg_3 , 'key' )","id_":253441,"task_name":"https:\/\/github.com\/dagster-io\/dagster\/blob\/4119f8c773089de64831b1dfb9e168e353d401dc\/examples\/airline-demo\/airline_demo\/solids.py#L180-L197","negative":"Returns a dictionary with all the past baking statuses of a single book."} {"query":"Parse an input source with HTML text into an Amara 3 tree","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = None , arg_4 = False ) : def get_tree_instance ( arg_5 , arg_4 = arg_4 ) : return treebuilder ( arg_4 ) arg_6 = html5lib . HTMLParser ( tree = get_tree_instance ) arg_7 = arg_6 . Func ( arg_0 ) arg_8 = next ( ( e for e in arg_7 . root_nodes if isinstance ( e , element ) ) , None ) return arg_8","id_":253442,"task_name":"https:\/\/github.com\/uogbuji\/amara3-xml\/blob\/88c18876418cffc89bb85b4a3193e5002b6b39a6\/pylib\/uxml\/html5.py#L230-L250","negative":"Show an image.\n\n Args:\n img (str or ndarray): The image to be displayed.\n win_name (str): The window name.\n wait_time (int): Value of waitKey param."} {"query":"Update one variant document in the database .","positive":"def Func ( arg_0 , arg_1 ) : LOG . debug ( 'Updating variant %s' , arg_1 . get ( 'simple_id' ) ) arg_2 = arg_0 . variant_collection . find_one_and_replace ( { '_id' : arg_1 [ '_id' ] } , arg_1 , return_document = pymongo . ReturnDocument . AFTER ) return arg_2","id_":253443,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/adapter\/mongo\/variant_loader.py#L37-L55","negative":"Computes the light curve model"} {"query":"Find executables in a path .","positive":"def Func ( arg_0 ) : arg_1 = [ ] for arg_2 in os . listdir ( arg_0 ) : arg_3 = os . path . join ( arg_0 , arg_2 ) if ( os . access ( arg_3 , os . X_OK ) and not os . path . isdir ( arg_3 ) and arg_2 not in [ 'GMXRC' , 'GMXRC.bash' , 'GMXRC.csh' , 'GMXRC.zsh' , 'demux.pl' , 'xplor2gmx.pl' ] ) : arg_1 . append ( arg_2 ) return arg_1","id_":253444,"task_name":"https:\/\/github.com\/Becksteinlab\/GromacsWrapper\/blob\/d4f9a8cb6f48292732cf7c7e4ef4a6d2ccbc51b9\/gromacs\/tools.py#L178-L194","negative":"Revoke the token and remove the cookie."} {"query":"converts a timestamp to seconds","positive":"def Func ( arg_0 ) : try : return int ( arg_0 ) except ValueError : pass arg_1 = ( _Func_re . match ( arg_0 ) or _Func_2_re . match ( arg_0 ) ) if not arg_1 : raise ValueError arg_2 = 0 arg_2 += int ( arg_1 . group ( \"hours\" ) or \"0\" ) * 60 * 60 arg_2 += int ( arg_1 . group ( \"minutes\" ) or \"0\" ) * 60 arg_2 += int ( arg_1 . group ( \"seconds\" ) or \"0\" ) return arg_2","id_":253445,"task_name":"https:\/\/github.com\/streamlink\/streamlink\/blob\/c8ed1daff14ac03195870238b9b900c1109dd5c1\/src\/streamlink\/utils\/times.py#L20-L48","negative":"validate source directory names in components"} {"query":"Runs HTTP GET request to retrieve the list of experiments .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . EXPERIMENTS_URI_FMT . format ( arg_1 ) return arg_0 . _send_get_req ( arg_2 )","id_":253446,"task_name":"https:\/\/github.com\/Azure\/Azure-MachineLearning-ClientLibrary-Python\/blob\/d1211b289747671898eb063013e0dc53d3c80acd\/azureml\/http.py#L65-L68","negative":"store metric in data tree and calc offset signs\n\n sign < 0 is CYAN, means metric value is lower then previous,\n sign > 1 is YELLOW, means metric value is higher then prevoius,\n sign == 0 is WHITE, means initial or equal metric value"} {"query":"Yield code with unused imports removed .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = False , arg_3 = False , arg_4 = False , arg_5 = False , arg_6 = False , ) : arg_7 = SAFE_IMPORTS if arg_1 : arg_7 |= frozenset ( arg_1 ) del arg_1 arg_8 = check ( arg_0 ) if arg_6 : arg_9 = frozenset ( ) else : arg_9 = frozenset ( unused_import_line_numbers ( arg_8 ) ) arg_10 = collections . defaultdict ( lambda : [ ] ) for arg_11 , arg_12 in unused_import_module_name ( arg_8 ) : arg_10 [ arg_11 ] . append ( arg_12 ) if arg_2 and not ( re . search ( r'\\b__all__\\b' , arg_0 ) or re . search ( r'\\bdel\\b' , arg_0 ) ) : arg_13 = frozenset ( star_import_used_line_numbers ( arg_8 ) ) if len ( arg_13 ) > 1 : arg_13 = frozenset ( ) else : arg_14 = [ ] for arg_11 , arg_15 , arg_16 in star_import_usage_undefined_name ( arg_8 ) : arg_14 . append ( arg_15 ) if not arg_14 : arg_13 = frozenset ( ) else : arg_13 = frozenset ( ) if arg_5 : arg_17 = frozenset ( unused_variable_line_numbers ( arg_8 ) ) else : arg_17 = frozenset ( ) if arg_4 : arg_18 = frozenset ( duplicate_key_line_numbers ( arg_8 , arg_0 ) ) else : arg_18 = frozenset ( ) arg_19 = get_messages_by_line ( arg_8 ) arg_20 = io . StringIO ( arg_0 ) arg_21 = '' for arg_11 , arg_22 in enumerate ( arg_20 . readlines ( ) , start = 1 ) : if '#' in arg_22 : yield arg_22 elif arg_11 in arg_9 : yield filter_unused_import ( arg_22 , unused_module = arg_10 [ arg_11 ] , arg_3 = arg_3 , arg_7 = arg_7 , arg_21 = arg_21 ) elif arg_11 in arg_17 : yield filter_unused_variable ( arg_22 ) elif arg_11 in arg_18 : yield filter_duplicate_key ( arg_22 , arg_19 [ arg_11 ] , arg_11 , arg_18 , arg_0 ) elif arg_11 in arg_13 : yield filter_star_import ( arg_22 , arg_14 ) else : yield arg_22 arg_21 = arg_22","id_":253447,"task_name":"https:\/\/github.com\/myint\/autoflake\/blob\/68fea68646922b920d55975f9f2adaeafd84df4f\/autoflake.py#L330-L411","negative":"Return a list of all enrollments for the passed section_id.\n\n https:\/\/canvas.instructure.com\/doc\/api\/enrollments.html#method.enrollments_api.index"} {"query":"Creates a new channel for receiving push data .","positive":"async def Func ( arg_0 ) : logger . info ( 'Requesting new gsessionid and SID...' ) arg_0 . _sid_param = None arg_0 . _gsessionid_param = None arg_3 = await arg_0 . send_maps ( [ ] ) arg_0 . _sid_param , arg_0 . _gsessionid_param = _parse_sid_response ( arg_3 . body ) logger . info ( 'New SID: {}' . format ( arg_0 . _sid_param ) ) logger . info ( 'New gsessionid: {}' . format ( arg_0 . _gsessionid_param ) )","id_":253448,"task_name":"https:\/\/github.com\/tdryer\/hangups\/blob\/85c0bf0a57698d077461283895707260f9dbf931\/hangups\/channel.py#L241-L260","negative":"Stores a all explored parameter names for internal recall"} {"query":"load the secrets credentials file with the Globus OAuthTokenResponse","positive":"def Func ( arg_0 ) : arg_0 . auth = arg_0 . _get_and_update_setting ( 'GLOBUS_AUTH_RESPONSE' ) arg_0 . transfer = arg_0 . _get_and_update_setting ( 'GLOBUS_TRANSFER_RESPONSE' )","id_":253449,"task_name":"https:\/\/github.com\/singularityhub\/sregistry-cli\/blob\/abc96140a1d15b5e96d83432e1e0e1f4f8f36331\/sregistry\/main\/globus\/__init__.py#L66-L73","negative":"Subtract the arg from the value."} {"query":"This function dispatches event messages to the correct functions . You should override this method only if you are not satisfied with the automatic dispatching to on_ - prefixed methods . You could then implement your own dispatch . See the source code for inspiration .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 [ 'args' ] arg_3 = arg_1 [ 'name' ] if not allowed_event_name_regex . match ( arg_3 ) : arg_0 . error ( \"unallowed_event_name\" , \"name must only contains alpha numerical characters\" ) return arg_4 = 'on_' + arg_3 . replace ( ' ' , '_' ) return arg_0 . call_method_with_acl ( arg_4 , arg_1 , * arg_2 )","id_":253450,"task_name":"https:\/\/github.com\/abourget\/gevent-socketio\/blob\/1cdb1594a315326987a17ce0924ea448a82fab01\/socketio\/namespace.py#L180-L225","negative":"Add an HTTP header to response object.\n\n Arguments:\n name (str): HTTP header field name\n value (str): HTTP header field value"} {"query":"Write DDL of table indexes to the output file","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . f . write ( '\\n' . join ( super ( PostgresFileWriter , arg_0 ) . Func ( arg_1 ) ) )","id_":253451,"task_name":"https:\/\/github.com\/philipsoutham\/py-mysql2pgsql\/blob\/66dc2a3a3119263b3fe77300fb636346509787ef\/mysql2pgsql\/lib\/postgres_file_writer.py#L82-L90","negative":"Rounds predictions and calculates accuracy in terms of absolute coincidence.\n\n Args:\n y_true: list of true values\n y_predicted: list of predicted values\n\n Returns:\n portion of absolutely coincidental samples"} {"query":"Return a valid python filename in the current directory .","positive":"def Func ( arg_0 , arg_1 = None ) : arg_0 = os . path . expanduser ( arg_0 ) if arg_1 is None : arg_2 = ( sys . platform == 'win32' ) else : arg_2 = arg_1 arg_0 = unquote_filename ( arg_0 , arg_2 = arg_2 ) if not os . path . isfile ( arg_0 ) and not arg_0 . endswith ( '.py' ) : arg_0 += '.py' if os . path . isfile ( arg_0 ) : return arg_0 else : raise IOError , 'File `%r` not found.' % arg_0","id_":253452,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/utils\/path.py#L88-L110","negative":"Returns an aggregator connection."} {"query":"Convert UTC datetime into user interface string .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False ) : arg_3 = '' if arg_2 : arg_3 += '\\n' + arg_1 . get ( 'date' , '' ) + '\\n' arg_3 += arg_1 . get ( 'time' , '' ) return arg_0 . astimezone ( tz = None ) . strftime ( arg_3 )","id_":253453,"task_name":"https:\/\/github.com\/tdryer\/hangups\/blob\/85c0bf0a57698d077461283895707260f9dbf931\/hangups\/ui\/__main__.py#L607-L613","negative":"Sets the review comment. Raises CardinalityError if\n already set. OrderError if no reviewer defined before."} {"query":"Test must raise one of expected exceptions to pass .","positive":"def Func ( * arg_0 ) : arg_1 = ' or ' . join ( [ e . __name__ for e in arg_0 ] ) def decorate ( arg_2 ) : arg_3 = arg_2 . __name__ def arg_7 ( * arg_4 , ** arg_5 ) : try : arg_2 ( * arg_4 , ** arg_5 ) except arg_0 : pass except : raise else : arg_6 = \"%s() did not raise %s\" % ( arg_3 , arg_1 ) raise AssertionError ( arg_6 ) arg_7 = make_decorator ( arg_2 ) ( arg_7 ) return arg_7 return decorate","id_":253454,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/nose\/tools\/nontrivial.py#L39-L70","negative":"Decode the data passed in and potentially flush the decoder."} {"query":"Validate that jobs executed after their dependencies .","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in arg_0 : arg_3 = arg_1 [ arg_2 ] . metadata . started for arg_4 in arg_0 . predecessors ( arg_2 ) : arg_5 = arg_1 [ arg_4 ] . metadata . completed assert arg_3 > arg_5 , \"%s should have happened after %s\" % ( arg_2 , arg_4 )","id_":253455,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/share\/doc\/ipython\/examples\/parallel\/dagdeps.py#L64-L70","negative":"write lines, one by one, separated by \\n to device"} {"query":"Decorator that protect methods with HTTP authentication .","positive":"def Func ( arg_0 , arg_1 ) : def auth_decorator ( arg_2 ) : def inner ( arg_3 , * arg_4 , ** arg_5 ) : if arg_3 . get_authenticated_user ( arg_1 , arg_0 ) : return arg_2 ( arg_3 , * arg_4 , ** arg_5 ) return inner return auth_decorator","id_":253456,"task_name":"https:\/\/github.com\/gvalkov\/tornado-http-auth\/blob\/9eb225c1740fad1e53320b55d8d4fc6ab4ba58b6\/tornado_http_auth.py#L227-L234","negative":"Parse a hub key into a dictionary of component parts\n\n :param key: str, a hub key\n :returns: dict, hub key split into parts\n :raises: ValueError"} {"query":"Reanalyze data for a single ABF . Also remakes child and parent html .","positive":"def Func ( arg_0 ) : assert os . path . exists ( arg_0 ) and arg_0 . endswith ( \".abf\" ) arg_1 , arg_2 = os . path . split ( arg_0 ) arg_3 = os . path . splitext ( arg_2 ) [ 0 ] arg_4 = INDEX ( arg_1 ) arg_4 . analyzeABF ( arg_3 ) arg_4 . scan ( ) arg_4 . html_single_basic ( [ arg_3 ] , overwrite = True ) arg_4 . html_single_plot ( [ arg_3 ] , overwrite = True ) arg_4 . scan ( ) arg_4 . html_index ( ) return","id_":253457,"task_name":"https:\/\/github.com\/swharden\/SWHLab\/blob\/a86c3c65323cec809a4bd4f81919644927094bf5\/swhlab\/indexing\/indexing.py#L279-L292","negative":"Adds all parameters to `traj`"} {"query":"For each new message build its platform specific message object and get a response .","positive":"async def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . build_message ( arg_1 ) if not arg_2 : logger . error ( '[%s] Unable to build Message with data, data=%s, error' , arg_0 . engine_name , arg_1 ) return logger . info ( '[%s] New message from %s: %s' , arg_0 . engine_name , arg_2 . user , arg_2 . text ) arg_3 = await arg_0 . get_response ( arg_2 ) if arg_3 : await arg_0 . send_response ( arg_3 )","id_":253458,"task_name":"https:\/\/github.com\/rougeth\/bottery\/blob\/1c724b867fa16708d59a3dbba5dd2c3de85147a9\/bottery\/platforms.py#L92-L112","negative":"Replace target with replacement"} {"query":"Set a clinvar submission ID to closed","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : LOG . info ( 'closing clinvar submission \"%s\"' , arg_2 ) if arg_3 == 'open' : arg_0 . clinvar_submission_collection . update_many ( { 'user_id' : arg_1 } , { '$set' : { 'status' : 'closed' , 'updated_at' : datetime . now ( ) } } ) arg_4 = arg_0 . clinvar_submission_collection . find_one_and_update ( { '_id' : ObjectId ( arg_2 ) } , { '$set' : { 'status' : arg_3 , 'updated_at' : datetime . now ( ) } } , return_document = pymongo . ReturnDocument . AFTER ) return arg_4","id_":253459,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/adapter\/mongo\/clinvar.py#L160-L188","negative":"Finds the first operator in the list, converts it and its operands to a OptreeNode, then\n returns a new list with the operator and operands replaced by the new OptreeNode."} {"query":"Converts a node set to surface .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . nodes . copy ( ) arg_3 = arg_2 . iloc [ 0 ] . copy ( ) arg_3 [ \"coords\" ] *= np . nan arg_3 [ \"sets\" ] = True arg_2 . loc [ 0 ] = arg_3 arg_5 = arg_0 . split ( \"surfaces\" ) . unstack ( ) arg_6 = pd . DataFrame ( arg_2 . sets [ arg_1 ] . loc [ arg_5 . values . flatten ( ) ] . values . reshape ( arg_5 . shape ) . prod ( axis = 1 ) . astype ( np . bool ) , index = arg_5 . index ) . unstack ( ) . fillna ( False ) for arg_7 in arg_6 . keys ( ) : arg_0 . elements [ \"surfaces\" , arg_1 , \"f{0}\" . format ( arg_7 [ 1 ] + 1 ) ] = arg_6 . loc [ : , arg_7 ]","id_":253460,"task_name":"https:\/\/github.com\/lcharleux\/argiope\/blob\/8170e431362dc760589f7d141090fd133dece259\/argiope\/mesh.py#L508-L528","negative":"Return HgResource object for path.\n\n See DAVProvider.get_resource_inst()"} {"query":"Returns the hash value associated with a specific block index .","positive":"def Func ( arg_0 , arg_1 , ** arg_2 ) : return arg_0 . _call ( JSONRPCMethods . GET_BLOCK_HASH . value , [ arg_1 , ] , ** arg_2 )","id_":253461,"task_name":"https:\/\/github.com\/ellmetha\/neojsonrpc\/blob\/e369b633a727482d5f9e310f0c3337ae5f7265db\/neojsonrpc\/client.py#L135-L144","negative":"Calculate the modelled progress state for the given time moment.\n\n :returns: tuple (x, v) of the progress level and progress speed."} {"query":"Load an address provided the public key .","positive":"def Func ( arg_0 , arg_1 , arg_2 = True , arg_3 = 56 , arg_4 = None ) : arg_1 = PublicKey ( arg_1 , arg_4 = arg_4 or Prefix . prefix ) if arg_2 : arg_5 = arg_1 . compressed ( ) else : arg_5 = arg_1 . uncompressed ( ) arg_6 = hashlib . sha256 ( unhexlify ( arg_5 ) ) . hexdigest ( ) arg_7 = hexlify ( ripemd160 ( arg_6 ) ) . decode ( \"ascii\" ) arg_8 = ( \"%.2x\" % arg_3 ) + arg_7 arg_9 = arg_8 + hexlify ( doublesha256 ( arg_8 ) [ : 4 ] ) . decode ( \"ascii\" ) arg_9 = hexlify ( ripemd160 ( arg_9 ) ) . decode ( \"ascii\" ) return arg_0 ( arg_9 , arg_4 = arg_1 . prefix )","id_":253462,"task_name":"https:\/\/github.com\/xeroc\/python-graphenelib\/blob\/8bb5396bc79998ee424cf3813af478304173f3a6\/graphenebase\/account.py#L158-L174","negative":"Calculate a t-test score for the difference between two samples.\n\n Args:\n sample1: one sample.\n sample2: the other sample.\n\n Returns:\n The t-test score, as a float."} {"query":"Merge two dictionaries .","positive":"def Func ( arg_0 , arg_1 ) : return dict ( ( str ( arg_2 ) , arg_0 . get ( arg_2 ) or arg_1 . get ( arg_2 ) ) for arg_2 in set ( arg_1 ) | set ( arg_0 ) )","id_":253463,"task_name":"https:\/\/github.com\/Neurita\/boyle\/blob\/2dae7199849395a209c887d5f30506e1de8a9ad9\/boyle\/utils\/rcfile.py#L14-L22","negative":"Disconnect any connected devices that have any of the specified\n service UUIDs."} {"query":"Extract an alphabetically sorted list of elements from the material s compounds .","positive":"def Func ( arg_0 ) : arg_1 = stoich . elements ( arg_0 . compounds ) return sorted ( list ( arg_1 ) )","id_":253464,"task_name":"https:\/\/github.com\/Ex-Mente\/auxi.0\/blob\/2dcdae74154f136f8ca58289fe5b20772f215046\/auxi\/modelling\/process\/materials\/thermo.py#L246-L255","negative":"Perform a rachted step, calculating a new shared secret from the public key and\n deriving new chain keys from this secret.\n\n New Diffie-Hellman calculations are only performed if the public key is different\n from the previous one.\n\n :param other_pub: A bytes-like object encoding the public key of the other\n Diffie-Hellman ratchet to synchronize with."} {"query":"extract the parts of a StreamItem that go into a kvlayer key convert StreamItem to blob for storage .","positive":"def Func ( arg_0 ) : arg_1 = key_for_stream_item ( arg_0 ) arg_2 = streamcorpus . serialize ( arg_0 ) arg_3 , arg_2 = streamcorpus . compress_and_encrypt ( arg_2 ) assert not arg_3 , arg_3 return arg_1 , arg_2","id_":253465,"task_name":"https:\/\/github.com\/trec-kba\/streamcorpus-pipeline\/blob\/8bb82ea1beb83c6b40ed03fa1659df2897c2292a\/streamcorpus_pipeline\/_kvlayer.py#L411-L422","negative":"Return the RSSI signal strength in decibels."} {"query":"add optional tracker_url argument","positive":"def Func ( arg_0 ) : arg_0 . add_argument ( '--tracker_url' , metavar = '(tracker url; default: \"' + DEFAULT_TRACKER_URL + '\")' , type = str , default = DEFAULT_TRACKER_URL ) return arg_0","id_":253466,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/tools\/explorer\/src\/python\/args.py#L85-L91","negative":"issue a command to read the archive records after a known time stamp."} {"query":"Sets gender and relevant country values for names dictionary of detector","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : if '+' in arg_1 : for arg_4 in [ '' , ' ' , '-' ] : arg_0 . Func ( arg_1 . replace ( '+' , arg_4 ) , arg_2 , arg_3 ) else : if arg_1 not in arg_0 . names : arg_0 . names [ arg_1 ] = { } arg_0 . names [ arg_1 ] [ arg_2 ] = arg_3","id_":253467,"task_name":"https:\/\/github.com\/ferhatelmas\/sexmachine\/blob\/85d33bb47ccc017676e69788750f116e391f52db\/sexmachine\/detector.py#L64-L72","negative":"Generate a square lattice with auxiliary nodes for spanning detection\n\n Parameters\n ----------\n\n length : int\n Number of nodes in one dimension, excluding the auxiliary nodes.\n\n Returns\n -------\n\n networkx.Graph\n A square lattice graph with auxiliary nodes for spanning cluster\n detection\n\n See Also\n --------\n\n sample_states : spanning cluster detection"} {"query":"Read config values from kwargs .","positive":"def Func ( arg_0 , ** arg_1 ) : for arg_2 , arg_3 in iitems ( arg_1 ) : if arg_3 is not None : if arg_2 in arg_0 . MUST_BE_LIST and isinstance ( arg_3 , string_class ) : arg_3 = [ arg_3 ] setattr ( arg_0 , arg_2 , arg_3 )","id_":253468,"task_name":"https:\/\/github.com\/tnkteja\/myhelp\/blob\/fb3a4809d448ad14d5b2e6ddf2e7e89ad52b71cb\/virtualEnvironment\/lib\/python2.7\/site-packages\/coverage\/config.py#L148-L154","negative":"issue a command to read the archive records after a known time stamp."} {"query":"In replacement for Deprecated add_path method","positive":"def Func ( arg_0 , arg_1 , ** arg_2 ) : if nx . __version__ [ 0 ] == \"1\" : return super ( ) . Func ( arg_1 , ** arg_2 ) else : return nx . Func ( arg_0 , arg_1 , ** arg_2 )","id_":253469,"task_name":"https:\/\/github.com\/fumitoh\/modelx\/blob\/0180da34d052c44fb94dab9e115e218bbebfc9c3\/modelx\/core\/model.py#L85-L90","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Get impls from their interfaces .","positive":"def Func ( arg_0 ) : if arg_0 is None : return None elif isinstance ( arg_0 , Mapping ) : return { arg_1 : arg_0 [ arg_1 ] . _impl for arg_1 in arg_0 } elif isinstance ( arg_0 , Sequence ) : return [ arg_0 . _impl for arg_0 in arg_0 ] else : return arg_0 . _impl","id_":253470,"task_name":"https:\/\/github.com\/fumitoh\/modelx\/blob\/0180da34d052c44fb94dab9e115e218bbebfc9c3\/modelx\/core\/base.py#L81-L93","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Get gene info from Entrez .","positive":"def Func ( arg_0 : arg_1 [ arg_2 [ arg_3 , arg_4 ] ] ) : arg_5 = PUBMED_GENE_QUERY_URL . format ( ',' . join ( arg_3 ( x ) . strip ( ) for x in arg_0 ) ) arg_6 = requests . get ( arg_5 ) arg_7 = ElementTree . fromstring ( arg_6 . content ) return { arg_8 . attrib [ 'uid' ] : { 'summary' : _sanitize ( arg_8 . find ( 'Summary' ) . text ) , 'description' : arg_8 . find ( 'Description' ) . text } for arg_8 in arg_7 . findall ( '.\/DocumentSummarySet\/DocumentSummary' ) }","id_":253471,"task_name":"https:\/\/github.com\/pybel\/pybel-tools\/blob\/3491adea0ac4ee60f57275ef72f9b73da6dbfe0c\/src\/pybel_tools\/document_utils\/document_utils.py#L52-L64","negative":"Returns mappable data for a random subset of voxels.\n\n May be useful as a baseline in predictive analyses--e.g., to compare\n performance of a more principled feature selection method with simple\n random selection.\n\n Args:\n dataset: A Dataset instance\n n_voxels: An integer specifying the number of random voxels to select.\n\n Returns:\n A 2D numpy array with (randomly-selected) voxels in rows and mappables\n in columns."} {"query":"Applies a sequence of slice or copy - with - overrides operations to dist .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : for arg_3 , arg_4 in arg_2 : arg_0 = _apply_single_step ( arg_0 , arg_1 , arg_3 , arg_4 ) return arg_0","id_":253472,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/distributions\/internal\/slicing.py#L158-L162","negative":"Get the context for this view."} {"query":"Split the date string using translations in locale info .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False ) : if not arg_1 : return arg_1 arg_3 = arg_0 . _get_Func_relative_regex_cache ( ) arg_4 = arg_0 . _get_match_relative_regex_cache ( ) arg_5 = arg_3 . Func ( arg_1 ) for arg_6 , arg_7 in enumerate ( arg_5 ) : if arg_4 . match ( arg_7 ) : arg_5 [ arg_6 ] = [ arg_7 ] continue arg_5 [ arg_6 ] = arg_0 . _Func_by_known_words ( arg_7 , arg_2 ) return list ( filter ( bool , chain ( * arg_5 ) ) )","id_":253473,"task_name":"https:\/\/github.com\/scrapinghub\/dateparser\/blob\/11a761c99d3ee522a3c63756b70c106a579e8b5c\/dateparser\/languages\/dictionary.py#L116-L145","negative":"Expand the rank of x up to static_event_rank times for broadcasting.\n\n The static event rank was checked to not be None at construction time.\n\n Args:\n x: A tensor to expand.\n Returns:\n The expanded tensor."} {"query":"Checks whether path belongs to standard library or installed modules .","positive":"def Func ( arg_0 ) : if 'site-packages' in arg_0 : return True for arg_1 in _STDLIB_PATHS : if fnmatch . fnmatchcase ( arg_0 , arg_1 + '*' ) : return True return False","id_":253474,"task_name":"https:\/\/github.com\/nvdv\/vprof\/blob\/4c3ff78f8920ab10cb9c00b14143452aa09ff6bb\/vprof\/code_heatmap.py#L18-L25","negative":"Save a vectorized image to file."} {"query":"Calculates average dictinary from list of dictionary for give label","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : return defaultdict ( float , pd . DataFrame . from_records ( filter_by_label ( arg_0 , arg_1 , arg_2 ) [ 0 ] ) . mean ( ) . to_dict ( ) )","id_":253475,"task_name":"https:\/\/github.com\/MuhammedHasan\/sklearn_utils\/blob\/337c3b7a27f4921d12da496f66a2b83ef582b413\/sklearn_utils\/utils\/data_utils.py#L23-L35","negative":"Check that a binary operator is surrounded by exactly one space."} {"query":"Updates the header information from the original file to the selection .","positive":"def Func ( arg_0 ) : if arg_0 . header [ b'foff' ] < 0 : arg_0 . header [ b'fch1' ] = arg_0 . container . f_stop else : arg_0 . header [ b'fch1' ] = arg_0 . container . f_start arg_0 . header [ b'nchans' ] = arg_0 . container . selection_shape [ arg_0 . freq_axis ] arg_0 . header [ b'tstart' ] = arg_0 . container . populate_timestamps ( update_header = True )","id_":253476,"task_name":"https:\/\/github.com\/UCBerkeleySETI\/blimpy\/blob\/b8822d3e3e911944370d84371a91fa0c29e9772e\/blimpy\/waterfall.py#L167-L181","negative":"Convert reflection coefficients to inverse sine parameters.\n\n :param k: reflection coefficients\n :return: inverse sine parameters\n\n .. seealso:: :func:`is2rc`, :func:`rc2poly`, :func:`rc2acC`, :func:`rc2lar`.\n\n Reference: J.R. Deller, J.G. Proakis, J.H.L. Hansen, \"Discrete-Time\n Processing of Speech Signals\", Prentice Hall, Section 7.4.5."} {"query":"Returns a versioned URI string for this class","positive":"def Func ( arg_0 ) : arg_1 = 'v{0}' . format ( getattr ( arg_0 , 'RESOURCE_VERSION' , '1' ) ) return \"\/{0}\/{1}\" . format ( arg_1 , class_to_api_name ( arg_0 . class_name ( ) ) )","id_":253477,"task_name":"https:\/\/github.com\/solvebio\/solvebio-python\/blob\/b29614643043afd19c1d8074e8f25c6700d51a73\/solvebio\/resource\/apiresource.py#L49-L52","negative":"Execute the enrich phase for a given backend section\n\n :param config: a Mordred config object\n :param backend_section: the backend section where the enrich phase is executed"} {"query":"Set the revocation timestamp .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = _lib . X509_REVOKED_get0_revocationDate ( arg_0 . _revoked ) return _set_asn1_time ( arg_2 , arg_1 )","id_":253478,"task_name":"https:\/\/github.com\/pyca\/pyopenssl\/blob\/1fbe064c50fd030948141d7d630673761525b0d0\/src\/OpenSSL\/crypto.py#L2058-L2067","negative":"Return the generation index of the first generation in the given\n swarm that does not have numParticles particles in it, either still in the\n running state or completed. This does not include orphaned particles.\n\n Parameters:\n ---------------------------------------------------------------------\n swarmId: A string representation of the sorted list of encoders in this\n swarm. For example '__address_encoder.__gym_encoder'\n minNumParticles: minium number of partices required for a full\n generation.\n\n retval: generation index, or None if no particles at all."} {"query":"This method is called when a signal is received .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_0 . print_method : arg_0 . print_method ( '\\nProgram received signal %s.' % arg_0 . signame ) if arg_0 . print_stack : import traceback arg_3 = traceback . format_stack ( arg_2 ) for arg_4 in arg_3 : if arg_4 [ - 1 ] == '\\n' : arg_4 = arg_4 [ 0 : - 1 ] arg_0 . print_method ( arg_4 ) pass pass if arg_0 . b_stop : arg_5 = arg_0 . dbgr . core arg_6 = arg_5 . trace_hook_suspend arg_5 . trace_hook_suspend = True arg_5 . stop_reason = ( 'intercepting signal %s (%d)' % ( arg_0 . signame , arg_1 ) ) arg_5 . processor . event_processor ( arg_2 , 'signal' , arg_1 ) arg_5 . trace_hook_suspend = arg_6 pass if arg_0 . pass_along : if arg_0 . old_Funcr : arg_0 . old_Funcr ( arg_1 , arg_2 ) pass pass return","id_":253479,"task_name":"https:\/\/github.com\/rocky\/python3-trepan\/blob\/14e91bc0acce090d67be145b1ac040cab92ac5f3\/trepan\/lib\/sighandler.py#L466-L494","negative":"This method increases the permanence values of synapses of columns whose\n activity level has been too low. Such columns are identified by having an\n overlap duty cycle that drops too much below those of their peers. The\n permanence values for such columns are increased."} {"query":"Rename key src to dst if dst doesn t already exist","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_0 . exists ( arg_2 ) : return False return arg_0 . _rc_rename ( arg_1 , arg_2 )","id_":253480,"task_name":"https:\/\/github.com\/salimane\/rediscluster-py\/blob\/4fe4d928cd6fe3e7564f7362e3996898bda5a285\/rediscluster\/cluster_client.py#L478-L483","negative":"Fetch the comments of a given event."} {"query":"Filter bot from real users .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . small_delay ( ) arg_1 = arg_0 . convert_to_user_id ( arg_1 ) if not arg_1 : return False if arg_1 in arg_0 . whitelist : return True if arg_1 in arg_0 . blacklist : return False arg_2 = arg_0 . get_user_info ( arg_1 ) if not arg_2 : return True arg_3 = arg_0 . skipped_file if \"following_count\" in arg_2 and arg_2 [ \"following_count\" ] > arg_0 . max_following_to_block : arg_4 = 'following_count > bot.max_following_to_block, skipping!' arg_0 . console_print ( arg_4 , 'red' ) arg_3 . append ( arg_1 ) return False if search_stop_words_in_user ( arg_0 , arg_2 ) : arg_4 = '`bot.search_stop_words_in_user` found in user, skipping!' arg_3 . append ( arg_1 ) return False return True","id_":253481,"task_name":"https:\/\/github.com\/instagrambot\/instabot\/blob\/d734f892ac4cc35d22746a4f2680425ffaff0927\/instabot\/bot\/bot_filter.py#L230-L257","negative":"Get base-64 encoded data as a string for the given image. Fallback to return\n fallback_image_file if cannot get the image data or img is None.\n @param {Image} img - The PIL Image to get src data for\n @param {String} fallback_image_file - The filename of the image file,\n to be used when image data capture fails\n @return {String} The base-64 encoded image data string, or path to the file\n itself if not supported."} {"query":"Configure the coloring of the widget","positive":"def Func ( arg_0 , arg_1 ) : try : arg_2 = arg_0 . config . ZMQInteractiveShell . colors except AttributeError : arg_2 = None try : arg_3 = arg_0 . config . IPythonWidget . syntax_style except AttributeError : arg_3 = None try : arg_4 = arg_0 . config . IPythonWidget . style_sheet except AttributeError : arg_4 = None if arg_2 : arg_2 = arg_2 . lower ( ) if arg_2 in ( 'lightbg' , 'light' ) : arg_2 = 'lightbg' elif arg_2 in ( 'dark' , 'linux' ) : arg_2 = 'linux' else : arg_2 = 'nocolor' elif arg_3 : if arg_3 == 'bw' : arg_2 = 'nocolor' elif styles . dark_style ( arg_3 ) : arg_2 = 'linux' else : arg_2 = 'lightbg' else : arg_2 = None if arg_3 : arg_1 . style_sheet = styles . sheet_from_template ( arg_3 , arg_2 ) arg_1 . syntax_style = arg_3 arg_1 . _syntax_style_changed ( ) arg_1 . _style_sheet_changed ( ) elif arg_2 : arg_1 . set_default_style ( arg_2 = arg_2 ) if arg_0 . stylesheet : if os . path . isfile ( arg_0 . stylesheet ) : with open ( arg_0 . stylesheet ) as f : arg_4 = f . read ( ) else : raise IOError ( \"Stylesheet %r not found.\" % arg_0 . stylesheet ) if arg_4 : arg_1 . style_sheet = arg_4 arg_1 . _style_sheet_changed ( )","id_":253482,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/frontend\/qt\/console\/qtconsoleapp.py#L268-L325","negative":"Gets the 3 GeoTiff vlrs from the vlr_list and parse them into\n a nicer structure\n\n Parameters\n ----------\n vlr_list: pylas.vrls.vlrslist.VLRList list of vlrs from a las file\n\n Raises\n ------\n IndexError if any of the needed GeoTiffVLR is not found in the list\n\n Returns\n -------\n List of GeoTiff keys parsed from the VLRs"} {"query":"Components are positioned relative to their container . Use this method to position the bottom - left corner of the components at the origin .","positive":"def Func ( arg_0 ) : for arg_1 in arg_0 : if isinstance ( arg_1 , Ellipse ) : arg_1 . x_origin = arg_1 . e_width arg_1 . y_origin = arg_1 . e_height elif isinstance ( arg_1 , ( Polygon , BSpline ) ) : arg_4 = min ( [ t [ 0 ] for t in arg_1 . points ] ) arg_5 = min ( [ t [ 1 ] for t in arg_1 . points ] ) arg_1 . points = [ ( p [ 0 ] - arg_4 , p [ 1 ] - arg_5 ) for p in arg_1 . points ] elif isinstance ( arg_1 , Text ) : arg_7 = str_to_font ( str ( arg_1 . pen . font ) ) arg_1 . text_x = 0 arg_1 . text_y = 0","id_":253483,"task_name":"https:\/\/github.com\/rwl\/godot\/blob\/013687c9e8983d2aa2ceebb8a76c5c4f1e37c90f\/godot\/util.py#L139-L160","negative":"Create template config for specified template name.\n\n .. __: https:\/\/api.go.cd\/current\/#create-template-config\n\n Returns:\n Response: :class:`gocd.api.response.Response` object"} {"query":"Count the number of citations from each year .","positive":"def Func ( arg_0 : arg_1 ) -> typing . Counter [ int ] : arg_2 = defaultdict ( set ) for arg_3 , arg_3 , arg_4 in arg_0 . edges ( arg_4 = True ) : if CITATION not in arg_4 or CITATION_DATE not in arg_4 [ CITATION ] : continue try : arg_5 = _ensure_datetime ( arg_4 [ CITATION ] [ CITATION_DATE ] ) arg_2 [ arg_5 . year ] . add ( ( arg_4 [ CITATION ] [ CITATION_TYPE ] , arg_4 [ CITATION ] [ CITATION_REFERENCE ] ) ) except Exception : continue return count_dict_values ( arg_2 )","id_":253484,"task_name":"https:\/\/github.com\/pybel\/pybel-tools\/blob\/3491adea0ac4ee60f57275ef72f9b73da6dbfe0c\/src\/pybel_tools\/summary\/provenance.py#L224-L238","negative":"Whether a connection can be established between those two meshes."} {"query":"Gets the raw record .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_3 = { 'objectID' : arg_0 . objectID ( arg_1 ) } if arg_2 : if isinstance ( arg_2 , str ) : arg_2 = ( arg_2 , ) for arg_4 in arg_2 : arg_5 = arg_0 . __translate_fields . get ( arg_4 , None ) if arg_5 : arg_3 [ arg_5 ] = arg_0 . __named_fields [ arg_5 ] ( arg_1 ) else : for arg_5 , arg_6 in arg_0 . __named_fields . items ( ) : arg_3 [ arg_5 ] = arg_6 ( arg_1 ) if arg_0 . geo_field : arg_7 = arg_0 . geo_field ( arg_1 ) if isinstance ( arg_7 , tuple ) : arg_3 [ '_geoloc' ] = { 'lat' : arg_7 [ 0 ] , 'lng' : arg_7 [ 1 ] } elif isinstance ( arg_7 , dict ) : arg_0 . _validate_geolocation ( arg_7 ) arg_3 [ '_geoloc' ] = arg_7 elif isinstance ( arg_7 , list ) : [ arg_0 . _validate_geolocation ( arg_8 ) for arg_8 in arg_7 ] arg_3 [ '_geoloc' ] = arg_7 if arg_0 . tags : if callable ( arg_0 . tags ) : arg_3 [ '_tags' ] = arg_0 . tags ( arg_1 ) if not isinstance ( arg_3 [ '_tags' ] , list ) : arg_3 [ '_tags' ] = list ( arg_3 [ '_tags' ] ) logger . debug ( 'BUILD %s FROM %s' , arg_3 [ 'objectID' ] , arg_0 . model ) return arg_3","id_":253485,"task_name":"https:\/\/github.com\/algolia\/algoliasearch-django\/blob\/ca219db41eb56bdd1c0389cdc1508a41698958d7\/algoliasearch_django\/models.py#L197-L238","negative":"Adds all parameters to `traj`"} {"query":"Expand a range with a multiplicative or additive constant","positive":"def Func ( arg_0 , arg_1 = 0 , arg_2 = 0 , arg_3 = 1 ) : arg_4 = arg_0 try : arg_4 [ 0 ] except TypeError : arg_4 = ( arg_4 , arg_4 ) if zero_range ( arg_4 ) : arg_5 = arg_4 [ 0 ] - arg_3 \/ 2 , arg_4 [ 0 ] + arg_3 \/ 2 else : arg_6 = ( arg_4 [ 1 ] - arg_4 [ 0 ] ) * arg_1 + arg_2 arg_5 = arg_4 [ 0 ] - arg_6 , arg_4 [ 1 ] + arg_6 return arg_5","id_":253486,"task_name":"https:\/\/github.com\/has2k1\/mizani\/blob\/312d0550ee0136fd1b0384829b33f3b2065f47c8\/mizani\/bounds.py#L446-L509","negative":"Instantiate a GTFS object by computing\n\n Parameters\n ----------\n gtfs_directory: str\n path to the directory for importing the database"} {"query":"Return the url corresponding to the given notebook file","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . urls if isinstance ( arg_2 , dict ) : return arg_2 . get ( arg_1 ) elif isstring ( arg_2 ) : if not arg_2 . endswith ( '\/' ) : arg_2 += '\/' return arg_2 + arg_1","id_":253487,"task_name":"https:\/\/github.com\/Chilipp\/sphinx-nbexamples\/blob\/08e0319ff3c70f8a931dfa8890caf48add4d0470\/sphinx_nbexamples\/__init__.py#L912-L932","negative":"This returns an array of each sector and performance for the current trading day. Performance is based on each sector ETF.\n\n https:\/\/iexcloud.io\/docs\/api\/#sector-performance\n 8am-5pm ET Mon-Fri\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result"} {"query":"Convert phrase to normilized file name .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = re . sub ( r\"[^\\w\\s\\.]\" , '' , arg_1 . strip ( ) . lower ( ) ) arg_2 = re . sub ( r\"\\s+\" , '_' , arg_2 ) return arg_2 + '.png'","id_":253488,"task_name":"https:\/\/github.com\/swistakm\/pyimgui\/blob\/04dd78053900bf69e0ce7638d1b7036bf2181982\/doc\/source\/custom_directives.py#L152-L159","negative":"Produces a list of ports to be updated async."} {"query":"Get merge notes","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ ] arg_3 = arg_0 . client . notes ( GitLabClient . MERGES , arg_1 ) for arg_4 in arg_3 : for arg_5 in json . loads ( arg_4 ) : arg_6 = arg_5 [ 'id' ] arg_5 [ 'award_emoji_data' ] = arg_0 . __get_note_award_emoji ( GitLabClient . MERGES , arg_1 , arg_6 ) arg_2 . append ( arg_5 ) return arg_2","id_":253489,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-perceval\/blob\/41c908605e88b7ebc3a536c643fa0f212eaf9e0e\/perceval\/backends\/core\/gitlab.py#L277-L291","negative":"Parses package fields."} {"query":"Register jinja filters vars functions .","positive":"def Func ( arg_0 ) : import jinja2 from . utils import filters , permissions , helpers if arg_0 . debug or arg_0 . testing : arg_1 = jinja2 . ChoiceLoader ( [ arg_0 . jinja_loader , jinja2 . FileSystemLoader ( [ os . path . join ( arg_0 . config . get ( 'PROJECT_PATH' ) , 'application\/macros' ) , os . path . join ( arg_0 . config . get ( 'PROJECT_PATH' ) , 'application\/pages' ) ] ) ] ) else : arg_1 = jinja2 . ChoiceLoader ( [ arg_0 . jinja_loader , jinja2 . FileSystemLoader ( [ os . path . join ( arg_0 . config . get ( 'PROJECT_PATH' ) , 'output\/macros' ) , os . path . join ( arg_0 . config . get ( 'PROJECT_PATH' ) , 'output\/pages' ) ] ) ] ) arg_0 . jinja_loader = arg_1 arg_0 . jinja_env . filters . update ( { 'timesince' : filters . timesince } ) def url_for_other_page ( arg_3 ) : arg_4 = request . view_args . copy ( ) arg_5 = request . args . copy ( ) . to_dict ( ) arg_6 = dict ( arg_4 . items ( ) + arg_5 . items ( ) ) arg_6 [ 'page' ] = arg_3 return url_for ( request . endpoint , ** arg_6 ) arg_7 = { } for arg_8 , arg_9 in iteritems ( arg_0 . url_map . _rules_by_endpoint ) : if any ( arg_10 in arg_8 for arg_10 in [ '_debug_toolbar' , 'debugtoolbar' , 'static' ] ) : continue arg_7 [ arg_8 ] = [ { 'rule' : rule . rule } for rule in arg_9 ] arg_0 . jinja_env . globals . update ( { 'absolute_url_for' : helpers . absolute_url_for , 'url_for_other_page' : url_for_other_page , 'rules' : arg_7 , 'permissions' : permissions } )","id_":253490,"task_name":"https:\/\/github.com\/hustlzp\/Flask-Boost\/blob\/d0308408ebb248dd752b77123b845f8ec637fab2\/flask_boost\/project\/application\/__init__.py#L79-L125","negative":"Returns assignment data for the given course_id.\n\n https:\/\/canvas.instructure.com\/doc\/api\/analytics.html#method.analytics_api.course_assignments"} {"query":"Attempts to find the Teradata install directory with the defaults for a given platform . Should always return None when the defaults are not present and the TERADATA_HOME environment variable wasn t explicitly set to the correct install location .","positive":"def Func ( ) : if platform . system ( ) == 'Windows' : if is_64bit ( ) : return latest_teradata_version ( \"C:\/Program Files\/Teradata\/Client\" ) else : return latest_teradata_version ( \"C:\/Program Files (x86)\/Teradata\/Client\" ) elif platform . system ( ) == 'Linux' : return latest_teradata_version ( \"\/opt\/teradata\/client\" ) elif platform . system ( ) == 'Darwin' : return latest_teradata_version ( \"\/Library\/Application Support\/teradata\/client\" ) else : return latest_teradata_version ( \"\/opt\/teradata\/client\" )","id_":253491,"task_name":"https:\/\/github.com\/capitalone\/giraffez\/blob\/6b4d27eb1a1eaf188c6885c7364ef27e92b1b957\/setup.py#L76-L102","negative":"Show device heap size"} {"query":"Acquire the lock to read","positive":"def Func ( arg_0 ) : arg_0 . _order_mutex . acquire ( ) arg_0 . _readers_mutex . acquire ( ) if arg_0 . _readers == 0 : arg_0 . _access_mutex . acquire ( ) arg_0 . _readers += 1 arg_0 . _order_mutex . release ( ) arg_0 . _readers_mutex . release ( )","id_":253492,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-kingarthur\/blob\/9d6a638bee68d5e5c511f045eeebf06340fd3252\/arthur\/utils.py#L45-L56","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Method to stream data into BigQuery one record at a time without needing to run a load job","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 = False , arg_6 = False , arg_7 = False ) : arg_8 = arg_1 if arg_1 else arg_0 . project_id arg_9 = { \"rows\" : arg_4 , \"ignoreUnknownValues\" : arg_5 , \"kind\" : \"bigquery#tableDataInsertAllRequest\" , \"skipInvalidRows\" : arg_6 , } try : arg_0 . log . info ( 'Inserting %s row(s) into Table %s:%s.%s' , len ( arg_4 ) , arg_8 , arg_2 , arg_3 ) arg_10 = arg_0 . service . tabledata ( ) . insertAll ( projectId = arg_8 , datasetId = arg_2 , tableId = arg_3 , arg_9 = arg_9 ) . execute ( num_retries = arg_0 . num_retries ) if 'insertErrors' not in arg_10 : arg_0 . log . info ( 'All row(s) inserted successfully: %s:%s.%s' , arg_8 , arg_2 , arg_3 ) else : arg_11 = '{} insert error(s) occurred: {}:{}.{}. Details: {}' . format ( len ( arg_10 [ 'insertErrors' ] ) , arg_8 , arg_2 , arg_3 , arg_10 [ 'insertErrors' ] ) if arg_7 : raise AirflowException ( 'BigQuery job failed. Error was: {}' . format ( arg_11 ) ) arg_0 . log . info ( arg_11 ) except HttpError as err : raise AirflowException ( 'BigQuery job failed. Error was: {}' . format ( err . content ) )","id_":253493,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/bigquery_hook.py#L1705-L1779","negative":"Release control of QTM."} {"query":"Add a node to always run after the parent is finished .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , ** arg_3 ) : return arg_0 . _assoc_or_create ( 'always' , arg_1 , arg_2 , ** arg_3 )","id_":253494,"task_name":"https:\/\/github.com\/ansible\/tower-cli\/blob\/a2b151fed93c47725018d3034848cb3a1814bed7\/tower_cli\/resources\/node.py#L229-L245","negative":"Translates round trip symbols to sectors.\n\n Parameters\n ----------\n round_trips : pd.DataFrame\n DataFrame with one row per round trip trade.\n - See full explanation in round_trips.extract_round_trips\n sector_mappings : dict or pd.Series, optional\n Security identifier to sector mapping.\n Security ids as keys, sectors as values.\n\n Returns\n -------\n sector_round_trips : pd.DataFrame\n Round trips with symbol names replaced by sector names."} {"query":"Iterator which visits all suites and suite files yielding test cases and keywords","positive":"def Func ( arg_0 , * arg_1 ) : arg_2 = arg_1 if len ( arg_1 ) > 0 else [ SuiteFile , ResourceFile , SuiteFolder , Testcase , Keyword ] for arg_3 in arg_0 . robot_files : if arg_3 . __class__ in arg_2 : yield arg_3 if isinstance ( arg_3 , SuiteFolder ) : for arg_4 in arg_3 . Func ( ) : if arg_4 . __class__ in arg_2 : yield arg_4 else : for arg_4 in arg_3 . Func ( * arg_1 ) : yield arg_4","id_":253495,"task_name":"https:\/\/github.com\/boakley\/robotframework-lint\/blob\/3e3578f4e39af9af9961aa0a715f146b74474091\/rflint\/parser\/parser.py#L73-L89","negative":"Associate an existing reservedIP to a deployment.\n\n name:\n Required. Name of the reserved IP address.\n\n service_name:\n Required. Name of the hosted service.\n\n deployment_name:\n Required. Name of the deployment.\n\n virtual_ip_name:\n Optional. Name of the VirtualIP in case of multi Vip tenant.\n If this value is not specified default virtualIP is used\n for this operation."} {"query":"Adds a segment to the download pool and write queue .","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 . closed : return if arg_1 is not None : arg_2 = arg_0 . executor . submit ( arg_0 . fetch , arg_1 , retries = arg_0 . retries ) else : arg_2 = None arg_0 . queue ( arg_0 . futures , ( arg_1 , arg_2 ) )","id_":253496,"task_name":"https:\/\/github.com\/streamlink\/streamlink\/blob\/c8ed1daff14ac03195870238b9b900c1109dd5c1\/src\/streamlink\/stream\/segmented.py#L111-L122","negative":"Build a notebook model from database record."} {"query":"Convert a decimal number to hexadecimal","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = '' for arg_3 in range ( 0 , 4 ) : arg_4 = arg_0 . hexChars [ ( arg_1 >> ( arg_3 * 8 + 4 ) ) & 0x0F ] arg_5 = arg_0 . hexChars [ ( arg_1 >> ( arg_3 * 8 ) ) & 0x0F ] arg_2 += ( arg_4 + arg_5 ) return arg_2","id_":253497,"task_name":"https:\/\/github.com\/futapi\/fut\/blob\/3792c9eee8f5884f38a02210e649c46c6c7a756d\/fut\/EAHashingAlgorithm.py#L26-L36","negative":"Adds all parameters to `traj`"} {"query":"Asynchronous connection listener . Starts a handler for each connection .","positive":"def Func ( arg_0 , arg_1 , * arg_2 ) : arg_3 , arg_4 = arg_1 . accept ( ) arg_5 = arg_3 . makefile ( arg_3 ) arg_0 . shell = ShoebotCmd ( arg_0 . bot , stdin = arg_5 , stdout = arg_5 , intro = INTRO ) print ( _ ( \"Connected\" ) ) GObject . io_add_watch ( arg_3 , GObject . IO_IN , arg_0 . handler ) if arg_0 . shell . intro : arg_0 . shell . stdout . write ( str ( arg_0 . shell . intro ) + \"\\n\" ) arg_0 . shell . stdout . flush ( ) return True","id_":253498,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/shoebot\/sbio\/socket_server.py#L80-L91","negative":"Stores a all explored parameter names for internal recall"} {"query":"Parse source text to find executable lines excluded lines etc .","positive":"def Func ( arg_0 ) : try : arg_0 . _raw_parse ( ) except ( tokenize . TokenError , IndentationError ) : arg_1 , arg_2 , arg_1 = sys . exc_info ( ) arg_3 , arg_4 = arg_2 . args raise NotPython ( \"Couldn't parse '%s' as Python source: '%s' at %s\" % ( arg_0 . filename , arg_3 , arg_4 ) ) arg_5 = arg_0 . first_lines ( arg_0 . excluded ) arg_6 = arg_0 . first_lines ( arg_0 . statement_starts , arg_5 , arg_0 . docstrings ) return arg_6 , arg_5","id_":253499,"task_name":"https:\/\/github.com\/tnkteja\/myhelp\/blob\/fb3a4809d448ad14d5b2e6ddf2e7e89ad52b71cb\/virtualEnvironment\/lib\/python2.7\/site-packages\/coverage\/parser.py#L199-L226","negative":"Update the profile picture for the current user.\n\n Args:\n image (file): a file-like object to read the image from"} {"query":"Resize and image to fit the passed in width keeping the aspect ratio the same","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 \/ arg_0 . size [ 0 ] arg_3 = arg_0 . size [ 1 ] * arg_2 arg_4 = arg_0 . resize ( ( int ( arg_1 ) , int ( arg_3 ) ) , PIL . Image . ANTIALIAS ) return arg_4","id_":253500,"task_name":"https:\/\/github.com\/stevelittlefish\/littlefish\/blob\/6deee7f81fab30716c743efe2e94e786c6e17016\/littlefish\/imageutil.py#L184-L196","negative":"Gets predictions for a given utterance, in the form of intents and\n entities. The current maximum query size is 500 characters.\n\n :param app_id: The LUIS application ID (Guid).\n :type app_id: str\n :param query: The utterance to predict.\n :type query: str\n :param timezone_offset: The timezone offset for the location of the\n request.\n :type timezone_offset: float\n :param verbose: If true, return all intents instead of just the top\n scoring intent.\n :type verbose: bool\n :param staging: Use the staging endpoint slot.\n :type staging: bool\n :param spell_check: Enable spell checking.\n :type spell_check: bool\n :param bing_spell_check_subscription_key: The subscription key to use\n when enabling Bing spell check\n :type bing_spell_check_subscription_key: str\n :param log: Log query (default is true)\n :type log: bool\n :param dict custom_headers: headers that will be added to the request\n :param bool raw: returns the direct response alongside the\n deserialized response\n :param operation_config: :ref:`Operation configuration\n overrides`.\n :return: LuisResult or ClientRawResponse if raw=true\n :rtype:\n ~azure.cognitiveservices.language.luis.runtime.models.LuisResult or\n ~msrest.pipeline.ClientRawResponse\n :raises:\n :class:`APIErrorException`"} {"query":"Call each callable in callables suppressing any exceptions supplied . If no exception classes are supplied all Exceptions will be suppressed .","positive":"def Func ( arg_0 , * arg_1 ) : if not arg_1 : arg_1 = Exception , for arg_2 in arg_0 : try : yield arg_2 ( ) except arg_1 : pass","id_":253501,"task_name":"https:\/\/github.com\/jaraco\/jaraco.itertools\/blob\/0dc47c8924fa3d9ab676c3a6e195f03f728b72c6\/jaraco\/itertools.py#L931-L952","negative":"Call the restful endpoint to merge two RAMON objects into one.\n\n Arguments:\n token (str): The token to inspect\n channel (str): The channel to inspect\n ids (int[]): the list of the IDs to merge\n delete (bool : False): Whether to delete after merging.\n\n Returns:\n json: The ID as returned by ndstore"} {"query":"Get the list of parameter names for the object","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . __init__ arg_2 , arg_3 = inspect . getargspec ( arg_1 ) [ : 2 ] if arg_3 is not None : raise RuntimeError ( 'BaseTransformer objects cannot have varargs' ) arg_2 . pop ( 0 ) arg_2 . sort ( ) return arg_2","id_":253502,"task_name":"https:\/\/github.com\/bmcfee\/muda\/blob\/ff82efdfaeb98da0a9f9124845826eb20536a9ba\/muda\/base.py#L21-L33","negative":"Given a KeyboardModifiers flags object, return whether the Control\n key is down.\n\n Parameters:\n -----------\n include_command : bool, optional (default True)\n Whether to treat the Command key as a (mutually exclusive) synonym\n for Control when in Mac OS."} {"query":"Calculates J along the direction .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . state . residuals . copy ( ) . ravel ( ) arg_2 = np . zeros ( arg_0 . param_vals . size ) arg_3 = arg_0 . param_vals . copy ( ) arg_4 = [ ] for arg_5 in range ( arg_0 . param_vals . size ) : arg_2 *= 0 arg_2 [ arg_5 ] += arg_0 . dl arg_0 . update_function ( arg_3 + arg_2 ) arg_6 = arg_0 . state . residuals . copy ( ) . ravel ( ) arg_4 . append ( ( arg_6 - arg_1 ) \/ arg_0 . dl ) arg_0 . update_function ( arg_3 ) return np . array ( arg_4 )","id_":253503,"task_name":"https:\/\/github.com\/peri-source\/peri\/blob\/61beed5deaaf978ab31ed716e8470d86ba639867\/peri\/opt\/optimize.py#L1570-L1583","negative":"Stop streaming samples from device and delete samples buffer"} {"query":"Replace vars and copy .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 , arg_4 = mkstemp ( ) with io . open ( arg_4 , 'w' , encoding = 'utf-8' ) as new_file : with io . open ( arg_0 , 'r' , encoding = 'utf-8' ) as old_file : for arg_5 in old_file : arg_6 = arg_5 . replace ( '#{project}' , arg_2 ) . replace ( '#{project|title}' , arg_2 . title ( ) ) new_file . write ( arg_6 ) shutil . copy ( arg_4 , arg_1 ) os . close ( arg_3 )","id_":253504,"task_name":"https:\/\/github.com\/hustlzp\/Flask-Boost\/blob\/d0308408ebb248dd752b77123b845f8ec637fab2\/flask_boost\/cli.py#L316-L330","negative":"Stop streaming samples from device and delete samples buffer"} {"query":"Operates on item_dict","positive":"def Func ( arg_0 ) : arg_0 [ 'type' ] = arg_0 [ 'name' ] if len ( arg_0 [ 'summary_fields' ] ) == 0 : arg_0 [ 'resource_name' ] = None arg_0 [ 'resource_type' ] = None else : arg_1 = arg_0 [ 'summary_fields' ] arg_0 [ 'resource_name' ] = arg_1 . get ( 'resource_name' , '[unknown]' ) arg_0 [ 'resource_type' ] = arg_1 . get ( 'resource_type' , '[unknown]' )","id_":253505,"task_name":"https:\/\/github.com\/ansible\/tower-cli\/blob\/a2b151fed93c47725018d3034848cb3a1814bed7\/tower_cli\/resources\/role.py#L183-L201","negative":"Run length encoding for binhex4.\n The CPython implementation does not do run length encoding\n of \\x90 characters. This implementation does."} {"query":"Use the create_bucket API to create a new bucket","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = S3URL ( arg_1 ) message ( 'Creating %s' , arg_1 ) if not arg_0 . opt . dry_run : arg_3 = arg_0 . s3 . Func ( Bucket = arg_2 . bucket ) if arg_3 [ 'ResponseMetadata' ] [ \"HTTPStatusCode\" ] == 200 : message ( 'Done.' ) else : raise Failure ( 'Unable to create bucket %s' % arg_1 )","id_":253506,"task_name":"https:\/\/github.com\/bloomreach\/s4cmd\/blob\/bb51075bf43703e7cd95aa39288cf7732ec13a6d\/s4cmd.py#L824-L834","negative":"Clean up stats file, if configured to do so."} {"query":"Make a dictionary with filename and base64 file data","positive":"def Func ( arg_0 , arg_1 ) : if isinstance ( arg_1 , dict ) : arg_2 = arg_1 [ 'file' ] if 'filename' in arg_1 : arg_3 = arg_1 [ 'filename' ] else : arg_3 = arg_2 . name else : arg_2 = arg_1 arg_3 = arg_1 . name arg_4 = base64 . b64encode ( arg_2 . read ( ) ) return { 'id' : arg_3 , 'data' : arg_4 . decode ( ) if six . PY3 else arg_4 , }","id_":253507,"task_name":"https:\/\/github.com\/sendwithus\/sendwithus_python\/blob\/8ae50d514febd44f7d9be3c838b4d92f99412832\/sendwithus\/__init__.py#L452-L468","negative":"Forces unregistration of tree admin class with following re-registration."} {"query":"Import a backup from Cloud Storage to Cloud Datastore .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = None , arg_5 = None ) : arg_6 = arg_0 . get_conn ( ) arg_7 = 'gs:\/\/' + '\/' . join ( filter ( None , [ arg_1 , arg_3 , arg_2 ] ) ) if not arg_4 : arg_4 = { } if not arg_5 : arg_5 = { } arg_8 = { 'inputUrl' : arg_7 , 'entityFilter' : arg_4 , 'labels' : arg_5 , } arg_9 = ( arg_6 . projects ( ) . import_ ( projectId = arg_0 . project_id , arg_8 = arg_8 ) . execute ( num_retries = arg_0 . num_retries ) ) return arg_9","id_":253508,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/datastore_hook.py#L297-L337","negative":"Main executor of the trimmomatic_report template.\n\n Parameters\n ----------\n log_files : list\n List of paths to the trimmomatic log files."} {"query":"Adds an edge to the graph .","positive":"def Func ( arg_0 , arg_1 , arg_2 , ** arg_3 ) : arg_4 = arg_0 . add_node ( arg_1 ) arg_5 = arg_0 . add_node ( arg_2 ) if \"directed\" in arg_0 . trait_names ( ) : arg_6 = arg_0 . directed else : arg_6 = False if arg_0 . default_edge is not None : arg_7 = arg_0 . default_edge . clone_traits ( copy = \"deep\" ) arg_7 . tail_node = arg_4 arg_7 . head_node = arg_5 arg_7 . conn = \"->\" if arg_6 else \"--\" arg_7 . set ( ** arg_3 ) else : arg_7 = Edge ( arg_4 , arg_5 , arg_6 , ** arg_3 ) if \"strict\" in arg_0 . trait_names ( ) : if not arg_0 . strict : arg_0 . edges . append ( arg_7 ) else : arg_0 . edges . append ( arg_7 ) else : arg_0 . edges . append ( arg_7 )","id_":253509,"task_name":"https:\/\/github.com\/rwl\/godot\/blob\/013687c9e8983d2aa2ceebb8a76c5c4f1e37c90f\/godot\/base_graph.py#L519-L548","negative":"Creates a tempfile and starts the given editor, returns the data afterwards."} {"query":"Calculate error bars from scikit - learn RandomForest estimators .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = True , arg_5 = False , arg_6 = None ) : if arg_3 is None : arg_3 = calc_inbag ( arg_1 . shape [ 0 ] , arg_0 ) arg_7 = np . array ( [ tree . predict ( arg_2 ) for tree in arg_0 ] ) . T arg_8 = np . mean ( arg_7 , 0 ) arg_9 = arg_7 - arg_8 arg_10 = arg_0 . n_estimators arg_11 = _core_computation ( arg_1 , arg_2 , arg_3 , arg_9 , arg_10 , arg_5 , arg_6 ) arg_12 = _bias_correction ( arg_11 , arg_3 , arg_9 , arg_10 ) if np . max ( arg_3 ) == 1 : arg_13 = 1 \/ ( 1 - np . mean ( arg_3 ) ) ** 2 arg_12 *= arg_13 if not arg_4 : return arg_12 if arg_12 . shape [ 0 ] <= 20 : print ( \"No calibration with n_samples <= 20\" ) return arg_12 if arg_4 : arg_14 = 2 arg_15 = np . ceil ( arg_10 \/ arg_14 ) arg_16 = copy . deepcopy ( arg_0 ) arg_16 . estimators_ = np . random . permutation ( arg_16 . estimators_ ) [ : int ( arg_15 ) ] arg_16 . n_estimators = int ( arg_15 ) arg_19 = Func ( arg_16 , arg_1 , arg_2 , arg_4 = False , arg_5 = arg_5 , arg_6 = arg_6 ) arg_20 = np . mean ( ( arg_19 - arg_12 ) ** 2 ) arg_21 = arg_15 \/ arg_10 arg_22 = ( arg_21 ** 2 + ( 1 - arg_21 ) ** 2 ) \/ ( 2 * ( 1 - arg_21 ) ** 2 ) * arg_20 arg_23 = calibrateEB ( arg_12 , arg_22 ) return arg_23","id_":253510,"task_name":"https:\/\/github.com\/scikit-learn-contrib\/forest-confidence-interval\/blob\/401c63a74a27d775eff0f72b6c20ffd568491fe0\/forestci\/forestci.py#L167-L275","negative":"Modify an existing lock's timeout.\n\n token:\n Valid lock token.\n timeout:\n Suggested lifetime in seconds (-1 for infinite).\n The real expiration time may be shorter than requested!\n Returns:\n Lock dictionary.\n Raises ValueError, if token is invalid."} {"query":"add josa at the end of this word","positive":"def Func ( arg_0 , arg_1 = arg_2 ) : arg_3 = arg_0 . strip ( ) [ - 1 ] try : arg_4 , arg_4 , arg_5 = letter . decompose ( arg_3 ) except NotHangulException : arg_5 = letter . get_substituent_of ( arg_3 ) if arg_5 in ( '' , arg_1 [ 'except' ] ) : return arg_0 + arg_1 [ 'has' ] return arg_0 + arg_1 [ 'not' ]","id_":253511,"task_name":"https:\/\/github.com\/bluedisk\/hangul-toolkit\/blob\/f36b534ee339263fb72e687b732697cc7ed290dc\/hgtk\/josa.py#L34-L45","negative":"Clone throttles without memory"} {"query":"If the build location was a temporary directory this will move it to a new more permanent location","positive":"def Func ( arg_0 ) : if arg_0 . source_dir is not None : return assert arg_0 . req is not None assert arg_0 . _temp_build_dir arg_1 = arg_0 . _temp_build_dir arg_2 = arg_0 . _ideal_build_dir del arg_0 . _ideal_build_dir if arg_0 . editable : arg_3 = arg_0 . name . lower ( ) else : arg_3 = arg_0 . name arg_4 = os . path . join ( arg_2 , arg_3 ) if not os . path . exists ( arg_2 ) : logger . debug ( 'Creating directory %s' , arg_2 ) _make_build_dir ( arg_2 ) if os . path . exists ( arg_4 ) : raise InstallationError ( 'A package already exists in %s; please remove it to continue' % display_path ( arg_4 ) ) logger . debug ( 'Moving package %s from %s to new location %s' , arg_0 , display_path ( arg_1 ) , display_path ( arg_4 ) , ) shutil . move ( arg_1 , arg_4 ) arg_0 . _temp_build_dir = arg_4 arg_0 . source_dir = arg_4 arg_0 . _egg_info_path = None","id_":253512,"task_name":"https:\/\/github.com\/tnkteja\/myhelp\/blob\/fb3a4809d448ad14d5b2e6ddf2e7e89ad52b71cb\/virtualEnvironment\/lib\/python2.7\/site-packages\/pip\/req\/req_install.py#L235-L264","negative":"write lines, one by one, separated by \\n to device"} {"query":"Stop any currently running Docker containers associated with Dusty or associated with the provided apps_or_services . Does not remove the service s containers .","positive":"def Func ( arg_0 = None , arg_1 = False ) : if arg_0 : log_to_client ( \"Stopping the following apps or services: {}\" . format ( ', ' . join ( arg_0 ) ) ) else : log_to_client ( \"Stopping all running containers associated with Dusty\" ) compose . stop_running_services ( arg_0 ) if arg_1 : compose . rm_containers ( arg_0 )","id_":253513,"task_name":"https:\/\/github.com\/gamechanger\/dusty\/blob\/dc12de90bb6945023d6f43a8071e984313a1d984\/dusty\/commands\/run.py#L99-L110","negative":"Set last modified time for destPath to timeStamp on epoch-format"} {"query":"r Method to calculate heat capacity of a solid at temperature T with a given method .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_2 == PERRY151 : arg_3 = ( arg_0 . PERRY151_const + arg_0 . PERRY151_lin * arg_1 + arg_0 . PERRY151_quadinv \/ arg_1 ** 2 + arg_0 . PERRY151_quad * arg_1 ** 2 ) * calorie elif arg_2 == CRCSTD : arg_3 = arg_0 . CRCSTD_Cp elif arg_2 == LASTOVKA_S : arg_3 = Lastovka_solid ( arg_1 , arg_0 . similarity_variable ) arg_3 = property_mass_to_molar ( arg_3 , arg_0 . MW ) elif arg_2 in arg_0 . tabular_data : arg_3 = arg_0 . interpolate ( arg_1 , arg_2 ) return arg_3","id_":253514,"task_name":"https:\/\/github.com\/CalebBell\/thermo\/blob\/3857ed023a3e64fd3039a32d53576c24990ef1c3\/thermo\/heat_capacity.py#L2523-L2552","negative":"Get the reason of this revocation.\n\n :return: The reason, or ``None`` if there is none.\n :rtype: bytes or NoneType\n\n .. seealso::\n\n :meth:`all_reasons`, which gives you a list of all supported\n reasons this method might return."} {"query":"Return a version of this object represented entirely using integer values for the relative attributes .","positive":"def Func ( arg_0 ) : arg_1 = int ( arg_0 . days ) arg_2 = round ( arg_0 . hours + 24 * ( arg_0 . days - arg_1 ) , 11 ) arg_3 = int ( arg_2 ) arg_4 = round ( arg_0 . minutes + 60 * ( arg_2 - arg_3 ) , 10 ) arg_5 = int ( arg_4 ) arg_6 = round ( arg_0 . seconds + 60 * ( arg_4 - arg_5 ) , 8 ) arg_7 = int ( arg_6 ) arg_8 = round ( arg_0 . microseconds + 1e6 * ( arg_6 - arg_7 ) ) return arg_0 . __class__ ( years = arg_0 . years , months = arg_0 . months , arg_1 = arg_1 , arg_3 = arg_3 , arg_5 = arg_5 , arg_7 = arg_7 , arg_8 = arg_8 , leapdays = arg_0 . leapdays , year = arg_0 . year , month = arg_0 . month , day = arg_0 . day , weekday = arg_0 . weekday , hour = arg_0 . hour , minute = arg_0 . minute , second = arg_0 . second , microsecond = arg_0 . microsecond )","id_":253515,"task_name":"https:\/\/github.com\/MacHu-GWU\/superjson-project\/blob\/782ca4b2edbd4b4018b8cedee42eeae7c921b917\/superjson\/pkg\/dateutil\/relativedelta.py#L268-L302","negative":"Gets the AwsGlueCatalogHook"} {"query":"Launch a subshell","positive":"def Func ( arg_0 = None ) : if arg_0 : arg_1 . environ [ 'PROMPT' ] = prompt ( arg_0 ) subprocess . call ( cmd ( ) , env = arg_1 . environ . data )","id_":253516,"task_name":"https:\/\/github.com\/cpenv\/cpenv\/blob\/afbb569ae04002743db041d3629a5be8c290bd89\/cpenv\/shell.py#L67-L73","negative":"Get a single publication."} {"query":"n - qubit input state for QFT that produces output 1 .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : for arg_3 in range ( arg_2 ) : arg_0 . h ( arg_1 [ arg_3 ] ) arg_0 . u1 ( math . pi \/ float ( 2 ** ( arg_3 ) ) , arg_1 [ arg_3 ] ) . inverse ( )","id_":253517,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/examples\/python\/qft.py#L24-L28","negative":"Connect to the stream\n\n Returns\n -------\n asyncio.coroutine\n The streaming response"} {"query":"Draw solid triangle with points x0 y0 - x1 y1 - x2 y2","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 , arg_6 , arg_7 = None , arg_8 = False ) : arg_9 = arg_10 = arg_20 = arg_19 = 0 if arg_2 > arg_4 : arg_2 , arg_4 = arg_4 , arg_2 arg_1 , arg_3 = arg_3 , arg_1 if arg_4 > arg_6 : arg_6 , arg_4 = arg_4 , arg_6 arg_5 , arg_3 = arg_3 , arg_5 if arg_2 > arg_4 : arg_2 , arg_4 = arg_4 , arg_2 arg_1 , arg_3 = arg_3 , arg_1 if arg_2 == arg_6 : arg_9 = arg_10 = arg_1 if arg_3 < arg_9 : arg_9 = arg_3 elif arg_3 > arg_10 : arg_10 = arg_3 if arg_5 < arg_9 : arg_9 = arg_5 elif arg_5 > arg_10 : arg_10 = arg_5 _draw_fast_hline ( arg_0 , arg_9 , arg_2 , arg_10 - arg_9 + 1 , arg_7 , arg_8 ) arg_11 = arg_3 - arg_1 arg_12 = arg_4 - arg_2 arg_13 = arg_5 - arg_1 arg_14 = arg_6 - arg_2 arg_15 = arg_5 - arg_3 arg_16 = arg_6 - arg_4 arg_17 = 0 arg_18 = 0 if arg_4 == arg_6 : arg_19 = arg_4 else : arg_19 = arg_4 - 1 for arg_20 in range ( arg_20 , arg_19 + 1 ) : arg_9 = arg_1 + arg_17 \/ arg_12 arg_10 = arg_1 + arg_18 \/ arg_14 arg_17 += arg_11 arg_18 += arg_13 if arg_9 > arg_10 : arg_9 , arg_10 = arg_10 , arg_9 _draw_fast_hline ( arg_0 , arg_9 , arg_20 , arg_10 - arg_9 + 1 , arg_7 , arg_8 ) arg_17 = arg_15 * ( arg_20 - arg_4 ) arg_18 = arg_13 * ( arg_20 - arg_2 ) for arg_20 in range ( arg_20 , arg_6 + 1 ) : arg_9 = arg_3 + arg_17 \/ arg_16 arg_10 = arg_1 + arg_18 \/ arg_14 arg_17 += arg_15 arg_18 += arg_13 if arg_9 > arg_10 : arg_9 , arg_10 = arg_10 , arg_9 _draw_fast_hline ( arg_0 , arg_9 , arg_20 , arg_10 - arg_9 + 1 , arg_7 , arg_8 )","id_":253518,"task_name":"https:\/\/github.com\/ManiacalLabs\/BiblioPixel\/blob\/fd97e6c651a4bbcade64733847f4eec8f7704b7c\/bibliopixel\/layout\/matrix_drawing.py#L289-L359","negative":"Set the rotation of this body using a rotation matrix.\n\n Parameters\n ----------\n rotation : sequence of 9 floats\n The desired rotation matrix for this body."} {"query":"Constructs function encapsulated in the graph and the session .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : import keras . backend as K @ wraps ( arg_0 ) def _wrapped ( * arg_3 , ** arg_4 ) : with arg_1 . as_default ( ) : K . set_session ( arg_2 ) return arg_0 ( * arg_3 , ** arg_4 ) return _wrapped","id_":253519,"task_name":"https:\/\/github.com\/deepmipt\/DeepPavlov\/blob\/f3e4a69a3764d25d2f5bad4f1f1aebc872b00f9c\/deeppavlov\/core\/models\/tf_backend.py#L31-L40","negative":"Remove all binary files in the adslib directory."} {"query":"Monkey - patch compiler to allow for removal of default compiler flags .","positive":"def Func ( arg_0 ) : import arg_18 . ccompiler def _Func ( arg_1 , arg_2 , arg_3 = None , arg_4 = None , arg_5 = None , arg_6 = 0 , arg_7 = None , arg_8 = None , arg_9 = None ) : for arg_10 in arg_0 : if arg_10 in arg_1 . compiler_so : arg_1 . compiler_so . remove ( arg_10 ) arg_4 , arg_11 , arg_8 , arg_12 , arg_13 = arg_1 . _setup_compile ( arg_3 , arg_4 , arg_5 , arg_2 , arg_9 , arg_8 ) arg_14 = arg_1 . _get_cc_args ( arg_12 , arg_6 , arg_7 ) for arg_15 in arg_11 : try : arg_16 , arg_17 = arg_13 [ arg_15 ] except KeyError : continue arg_1 . _compile ( arg_15 , arg_16 , arg_17 , arg_14 , arg_8 , arg_12 ) return arg_11 arg_18 . ccompiler . CCompiler . compile = _Func","id_":253520,"task_name":"https:\/\/github.com\/capitalone\/giraffez\/blob\/6b4d27eb1a1eaf188c6885c7364ef27e92b1b957\/setup.py#L40-L62","negative":"Processes a scheduled consumption request that has completed\n\n :type token: RequestToken\n :param token: The token associated to the consumption\n request that is used to identify the request."} {"query":"Read incoming message .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . packet with arg_0 . __Func_lock : arg_2 = arg_0 . __buffer while len ( arg_2 ) < arg_1 : arg_2 += arg_0 . _Func_data ( ) arg_3 = arg_0 . __unpack ( arg_2 [ : arg_1 ] ) [ 0 ] + arg_1 while len ( arg_2 ) < arg_3 : arg_2 += arg_0 . _Func_data ( ) arg_4 , arg_0 . __buffer = decode ( arg_2 [ arg_1 : ] ) return arg_4","id_":253521,"task_name":"https:\/\/github.com\/hdima\/erlport\/blob\/246b7722d62b87b48be66d9a871509a537728962\/priv\/python3\/erlport\/erlproto.py#L84-L95","negative":"Add file to the patch with patch_name.\n If patch_name is None or empty the topmost patch will be used.\n Adding an already added patch will raise an QuiltError if ignore is\n False."} {"query":"login and update cached cookies","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_0 . logger . debug ( 'login ...' ) arg_3 = arg_0 . session . http . get ( arg_0 . login_url ) arg_4 = arg_0 . _input_re . findall ( arg_3 . text ) if not arg_4 : raise PluginError ( 'Missing input data on login website.' ) arg_5 = { } for arg_6 in arg_4 : try : arg_7 = arg_0 . _name_re . search ( arg_6 ) . group ( 1 ) except AttributeError : continue try : arg_8 = arg_0 . _value_re . search ( arg_6 ) . group ( 1 ) except AttributeError : arg_8 = '' arg_5 [ arg_7 ] = arg_8 arg_9 = { 'ctl00$Login1$UserName' : arg_1 , 'ctl00$Login1$Password' : arg_2 , 'ctl00$Login1$LoginButton.x' : '0' , 'ctl00$Login1$LoginButton.y' : '0' } arg_5 . update ( arg_9 ) arg_3 = arg_0 . session . http . post ( arg_0 . login_url , arg_5 = arg_5 ) for arg_10 in arg_0 . session . http . cookies : arg_0 . _session_attributes . set ( arg_10 . name , arg_10 . value , expires = 3600 * 24 ) if arg_0 . _session_attributes . get ( 'ASP.NET_SessionId' ) and arg_0 . _session_attributes . get ( '.abportail1' ) : arg_0 . logger . debug ( 'New session data' ) arg_0 . set_expires_time_cache ( ) return True else : arg_0 . logger . error ( 'Failed to login, check your username\/password' ) return False","id_":253522,"task_name":"https:\/\/github.com\/streamlink\/streamlink\/blob\/c8ed1daff14ac03195870238b9b900c1109dd5c1\/src\/streamlink\/plugins\/abweb.py#L93-L135","negative":"Downloads a MP4 or WebM file that is associated with the video at the URL passed.\n\n :param str url: URL of the video to be downloaded\n :return str: Filename of the file in local storage"} {"query":"Scan the provided policy directory for all JSON policy files .","positive":"def Func ( arg_0 ) : arg_1 = [ os . path . join ( arg_0 , x ) for x in os . listdir ( arg_0 ) if x . endswith ( \".json\" ) ] return sorted ( arg_1 )","id_":253523,"task_name":"https:\/\/github.com\/OpenKMIP\/PyKMIP\/blob\/b51c5b044bd05f8c85a1d65d13a583a4d8fc1b0e\/kmip\/services\/server\/monitor.py#L25-L30","negative":"Subtract the arg from the value."} {"query":"Write the ObjectDefaults structure encoding to the data stream .","positive":"def Func ( arg_0 , arg_1 , arg_2 = arg_3 . KMIPVersion . KMIP_2_0 ) : if arg_2 < arg_3 . KMIPVersion . KMIP_2_0 : raise exceptions . VersionNotSupported ( \"KMIP {} does not support the ObjectDefaults object.\" . format ( arg_2 . value ) ) arg_6 = BytearrayStream ( ) if arg_0 . _object_type : arg_0 . _object_type . Func ( arg_6 , arg_2 = arg_2 ) else : raise exceptions . InvalidField ( \"The ObjectDefaults structure is missing the object type \" \"field.\" ) if arg_0 . _attributes : arg_0 . _attributes . Func ( arg_6 , arg_2 = arg_2 ) else : raise exceptions . InvalidField ( \"The ObjectDefaults structure is missing the attributes field.\" ) arg_0 . length = arg_6 . length ( ) super ( ObjectDefaults , arg_0 ) . Func ( arg_1 , arg_2 = arg_2 ) arg_1 . Func ( arg_6 . buffer )","id_":253524,"task_name":"https:\/\/github.com\/OpenKMIP\/PyKMIP\/blob\/b51c5b044bd05f8c85a1d65d13a583a4d8fc1b0e\/kmip\/core\/objects.py#L3755-L3801","negative":"This is a parallel worker for running period-recovery.\n\n Parameters\n ----------\n\n task : tuple\n This is used to pass args to the `periodicvar_recovery` function::\n\n task[0] = period-finding result pickle to work on\n task[1] = simbasedir\n task[2] = period_tolerance\n\n Returns\n -------\n\n dict\n This is the dict produced by the `periodicvar_recovery` function for the\n input period-finding result pickle."} {"query":"Load and return a PySchema class from an avsc string","positive":"def Func ( arg_0 ) : if isinstance ( arg_0 , str ) : arg_0 = arg_0 . decode ( \"utf8\" ) arg_1 = json . loads ( arg_0 ) return AvroSchemaParser ( ) . parse_schema_struct ( arg_1 )","id_":253525,"task_name":"https:\/\/github.com\/spotify\/pyschema\/blob\/7e6c3934150bcb040c628d74ace6caf5fcf867df\/pyschema_extensions\/avro_schema_parser.py#L49-L57","negative":"Execute the enrich phase for a given backend section\n\n :param config: a Mordred config object\n :param backend_section: the backend section where the enrich phase is executed"} {"query":"Public form to add an event .","positive":"def Func ( arg_0 ) : arg_1 = AddEventForm ( arg_0 . POST or None ) if arg_1 . is_valid ( ) : arg_2 = arg_1 . save ( commit = False ) arg_2 . sites = settings . SITE_ID arg_2 . submitted_by = arg_0 . user arg_2 . approved = True arg_2 . slug = slugify ( arg_2 . name ) arg_2 . save ( ) messages . success ( arg_0 , 'Your event has been added.' ) return HttpResponseRedirect ( reverse ( 'events_index' ) ) return render ( arg_0 , 'happenings\/event_form.html' , { 'form' : arg_1 , 'form_title' : 'Add an event' } )","id_":253526,"task_name":"https:\/\/github.com\/tBaxter\/tango-happenings\/blob\/cb3c49ea39e0a6cef9c6ffb534c2fbf401139ba2\/build\/lib\/happenings\/views.py#L214-L229","negative":"Remove rows with NAs from the H2OFrame.\n\n :returns: new H2OFrame with all rows from the original frame containing any NAs removed."} {"query":"Returns encodings for all the records","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . fields [ 0 ] . numEncodings assert ( all ( arg_2 . numEncodings == arg_1 for arg_2 in arg_0 . fields ) ) arg_3 = [ arg_0 . getEncoding ( index ) for index in range ( arg_1 ) ] return arg_3","id_":253527,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/data\/generators\/data_generator.py#L301-L308","negative":"Adds all parameters to `traj`"} {"query":"Remove the key from the request cache and from memcache .","positive":"def Func ( arg_0 , * arg_1 ) : arg_2 = get_cache ( ) arg_3 = arg_0 . get_cache_key ( * arg_1 ) if arg_3 in arg_2 : del arg_2 [ arg_3 ]","id_":253528,"task_name":"https:\/\/github.com\/Patreon\/multiget-cache-py\/blob\/824ec4809c97cc7e0035810bd9fefd1262de3318\/multiget_cache\/base_cache_wrapper.py#L55-L60","negative":"Gets status of response."} {"query":"Synchronous DELETE request . data must be a JSONable value .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = None , arg_5 = None ) : if not arg_2 : arg_2 = '' arg_3 = arg_3 or { } arg_4 = arg_4 or { } arg_6 = arg_0 . _build_endpoint_url ( arg_1 , arg_2 ) arg_0 . _authenticate ( arg_3 , arg_4 ) return make_Func_request ( arg_6 , arg_3 , arg_4 , arg_5 = arg_5 )","id_":253529,"task_name":"https:\/\/github.com\/ozgur\/python-firebase\/blob\/6b96b326f6d8f477503ca42fdfbd81bcbe1f9e0d\/firebase\/firebase.py#L371-L380","negative":"Print out stats about how files are getting processed.\n\n :param known_file_paths: a list of file paths that may contain Airflow\n DAG definitions\n :type known_file_paths: list[unicode]\n :return: None"} {"query":"returns an iterator to grab four lines at a time","positive":"def Func ( arg_0 ) : if arg_0 [ 0 ] . endswith ( \".gz\" ) : arg_1 = gzip . open else : arg_1 = open arg_2 = arg_1 ( arg_0 [ 0 ] , 'r' ) arg_3 = iter ( arg_2 ) arg_4 = itertools . izip ( arg_3 , arg_3 , arg_3 , arg_3 ) if arg_0 [ 1 ] : arg_5 = arg_1 ( arg_0 [ 1 ] , 'r' ) arg_6 = iter ( arg_5 ) arg_7 = itertools . izip ( arg_6 , arg_6 , arg_6 , arg_6 ) arg_8 = itertools . izip ( arg_4 , arg_7 ) else : arg_5 = 0 arg_8 = itertools . izip ( arg_4 , iter ( int , 1 ) ) def feedme ( arg_8 ) : for arg_9 in arg_8 : yield arg_9 arg_10 = feedme ( arg_8 ) return arg_10 , arg_2 , arg_5","id_":253530,"task_name":"https:\/\/github.com\/dereneaton\/ipyrad\/blob\/5eeb8a178160f45faf71bf47cec4abe998a575d1\/ipyrad\/assemble\/demultiplex.py#L398-L426","negative":"Register unit object on interface level object"} {"query":"Look for inputs of the app that are remote files . Submit stage_in apps for such files and replace the file objects in the inputs list with corresponding DataFuture objects .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : if arg_1 == 'data_manager' : return arg_2 , arg_3 arg_4 = arg_3 . get ( 'inputs' , [ ] ) for arg_5 , arg_6 in enumerate ( arg_4 ) : if isinstance ( arg_6 , File ) and arg_6 . is_remote ( ) : arg_4 [ arg_5 ] = arg_0 . data_manager . stage_in ( arg_6 , arg_1 ) for arg_7 , arg_6 in arg_3 . items ( ) : if isinstance ( arg_6 , File ) and arg_6 . is_remote ( ) : arg_3 [ arg_7 ] = arg_0 . data_manager . stage_in ( arg_6 , arg_1 ) arg_8 = list ( arg_2 ) for arg_5 , arg_6 in enumerate ( arg_8 ) : if isinstance ( arg_6 , File ) and arg_6 . is_remote ( ) : arg_8 [ arg_5 ] = arg_0 . data_manager . stage_in ( arg_6 , arg_1 ) return tuple ( arg_8 ) , arg_3","id_":253531,"task_name":"https:\/\/github.com\/Parsl\/parsl\/blob\/d7afb3bc37f50dcf224ae78637944172edb35dac\/parsl\/dataflow\/dflow.py#L461-L490","negative":"Prepare received data for representation.\n\n Args:\n data (dict): values to represent (ex. {'001' : 130})\n number_to_keep (int): number of elements to show individually.\n\n Returns:\n dict: processed data to show."} {"query":"If there s no log configuration set up a default handler .","positive":"def Func ( arg_0 ) : if log . handlers : return arg_1 = logging . StreamHandler ( ) arg_2 = logging . Formatter ( '%(asctime)s %(levelname)-5.5s [%(name)s][%(threadName)s] %(message)s' ) arg_1 . setFormatter ( arg_2 ) log . addHandler ( arg_1 )","id_":253532,"task_name":"https:\/\/github.com\/rollbar\/pyrollbar\/blob\/33ef2e723a33d09dd6302f978f4a3908be95b9d2\/rollbar\/contrib\/django\/middleware.py#L236-L246","negative":"Show an image.\n\n Args:\n img (str or ndarray): The image to be displayed.\n win_name (str): The window name.\n wait_time (int): Value of waitKey param."} {"query":"Parse any HDL type to this transaction template instance","positive":"def Func ( arg_0 , arg_1 : arg_2 , arg_3 : arg_4 ) -> None : arg_0 . bitAddr = arg_3 arg_5 = False if isinstance ( arg_1 , Bits ) : arg_6 = arg_0 . _loadFromBits elif isinstance ( arg_1 , HStruct ) : arg_6 = arg_0 . _loadFromHStruct elif isinstance ( arg_1 , HArray ) : arg_6 = arg_0 . _loadFromArray elif isinstance ( arg_1 , HStream ) : arg_6 = arg_0 . _loadFromHStream elif isinstance ( arg_1 , HUnion ) : arg_6 = arg_0 . _loadFromUnion arg_5 = True else : raise TypeError ( \"expected instance of HdlType\" , arg_1 ) arg_0 . bitAddrEnd = arg_6 ( arg_1 , arg_3 ) arg_0 . childrenAreChoice = arg_5","id_":253533,"task_name":"https:\/\/github.com\/Nic30\/hwt\/blob\/8cbb399e326da3b22c233b98188a9d08dec057e6\/hwt\/hdl\/transTmpl.py#L184-L205","negative":"set current cursor position"} {"query":"Create the droplet with object properties .","positive":"def Func ( arg_0 , * arg_1 , ** arg_2 ) : for arg_3 in arg_2 . keys ( ) : setattr ( arg_0 , arg_3 , arg_2 [ arg_3 ] ) if not arg_0 . size_slug and arg_0 . size : arg_0 . size_slug = arg_0 . size arg_5 = Droplet . __get_ssh_keys_id_or_fingerprint ( arg_0 . ssh_keys , arg_0 . token , arg_0 . name ) arg_6 = { \"name\" : arg_0 . name , \"size\" : arg_0 . size_slug , \"image\" : arg_0 . image , \"region\" : arg_0 . region , \"ssh_keys\" : arg_5 , \"backups\" : bool ( arg_0 . backups ) , \"ipv6\" : bool ( arg_0 . ipv6 ) , \"private_networking\" : bool ( arg_0 . private_networking ) , \"volumes\" : arg_0 . volumes , \"tags\" : arg_0 . tags , \"monitoring\" : bool ( arg_0 . monitoring ) , } if arg_0 . user_data : arg_6 [ \"user_data\" ] = arg_0 . user_data arg_6 = arg_0 . get_data ( \"droplets\/\" , type = POST , params = arg_6 ) if arg_6 : arg_0 . id = arg_6 [ 'droplet' ] [ 'id' ] arg_8 = arg_6 [ 'links' ] [ 'actions' ] [ 0 ] [ 'id' ] arg_0 . action_ids = [ ] arg_0 . action_ids . append ( arg_8 )","id_":253534,"task_name":"https:\/\/github.com\/koalalorenzo\/python-digitalocean\/blob\/d0221b57856fb1e131cafecf99d826f7b07a947c\/digitalocean\/Droplet.py#L529-L570","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Create an interrupt event handle .","positive":"def Func ( ) : class SECURITY_ATTRIBUTES ( ctypes . Structure ) : arg_0 = [ ( \"nLength\" , ctypes . c_int ) , ( \"lpSecurityDescriptor\" , ctypes . c_void_p ) , ( \"bInheritHandle\" , ctypes . c_int ) ] arg_1 = SECURITY_ATTRIBUTES ( ) arg_2 = ctypes . pointer ( arg_1 ) arg_1 . nLength = ctypes . sizeof ( SECURITY_ATTRIBUTES ) arg_1 . lpSecurityDescriptor = 0 arg_1 . bInheritHandle = 1 return ctypes . windll . kernel32 . CreateEventA ( arg_2 , False , False , '' )","id_":253535,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/zmq\/parentpoller.py#L67-L92","negative":"Write the index.html file for this report."} {"query":"Given a module that contains a list of some types find all symbols in the module that do not start with _ and attempt to import them as types .","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in ( x for x in dir ( arg_1 ) if not x . startswith ( '_' ) ) : arg_3 = getattr ( arg_1 , arg_2 ) try : arg_0 . inject_type ( arg_2 , arg_3 ) except ArgumentError : pass","id_":253536,"task_name":"https:\/\/github.com\/iotile\/typedargs\/blob\/0a5091a664b9b4d836e091e9ba583e944f438fd8\/typedargs\/typeinfo.py#L336-L348","negative":"Release the semaphore\n\n :param tag: A tag identifying what is releasing the semaphore\n :param acquire_token: The token returned from when the semaphore was\n acquired. Note that this is not really needed to directly use this\n class but is needed for API compatibility with the\n SlidingWindowSemaphore implementation."} {"query":"Merges the contents of two lists into a new list .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = list ( arg_0 ) for arg_3 in arg_1 : if arg_3 not in arg_2 : arg_2 . append ( arg_3 ) return arg_2","id_":253537,"task_name":"https:\/\/github.com\/jayclassless\/tidypy\/blob\/3c3497ca377fbbe937103b77b02b326c860c748f\/src\/tidypy\/util.py#L23-L40","negative":"Output some information on CUDA-enabled devices on your computer,\n including current memory usage. Modified to only get number of devices.\n\n It's a port of https:\/\/gist.github.com\/f0k\/0d6431e3faa60bffc788f8b4daa029b1\n from C to Python with ctypes, so it can run without compiling\n anything. Note that this is a direct translation with no attempt to\n make the code Pythonic. It's meant as a general demonstration on how\n to obtain CUDA device information from Python without resorting to\n nvidia-smi or a compiled Python extension.\n\n\n .. note:: Author: Jan Schl\u00fcter, https:\/\/gist.github.com\/63a664160d016a491b2cbea15913d549.git"} {"query":"Routes Alexa requests to appropriate handlers .","positive":"def Func ( arg_0 , arg_1 : arg_2 ) -> arg_2 : arg_3 = arg_1 [ 'request' ] [ 'type' ] arg_4 = arg_1 [ 'request' ] [ 'requestId' ] log . debug ( f'Received request. Type: {request_type}, id: {request_id}' ) if arg_3 in arg_0 . handled_requests . keys ( ) : arg_5 : arg_2 = arg_0 . handled_requests [ arg_3 ] ( arg_1 ) else : arg_5 : arg_2 = arg_0 . handled_requests [ '_unsupported' ] ( arg_1 ) log . warning ( f'Unsupported request type: {request_type}, request id: {request_id}' ) arg_0 . _rearm_self_destruct ( ) return arg_5","id_":253538,"task_name":"https:\/\/github.com\/deepmipt\/DeepPavlov\/blob\/f3e4a69a3764d25d2f5bad4f1f1aebc872b00f9c\/deeppavlov\/utils\/alexa\/conversation.py#L79-L99","negative":"Write the content of the `meta_dict` into `filename`.\n\n Parameters\n ----------\n filename: str\n Path to the output file\n\n meta_dict: dict\n Dictionary with the fields of the metadata .mhd file"} {"query":"Send an email to invite a non - Google contact to Hangouts .","positive":"async def Func ( arg_0 , arg_1 ) : arg_2 = hangouts_pb2 . SendOffnetworkInvitationResponse ( ) await arg_0 . _pb_request ( 'devices\/sendoffnetworkinvitation' , arg_1 , arg_2 ) return arg_2","id_":253539,"task_name":"https:\/\/github.com\/tdryer\/hangups\/blob\/85c0bf0a57698d077461283895707260f9dbf931\/hangups\/client.py#L601-L609","negative":"Read attribute from sysfs and return as string"} {"query":"Choose tasks for a given stream_id and values and Returns a list of target tasks","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_1 not in arg_0 . targets : return [ ] arg_3 = [ ] for arg_4 in arg_0 . targets [ arg_1 ] : arg_3 . extend ( arg_4 . Func ( arg_2 ) ) return arg_3","id_":253540,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/instance\/src\/python\/utils\/misc\/custom_grouping_helper.py#L52-L60","negative":"Returns a decorator to swallow a requests exception for modules that\n are not accessible without logging in, and turn it into an Unavailable\n exception."} {"query":"Naive depth - search into a directory for files with a given extension .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 3 , arg_3 = True ) : assert arg_2 >= 1 arg_1 = arg_1 . strip ( os . extsep ) arg_4 = list ( ) for arg_5 in range ( 1 , arg_2 + 1 ) : arg_6 = os . path . sep . join ( [ \"*\" ] * arg_5 ) arg_7 = os . path . join ( arg_0 , os . extsep . join ( [ arg_6 , arg_1 ] ) ) arg_4 += glob . glob ( arg_7 ) if arg_3 : arg_4 . sort ( ) return arg_4","id_":253541,"task_name":"https:\/\/github.com\/marl\/jams\/blob\/b16778399b9528efbd71434842a079f7691a7a66\/jams\/util.py#L182-L223","negative":"Returns a decorator to swallow a requests exception for modules that\n are not accessible without logging in, and turn it into an Unavailable\n exception."} {"query":"segment the raw text into paragraphs","positive":"def Func ( arg_0 ) : arg_1 = [ ] for arg_2 in arg_0 : arg_2 = arg_2 . strip ( ) if len ( arg_2 ) < 1 : if len ( arg_1 ) > 0 : yield \"\\n\" . join ( arg_1 ) arg_1 = [ ] else : arg_1 . append ( arg_2 ) if len ( arg_1 ) > 0 : yield \"\\n\" . join ( arg_1 )","id_":253542,"task_name":"https:\/\/github.com\/DerwenAI\/pytextrank\/blob\/181ea41375d29922eb96768cf6550e57a77a0c95\/pytextrank\/pytextrank.py#L34-L51","negative":"Extract metadata entries from an xml node"} {"query":"Context manager which yields a function for adding multiple routes from a given module .","positive":"def Func ( arg_0 : arg_1 . Application , arg_3 = None , arg_4 : arg_5 = None , arg_6 : arg_5 = None ) : if isinstance ( arg_3 , ( arg_5 , bytes ) ) : arg_3 = importlib . import_module ( arg_3 ) def add_route ( arg_7 , arg_8 , arg_9 , arg_10 = None ) : if isinstance ( arg_9 , ( arg_5 , bytes ) ) : if not arg_3 : raise ValueError ( 'Must pass module to Func if passing handler name strings.' ) arg_10 = arg_10 or arg_9 arg_9 = getattr ( arg_3 , arg_9 ) else : arg_10 = arg_10 or arg_9 . __name__ arg_8 = make_path ( arg_8 , arg_4 ) arg_10 = '.' . join ( ( arg_6 , arg_10 ) ) if arg_6 else arg_10 return arg_0 . router . add_route ( arg_7 , arg_8 , arg_9 , arg_10 = arg_10 ) yield add_route","id_":253543,"task_name":"https:\/\/github.com\/sloria\/aiohttp_utils\/blob\/e5b41452f8077e7d749715606b1560f4b50e3d71\/aiohttp_utils\/routing.py#L85-L150","negative":"Returns Gcp Video Intelligence Service client\n\n :rtype: google.cloud.videointelligence_v1.VideoIntelligenceServiceClient"} {"query":"Calculate statistics for given data .","positive":"def Func ( arg_0 , arg_1 = [ 'mean' , 'std' ] , arg_2 = [ ] ) : arg_3 = { 'mean' : numpy . mean , 'avg' : numpy . mean , 'std' : numpy . std , 'standard_deviation' : numpy . std , 'median' : numpy . median , 'min' : numpy . amin , 'max' : numpy . amax } arg_4 = { } arg_5 = { } if len ( arg_0 ) == 0 : return arg_4 , arg_5 for arg_6 in arg_1 : if arg_6 in arg_3 . keys ( ) : arg_4 [ arg_6 ] = arg_3 [ arg_6 ] ( arg_0 ) else : logger . error ( \"Unsupported stat : \" + str ( arg_6 ) ) for arg_7 in arg_2 : if isinstance ( arg_7 , float ) or isinstance ( arg_7 , int ) : arg_5 [ arg_7 ] = numpy . percentile ( arg_0 , arg_7 ) else : logger . error ( \"Unsupported percentile requested (should be int or float): \" + str ( arg_7 ) ) return arg_4 , arg_5","id_":253544,"task_name":"https:\/\/github.com\/linkedin\/naarad\/blob\/261e2c0760fd6a6b0ee59064180bd8e3674311fe\/src\/naarad\/utils.py#L619-L651","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Convert to a value to send to Octave .","positive":"def Func ( arg_0 , arg_1 ) : if not isinstance ( arg_1 , OctaveUserClass ) or not arg_1 . _attrs : return dict ( ) arg_2 = [ ] arg_3 = [ ] for arg_4 in arg_1 . _attrs : arg_2 . append ( ( str ( arg_4 ) , object ) ) arg_3 . append ( getattr ( arg_1 , arg_4 ) ) arg_5 = np . array ( [ tuple ( arg_3 ) ] , arg_2 ) return MatlabObject ( arg_5 , arg_1 . _name )","id_":253545,"task_name":"https:\/\/github.com\/blink1073\/oct2py\/blob\/bfc69d2168ae3d98258f95bbc55a858c21836b58\/oct2py\/dynamic.py#L187-L200","negative":"Given a Dusty repo object, clone the remote into Dusty's local repos\n directory if it does not already exist."} {"query":"Decorated function is a no - op if TEMPLATE_DEBUG is False","positive":"def Func ( arg_0 ) : def _ ( * arg_1 , ** arg_2 ) : arg_3 = getattr ( settings , 'TEMPLATE_DEBUG' , False ) return arg_0 ( * arg_1 , ** arg_2 ) if arg_3 else '' return _","id_":253546,"task_name":"https:\/\/github.com\/calebsmith\/django-template-debug\/blob\/f3d52638da571164d63e5c8331d409b0743c628f\/template_debug\/templatetags\/debug_tags.py#L18-L23","negative":"Get the context for this view."} {"query":"HTTP Get Request","positive":"def Func ( arg_0 ) : return requests . get ( arg_0 . _url , data = arg_0 . _data , headers = arg_0 . _headers , auth = ( arg_0 . _email , arg_0 . _api_token ) )","id_":253547,"task_name":"https:\/\/github.com\/boundary\/pulse-api-cli\/blob\/b01ca65b442eed19faac309c9d62bbc3cb2c098f\/boundary\/api_call.py#L186-L190","negative":"Return HgResource object for path.\n\n See DAVProvider.get_resource_inst()"} {"query":"Return an approximate number of queued tasks in the queue .","positive":"def Func ( arg_0 , arg_1 = None ) : arg_2 = arg_0 . _query_queued ( 'COUNT(*) AS count' , arg_1 = arg_1 ) return arg_2 [ 0 ] . count","id_":253548,"task_name":"https:\/\/github.com\/memsql\/memsql-python\/blob\/aac223a1b937d5b348b42af3c601a6c685ca633a\/memsql\/common\/sql_step_queue\/queue.py#L50-L53","negative":"Uploads file to GDocs spreadsheet.\n Content type can be provided as argument, default is ods."} {"query":"Run the algorithm on an undirected graph .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_0 . arguments [ '{VERBOSE}' ] = str ( arg_0 . verbose ) . upper ( ) arg_0 . arguments [ '{SCORE}' ] = arg_0 . score arg_0 . arguments [ '{BETA}' ] = str ( arg_0 . beta ) arg_0 . arguments [ '{OPTIM}' ] = str ( arg_0 . optim ) . upper ( ) arg_0 . arguments [ '{ALPHA}' ] = str ( arg_0 . alpha ) arg_4 = DataFrame ( list ( nx . edges ( arg_2 ) ) , columns = [ \"from\" , \"to\" ] ) arg_5 = DataFrame ( list ( nx . edges ( nx . DiGraph ( DataFrame ( - nx . adj_matrix ( arg_2 , weight = None ) . to_dense ( ) + 1 , columns = list ( arg_2 . nodes ( ) ) , index = list ( arg_2 . nodes ( ) ) ) ) ) ) , columns = [ \"from\" , \"to\" ] ) arg_6 = arg_0 . _run_bnlearn ( arg_1 , arg_4 = arg_4 , arg_5 = arg_5 , verbose = arg_0 . verbose ) return nx . relabel_nodes ( nx . DiGraph ( arg_6 ) , { arg_7 : arg_8 for arg_7 , arg_8 in enumerate ( arg_1 . columns ) } )","id_":253549,"task_name":"https:\/\/github.com\/Diviyan-Kalainathan\/CausalDiscoveryToolbox\/blob\/be228b078ba9eb76c01b3ccba9a1c0ad9e9e5ed1\/cdt\/causality\/graph\/bnlearn.py#L140-L166","negative":"Register a file reader for use in parse_config_file.\n\n Registered file readers will be used to try reading files passed to\n `parse_config_file`. All file readers (beginning with the default `open`) will\n be tried until one of them succeeds at opening the file.\n\n This function may also be be used used as a decorator. For example:\n\n @register_file_reader(IOError)\n def exotic_data_source(filename):\n ...\n\n Args:\n *args: (When used as a decorator, only the existence check is supplied.)\n - file_reader_fn: The file reader function to register. This should be a\n function that can be used as a context manager to open a file and\n provide a file-like object, similar to Python's built-in `open`.\n - is_readable_fn: A function taking the file path and returning a boolean\n indicating whether the file can be read by `file_reader_fn`.\n\n Returns:\n `None`, or when used as a decorator, a function that will perform the\n registration using the supplied readability predicate."} {"query":"Process a challenge and return the response .","positive":"def Func ( arg_0 , Func ) : if not Func : logger . debug ( \"Empty challenge\" ) return Failure ( \"bad-challenge\" ) if arg_0 . _server_first_message : return arg_0 . _final_challenge ( Func ) arg_2 = SERVER_FIRST_MESSAGE_RE . match ( Func ) if not arg_2 : logger . debug ( \"Bad challenge syntax: {0!r}\" . format ( Func ) ) return Failure ( \"bad-challenge\" ) arg_0 . _server_first_message = Func arg_4 = arg_2 . group ( \"mext\" ) if arg_4 : logger . debug ( \"Unsupported extension received: {0!r}\" . format ( arg_4 ) ) return Failure ( \"bad-challenge\" ) arg_5 = arg_2 . group ( \"nonce\" ) if not arg_5 . startswith ( arg_0 . _c_nonce ) : logger . debug ( \"Nonce does not start with our nonce\" ) return Failure ( \"bad-challenge\" ) arg_6 = arg_2 . group ( \"salt\" ) try : arg_6 = a2b_base64 ( arg_6 ) except ValueError : logger . debug ( \"Bad base64 encoding for salt: {0!r}\" . format ( arg_6 ) ) return Failure ( \"bad-challenge\" ) arg_7 = arg_2 . group ( \"iteration_count\" ) try : arg_7 = int ( arg_7 ) except ValueError : logger . debug ( \"Bad iteration_count: {0!r}\" . format ( arg_7 ) ) return Failure ( \"bad-challenge\" ) return arg_0 . _make_response ( arg_5 , arg_6 , arg_7 )","id_":253550,"task_name":"https:\/\/github.com\/Jajcus\/pyxmpp2\/blob\/14a40a3950910a9cd008b55f0d8905aa0186ce18\/pyxmpp2\/sasl\/scram.py#L247-L297","negative":"Sets niceness of a process"} {"query":"Jumps short if parity .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . PC = Operators . ITEBV ( arg_0 . address_bit_size , arg_0 . PF , arg_1 . read ( ) , arg_0 . PC )","id_":253551,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/native\/cpu\/x86.py#L3434-L3441","negative":"This decorator wraps descriptor methods with a new method that tries\n to delegate to a function of the same name defined on the owner instance\n for convenience for dispatcher clients."} {"query":"Make table definitions","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . cursor ( ) if arg_0 . tabledef is None : return if not arg_0 . tabledef . startswith ( 'CREATE' ) : arg_2 . execute ( 'CREATE TABLE IF NOT EXISTS %s %s' % ( arg_0 . table , arg_0 . tabledef ) ) else : arg_2 . execute ( arg_0 . tabledef ) arg_1 . commit ( )","id_":253552,"task_name":"https:\/\/github.com\/CxAalto\/gtfspy\/blob\/bddba4b74faae6c1b91202f19184811e326547e5\/gtfspy\/import_loaders\/table_loader.py#L239-L259","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Plot the paulivec representation of a quantum state .","positive":"def Func ( arg_0 , arg_1 = \"\" , arg_2 = None , arg_3 = None ) : if not HAS_MATPLOTLIB : raise ImportError ( 'Must have Matplotlib installed.' ) arg_0 = _validate_input_state ( arg_0 ) if arg_2 is None : arg_2 = ( 7 , 5 ) arg_4 = int ( np . log2 ( len ( arg_0 ) ) ) arg_5 = list ( map ( lambda x : x . to_label ( ) , pauli_group ( arg_4 ) ) ) arg_6 = list ( map ( lambda x : np . real ( np . trace ( np . dot ( x . to_matrix ( ) , arg_0 ) ) ) , pauli_group ( arg_4 ) ) ) arg_7 = len ( arg_6 ) if arg_3 is None : arg_3 = \"#648fff\" arg_8 = np . arange ( arg_7 ) arg_9 = 0.5 arg_10 , arg_11 = plt . subplots ( arg_2 = arg_2 ) arg_11 . grid ( zorder = 0 , linewidth = 1 , linestyle = '--' ) arg_11 . bar ( arg_8 , arg_6 , arg_9 , arg_3 = arg_3 , zorder = 2 ) arg_11 . axhline ( linewidth = 1 , arg_3 = 'k' ) arg_11 . set_ylabel ( 'Expectation value' , fontsize = 14 ) arg_11 . set_xticks ( arg_8 ) arg_11 . set_yticks ( [ - 1 , - 0.5 , 0 , 0.5 , 1 ] ) arg_11 . set_xticklabels ( arg_5 , fontsize = 14 , rotation = 70 ) arg_11 . set_xlabel ( 'Pauli' , fontsize = 14 ) arg_11 . set_ylim ( [ - 1 , 1 ] ) arg_11 . set_facecolor ( '#eeeeee' ) for arg_12 in arg_11 . xaxis . get_major_ticks ( ) + arg_11 . yaxis . get_major_ticks ( ) : arg_12 . label . set_fontsize ( 14 ) arg_11 . set_title ( arg_1 , fontsize = 16 ) plt . close ( arg_10 ) return arg_10","id_":253553,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/visualization\/state_visualization.py#L344-L390","negative":"Leave a one-to-one conversation.\n\n One-to-one conversations are \"sticky\"; they can't actually be deleted.\n This API clears the event history of the specified conversation up to\n ``delete_upper_bound_timestamp``, hiding it if no events remain."} {"query":"Asserts that val is string or iterable and starts with prefix .","positive":"def Func ( arg_0 , arg_1 ) : if arg_1 is None : raise TypeError ( 'given prefix arg must not be none' ) if isinstance ( arg_0 . val , str_types ) : if not isinstance ( arg_1 , str_types ) : raise TypeError ( 'given prefix arg must be a string' ) if len ( arg_1 ) == 0 : raise ValueError ( 'given prefix arg must not be empty' ) if not arg_0 . val . startswith ( arg_1 ) : arg_0 . _err ( 'Expected <%s> to start with <%s>, but did not.' % ( arg_0 . val , arg_1 ) ) elif isinstance ( arg_0 . val , Iterable ) : if len ( arg_0 . val ) == 0 : raise ValueError ( 'val must not be empty' ) arg_2 = next ( iter ( arg_0 . val ) ) if arg_2 != arg_1 : arg_0 . _err ( 'Expected %s to start with <%s>, but did not.' % ( arg_0 . val , arg_1 ) ) else : raise TypeError ( 'val is not a string or iterable' ) return arg_0","id_":253554,"task_name":"https:\/\/github.com\/ActivisionGameScience\/assertpy\/blob\/08d799cdb01f9a25d3e20672efac991c7bc26d79\/assertpy\/assertpy.py#L609-L628","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"Asynchronously request a URL and get the encoded text content of the body .","positive":"async def Func ( arg_0 , arg_1 ) : arg_2 = logging . getLogger ( __name__ ) async with arg_1 . get ( arg_0 ) as response : arg_2 . info ( 'Downloading %r' , arg_0 ) return await response . text ( )","id_":253555,"task_name":"https:\/\/github.com\/lsst-sqre\/lsst-projectmeta-kit\/blob\/ac8d4ff65bb93d8fdeb1b46ae6eb5d7414f1ae14\/lsstprojectmeta\/tex\/lsstbib.py#L22-L42","negative":"Perform dimensionality reduction on X.\n\n Parameters\n ----------\n X : {array-like, sparse matrix}, shape (n_samples, n_features)\n New data.\n\n Returns\n -------\n X_new : array, shape (n_samples, n_components)\n Reduced version of X. This will always be a dense array."} {"query":"Round each value of a column","positive":"def Func ( arg_0 , * , arg_1 : arg_2 , arg_3 : arg_4 , arg_5 : arg_2 = None ) : arg_5 = arg_5 or arg_1 arg_0 [ arg_5 ] = arg_0 [ arg_1 ] . round ( arg_3 ) return arg_0","id_":253556,"task_name":"https:\/\/github.com\/ToucanToco\/toucan-data-sdk\/blob\/c3ca874e1b64f4bdcc2edda750a72d45d1561d8a\/toucan_data_sdk\/utils\/postprocess\/math.py#L195-L238","negative":"Show Program Landing page for the Enterprise's Program.\n\n Render the Enterprise's Program Enrollment page for a specific program.\n The Enterprise and Program are both selected by their respective UUIDs.\n\n Unauthenticated learners will be redirected to enterprise-linked SSO.\n\n A 404 will be raised if any of the following conditions are met:\n * No enterprise customer UUID query parameter ``enterprise_uuid`` found in request.\n * No enterprise customer found against the enterprise customer\n uuid ``enterprise_uuid`` in the request kwargs.\n * No Program can be found given ``program_uuid`` either at all or associated with\n the Enterprise.."} {"query":"If filename was used output a filename along with multifile numbered filenames will be used .","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 . buff : return arg_0 . buff elif arg_0 . multifile : return arg_0 . file_root + \"_%03d\" % arg_1 + arg_0 . file_ext else : return arg_0 . filename","id_":253557,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/shoebot\/core\/cairo_sink.py#L64-L78","negative":"Is real time subscription running."} {"query":"Adds the specified value as a new version of the specified secret resource .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = None , arg_5 = None , arg_6 = False , ** arg_7 ) : arg_8 = models . SecretValueResourceDescription ( arg_3 = arg_3 , arg_4 = arg_4 ) arg_9 = arg_0 . Func . metadata [ 'url' ] arg_10 = { 'secretResourceName' : arg_0 . _serialize . url ( \"secret_resource_name\" , arg_1 , 'str' , skip_quote = True ) , 'secretValueResourceName' : arg_0 . _serialize . url ( \"secret_value_resource_name\" , arg_2 , 'str' , skip_quote = True ) } arg_9 = arg_0 . _client . format_url ( arg_9 , ** arg_10 ) arg_11 = { } arg_11 [ 'api-version' ] = arg_0 . _serialize . query ( \"self.api_version\" , arg_0 . api_version , 'str' ) arg_12 = { } arg_12 [ 'Accept' ] = 'application\/json' arg_12 [ 'Content-Type' ] = 'application\/json; charset=utf-8' if arg_5 : arg_12 . update ( arg_5 ) arg_13 = arg_0 . _serialize . body ( arg_8 , 'SecretValueResourceDescription' ) arg_14 = arg_0 . _client . put ( arg_9 , arg_11 , arg_12 , arg_13 ) arg_15 = arg_0 . _client . send ( arg_14 , stream = False , ** arg_7 ) if arg_15 . status_code not in [ 200 , 201 , 202 ] : raise models . FabricErrorException ( arg_0 . _deserialize , arg_15 ) arg_16 = None if arg_15 . status_code == 200 : arg_16 = arg_0 . _deserialize ( 'SecretValueResourceDescription' , arg_15 ) if arg_15 . status_code == 201 : arg_16 = arg_0 . _deserialize ( 'SecretValueResourceDescription' , arg_15 ) if arg_6 : arg_17 = ClientRawResponse ( arg_16 , arg_15 ) return arg_17 return arg_16","id_":253558,"task_name":"https:\/\/github.com\/Azure\/azure-sdk-for-python\/blob\/d7306fde32f60a293a7567678692bdad31e4b667\/azure-servicefabric\/azure\/servicefabric\/operations\/mesh_secret_value_operations.py#L38-L110","negative":"Gets information relating to the opening coin toss.\n\n Keys are:\n * wonToss - contains the ID of the team that won the toss\n * deferred - bool whether the team that won the toss deferred it\n\n :returns: Dictionary of coin toss-related info."} {"query":"Gets the unique backends that are available .","positive":"def Func ( ) : arg_0 = IBMQ . backends ( ) arg_1 = [ ] arg_2 = [ ] for arg_3 in arg_0 : if arg_3 . name ( ) not in arg_2 and not arg_3 . configuration ( ) . simulator : arg_1 . append ( arg_3 ) arg_2 . append ( arg_3 . name ( ) ) if not arg_1 : raise QiskitError ( 'No backends available.' ) return arg_1","id_":253559,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/tools\/monitor\/backend_overview.py#L21-L39","negative":"Remove cards from watchlist.\n\n :params trade_id: Trade id."} {"query":"Robust solve Ax = y .","positive":"def Func ( arg_0 , arg_1 ) : from numpy_sugar . linalg import Func as _Func try : arg_2 = _Func ( arg_0 , arg_1 ) except LinAlgError : arg_3 = \"Could not converge to solve Ax=y.\" arg_3 += \" Setting x to zero.\" warnings . warn ( arg_3 , RuntimeWarning ) arg_2 = zeros ( arg_0 . shape [ 0 ] ) return arg_2","id_":253560,"task_name":"https:\/\/github.com\/limix\/glimix-core\/blob\/cddd0994591d100499cc41c1f480ddd575e7a980\/glimix_core\/_util\/solve.py#L37-L51","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Squeezes a bracketing interval containing the minimum .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 = None ) : arg_6 = ( arg_1 . x < arg_3 . x ) & ( arg_3 . x < arg_2 . x ) if arg_5 is not None : arg_6 = arg_6 & arg_5 arg_7 = ( arg_3 . df < 0 ) & ( arg_3 . f <= arg_4 ) arg_8 = arg_6 & ( arg_3 . df < 0 ) & ( arg_3 . f > arg_4 ) arg_9 = val_where ( arg_6 & arg_7 , arg_3 , arg_1 ) arg_10 = val_where ( arg_6 & ~ arg_7 , arg_3 , arg_2 ) arg_11 = _IntermediateResult ( iteration = tf . convert_to_tensor ( value = 0 ) , stopped = ~ arg_8 , failed = tf . zeros_like ( arg_6 ) , num_evals = tf . convert_to_tensor ( value = 0 ) , arg_9 = arg_9 , arg_10 = arg_10 ) return _bisect ( arg_0 , arg_11 , arg_4 )","id_":253561,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/optimizer\/linesearch\/internal\/hager_zhang_lib.py#L301-L423","negative":"Leave a one-to-one conversation.\n\n One-to-one conversations are \"sticky\"; they can't actually be deleted.\n This API clears the event history of the specified conversation up to\n ``delete_upper_bound_timestamp``, hiding it if no events remain."} {"query":"converts pb kvs to dict","positive":"def Func ( arg_0 , arg_1 = True ) : arg_2 = { } for arg_3 in arg_0 : if arg_3 . value : arg_2 [ arg_3 . key ] = arg_3 . value elif arg_3 . serialized_value : if topology_pb2 . JAVA_SERIALIZED_VALUE == arg_3 . type : arg_5 = _convert_java_value ( arg_3 , arg_1 = arg_1 ) if arg_5 is not None : arg_2 [ arg_3 . key ] = arg_5 else : arg_2 [ arg_3 . key ] = _raw_value ( arg_3 ) return arg_2","id_":253562,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/tools\/tracker\/src\/python\/tracker.py#L39-L57","negative":"Start spark and hdfs worker containers\n\n :param job: The underlying job."} {"query":"r Return fall - out .","positive":"def Func ( arg_0 ) : if arg_0 . _fp + arg_0 . _tn == 0 : return float ( 'NaN' ) return arg_0 . _fp \/ ( arg_0 . _fp + arg_0 . _tn )","id_":253563,"task_name":"https:\/\/github.com\/chrislit\/abydos\/blob\/165466b3ff6afd8024a4c8660421b0c4e7773db9\/abydos\/stats\/_confusion_table.py#L570-L593","negative":"Read the data encoding the Cancel response payload and decode it into\n its constituent parts.\n\n Args:\n input_stream (stream): A data stream containing encoded object\n data, supporting a read method; usually a BytearrayStream\n object.\n kmip_version (KMIPVersion): An enumeration defining the KMIP\n version with which the object will be decoded. Optional,\n defaults to KMIP 1.0.\n\n Raises:\n ValueError: Raised if the data attribute is missing from the\n encoded payload."} {"query":"Splits the docstring of the given value into it s summary and body .","positive":"def Func ( arg_0 ) : arg_1 = textwrap . dedent ( getattr ( arg_0 , '__doc__' , '' ) ) if not arg_1 : return None arg_2 = arg_1 . strip ( ) . split ( '\\n\\n' , 1 ) try : arg_3 = arg_2 [ 1 ] except IndexError : arg_3 = None return Docstring ( arg_2 [ 0 ] , arg_3 )","id_":253564,"task_name":"https:\/\/github.com\/disqus\/django-mailviews\/blob\/9993d5e911d545b3bc038433986c5f6812e7e965\/mailviews\/utils.py#L10-L26","negative":"Disconnect any connected devices that have any of the specified\n service UUIDs."} {"query":"return a list of GithubComponentVersion objects for all tags","positive":"def Func ( arg_0 ) : return [ GithubComponentVersion ( '' , arg_1 [ 0 ] , arg_1 [ 1 ] , arg_0 . name , cache_key = _createCacheKey ( 'tag' , arg_1 [ 0 ] , arg_1 [ 1 ] , arg_0 . name ) ) for arg_1 in arg_0 . _getTags ( ) ]","id_":253565,"task_name":"https:\/\/github.com\/ARMmbed\/yotta\/blob\/56bc1e56c602fa20307b23fe27518e9cd6c11af1\/yotta\/lib\/github_access.py#L269-L276","negative":"This is a parallel worker for running period-recovery.\n\n Parameters\n ----------\n\n task : tuple\n This is used to pass args to the `periodicvar_recovery` function::\n\n task[0] = period-finding result pickle to work on\n task[1] = simbasedir\n task[2] = period_tolerance\n\n Returns\n -------\n\n dict\n This is the dict produced by the `periodicvar_recovery` function for the\n input period-finding result pickle."} {"query":"Builds the RETURNING part of the query .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . connection . ops . quote_name return ' RETURNING %s' % arg_1 ( arg_0 . query . model . _meta . pk . attname )","id_":253566,"task_name":"https:\/\/github.com\/SectorLabs\/django-postgres-extra\/blob\/eef2ed5504d225858d4e4f5d77a838082ca6053e\/psqlextra\/compiler.py#L46-L50","negative":"Handle marking messages as read and keeping client active."} {"query":"Get the owner name of a file or directory .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False ) : arg_3 = arg_2 and run_as_root or arg_0 . run with arg_0 . settings ( hide ( 'running' , 'stdout' ) , warn_only = True ) : arg_4 = arg_3 ( 'stat -c %%U \"%(path)s\"' % locals ( ) ) if arg_4 . failed and 'stat: illegal option' in arg_4 : return arg_3 ( 'stat -f %%Su \"%(path)s\"' % locals ( ) ) return arg_4","id_":253567,"task_name":"https:\/\/github.com\/chrisspen\/burlap\/blob\/a92b0a8e5206850bb777c74af8421ea8b33779bd\/burlap\/files.py#L140-L152","negative":"Returns the maximum delay for the InferenceElements in the inference\n dictionary\n\n Parameters:\n -----------------------------------------------------------------------\n inferences: A dictionary where the keys are InferenceElements"} {"query":"Wrapper function that first configures logging and starts a single run afterwards .","positive":"def Func ( arg_0 ) : _configure_niceness ( arg_0 ) _configure_logging ( arg_0 ) arg_1 = arg_0 [ 'result_queue' ] arg_2 = _sigint_handling_single_run ( arg_0 ) arg_1 . put ( arg_2 ) arg_1 . close ( )","id_":253568,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/environment.py#L127-L134","negative":"Validates API Root information. Raises errors for required\n properties."} {"query":"Run migrations in online mode .","positive":"def Func ( ) : arg_0 = settings . engine with arg_0 . connect ( ) as connection : context . configure ( connection = connection , transaction_per_migration = True , target_metadata = target_metadata , compare_type = COMPARE_TYPE , ) with context . begin_transaction ( ) : context . run_migrations ( )","id_":253569,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/migrations\/env.py#L68-L86","negative":"Convert this unnormalized batch to an instance of Batch.\n\n As this method is intended to be called before augmentation, it\n assumes that none of the ``*_aug`` attributes is yet set.\n It will produce an AssertionError otherwise.\n\n The newly created Batch's ``*_unaug`` attributes will match the ones\n in this batch, just in normalized form.\n\n Returns\n -------\n imgaug.augmentables.batches.Batch\n The batch, with ``*_unaug`` attributes being normalized."} {"query":"Return option as an expanded path .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : return os . path . expanduser ( os . path . expandvars ( arg_0 . get ( arg_1 , arg_2 ) ) )","id_":253570,"task_name":"https:\/\/github.com\/Becksteinlab\/GromacsWrapper\/blob\/d4f9a8cb6f48292732cf7c7e4ef4a6d2ccbc51b9\/gromacs\/config.py#L547-L549","negative":"Returns an invoice object for a given cart at its current revision.\n If such an invoice does not exist, the cart is validated, and if valid,\n an invoice is generated."} {"query":"This bins the phased mag series using the given binsize .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 0.002 , arg_3 = 9 ) : arg_4 = np . arange ( 0.0 , 1.0 , arg_2 ) arg_5 = npdigitize ( arg_0 , arg_4 ) arg_6 , arg_7 = [ ] , [ ] for arg_8 in npunique ( arg_5 ) : arg_9 = arg_5 == arg_8 arg_10 = arg_0 [ arg_9 ] arg_11 = arg_1 [ arg_9 ] if arg_9 . size > arg_3 : arg_6 . append ( npmedian ( arg_10 ) ) arg_7 . append ( npmedian ( arg_11 ) ) return np . array ( arg_6 ) , np . array ( arg_7 )","id_":253571,"task_name":"https:\/\/github.com\/waqasbhatti\/astrobase\/blob\/2922a14619d183fb28005fa7d02027ac436f2265\/astrobase\/periodbase\/_oldpf.py#L226-L246","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"When we get an event from js lookup the node and invoke the action on the enaml node .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = json . loads ( arg_1 ) log . debug ( f'Update from js: {change}' ) arg_3 = arg_2 . get ( 'ref' ) if not arg_3 : return arg_4 = arg_0 . viewer . xpath ( '\/\/*[@ref=$ref]' , arg_3 = arg_3 ) if not arg_4 : return arg_5 = arg_4 [ 0 ] if arg_2 . get ( 'type' ) and arg_2 . get ( 'name' ) : if arg_2 [ 'type' ] == 'event' : arg_6 = getattr ( arg_5 , arg_2 [ 'name' ] ) arg_6 ( ) elif arg_2 [ 'type' ] == 'update' : setattr ( arg_5 , arg_2 [ 'name' ] , arg_2 [ 'value' ] ) else : log . warning ( f\"Unhandled event {self} {node}: {change}\" )","id_":253572,"task_name":"https:\/\/github.com\/codelv\/enaml-web\/blob\/88f1131a7b3ba9e83467b4f44bc3bab6f0de7559\/examples\/dataframe_viewer\/app.py#L69-L95","negative":"Get the context for this view."} {"query":"Stores a particular item to disk .","positive":"def Func ( arg_0 , arg_1 , arg_2 , * arg_3 , ** arg_4 ) : arg_5 = True try : arg_5 = arg_0 . _srvc_opening_routine ( 'a' , arg_1 , arg_4 ) if arg_1 == pypetconstants . MERGE : arg_0 . _trj_merge_trajectories ( * arg_3 , ** arg_4 ) elif arg_1 == pypetconstants . BACKUP : arg_0 . _trj_backup_trajectory ( arg_2 , * arg_3 , ** arg_4 ) elif arg_1 == pypetconstants . PREPARE_MERGE : arg_0 . _trj_prepare_merge ( arg_2 , * arg_3 , ** arg_4 ) elif arg_1 == pypetconstants . TRAJECTORY : arg_0 . _trj_Func_trajectory ( arg_2 , * arg_3 , ** arg_4 ) elif arg_1 == pypetconstants . SINGLE_RUN : arg_0 . _srn_Func_single_run ( arg_2 , * arg_3 , ** arg_4 ) elif arg_1 in pypetconstants . LEAF : arg_0 . _prm_Func_parameter_or_result ( arg_2 , * arg_3 , ** arg_4 ) elif arg_1 == pypetconstants . DELETE : arg_0 . _all_delete_parameter_or_result_or_group ( arg_2 , * arg_3 , ** arg_4 ) elif arg_1 == pypetconstants . GROUP : arg_0 . _grp_Func_group ( arg_2 , * arg_3 , ** arg_4 ) elif arg_1 == pypetconstants . TREE : arg_0 . _tree_Func_sub_branch ( arg_2 , * arg_3 , ** arg_4 ) elif arg_1 == pypetconstants . DELETE_LINK : arg_0 . _lnk_delete_link ( arg_2 , * arg_3 , ** arg_4 ) elif arg_1 == pypetconstants . LIST : arg_0 . _srvc_Func_several_items ( arg_2 , * arg_3 , ** arg_4 ) elif arg_1 == pypetconstants . ACCESS_DATA : return arg_0 . _hdf5_interact_with_data ( arg_2 , * arg_3 , ** arg_4 ) elif arg_1 == pypetconstants . OPEN_FILE : arg_5 = False arg_0 . _keep_open = True arg_0 . _node_processing_timer . active = False elif arg_1 == pypetconstants . CLOSE_FILE : arg_5 = True arg_0 . _keep_open = False elif arg_1 == pypetconstants . FLUSH : arg_0 . _hdf5file . flush ( ) else : raise pex . NoSuchServiceError ( 'I do not know how to handle `%s`' % arg_1 ) except : arg_0 . _logger . error ( 'Failed storing `%s`' % str ( arg_2 ) ) raise finally : arg_0 . _srvc_closing_routine ( arg_5 )","id_":253573,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/storageservice.py#L965-L1337","negative":"If you want to use functionalities related to Controls and Patterns in a new thread.\n You must call InitializeUIAutomationInCurrentThread first in the thread\n and call UninitializeUIAutomationInCurrentThread when the thread exits.\n But you can't use use a Control or a Pattern created in a different thread.\n So you can't create a Control or a Pattern in main thread and then pass it to a new thread and use it."} {"query":"Loads a description file and returns it as a module .","positive":"def Func ( arg_0 ) : global g_descriptionImportCount if not os . path . isfile ( arg_0 ) : raise RuntimeError ( ( \"Experiment description file %s does not exist or \" + \"is not a file\" ) % ( arg_0 , ) ) arg_1 = imp . load_source ( \"pf_description%d\" % g_descriptionImportCount , arg_0 ) g_descriptionImportCount += 1 if not hasattr ( arg_1 , \"descriptionInterface\" ) : raise RuntimeError ( \"Experiment description file %s does not define %s\" % ( arg_0 , \"descriptionInterface\" ) ) if not isinstance ( arg_1 . descriptionInterface , exp_description_api . DescriptionIface ) : raise RuntimeError ( ( \"Experiment description file %s defines %s but it \" + \"is not DescriptionIface-based\" ) % ( arg_0 , name ) ) return arg_1","id_":253574,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/frameworks\/opf\/helpers.py#L80-L104","negative":"Saves any changes to this record set."} {"query":"Detects if lock client was forked .","positive":"def Func ( arg_0 ) : if arg_0 . _pid is None : arg_0 . _pid = os . getpid ( ) if arg_0 . _context is not None : arg_2 = os . getpid ( ) if arg_2 != arg_0 . _pid : arg_0 . _logger . debug ( 'Fork detected: My pid `%s` != os pid `%s`. ' 'Restarting connection.' % ( str ( arg_0 . _pid ) , str ( arg_2 ) ) ) arg_0 . _context = None arg_0 . _pid = arg_2","id_":253575,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/utils\/mpwrappers.py#L589-L604","negative":"For a 2D array and mask, map the values of all unmasked pixels to a 1D array.\n\n The pixel coordinate origin is at the top left corner of the 2D array and goes right-wards and downwards, such\n that for an array of shape (3,3) where all pixels are unmasked:\n\n - pixel [0,0] of the 2D array will correspond to index 0 of the 1D array.\n - pixel [0,1] of the 2D array will correspond to index 1 of the 1D array.\n - pixel [1,0] of the 2D array will correspond to index 4 of the 1D array.\n\n Parameters\n ----------\n mask : ndarray\n A 2D array of bools, where *False* values mean unmasked and are included in the mapping.\n array_2d : ndarray\n The 2D array of values which are mapped to a 1D array.\n\n Returns\n --------\n ndarray\n A 1D array of values mapped from the 2D array with dimensions (total_unmasked_pixels).\n\n Examples\n --------\n mask = np.array([[True, False, True],\n [False, False, False]\n [True, False, True]])\n\n array_2d = np.array([[1.0, 2.0, 3.0],\n [4.0, 5.0, 6.0],\n [7.0, 8.0, 9.0]])\n\n array_1d = map_2d_array_to_masked_1d_array_from_array_2d_and_mask(mask=mask, array_2d=array_2d)"} {"query":"Applies one step of Euler - Maruyama method .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 = None ) : with tf . compat . v1 . name_scope ( arg_5 , 'malaFunc' , [ arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ] ) : arg_6 = [ ] for arg_7 , arg_8 , arg_9 , arg_10 , arg_11 in zip ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : arg_12 = arg_8 + arg_9 + arg_11 * tf . sqrt ( arg_10 ) * arg_7 arg_6 . append ( arg_12 ) return arg_6","id_":253576,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/mcmc\/langevin.py#L630-L688","negative":"Sets the new size and buffer size internally"} {"query":"Attaches a CDROM to a server .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = '{ \"id\": \"' + arg_3 + '\" }' arg_5 = arg_0 . _perform_request ( url = '\/datacenters\/%s\/servers\/%s\/cdroms' % ( arg_1 , arg_2 ) , method = 'POST' , arg_4 = arg_4 ) return arg_5","id_":253577,"task_name":"https:\/\/github.com\/profitbricks\/profitbricks-sdk-python\/blob\/2c804b141688eccb07d6ae56601d5c60a62abebd\/profitbricks\/client.py#L1493-L1516","negative":"in reversed order"} {"query":"Compute the minimal distance between each point on self and other .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : import shapely . geometry from . kps import Keypoint if isinstance ( arg_1 , Keypoint ) : arg_1 = shapely . geometry . Point ( ( arg_1 . x , arg_1 . y ) ) elif isinstance ( arg_1 , LineString ) : if len ( arg_1 . coords ) == 0 : return arg_2 elif len ( arg_1 . coords ) == 1 : arg_1 = shapely . geometry . Point ( arg_1 . coords [ 0 , : ] ) else : arg_1 = shapely . geometry . LineString ( arg_1 . coords ) elif isinstance ( arg_1 , tuple ) : assert len ( arg_1 ) == 2 arg_1 = shapely . geometry . Point ( arg_1 ) else : raise ValueError ( ( \"Expected Keypoint or LineString or tuple (x,y), \" + \"got type %s.\" ) % ( type ( arg_1 ) , ) ) return [ shapely . geometry . Point ( arg_3 ) . distance ( arg_1 ) for arg_3 in arg_0 . coords ]","id_":253578,"task_name":"https:\/\/github.com\/aleju\/imgaug\/blob\/786be74aa855513840113ea523c5df495dc6a8af\/imgaug\/augmentables\/lines.py#L160-L201","negative":"Read data synchronous from an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: data name\n :param Type data_type: type of the data given to the PLC, according to\n PLCTYPE constants\n :param bool return_ctypes: return ctypes instead of python types if True\n (default: False)\n :rtype: data_type\n :return: value: **value**"} {"query":"Build a clinVar submission form for a variant .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = controllers . Func_export ( store , arg_0 , arg_1 , arg_2 ) if request . method == 'GET' : return arg_3 else : arg_4 = request . form . to_dict ( ) arg_5 = set_submission_objects ( arg_4 ) arg_6 = store . get_open_Func_submission ( current_user . email , arg_0 ) arg_7 = store . add_to_submission ( arg_6 [ '_id' ] , arg_5 ) return redirect ( url_for ( 'cases.Func_submissions' , arg_0 = arg_0 ) )","id_":253579,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/server\/blueprints\/variants\/views.py#L387-L402","negative":"Return a dictionary of circuit properties."} {"query":"Add all filenames in the given list to the parser s set .","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in arg_1 : arg_3 = open ( arg_2 , 'r' ) try : arg_0 . add_bpmn_xml ( ET . parse ( arg_3 ) , arg_2 = arg_2 ) finally : arg_3 . close ( )","id_":253580,"task_name":"https:\/\/github.com\/knipknap\/SpiffWorkflow\/blob\/f0af7f59a332e0619e4f3c00a7d4a3d230760e00\/SpiffWorkflow\/bpmn\/parser\/BpmnParser.py#L119-L128","negative":"Augment a sample shape to broadcast batch dimensions.\n\n Computes an augmented sample shape, so that any batch dimensions not\n part of the distribution `partial_batch_dist` are treated as identical\n distributions.\n\n # partial_batch_dist.batch_shape = [ 7]\n # full_sample_and_batch_shape = [3, 4, 7]\n # => return an augmented sample shape of [3, 4] so that\n # partial_batch_dist.sample(augmented_sample_shape) has combined\n # sample and batch shape of [3, 4, 7].\n\n Args:\n partial_batch_dist: `tfd.Distribution` instance with batch shape a\n prefix of `full_sample_and_batch_shape`.\n full_sample_and_batch_shape: a Tensor or Tensor-like shape.\n validate_args: if True, check for shape errors at runtime.\n Returns:\n augmented_sample_shape: sample shape such that\n `partial_batch_dist.sample(augmented_sample_shape)` has combined\n sample and batch shape of `full_sample_and_batch_shape`.\n\n Raises:\n ValueError: if `partial_batch_dist.batch_shape` has more dimensions than\n `full_sample_and_batch_shape`.\n NotImplementedError: if broadcasting would be required to make\n `partial_batch_dist.batch_shape` into a prefix of\n `full_sample_and_batch_shape` ."} {"query":"Call a method .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_0 . URL % { 'base' : arg_0 . base_url , 'method' : arg_1 } arg_2 [ '__conduit__' ] = { 'token' : arg_0 . api_token } arg_4 = { 'params' : json . dumps ( arg_2 , sort_keys = True ) , 'output' : 'json' , '__conduit__' : True } logger . debug ( \"Phabricator Conduit client requests: %s params: %s\" , arg_1 , str ( arg_4 ) ) arg_5 = arg_0 . fetch ( arg_3 , payload = arg_4 , arg_1 = HttpClient . POST , verify = False ) arg_6 = arg_5 . json ( ) if arg_6 [ 'error_code' ] : raise ConduitError ( error = arg_6 [ 'error_info' ] , code = arg_6 [ 'error_code' ] ) return arg_5 . text","id_":253581,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-perceval\/blob\/41c908605e88b7ebc3a536c643fa0f212eaf9e0e\/perceval\/backends\/core\/phabricator.py#L575-L607","negative":"Read data synchronous from an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: data name\n :param Type data_type: type of the data given to the PLC, according to\n PLCTYPE constants\n :param bool return_ctypes: return ctypes instead of python types if True\n (default: False)\n :rtype: data_type\n :return: value: **value**"} {"query":"Get IDs or data for studies that meet specific criteria .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = None , arg_4 = None , arg_5 = 0.001 , arg_6 = 0.0 , arg_7 = arg_8 . sum , arg_10 = 'ids' , arg_11 = 6 ) : arg_12 = [ ] if arg_1 is not None : if arg_10 == 'weights' : if arg_2 is not None or arg_3 is not None or arg_4 is not None : raise ValueError ( \"return_type cannot be 'weights' when feature-based \" \"search is used in conjunction with other search \" \"modes.\" ) return arg_0 . feature_table . get_ids ( arg_1 , arg_5 , arg_7 , get_weights = True ) else : arg_12 . append ( arg_0 . feature_table . get_ids ( arg_1 , arg_5 , arg_7 ) ) if arg_2 is not None : arg_13 = arg_0 . feature_table . get_ids_by_expression ( arg_2 , arg_5 , arg_7 ) arg_12 . append ( list ( arg_13 ) ) if arg_3 is not None : arg_3 = arg_0 . masker . mask ( arg_3 , in_global_mask = True ) . astype ( bool ) arg_14 = arg_8 . sum ( arg_3 ) arg_15 = arg_0 . image_table . data . T . dot ( arg_3 ) . astype ( float ) if isinstance ( arg_6 , float ) : arg_15 \/= arg_14 arg_16 = arg_8 . where ( arg_15 > arg_6 ) [ 0 ] arg_12 . append ( [ arg_0 . image_table . ids [ arg_17 ] for arg_17 in arg_16 ] ) if arg_4 is not None : arg_11 = float ( arg_11 ) arg_18 = set ( ) for arg_19 in arg_4 : arg_20 = arg_8 . array ( arg_19 , dtype = float ) arg_21 = arg_0 . activations [ 'x' ] arg_22 = arg_0 . activations [ 'y' ] arg_23 = arg_0 . activations [ 'z' ] arg_24 = arg_8 . sqrt ( arg_8 . square ( arg_21 - arg_20 [ 0 ] ) + arg_8 . square ( arg_22 - arg_20 [ 1 ] ) + arg_8 . square ( arg_23 - arg_20 [ 2 ] ) ) arg_25 = arg_8 . where ( ( arg_24 > 5.5 ) & ( arg_24 < 6.5 ) ) [ 0 ] arg_26 = arg_24 [ arg_25 ] arg_18 |= set ( arg_0 . activations [ arg_24 <= arg_11 ] [ 'id' ] . unique ( ) ) arg_12 . append ( arg_18 ) arg_27 = list ( reduce ( lambda arg_21 , arg_22 : set ( arg_21 ) & set ( arg_22 ) , arg_12 ) ) if arg_10 == 'ids' : return arg_27 elif arg_10 == 'data' : return arg_0 . get_image_data ( arg_27 )","id_":253582,"task_name":"https:\/\/github.com\/neurosynth\/neurosynth\/blob\/948ce7edce15d7df693446e76834e0c23bfe8f11\/neurosynth\/base\/dataset.py#L241-L374","negative":"Remove workspace from config file."} {"query":"Log msg % args at level level once per n times .","positive":"def Func ( arg_0 , arg_1 , arg_2 , * arg_3 ) : arg_4 = _GetNextLogCountPerToken ( _GetFileAndLine ( ) ) log_if ( arg_0 , arg_1 , not ( arg_4 % arg_2 ) , * arg_3 )","id_":253583,"task_name":"https:\/\/github.com\/tensorlayer\/tensorlayer\/blob\/aa9e52e36c7058a7e6fd81d36563ca6850b21956\/tensorlayer\/logging\/tl_logging.py#L163-L176","negative":"This function returns a dictionary representation of a docker-compose.yml file, based on assembled_specs from\n the spec_assembler, and port_specs from the port_spec compiler"} {"query":"If the mode changes . Refresh the items .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . block if arg_2 and arg_0 . is_initialized and arg_1 [ 'type' ] == 'update' : if arg_1 [ 'oldvalue' ] == 'replace' : raise NotImplementedError for arg_3 in arg_0 . children : arg_2 . children . remove ( arg_3 ) arg_3 . set_parent ( None ) arg_0 . refresh_items ( )","id_":253584,"task_name":"https:\/\/github.com\/codelv\/enaml-web\/blob\/88f1131a7b3ba9e83467b4f44bc3bab6f0de7559\/web\/core\/block.py#L67-L78","negative":"Show Program Landing page for the Enterprise's Program.\n\n Render the Enterprise's Program Enrollment page for a specific program.\n The Enterprise and Program are both selected by their respective UUIDs.\n\n Unauthenticated learners will be redirected to enterprise-linked SSO.\n\n A 404 will be raised if any of the following conditions are met:\n * No enterprise customer UUID query parameter ``enterprise_uuid`` found in request.\n * No enterprise customer found against the enterprise customer\n uuid ``enterprise_uuid`` in the request kwargs.\n * No Program can be found given ``program_uuid`` either at all or associated with\n the Enterprise.."} {"query":"Initializes all components required to run a dag for a specified date range and calls helper method to execute the tasks .","positive":"def Func ( arg_0 , arg_1 = None ) : arg_2 = BackfillJob . _DagRunTaskStatus ( ) arg_3 = arg_0 . bf_start_date arg_4 = arg_0 . dag . get_run_dates ( arg_3 = arg_3 , end_date = arg_0 . bf_end_date ) if arg_0 . run_backwards : arg_5 = [ t . task_id for t in arg_0 . dag . task_dict . values ( ) if t . depends_on_past ] if arg_5 : raise AirflowException ( 'You cannot backfill backwards because one or more tasks depend_on_past: {}' . format ( \",\" . join ( arg_5 ) ) ) arg_4 = arg_4 [ : : - 1 ] if len ( arg_4 ) == 0 : arg_0 . log . info ( \"No run dates were found for the given dates and dag interval.\" ) return arg_6 = None if not arg_0 . donot_pickle and arg_0 . executor . __class__ not in ( executors . LocalExecutor , executors . SequentialExecutor ) : arg_7 = DagPickle ( arg_0 . dag ) arg_1 . add ( arg_7 ) arg_1 . commit ( ) arg_6 = arg_7 . id arg_8 = arg_0 . executor arg_8 . start ( ) arg_2 . total_runs = len ( arg_4 ) try : arg_10 = arg_2 . total_runs while arg_10 > 0 : arg_11 = [ run_date for run_date in arg_4 if run_date not in arg_2 . executed_dag_run_dates ] arg_0 . Func_for_run_dates ( arg_4 = arg_11 , arg_2 = arg_2 , arg_8 = arg_8 , arg_6 = arg_6 , arg_3 = arg_3 , arg_1 = arg_1 ) arg_10 = ( arg_2 . total_runs - len ( arg_2 . executed_dag_run_dates ) ) arg_12 = arg_0 . _collect_errors ( arg_2 = arg_2 , arg_1 = arg_1 ) if arg_12 : raise AirflowException ( arg_12 ) if arg_10 > 0 : arg_0 . log . info ( \"max_active_runs limit for dag %s has been reached \" \" - waiting for other dag runs to finish\" , arg_0 . dag_id ) time . sleep ( arg_0 . delay_on_limit_secs ) except ( KeyboardInterrupt , SystemExit ) : arg_0 . log . warning ( \"Backfill terminated by user.\" ) arg_0 . _set_unfinished_dag_runs_to_failed ( arg_2 . active_runs ) finally : arg_1 . commit ( ) arg_8 . end ( ) arg_0 . log . info ( \"Backfill done. Exiting.\" )","id_":253585,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/jobs.py#L2461-L2536","negative":"Replace all the tracks in a playlist, overwriting its existing tracks. \n This powerful request can be useful for replacing tracks, re-ordering existing tracks, or clearing the playlist.\n\n Parameters\n ----------\n playlist : Union[str, PLaylist]\n The playlist to modify\n tracks : Sequence[Union[str, Track]]\n Tracks to place in the playlist"} {"query":"loads all of the artists albums depending on how many the artist has this may be a long operation .","positive":"async def Func ( arg_0 , * , arg_1 = 'US' ) -> List [ Album ] : from . album import Album arg_2 = [ ] arg_3 = 0 arg_4 = await arg_0 . total_albums ( arg_1 = arg_1 ) while len ( arg_2 ) < arg_4 : arg_5 = await arg_0 . __client . http . artist_albums ( arg_0 . id , limit = 50 , arg_3 = arg_3 , arg_1 = arg_1 ) arg_3 += 50 arg_2 += list ( Album ( arg_0 . __client , arg_6 ) for arg_6 in arg_5 [ 'items' ] ) return arg_2","id_":253586,"task_name":"https:\/\/github.com\/mental32\/spotify.py\/blob\/bb296cac7c3dd289908906b7069bd80f43950515\/spotify\/models\/artist.py#L76-L101","negative":"Connect to the stream\n\n Returns\n -------\n asyncio.coroutine\n The streaming response"} {"query":"Read one message unit . It s possible however that more than one message will be set in a receive so we will have to buffer that for the next read . EOFError will be raised on EOF .","positive":"def Func ( arg_0 ) : if arg_0 . state == 'connected' : if 0 == len ( arg_0 . buf ) : arg_0 . buf = arg_0 . inout . recv ( Mtcpfns . TCP_MAX_PACKET ) if 0 == ( arg_0 . buf ) : arg_0 . state = 'disconnected' raise EOFError pass arg_0 . buf , arg_3 = Mtcpfns . unpack_msg ( arg_0 . buf ) return arg_3 . decode ( 'utf-8' ) else : raise IOError ( \"Func called in state: %s.\" % arg_0 . state )","id_":253587,"task_name":"https:\/\/github.com\/rocky\/python3-trepan\/blob\/14e91bc0acce090d67be145b1ac040cab92ac5f3\/trepan\/inout\/tcpclient.py#L84-L100","negative":"Overwrite the file with new data. You probably shouldn't do\n this yourself, it's easy to screw up your whole file with this."} {"query":"Get the ASN and the IP Block announcing the IP at a specific date .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_3 , arg_2 , arg_4 = arg_0 . run ( arg_1 , arg_2 ) arg_5 = next ( ( i for i , j in enumerate ( arg_3 ) if j is not None ) , None ) if arg_5 is not None : arg_6 = arg_4 [ arg_5 ] if arg_6 != '0.0.0.0\/0' : return arg_2 , arg_3 [ arg_5 ] , arg_6 return None","id_":253588,"task_name":"https:\/\/github.com\/CIRCL\/IP-ASN-history\/blob\/2e02ced01a08531a007d9cd71547c8248570de1b\/client\/ipasn_redis\/api.py#L91-L117","negative":"Execute show subcommand."} {"query":"Convert a message in CaseInsensitiveDict to dict .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . pop ( arg_0 . MESSAGE_ID_FIELD ) arg_3 = arg_1 . pop ( arg_0 . DATE_FIELD ) arg_4 = { k : v for k , v in arg_1 . items ( ) } arg_4 [ arg_0 . MESSAGE_ID_FIELD ] = arg_2 arg_4 [ arg_0 . DATE_FIELD ] = arg_3 return arg_4","id_":253589,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-perceval\/blob\/41c908605e88b7ebc3a536c643fa0f212eaf9e0e\/perceval\/backends\/core\/mbox.py#L284-L297","negative":"Attempts to fetch streams repeatedly\n until some are returned or limit hit."} {"query":"Create Invenio - Deposit - REST blueprint .","positive":"def Func ( arg_0 ) : arg_1 = Blueprint ( 'invenio_deposit_rest' , __name__ , url_prefix = '' , ) create_error_handlers ( arg_1 ) for arg_2 , arg_3 in ( arg_0 or { } ) . items ( ) : arg_3 = deepcopy ( arg_3 ) if 'files_serializers' in arg_3 : arg_4 = arg_3 . get ( 'files_serializers' ) arg_4 = { mime : obj_or_import_string ( func ) for mime , func in arg_4 . items ( ) } del arg_3 [ 'files_serializers' ] else : arg_4 = { } if 'record_serializers' in arg_3 : arg_5 = arg_3 . get ( 'record_serializers' ) arg_5 = { mime : obj_or_import_string ( func ) for mime , func in arg_5 . items ( ) } else : arg_5 = { } arg_6 = arg_3 . pop ( 'file_list_route' , '{0}\/files' . format ( arg_3 [ 'item_route' ] ) ) arg_7 = arg_3 . pop ( 'file_item_route' , '{0}\/files\/' . format ( arg_3 [ 'item_route' ] ) ) arg_3 . setdefault ( 'search_class' , DepositSearch ) arg_8 = obj_or_import_string ( arg_3 [ 'search_class' ] ) arg_3 . setdefault ( 'record_class' , Deposit ) arg_9 = obj_or_import_string ( arg_3 [ 'record_class' ] ) arg_3 . setdefault ( 'indexer_class' , None ) for arg_10 in records_rest_url_rules ( arg_2 , ** arg_3 ) : arg_1 . add_url_rule ( ** arg_10 ) arg_11 = { } if arg_3 . get ( 'search_index' ) : arg_11 [ 'index' ] = arg_3 [ 'search_index' ] if arg_3 . get ( 'search_type' ) : arg_11 [ 'doc_type' ] = arg_3 [ 'search_type' ] arg_12 = dict ( read_permission_factory = obj_or_import_string ( arg_3 . get ( 'read_permission_factory_imp' ) ) , create_permission_factory = obj_or_import_string ( arg_3 . get ( 'create_permission_factory_imp' ) ) , update_permission_factory = obj_or_import_string ( arg_3 . get ( 'update_permission_factory_imp' ) ) , delete_permission_factory = obj_or_import_string ( arg_3 . get ( 'delete_permission_factory_imp' ) ) , arg_9 = arg_9 , arg_8 = partial ( arg_8 , ** arg_11 ) , default_media_type = arg_3 . get ( 'default_media_type' ) , ) arg_13 = DepositActionResource . as_view ( DepositActionResource . view_name . format ( arg_2 ) , arg_5 = arg_5 , pid_type = arg_3 [ 'pid_type' ] , arg_12 = arg_12 , ) arg_1 . add_url_rule ( '{0}\/actions\/' . format ( arg_3 [ 'item_route' ] , ',' . join ( extract_actions_from_class ( arg_9 ) ) , ) , view_func = arg_13 , methods = [ 'POST' ] , ) arg_14 = DepositFilesResource . as_view ( DepositFilesResource . view_name . format ( arg_2 ) , arg_5 = arg_4 , pid_type = arg_3 [ 'pid_type' ] , arg_12 = arg_12 , ) arg_1 . add_url_rule ( arg_6 , view_func = arg_14 , methods = [ 'GET' , 'POST' , 'PUT' ] , ) arg_15 = DepositFileResource . as_view ( DepositFileResource . view_name . format ( arg_2 ) , arg_5 = arg_4 , pid_type = arg_3 [ 'pid_type' ] , arg_12 = arg_12 , ) arg_1 . add_url_rule ( arg_7 , view_func = arg_15 , methods = [ 'GET' , 'PUT' , 'DELETE' ] , ) return arg_1","id_":253590,"task_name":"https:\/\/github.com\/inveniosoftware\/invenio-deposit\/blob\/f243ea1d01ab0a3bc92ade3262d1abdd2bc32447\/invenio_deposit\/views\/rest.py#L66-L187","negative":"Total time since the timer is started.\n\n Returns (float): Time in seconds."} {"query":"Create a StoreItem from a result out of CosmosDB .","positive":"def Func ( arg_0 , arg_1 ) -> StoreItem : arg_2 = arg_1 . get ( 'document' ) arg_2 [ 'e_tag' ] = arg_1 . get ( '_etag' ) return StoreItem ( ** arg_2 )","id_":253591,"task_name":"https:\/\/github.com\/Microsoft\/botbuilder-python\/blob\/274663dd91c811bae6ac4488915ba5880771b0a7\/libraries\/botbuilder-azure\/botbuilder\/azure\/cosmosdb_storage.py#L163-L174","negative":"check unreachable code"} {"query":"Flatten lists of indices and ensure bounded by a known dim .","positive":"def Func ( arg_0 , arg_1 ) : if not all ( [ arg_2 . dtype == int for arg_2 in arg_0 ] ) : raise ValueError ( \"indices must be integers\" ) if npany ( asarray ( arg_0 ) >= arg_1 ) : raise ValueError ( \"indices out of bounds for axis with size %s\" % arg_1 ) return arg_0 . flatten ( )","id_":253592,"task_name":"https:\/\/github.com\/bolt-project\/bolt\/blob\/9cd7104aa085498da3097b72696184b9d3651c51\/bolt\/utils.py#L85-L103","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Given a set of transaction hashes returns the corresponding bundles sorted by tail transaction timestamp .","positive":"def Func ( arg_0 , arg_1 , arg_2 , ) : arg_1 = list ( arg_1 ) if not arg_1 : return [ ] arg_3 = [ ] arg_4 = set ( ) arg_5 = set ( ) arg_6 = GetTrytesCommand ( arg_0 ) ( hashes = arg_1 ) arg_7 = list ( map ( Transaction . from_tryte_string , arg_6 [ 'trytes' ] , ) ) for arg_8 in arg_7 : if arg_8 . is_tail : arg_4 . add ( arg_8 . hash ) else : arg_5 . add ( arg_8 . bundle_hash ) if arg_5 : for arg_8 in find_transaction_objects ( arg_0 = arg_0 , bundles = list ( arg_5 ) , ) : if arg_8 . is_tail : if arg_8 . hash not in arg_4 : arg_7 . append ( arg_8 ) arg_4 . add ( arg_8 . hash ) arg_9 = [ arg_8 for arg_8 in arg_7 if arg_8 . hash in arg_4 ] if arg_2 : arg_10 = GetLatestInclusionCommand ( arg_0 ) ( hashes = list ( arg_4 ) , ) for arg_8 in arg_9 : arg_8 . is_confirmed = arg_10 [ 'states' ] . get ( arg_8 . hash ) for arg_8 in arg_9 : arg_12 = GetBundlesCommand ( arg_0 ) ( transaction = arg_8 . hash ) arg_13 = arg_12 [ 'bundles' ] if arg_2 : for arg_14 in arg_13 : arg_14 . is_confirmed = arg_8 . is_confirmed arg_3 . extend ( arg_13 ) return list ( sorted ( arg_3 , key = lambda bundle_ : bundle_ . tail_transaction . timestamp , ) )","id_":253593,"task_name":"https:\/\/github.com\/iotaledger\/iota.lib.py\/blob\/97cdd1e241498446b46157b79b2a1ea2ec6d387a\/iota\/commands\/extended\/utils.py#L70-L145","negative":"Return True if we should retry, False otherwise."} {"query":"Fetch information about a repository .","positive":"def Func ( arg_0 , arg_1 , Func ) : arg_3 = urijoin ( arg_0 . base_url , arg_0 . RREPOSITORY , arg_1 , Func ) logger . debug ( \"DockerHub client requests: %s\" , arg_3 ) arg_4 = arg_0 . fetch ( arg_3 ) return arg_4 . text","id_":253594,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-perceval\/blob\/41c908605e88b7ebc3a536c643fa0f212eaf9e0e\/perceval\/backends\/core\/dockerhub.py#L191-L200","negative":"Serialize a dataframe.\n\n Parameters\n ----------\n writer : file\n File-like object to write to. Must be opened in binary mode.\n data_type_id : dict\n Serialization format to use.\n See the azureml.DataTypeIds class for constants.\n dataframe: pandas.DataFrame\n Dataframe to serialize."} {"query":"Update metadata with the action history","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . package print ( \"Including history of actions\" ) with cd ( arg_0 . rootdir ) : arg_2 = \".dgit\/log.json\" if os . path . exists ( arg_2 ) : arg_3 = open ( arg_2 ) . readlines ( ) arg_4 = [ ] for arg_5 in arg_3 : try : arg_5 = json . loads ( arg_5 ) for arg_6 in [ 'code' ] : if arg_6 not in arg_5 or arg_5 [ arg_6 ] == None : arg_5 [ arg_6 ] = \"...\" arg_4 . append ( arg_5 ) except : pass arg_1 [ 'actions' ] = arg_4","id_":253595,"task_name":"https:\/\/github.com\/pingali\/dgit\/blob\/ecde01f40b98f0719dbcfb54452270ed2f86686d\/dgitcore\/datasets\/common.py#L517-L538","negative":"Send a Gauge metric with the specified value"} {"query":"Returns an authorized HTTP object to be used to build a Google cloud service hook connection .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . _get_credentials ( ) arg_2 = httplib2 . Http ( ) arg_3 = google_auth_httplib2 . AuthorizedHttp ( arg_1 , arg_2 = arg_2 ) return arg_3","id_":253596,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/gcp_api_base_hook.py#L137-L146","negative":"Leave group."} {"query":"Returns a dictionary of arguments for DBUtils . SteadyDB . SteadyDBConnection constructor .","positive":"def Func ( ) : return dict ( creator = pymysql , host = Configuration . get ( 'nupic.cluster.database.host' ) , port = int ( Configuration . get ( 'nupic.cluster.database.port' ) ) , user = Configuration . get ( 'nupic.cluster.database.user' ) , passwd = Configuration . get ( 'nupic.cluster.database.passwd' ) , charset = 'utf8' , use_unicode = True , setsession = [ 'SET AUTOCOMMIT = 1' ] )","id_":253597,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/database\/connection.py#L643-L656","negative":"Main executor of the trimmomatic_report template.\n\n Parameters\n ----------\n log_files : list\n List of paths to the trimmomatic log files."} {"query":"Parse a JIRA API raw response .","positive":"def Func ( arg_0 ) : arg_1 = json . loads ( arg_0 ) arg_2 = arg_1 [ 'issues' ] for arg_3 in arg_2 : yield arg_3","id_":253598,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-perceval\/blob\/41c908605e88b7ebc3a536c643fa0f212eaf9e0e\/perceval\/backends\/core\/jira.py#L221-L234","negative":"Sequentially update the actors, the world, and the messaging system. \n The theater terminates once all of the actors indicate that they are done."} {"query":"Import keys to the stash from either a list of keys or a file","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None ) : arg_0 . _assert_valid_stash ( ) if not ( bool ( arg_2 ) ^ bool ( arg_3 ) ) : raise GhostError ( 'You must either provide a path to an exported stash file ' 'or a list of key dicts to import' ) if arg_3 : with open ( arg_3 ) as stash_file : arg_2 = json . Funcs ( stash_file . read ( ) ) arg_4 = arg_1 != arg_0 . passphrase if arg_4 : arg_5 = Stash ( TinyDBStorage ( 'stub' ) , arg_1 ) for arg_6 in arg_2 : arg_0 . put ( name = arg_6 [ 'name' ] , value = arg_5 . _decrypt ( arg_6 [ 'value' ] ) if arg_4 else arg_6 [ 'value' ] , metadata = arg_6 [ 'metadata' ] , description = arg_6 [ 'description' ] , lock = arg_6 . get ( 'lock' ) , key_type = arg_6 . get ( 'type' ) , encrypt = arg_4 )","id_":253599,"task_name":"https:\/\/github.com\/nir0s\/ghost\/blob\/77da967a4577ca4cf100cfe34e87b39ad88bf21c\/ghost.py#L494-L528","negative":"Check the spacing of a single equals sign."} {"query":"Parses a query string into a dict .","positive":"def Func ( arg_0 , arg_1 = \"query string\" , arg_2 = arg_3 , arg_4 = None , ** arg_5 ) : arg_6 = dict ( parse_qsl ( arg_0 , ** arg_5 ) ) if arg_4 : arg_6 = arg_4 . validate ( arg_6 , arg_1 = arg_1 , arg_2 = arg_2 ) return arg_6","id_":253600,"task_name":"https:\/\/github.com\/streamlink\/streamlink\/blob\/c8ed1daff14ac03195870238b9b900c1109dd5c1\/src\/streamlink\/utils\/__init__.py#L108-L119","negative":"Sets the package verification code, if not already set.\n code - A string.\n Raises CardinalityError if already defined.\n Raises OrderError if no package previously defined.\n Raises Value error if doesn't match verifcode form"} {"query":"Reimplemented to the store history .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = False , arg_3 = False ) : if not arg_2 : arg_4 = arg_0 . input_buffer if arg_1 is None else arg_1 arg_5 = super ( HistoryConsoleWidget , arg_0 ) . Func ( arg_1 , arg_2 , arg_3 ) if arg_5 and not arg_2 : arg_4 = arg_4 . rstrip ( ) if arg_4 and ( not arg_0 . _history or arg_0 . _history [ - 1 ] != arg_4 ) : arg_0 . _history . append ( arg_4 ) arg_0 . _history_edits = { } arg_0 . _history_index = len ( arg_0 . _history ) return arg_5","id_":253601,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/frontend\/qt\/console\/history_console_widget.py#L38-L60","negative":"Makes closure which creates `loc`, `scale` params from `tf.get_variable`.\n\n This function produces a closure which produces `loc`, `scale` using\n `tf.get_variable`. The closure accepts the following arguments:\n\n dtype: Type of parameter's event.\n shape: Python `list`-like representing the parameter's event shape.\n name: Python `str` name prepended to any created (or existing)\n `tf.Variable`s.\n trainable: Python `bool` indicating all created `tf.Variable`s should be\n added to the graph collection `GraphKeys.TRAINABLE_VARIABLES`.\n add_variable_fn: `tf.get_variable`-like `callable` used to create (or\n access existing) `tf.Variable`s.\n\n Args:\n is_singular: Python `bool` indicating if `scale is None`. Default: `False`.\n loc_initializer: Initializer function for the `loc` parameters.\n The default is `tf.random_normal_initializer(mean=0., stddev=0.1)`.\n untransformed_scale_initializer: Initializer function for the `scale`\n parameters. Default value: `tf.random_normal_initializer(mean=-3.,\n stddev=0.1)`. This implies the softplus transformed result is initialized\n near `0`. It allows a `Normal` distribution with `scale` parameter set to\n this value to approximately act like a point mass.\n loc_regularizer: Regularizer function for the `loc` parameters.\n The default (`None`) is to use the `tf.get_variable` default.\n untransformed_scale_regularizer: Regularizer function for the `scale`\n parameters. The default (`None`) is to use the `tf.get_variable` default.\n loc_constraint: An optional projection function to be applied to the\n loc after being updated by an `Optimizer`. The function must take as input\n the unprojected variable and must return the projected variable (which\n must have the same shape). Constraints are not safe to use when doing\n asynchronous distributed training.\n The default (`None`) is to use the `tf.get_variable` default.\n untransformed_scale_constraint: An optional projection function to be\n applied to the `scale` parameters after being updated by an `Optimizer`\n (e.g. used to implement norm constraints or value constraints). The\n function must take as input the unprojected variable and must return the\n projected variable (which must have the same shape). Constraints are not\n safe to use when doing asynchronous distributed training. The default\n (`None`) is to use the `tf.get_variable` default.\n\n Returns:\n default_loc_scale_fn: Python `callable` which instantiates `loc`, `scale`\n parameters from args: `dtype, shape, name, trainable, add_variable_fn`."} {"query":"Determine the geometry of the Voronoi pixelization by alligning it with the outer - most coordinates on a \\ grid plus a small buffer .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 = 1e-8 ) : arg_6 = np . min ( arg_1 [ : , 0 ] ) - arg_5 arg_7 = np . max ( arg_1 [ : , 0 ] ) + arg_5 arg_8 = np . min ( arg_1 [ : , 1 ] ) - arg_5 arg_9 = np . max ( arg_1 [ : , 1 ] ) + arg_5 arg_10 = ( arg_7 - arg_6 , arg_9 - arg_8 ) arg_11 = ( ( arg_7 + arg_6 ) \/ 2.0 , ( arg_9 + arg_8 ) \/ 2.0 ) return arg_0 . Geometry ( arg_10 = arg_10 , arg_2 = arg_2 , arg_11 = arg_11 , arg_3 = arg_3 , arg_4 = arg_4 )","id_":253602,"task_name":"https:\/\/github.com\/Jammy2211\/PyAutoLens\/blob\/91e50369c7a9c048c83d217625578b72423cd5a7\/autolens\/model\/inversion\/pixelizations.py#L238-L264","negative":"Set train summary. A TrainSummary object contains information\n necessary for the optimizer to know how often the logs are recorded,\n where to store the logs and how to retrieve them, etc. For details,\n refer to the docs of TrainSummary.\n\n\n :param summary: a TrainSummary object"} {"query":"Filter tags according due_tag option .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ ] arg_3 = arg_0 . options . due_tag arg_4 = [ arg_8 [ \"name\" ] for arg_8 in arg_1 ] try : arg_5 = arg_4 . index ( arg_3 ) except ValueError : arg_0 . warn_if_tag_not_found ( arg_3 , \"due-tag\" ) return copy . deepcopy ( arg_1 ) arg_6 = arg_1 [ arg_5 ] arg_7 = arg_0 . get_time_of_tag ( arg_6 ) for arg_8 in arg_1 : arg_9 = arg_0 . get_time_of_tag ( arg_8 ) if arg_9 <= arg_7 : arg_2 . append ( arg_8 ) return arg_2","id_":253603,"task_name":"https:\/\/github.com\/topic2k\/pygcgen\/blob\/c41701815df2c8c3a57fd5f7b8babe702127c8a1\/pygcgen\/generator.py#L1010-L1034","negative":"Returns mappable data for a random subset of voxels.\n\n May be useful as a baseline in predictive analyses--e.g., to compare\n performance of a more principled feature selection method with simple\n random selection.\n\n Args:\n dataset: A Dataset instance\n n_voxels: An integer specifying the number of random voxels to select.\n\n Returns:\n A 2D numpy array with (randomly-selected) voxels in rows and mappables\n in columns."} {"query":"Allocates a call id and emit .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 = ( ) , arg_6 = False , arg_7 = None , arg_8 = False , arg_9 = None ) : arg_10 = arg_0 . collector if not arg_10 . is_running ( ) : arg_10 . start ( ) arg_11 = uuid4_bytes ( ) arg_12 = ( DUPLEX if arg_0 . socket is arg_10 . socket else arg_10 . topic ) arg_13 = arg_0 . _make_header ( arg_2 , arg_11 , arg_12 , arg_1 ) arg_14 = arg_0 . _pack ( arg_3 , arg_4 , arg_6 ) def send_call ( ) : try : safe ( send , arg_0 . socket , arg_13 , arg_14 , arg_5 , zmq . NOBLOCK ) except zmq . Again : raise Undelivered ( 'emission was not delivered' ) arg_10 . prepare ( arg_11 , arg_0 , arg_2 , arg_3 , arg_4 ) send_call ( ) return arg_10 . establish ( arg_11 , arg_0 . timeout , arg_7 , send_call if arg_8 else None , arg_9 = arg_9 )","id_":253604,"task_name":"https:\/\/github.com\/sublee\/zeronimo\/blob\/b216638232932718d2cbc5eabd870c8f5b5e83fb\/zeronimo\/core.py#L454-L475","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Display help messages for the given message identifiers","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in arg_1 : try : for arg_3 in arg_0 . get_message_definitions ( arg_2 ) : print ( arg_3 . format_help ( checkerref = True ) ) print ( \"\" ) except UnknownMessageError as ex : print ( ex ) print ( \"\" ) continue","id_":253605,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/message\/message_store.py#L192-L202","negative":"This is a parallel worker for running period-recovery.\n\n Parameters\n ----------\n\n task : tuple\n This is used to pass args to the `periodicvar_recovery` function::\n\n task[0] = period-finding result pickle to work on\n task[1] = simbasedir\n task[2] = period_tolerance\n\n Returns\n -------\n\n dict\n This is the dict produced by the `periodicvar_recovery` function for the\n input period-finding result pickle."} {"query":"Use kwargs to select for desired attributes from list of zipcode dicts","positive":"def Func ( arg_0 = arg_1 , ** arg_2 ) : return [ arg_3 for arg_3 in arg_0 if all ( [ arg_4 in arg_3 and arg_3 [ arg_4 ] == arg_5 for arg_4 , arg_5 in arg_2 . items ( ) ] ) ]","id_":253606,"task_name":"https:\/\/github.com\/seanpianka\/Zipcodes\/blob\/c815226de7a12e659f3198a23de942e354c8a001\/zipcodes\/__init__.py#L87-L89","negative":"Propagate \"clk\" clock and reset \"rst\" signal to all subcomponents"} {"query":"Combines self and another_layout into an edge map .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = dict ( ) for arg_3 , arg_4 in arg_0 . get_virtual_bits ( ) . items ( ) : if arg_4 not in arg_1 . _p2v : raise LayoutError ( 'The wire_map_from_layouts() method does not support when the' ' other layout (another_layout) is smaller.' ) arg_2 [ arg_3 ] = arg_1 [ arg_4 ] return arg_2","id_":253607,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/transpiler\/layout.py#L230-L257","negative":"issue a command to read the archive records after a known time stamp."} {"query":"Loads config checking CLI arguments for a config file","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 [ arg_4 ] ) -> arg_3 [ arg_4 ] : from django . core . management . base import arg_10 arg_5 = arg_10 . create_parser def patched_parser ( arg_6 , arg_7 , arg_8 ) : arg_9 = arg_5 ( arg_6 , arg_7 , arg_8 ) argparser_add_argument ( arg_9 , arg_0 ) return arg_9 arg_10 . create_parser = patched_parser try : arg_9 = argparse . ArgumentParser ( add_help = False ) argparser_add_argument ( arg_9 , arg_0 ) arg_12 , arg_13 = arg_9 . parse_known_args ( arg_2 ) arg_0 . load ( arg_12 . config ) yield arg_13 finally : arg_10 . create_parser = arg_5","id_":253608,"task_name":"https:\/\/github.com\/lincolnloop\/goodconf\/blob\/19515da5783f86b9516dbf81531107c2d9eae567\/goodconf\/contrib\/django.py#L10-L33","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"Generates a bytecode from an object .","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 ) -> arg_3 : if isinstance ( arg_0 , pyte . superclasses . _PyteOp ) : return arg_0 . to_bytes ( arg_2 ) elif isinstance ( arg_0 , ( pyte . superclasses . _PyteAugmentedComparator , pyte . superclasses . _PyteAugmentedValidator . _FakeMathematicalOP ) ) : return arg_0 . to_bytes ( arg_2 ) elif isinstance ( arg_0 , pyte . superclasses . _PyteAugmentedValidator ) : arg_0 . validate ( ) return arg_0 . to_load ( ) elif isinstance ( arg_0 , int ) : return arg_0 . to_bytes ( ( arg_0 . bit_length ( ) + 7 ) \/\/ 8 , byteorder = \"little\" ) or b'' elif isinstance ( arg_0 , arg_3 ) : return arg_0 else : raise TypeError ( \"`{}` was not a valid bytecode-encodable item\" . format ( arg_0 ) )","id_":253609,"task_name":"https:\/\/github.com\/Fuyukai\/Pyte\/blob\/7ef04938d80f8b646bd73d976ac9787a5b88edd9\/pyte\/util.py#L68-L90","negative":"Return the generation index of the first generation in the given\n swarm that does not have numParticles particles in it, either still in the\n running state or completed. This does not include orphaned particles.\n\n Parameters:\n ---------------------------------------------------------------------\n swarmId: A string representation of the sorted list of encoders in this\n swarm. For example '__address_encoder.__gym_encoder'\n minNumParticles: minium number of partices required for a full\n generation.\n\n retval: generation index, or None if no particles at all."} {"query":"Determine the URL corresponding to Python object","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 != 'py' : return None arg_2 = arg_1 [ 'module' ] arg_3 = arg_1 [ 'fullname' ] arg_4 = sys . modules . get ( arg_2 ) if arg_4 is None : return None arg_5 = arg_4 for arg_6 in arg_3 . split ( '.' ) : try : arg_5 = getattr ( arg_5 , arg_6 ) except : return None try : arg_7 = inspect . getsourcefile ( arg_5 ) except : arg_7 = None if not arg_7 : return None try : arg_8 , arg_9 = inspect . getsourcelines ( arg_5 ) except : arg_9 = None if arg_9 : arg_10 = \"#L%d-L%d\" % ( arg_9 , arg_9 + len ( arg_8 ) - 1 ) else : arg_10 = \"\" arg_7 = relpath ( arg_7 , start = dirname ( scisalt . __file__ ) ) if 'dev' in scisalt . __version__ : return \"http:\/\/github.com\/joelfrederico\/SciSalt\/blob\/master\/scisalt\/%s%s\" % ( arg_7 , arg_10 ) else : return \"http:\/\/github.com\/joelfrederico\/SciSalt\/blob\/v%s\/scisalt\/%s%s\" % ( scisalt . __version__ , arg_7 , arg_10 )","id_":253610,"task_name":"https:\/\/github.com\/joelfrederico\/SciSalt\/blob\/7bf57c49c7dde0a8b0aa337fbd2fbd527ce7a67f\/docs\/conf.py#L358-L403","negative":"Gets a list of snapshots for a cluster\n\n :param cluster_identifier: unique identifier of a cluster\n :type cluster_identifier: str"} {"query":"return at most n array items move the cursor .","positive":"def Func ( arg_0 , arg_1 ) : while len ( arg_0 . pool ) < arg_1 : arg_0 . cur = arg_0 . files . next ( ) arg_0 . pool = numpy . append ( arg_0 . pool , arg_0 . fetch ( arg_0 . cur ) , axis = 0 ) arg_4 = arg_0 . pool [ : arg_1 ] if arg_1 == len ( arg_0 . pool ) : arg_0 . pool = arg_0 . fetch ( None ) else : arg_0 . pool = arg_0 . pool [ arg_1 : ] return arg_4","id_":253611,"task_name":"https:\/\/github.com\/rainwoodman\/sharedmem\/blob\/b23e59c1ed0e28f7b6c96c17a04d55c700e06e3a\/contrib\/multipartstream.py#L31-L44","negative":"Create a dictionary of datasets and a material object for air.\n\n :return: (Material, {str, DataSet})"} {"query":"parse topology location","positive":"def Func ( arg_0 ) : try : arg_1 = arg_0 [ 'cluster\/[role]\/[env]' ] . split ( '\/' ) arg_2 = arg_0 [ 'topology-name' ] arg_1 . append ( arg_2 ) if len ( arg_1 ) != 4 : raise return arg_1 except Exception : Log . error ( 'Invalid topology location' ) raise","id_":253612,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/tools\/explorer\/src\/python\/physicalplan.py#L61-L72","negative":"issue a command to read the archive records after a known time stamp."} {"query":"Executed when script is run as - is .","positive":"def Func ( ) : for arg_0 in locate_files ( ROOT_DIR ) : print ( \"Processing %s\" % arg_0 ) with open ( arg_0 , \"rt\" ) as f : arg_1 = list ( tokenize . generate_tokens ( f . readline ) ) arg_2 = tokenize . untokenize ( arg_1 ) arg_3 = normalize_tokens ( arg_1 ) arg_4 = tokenize . untokenize ( arg_3 ) assert arg_2 == arg_4","id_":253613,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-bindings\/bin\/pymagic.py#L70-L80","negative":"Gets back all response headers."} {"query":"Bookmark group .","positive":"def Func ( arg_0 ) : arg_1 , arg_2 , arg_3 = get_project_group_or_local ( arg_0 . obj . get ( 'project' ) , arg_0 . obj . get ( 'group' ) ) try : PolyaxonClient ( ) . experiment_group . Func ( arg_1 , arg_2 , arg_3 ) except ( PolyaxonHTTPError , PolyaxonShouldExitError , PolyaxonClientException ) as e : Printer . print_error ( 'Could not Func group `{}`.' . format ( arg_3 ) ) Printer . print_error ( 'Error message `{}`.' . format ( e ) ) sys . exit ( 1 ) Printer . print_success ( \"Experiments group is Funced.\" )","id_":253614,"task_name":"https:\/\/github.com\/polyaxon\/polyaxon-cli\/blob\/a7f5eed74d4d909cad79059f3c21c58606881449\/polyaxon_cli\/cli\/experiment_group.py#L318-L345","negative":"Revoke the token and remove the cookie."} {"query":"Uses default program defined by the system to open a file . This is done via os . startfile on windows open on mac and xdg - open on linux .","positive":"def Func ( arg_0 , arg_1 = True ) : from ubelt import util_cmd if arg_1 : print ( '[ubelt] Func(\"{}\")' . format ( arg_0 ) ) arg_0 = normpath ( arg_0 ) if not exists ( arg_0 ) : raise Exception ( 'Cannot start nonexistant file: %r' % arg_0 ) if not WIN32 : import pipes arg_0 = pipes . quote ( arg_0 ) if LINUX : arg_2 = util_cmd . cmd ( ( 'xdg-open' , arg_0 ) , detach = True , arg_1 = arg_1 ) elif DARWIN : arg_2 = util_cmd . cmd ( ( 'open' , arg_0 ) , detach = True , arg_1 = arg_1 ) elif WIN32 : os . Func ( arg_0 ) arg_2 = None else : raise RuntimeError ( 'Unknown Platform' ) if arg_2 is not None : if not arg_2 [ 'proc' ] : raise Exception ( 'Func failed' )","id_":253615,"task_name":"https:\/\/github.com\/Erotemic\/ubelt\/blob\/db802f3ad8abba025db74b54f86e6892b8927325\/ubelt\/util_platform.py#L231-L273","negative":"Random sampler for equal_splits functions"} {"query":"Update all parameters which are defined on self from otherObj","positive":"def Func ( arg_0 , arg_1 : \"PropDeclrCollector\" , arg_2 , arg_3 : arg_4 , arg_5 : arg_6 ) -> None : arg_7 = arg_4 ( ) if arg_3 is not None : arg_3 = arg_4 ( arg_3 ) for arg_8 in arg_0 . _params : arg_9 = arg_5 + arg_8 . _scopes [ arg_0 ] [ 1 ] try : arg_10 = getattr ( arg_1 , arg_9 ) if not isinstance ( arg_10 , Param ) : continue except AttributeError : continue if arg_3 and arg_10 in arg_3 : arg_7 . add ( arg_10 ) continue arg_2 ( arg_0 , arg_8 , arg_10 ) if arg_3 is not None : assert arg_7 == arg_3","id_":253616,"task_name":"https:\/\/github.com\/Nic30\/hwt\/blob\/8cbb399e326da3b22c233b98188a9d08dec057e6\/hwt\/synthesizer\/interfaceLevel\/propDeclrCollector.py#L213-L243","negative":"Utility method to visualize decision boundaries in R^2.\n\n Args:\n features: Input points, as a Numpy `array` of shape `[num_examples, 2]`.\n labels: Numpy `float`-like array of shape `[num_examples, 1]` giving a\n label for each point.\n true_w_b: A `tuple` `(w, b)` where `w` is a Numpy array of\n shape `[2]` and `b` is a scalar `float`, interpreted as a\n decision rule of the form `dot(features, w) + b > 0`.\n candidate_w_bs: Python `iterable` containing tuples of the same form as\n true_w_b.\n fname: The filename to save the plot as a PNG image (Python `str`)."} {"query":"Return the list of all sub - directories of path .","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in arg_0 . albums [ arg_1 ] . subdirs : arg_3 = os . path . normpath ( join ( arg_1 , arg_2 ) ) yield arg_3 , arg_0 . albums [ arg_3 ] for arg_4 , arg_5 in arg_0 . Func ( arg_3 ) : yield arg_4 , arg_0 . albums [ arg_3 ]","id_":253617,"task_name":"https:\/\/github.com\/saimn\/sigal\/blob\/912ca39991355d358dc85fd55c7aeabdd7acc386\/sigal\/gallery.py#L657-L664","negative":"Create a tar file based on the list of files passed"} {"query":"End a group . See begin_group for more details .","positive":"def Func ( arg_0 , arg_1 = 0 , arg_2 = '' ) : arg_0 . indentation -= arg_1 arg_3 = arg_0 . group_stack . pop ( ) if not arg_3 . breakables : arg_0 . group_queue . remove ( arg_3 ) if arg_2 : arg_0 . text ( arg_2 )","id_":253618,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/lib\/pretty.py#L257-L264","negative":"Parse a Supybot IRC log file.\n\n The method parses the Supybot IRC log file and returns an iterator of\n dictionaries. Each one of this, contains a message from the file.\n\n :param filepath: path to the IRC log file\n\n :returns: a generator of parsed messages\n\n :raises ParseError: raised when the format of the Supybot log file\n is invalid\n :raises OSError: raised when an error occurs reading the\n given file"} {"query":"Add new index values .","positive":"def Func ( arg_0 , arg_1 , * arg_2 ) : arg_3 = arg_0 . _index_keys_for ( arg_1 , * arg_2 ) arg_4 = map ( lambda k : ( k , '0' ) , arg_3 ) arg_0 . kvl . put ( arg_0 . INDEX_TABLE , * arg_4 )","id_":253619,"task_name":"https:\/\/github.com\/dossier\/dossier.store\/blob\/b22ffe2470bba9fcc98a30cb55b437bfa1521e7f\/dossier\/store\/store.py#L431-L444","negative":"Disallow users other than the user whose email is being reset."} {"query":"Returns the task ids allocated for the given component id","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ ] for arg_3 , arg_4 in arg_0 . task_to_component_map . items ( ) : if arg_4 == arg_1 : arg_2 . append ( arg_3 ) return arg_2","id_":253620,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/instance\/src\/python\/utils\/topology\/topology_context_impl.py#L110-L116","negative":"Write or append data into the given file path.\n\n :param data_to_write: The data to write.\n :type data_to_write: str\n\n :param overwrite:\n Tell us if we have to overwrite the\n content of the file we are working with.\n :type overwrite: bool"} {"query":"Overridden to not get rid of newlines","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = [ ] for arg_5 in arg_1 . splitlines ( False ) : if arg_5 : arg_4 . extend ( textwrap . wrap ( arg_5 . strip ( ) , arg_2 , initial_indent = arg_3 , subsequent_indent = arg_3 ) ) else : arg_4 . append ( arg_5 ) arg_1 = \"\\n\" . join ( arg_4 ) return arg_1","id_":253621,"task_name":"https:\/\/github.com\/Jaymon\/captain\/blob\/4297f32961d423a10d0f053bc252e29fbe939a47\/captain\/parse.py#L382-L401","negative":"Raises OrderError if no package or file defined.\n Raises CardinalityError if more than one type set.\n Raises SPDXValueError if type is unknown."} {"query":"Create Django form processing class - based view from injector class .","positive":"def Func ( arg_0 ) : arg_1 = create_handler ( FormView , arg_0 ) apply_form_methods ( arg_1 , arg_0 ) return arg_0 . let ( as_view = arg_1 . as_view )","id_":253622,"task_name":"https:\/\/github.com\/dry-python\/dependencies\/blob\/297912cbc6482ba26b3104729645f3a2aba5facc\/src\/dependencies\/contrib\/_django.py#L16-L21","negative":"Stop streaming frames."} {"query":"Delete subfield from position specified .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = None ) : arg_5 = record_get_subfields ( arg_0 , arg_1 , arg_3 = arg_3 , arg_4 = arg_4 ) try : del arg_5 [ arg_2 ] except IndexError : raise InvenioBibRecordFieldError ( \"The record does not contain the subfield \" \"'%(subfieldIndex)s' inside the field (local: \" \"'%(fieldIndexLocal)s, global: '%(fieldIndexGlobal)s' ) of tag \" \"'%(tag)s'.\" % { \"subfieldIndex\" : arg_2 , \"fieldIndexLocal\" : str ( arg_4 ) , \"fieldIndexGlobal\" : str ( arg_3 ) , \"tag\" : arg_1 } ) if not arg_5 : if arg_3 is not None : for arg_6 , arg_7 in enumerate ( arg_0 [ arg_1 ] ) : if arg_7 [ 4 ] == arg_3 : del arg_0 [ arg_1 ] [ arg_6 ] else : del arg_0 [ arg_1 ] [ arg_4 ] if not arg_0 [ arg_1 ] : del arg_0 [ arg_1 ]","id_":253623,"task_name":"https:\/\/github.com\/inspirehep\/harvesting-kit\/blob\/33a7f8aa9dade1d863110c6d8b27dfd955cb471f\/harvestingkit\/bibrecord.py#L888-L922","negative":"This function creates the command list from available information"} {"query":"Builds a url to a gravatar profile from an email address .","positive":"def Func ( arg_0 , arg_1 = arg_2 ) : if arg_1 : arg_3 = GRAVATAR_SECURE_URL else : arg_3 = GRAVATAR_URL arg_4 = calculate_gravatar_hash ( arg_0 ) arg_5 = '{base}{hash}' . format ( base = arg_3 , hash = arg_4 ) return arg_5","id_":253624,"task_name":"https:\/\/github.com\/twaddington\/django-gravatar\/blob\/c4849d93ed43b419eceff0ff2de83d4265597629\/django_gravatar\/helpers.py#L90-L108","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Loading of client_secrets JSON file optionally backed by a cache .","positive":"def Func ( arg_0 , arg_1 = None ) : arg_2 = 'oauth2client:secrets#ns' if not arg_1 : return _Func ( arg_0 ) arg_3 = arg_1 . get ( arg_0 , namespace = arg_2 ) if arg_3 is None : arg_4 , arg_5 = _Func ( arg_0 ) arg_3 = { arg_4 : arg_5 } arg_1 . set ( arg_0 , arg_3 , namespace = arg_2 ) return next ( six . iteritems ( arg_3 ) )","id_":253625,"task_name":"https:\/\/github.com\/googleapis\/oauth2client\/blob\/50d20532a748f18e53f7d24ccbe6647132c979a9\/oauth2client\/clientsecrets.py#L129-L173","negative":"A utility function that creates a list of enumeration values from a bit\n mask for a specific mask enumeration class.\n\n Args:\n enumeration (class): The enumeration class from which to draw\n enumeration values.\n mask (int): The bit mask from which to identify enumeration values.\n\n Returns:\n list: A list of enumeration values corresponding to the bit mask."} {"query":"Scales each value in the pixels of the image .","positive":"def Func ( arg_0 , arg_1 = 0.9 , arg_2 = None , arg_3 = False ) : arg_2 = arg_2 if arg_2 is not None else ( - np . inf , np . inf ) if arg_3 : arg_4 = 1 + np . random . uniform ( - arg_1 , arg_1 ) arg_0 = arg_0 * arg_4 else : arg_0 = arg_0 * arg_1 if len ( arg_2 ) == 2 : arg_0 = np . clip ( arg_0 , arg_2 [ 0 ] , arg_2 [ 1 ] ) else : raise Exception ( \"clip : tuple of 2 numbers\" ) return arg_0","id_":253626,"task_name":"https:\/\/github.com\/tensorlayer\/tensorlayer\/blob\/aa9e52e36c7058a7e6fd81d36563ca6850b21956\/tensorlayer\/prepro.py#L1861-L1907","negative":"Return a AzureDLFileSystem object."} {"query":"Rename the model itself","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . _impl . system . Func_model ( new_name = arg_1 , old_name = arg_0 . name )","id_":253627,"task_name":"https:\/\/github.com\/fumitoh\/modelx\/blob\/0180da34d052c44fb94dab9e115e218bbebfc9c3\/modelx\/core\/model.py#L107-L109","negative":"issue a command to read the archive records after a known time stamp."} {"query":"Map a function to all grid_stack in a grid - stack","positive":"def Func ( arg_0 , arg_1 , * arg_2 ) : return GridStack ( * [ arg_1 ( * arg_3 ) for arg_3 in zip ( arg_0 , * arg_2 ) ] )","id_":253628,"task_name":"https:\/\/github.com\/Jammy2211\/PyAutoLens\/blob\/91e50369c7a9c048c83d217625578b72423cd5a7\/autolens\/data\/array\/grids.py#L250-L252","negative":"Returns an optional AudioTrack."} {"query":"Finds gaps in the provided time - series and indexes them into groups .","positive":"def Func ( arg_0 , arg_1 = 4.0 ) : arg_2 = np . diff ( arg_0 ) arg_3 = np . where ( arg_2 > arg_1 ) [ 0 ] if len ( arg_3 ) > 0 : arg_4 = [ ] for arg_5 , arg_6 in enumerate ( arg_3 ) : if arg_5 == 0 : arg_4 . append ( slice ( 0 , arg_6 + 1 ) ) else : arg_4 . append ( slice ( arg_3 [ arg_5 - 1 ] + 1 , arg_6 + 1 ) ) arg_4 . append ( slice ( arg_3 [ - 1 ] + 1 , len ( arg_0 ) ) ) else : arg_4 = [ slice ( 0 , len ( arg_0 ) ) ] return len ( arg_4 ) , arg_4","id_":253629,"task_name":"https:\/\/github.com\/waqasbhatti\/astrobase\/blob\/2922a14619d183fb28005fa7d02027ac436f2265\/astrobase\/lcmath.py#L58-L114","negative":"Main executor of the trimmomatic_report template.\n\n Parameters\n ----------\n log_files : list\n List of paths to the trimmomatic log files."} {"query":"Submit a task to any of a subset of our targets .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : if arg_2 : arg_3 = [ arg_0 . loads [ i ] for i in arg_2 ] else : arg_3 = arg_0 . loads arg_4 = arg_0 . scheme ( arg_3 ) if arg_2 : arg_4 = arg_2 [ arg_4 ] arg_5 = arg_0 . targets [ arg_4 ] arg_0 . engine_stream . send ( arg_5 , flags = zmq . SNDMORE , copy = False ) arg_0 . engine_stream . send_multipart ( arg_1 . raw_msg , copy = False ) arg_0 . add_job ( arg_4 ) arg_0 . pending [ arg_5 ] [ arg_1 . msg_id ] = arg_1 arg_8 = dict ( arg_7 = arg_1 . msg_id , engine_id = arg_5 . decode ( 'ascii' ) ) arg_0 . session . send ( arg_0 . mon_stream , 'task_destination' , arg_8 = arg_8 , ident = [ b'tracktask' , arg_0 . ident ] )","id_":253630,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/parallel\/controller\/scheduler.py#L543-L563","negative":"Put a key inside the stash\n\n if key exists and modify true: delete and create\n if key exists and modify false: fail\n if key doesn't exist and modify true: fail\n if key doesn't exist and modify false: create\n\n `name` is unique and cannot be changed.\n\n `value` must be provided if the key didn't already exist, otherwise,\n the previous value will be retained.\n\n `created_at` will be left unmodified if the key\n already existed. Otherwise, the current time will be used.\n\n `modified_at` will be changed to the current time\n if the field is being modified.\n\n `metadata` will be updated if provided. If it wasn't\n provided the field from the existing key will be used and the\n same goes for the `uid` which will be generated if it didn't\n previously exist.\n\n `lock` will lock the key to prevent it from being modified or deleted\n\n `add` allows to add values to an existing key instead of overwriting.\n\n Returns the id of the key in the database"} {"query":"Augmenter that detects all edges in images marks them in a black and white image and then overlays the result with the original image .","positive":"def Func ( arg_0 = 0 , arg_1 = None , arg_2 = False , arg_3 = None ) : arg_4 = iap . handle_continuous_param ( arg_0 , \"alpha\" , value_range = ( 0 , 1.0 ) , tuple_to_uniform = True , list_to_choice = True ) def create_matrices ( arg_5 , arg_6 , arg_7 ) : arg_8 = arg_4 . draw_sample ( arg_3 = arg_7 ) ia . do_assert ( 0 <= arg_8 <= 1.0 ) arg_9 = np . array ( [ [ 0 , 0 , 0 ] , [ 0 , 1 , 0 ] , [ 0 , 0 , 0 ] ] , dtype = np . float32 ) arg_10 = np . array ( [ [ 0 , 1 , 0 ] , [ 1 , - 4 , 1 ] , [ 0 , 1 , 0 ] ] , dtype = np . float32 ) arg_11 = ( 1 - arg_8 ) * arg_9 + arg_8 * arg_10 return [ arg_11 ] * arg_6 if arg_1 is None : arg_1 = \"Unnamed%s\" % ( ia . caller_name ( ) , ) return Convolve ( create_matrices , arg_1 = arg_1 , arg_2 = arg_2 , arg_3 = arg_3 )","id_":253631,"task_name":"https:\/\/github.com\/aleju\/imgaug\/blob\/786be74aa855513840113ea523c5df495dc6a8af\/imgaug\/augmenters\/convolutional.py#L382-L445","negative":"This will output the nginx HTTP config string for specific port spec"} {"query":"One of the Bohachevsky functions","positive":"def Func ( arg_0 ) : arg_1 , arg_2 = arg_0 arg_3 = arg_1 ** 2 + 2 * arg_2 ** 2 - 0.3 * np . cos ( 3 * np . pi * arg_1 ) - 0.4 * np . cos ( 4 * np . pi * arg_2 ) + 0.7 arg_4 = np . array ( [ 2 * arg_1 + 0.3 * np . sin ( 3 * np . pi * arg_1 ) * 3 * np . pi , 4 * arg_2 + 0.4 * np . sin ( 4 * np . pi * arg_2 ) * 4 * np . pi , ] ) return arg_3 , arg_4","id_":253632,"task_name":"https:\/\/github.com\/nirum\/descent\/blob\/074c8452f15a0da638668a4fe139fde06ccfae7f\/descent\/objectives.py#L148-L156","negative":"Extracts returns based on interesting events. See\n gen_date_range_interesting.\n\n Parameters\n ----------\n returns : pd.Series\n Daily returns of the strategy, noncumulative.\n - See full explanation in tears.create_full_tear_sheet.\n\n Returns\n -------\n ranges : OrderedDict\n Date ranges, with returns, of all valid events."} {"query":"Put and return the only unique identifier possible its path","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . client . write ( arg_0 . _key_path ( arg_1 [ 'name' ] ) , ** arg_1 ) return arg_0 . _key_path ( arg_1 [ 'name' ] )","id_":253633,"task_name":"https:\/\/github.com\/nir0s\/ghost\/blob\/77da967a4577ca4cf100cfe34e87b39ad88bf21c\/ghost.py#L878-L882","negative":"Creates a tempfile and starts the given editor, returns the data afterwards."} {"query":"Resolve the logging path from job and task properties .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if not arg_1 . logging : return for arg_3 in arg_2 : arg_4 = provider_base . format_logging_uri ( arg_1 . logging . uri , arg_0 , arg_3 . task_metadata ) arg_5 = job_model . LoggingParam ( arg_4 , arg_1 . logging . file_provider ) if arg_3 . task_resources : arg_3 . task_resources = arg_3 . task_resources . _replace ( arg_5 = arg_5 ) else : arg_3 . task_resources = job_model . Resources ( arg_5 = arg_5 )","id_":253634,"task_name":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/443ce31daa6023dc2fd65ef2051796e19d18d5a7\/dsub\/commands\/dsub.py#L592-L617","negative":"Create a new Set produce by the intersection of 2 Set"} {"query":"Poll from the buffer","positive":"def Func ( arg_0 ) : try : arg_1 = arg_0 . _buffer . get ( block = False ) if arg_0 . _producer_callback is not None : arg_0 . _producer_callback ( ) return arg_1 except Queue . Empty : Log . debug ( \"%s: Empty in Func()\" % str ( arg_0 ) ) raise Queue . Empty","id_":253635,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/instance\/src\/python\/utils\/misc\/communicator.py#L64-L77","negative":"Update the HyperLogLog with a new data value in bytes.\n The value will be hashed using the hash function specified by\n the `hashfunc` argument in the constructor.\n\n Args:\n b: The value to be hashed using the hash function specified.\n\n Example:\n To update with a new string value (using the default SHA1 hash\n function, which requires bytes as input):\n\n .. code-block:: python\n\n hll = HyperLogLog()\n hll.update(\"new value\".encode('utf-8'))\n\n We can also use a different hash function, for example, `pyfarmhash`:\n\n .. code-block:: python\n\n import farmhash\n def _hash_32(b):\n return farmhash.hash32(b)\n hll = HyperLogLog(hashfunc=_hash_32)\n hll.update(\"new value\")"} {"query":"Render a LaTeX string to PNG .","positive":"def Func ( arg_0 , arg_1 = False , arg_2 = 'mpl' ) : if arg_2 == 'mpl' : arg_3 = Func_mpl elif arg_2 == 'dvipng' : arg_3 = Func_dvipng else : raise ValueError ( 'No such backend {0}' . format ( arg_2 ) ) arg_4 = arg_3 ( arg_0 ) if arg_1 and arg_4 : arg_4 = encodestring ( arg_4 ) return arg_4","id_":253636,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/lib\/latextools.py#L34-L58","negative":"Set your typing status in this conversation.\n\n Args:\n typing: (optional) ``TYPING_TYPE_STARTED``, ``TYPING_TYPE_PAUSED``,\n or ``TYPING_TYPE_STOPPED`` to start, pause, or stop typing,\n respectively. Defaults to ``TYPING_TYPE_STARTED``.\n\n Raises:\n .NetworkError: If typing status cannot be set."} {"query":"Decorator to mark plugin functions as entry points for web calls","positive":"def Func ( * arg_0 , ** arg_1 ) : def wrapper ( arg_2 ) : arg_2 . is_Func = True arg_2 . route = arg_0 [ 0 ] arg_2 . form_params = arg_1 . get ( 'form_params' , [ ] ) arg_2 . method = arg_1 . get ( 'method' , 'POST' ) return arg_2 return wrapper","id_":253637,"task_name":"https:\/\/github.com\/arcticfoxnv\/slackminion\/blob\/62ea77aba5ac5ba582793e578a379a76f7d26cdb\/slackminion\/plugin\/__init__.py#L26-L39","negative":"Perform dimensionality reduction on X.\n\n Parameters\n ----------\n X : {array-like, sparse matrix}, shape (n_samples, n_features)\n New data.\n\n Returns\n -------\n X_new : array, shape (n_samples, n_components)\n Reduced version of X. This will always be a dense array."} {"query":"Returns the scaled y positions of the points as doubles","positive":"def Func ( arg_0 ) : return scale_dimension ( arg_0 . Y , arg_0 . header . Func_scale , arg_0 . header . Func_offset )","id_":253638,"task_name":"https:\/\/github.com\/tmontaigu\/pylas\/blob\/8335a1a7d7677f0e4bc391bb6fa3c75b42ed5b06\/pylas\/lasdatas\/base.py#L66-L69","negative":"scan through the java output text and extract the bad java messages that may or may not happened when\n unit tests are run. It will not record any bad java messages that are stored in g_ok_java_messages.\n\n :return: none"} {"query":"Convert given string label of decay type to special index","positive":"def Func ( arg_0 , arg_1 : arg_2 ) -> int : arg_3 = arg_1 . replace ( '1' , 'one' ) . upper ( ) if arg_3 in arg_0 . __members__ : return DecayType [ arg_3 ] else : raise NotImplementedError","id_":253639,"task_name":"https:\/\/github.com\/deepmipt\/DeepPavlov\/blob\/f3e4a69a3764d25d2f5bad4f1f1aebc872b00f9c\/deeppavlov\/core\/models\/lr_scheduled_model.py#L40-L56","negative":"Process data to produce velocity and dropout information."} {"query":"Succeeds if the given parser cannot consume input","positive":"def Func ( arg_0 ) : @ tri def Func_block ( ) : arg_1 = object ( ) arg_2 = optional ( tri ( arg_0 ) , arg_1 ) if arg_2 != arg_1 : fail ( [ \"not \" + _fun_to_str ( arg_0 ) ] ) choice ( Func_block )","id_":253640,"task_name":"https:\/\/github.com\/brehaut\/picoparse\/blob\/5e07c8e687a021bba58a5a2a76696c7a7ff35a1c\/picoparse\/__init__.py#L382-L390","negative":"Write the index.html file for this report."} {"query":"Deserialize a dict of simple types into an instance of this class .","positive":"def Func ( arg_0 , arg_1 ) : try : arg_2 , arg_3 = arg_0 . schema . load ( arg_1 ) except ValidationError as ex : raise ModelValidationError ( ex . messages , ex . field_names , ex . fields , ex . data , ** ex . kwargs ) from None return arg_2","id_":253641,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/validation\/base.py#L335-L347","negative":"Wait until all pending messages have been sent.\n\n :returns: A list of the send results of all the pending messages. Each\n send result is a tuple with two values. The first is a boolean, indicating `True`\n if the message sent, or `False` if it failed. The second is an error if the message\n failed, otherwise it will be `None`.\n :rtype: list[tuple[bool, ~azure.servicebus.common.errors.MessageSendFailed]]\n\n Example:\n .. literalinclude:: ..\/examples\/async_examples\/test_examples_async.py\n :start-after: [START queue_sender_messages]\n :end-before: [END queue_sender_messages]\n :language: python\n :dedent: 4\n :caption: Schedule messages."} {"query":"Checks whether a HTTP status code is in the category denoted by the hundreds digit .","positive":"def Func ( arg_0 , arg_1 ) : assert arg_1 < 10 , 'HTTP status category must be a one-digit int!' arg_2 = arg_1 * 100 return arg_0 >= arg_2 and arg_0 < arg_2 + 100","id_":253642,"task_name":"https:\/\/github.com\/authomatic\/authomatic\/blob\/90a9ce60cc405ae8a2bf5c3713acd5d78579a04e\/authomatic\/providers\/__init__.py#L533-L541","negative":"Enables uniform interface to value and batch jacobian calculation.\n\n Works in both eager and graph modes.\n\n Arguments:\n f: The scalar function to evaluate.\n x: The value at which to compute the value and the batch jacobian.\n\n Returns:\n A tuple (f(x), J(x)), where J(x) is the batch jacobian."} {"query":"Send a validation email to the user s email address .","positive":"def Func ( arg_0 ) : if arg_0 . email_verified : raise ValueError ( _ ( 'Cannot validate already active user.' ) ) arg_1 = Site . objects . get_current ( ) arg_0 . validation_notification ( user = arg_0 , arg_1 = arg_1 ) . notify ( )","id_":253643,"task_name":"https:\/\/github.com\/incuna\/django-user-management\/blob\/6784e33191d4eff624d2cf2df9ca01db4f23c9c6\/user_management\/models\/mixins.py#L155-L161","negative":"A list of row indices to remove. There are two caveats. First, this is\n a potentially slow operation. Second, pattern indices will shift if\n patterns before them are removed."} {"query":"Return a distance between two strings .","positive":"def Func ( arg_0 , arg_1 , arg_2 = arg_3 ) : if callable ( arg_2 ) : return 1 - arg_2 ( arg_0 , arg_1 ) else : raise AttributeError ( 'Unknown Funcance function: ' + str ( arg_2 ) )","id_":253644,"task_name":"https:\/\/github.com\/chrislit\/abydos\/blob\/165466b3ff6afd8024a4c8660421b0c4e7773db9\/abydos\/distance\/__init__.py#L366-L407","negative":"Load a multipage tiff into a single variable in x,y,z format.\n\n Arguments:\n tiff_filename: Filename of source data\n dtype: data type to use for the returned tensor\n\n Returns:\n Array containing contents from input tiff file in xyz order"} {"query":"Set the access policy on the given DAG s ViewModel .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : def _get_or_create_dag_permission ( arg_3 ) : arg_4 = arg_0 . find_permission_view_menu ( arg_3 , arg_1 ) if not arg_4 : arg_0 . log . info ( \"Creating new permission '%s' on view '%s'\" , arg_3 , arg_1 ) arg_4 = arg_0 . add_permission_view_menu ( arg_3 , arg_1 ) return arg_4 def _revoke_stale_permissions ( arg_5 ) : arg_6 = arg_0 . find_permissions_view_menu ( arg_5 ) for arg_7 in arg_6 : arg_8 = [ arg_9 for arg_9 in arg_7 . role if arg_9 . name != 'Admin' ] for arg_9 in arg_8 : arg_10 = arg_2 . get ( arg_9 . name , { } ) if arg_7 . permission . name not in arg_10 : arg_0 . log . info ( \"Revoking '%s' on DAG '%s' for role '%s'\" , arg_7 . permission , arg_1 , arg_9 . name ) arg_0 . del_permission_role ( arg_9 , arg_7 ) arg_5 = arg_0 . find_view_menu ( arg_1 ) if arg_5 : _revoke_stale_permissions ( arg_5 ) for arg_11 , arg_12 in arg_2 . items ( ) : arg_9 = arg_0 . find_role ( arg_11 ) if not arg_9 : raise AirflowException ( \"The access_control mapping for DAG '{}' includes a role \" \"named '{}', but that role does not exist\" . format ( arg_1 , arg_11 ) ) arg_12 = set ( arg_12 ) arg_13 = arg_12 - arg_0 . DAG_PERMS if arg_13 : raise AirflowException ( \"The access_control map for DAG '{}' includes the following \" \"invalid permissions: {}; The set of valid permissions \" \"is: {}\" . format ( arg_1 , ( arg_12 - arg_0 . DAG_PERMS ) , arg_0 . DAG_PERMS ) ) for arg_3 in arg_12 : arg_4 = _get_or_create_dag_permission ( arg_3 ) arg_0 . add_permission_role ( arg_9 , arg_4 )","id_":253645,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/www\/security.py#L500-L560","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Returns the first recurrence after the given datetime instance . The inc keyword defines what happens if dt is an occurrence . With inc = True if dt itself is an occurrence it will be returned .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False ) : if arg_0 . _cache_complete : arg_3 = arg_0 . _cache else : arg_3 = arg_0 if arg_2 : for arg_4 in arg_3 : if arg_4 >= arg_1 : return arg_4 else : for arg_4 in arg_3 : if arg_4 > arg_1 : return arg_4 return None","id_":253646,"task_name":"https:\/\/github.com\/MacHu-GWU\/superjson-project\/blob\/782ca4b2edbd4b4018b8cedee42eeae7c921b917\/superjson\/pkg\/dateutil\/rrule.py#L214-L230","negative":"Dump the ocntent into the `file` in binary mode."} {"query":"Transform input according to potentially registered XSLT","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = None ) : if str ( arg_3 ) in arg_0 . _Func : arg_5 = arg_0 . _Func [ str ( arg_3 ) ] else : arg_5 = arg_0 . _Func [ \"default\" ] if isinstance ( arg_5 , str ) : with open ( arg_5 ) as f : arg_6 = etree . XSLT ( etree . parse ( f ) ) return etree . tostring ( arg_6 ( arg_2 ) , encoding = str , method = \"html\" , xml_declaration = None , pretty_print = False , with_tail = True , standalone = None ) elif isinstance ( arg_5 , Callable ) : return arg_5 ( arg_1 , arg_2 , arg_3 , arg_4 ) elif arg_5 is None : return etree . tostring ( arg_2 , encoding = str )","id_":253647,"task_name":"https:\/\/github.com\/Capitains\/flask-capitains-nemo\/blob\/8d91f2c05b925a6c8ea8c997baf698c87257bc58\/flask_nemo\/__init__.py#L309-L351","negative":"Reimplemented to the store history."} {"query":"Produces the registration manifest for people with the given product type .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . cleaned_data [ \"product\" ] arg_3 = arg_1 . cleaned_data [ \"category\" ] arg_4 = ( Q ( lineitem__product__in = arg_2 ) | Q ( lineitem__product__category__in = arg_3 ) ) arg_5 = commerce . Invoice . objects . filter ( arg_4 , status = commerce . Invoice . STATUS_PAID , ) . select_related ( \"cart\" , \"user\" , \"user__attendee\" , \"user__attendee__attendeeprofilebase\" ) arg_6 = set ( i . user for i in arg_5 ) arg_7 = commerce . Cart . objects . filter ( user__in = arg_6 ) arg_8 = commerce . ProductItem . objects . filter ( cart__in = arg_7 ) . select_related ( \"product\" , \"product__category\" , \"cart\" , \"cart__user\" , \"cart__user__attendee\" , \"cart__user__attendee__attendeeprofilebase\" ) . order_by ( \"product__category__order\" , \"product__order\" ) arg_6 = { } for arg_9 in arg_8 : arg_10 = arg_9 . cart if arg_10 . user not in arg_6 : arg_6 [ arg_10 . user ] = { \"unpaid\" : [ ] , \"paid\" : [ ] , \"refunded\" : [ ] } arg_8 = arg_6 [ arg_10 . user ] if arg_10 . status == commerce . Cart . STATUS_ACTIVE : arg_8 [ \"unpaid\" ] . append ( arg_9 ) elif arg_10 . status == commerce . Cart . STATUS_PAID : arg_8 [ \"paid\" ] . append ( arg_9 ) elif arg_10 . status == commerce . Cart . STATUS_RELEASED : arg_8 [ \"refunded\" ] . append ( arg_9 ) arg_12 = list ( arg_6 . keys ( ) ) arg_12 . sort ( key = ( lambda i : i . attendee . attendeeprofilebase . attendee_name ( ) . lower ( ) ) ) arg_13 = [ \"User ID\" , \"Name\" , \"Paid\" , \"Unpaid\" , \"Refunded\" ] def format_items ( arg_14 ) : arg_15 = [ '%d x %s' % ( arg_9 . quantity , str ( arg_9 . product ) ) for arg_9 in arg_14 ] return \", \\n\" . join ( arg_15 ) arg_16 = [ ] for arg_11 in arg_12 : arg_8 = arg_6 [ arg_11 ] arg_16 . append ( [ arg_11 . id , arg_11 . attendee . attendeeprofilebase . attendee_name ( ) , format_items ( arg_8 [ \"paid\" ] ) , format_items ( arg_8 [ \"unpaid\" ] ) , format_items ( arg_8 [ \"refunded\" ] ) , ] ) return ListReport ( \"Manifest\" , arg_13 , arg_16 )","id_":253648,"task_name":"https:\/\/github.com\/chrisjrn\/registrasion\/blob\/461d5846c6f9f3b7099322a94f5d9911564448e4\/registrasion\/reporting\/views.py#L831-L911","negative":"Context manager that changes to directory `path` and return to CWD\n when exited."} {"query":"Updates info attribute from info dict .","positive":"def Func ( arg_0 ) : if arg_0 . info_dict : arg_1 = [ ] if len ( arg_0 . info_dict ) > 1 : arg_0 . info_dict . pop ( \".\" , None ) for arg_2 , arg_3 in arg_0 . info_dict . items ( ) : if arg_2 == arg_3 : arg_1 . append ( arg_3 ) else : arg_1 . append ( \"=\" . join ( [ arg_2 , arg_3 ] ) ) arg_0 . info = \";\" . join ( arg_1 ) else : arg_0 . info = \".\"","id_":253649,"task_name":"https:\/\/github.com\/umich-brcf-bioinf\/Jacquard\/blob\/83dd61dd2b5e4110468493beec7bc121e6cb3cd1\/jacquard\/utils\/vcf.py#L313-L326","negative":"Serialize a dataframe.\n\n Parameters\n ----------\n writer : file\n File-like object to write to. Must be opened in binary mode.\n data_type_id : dict\n Serialization format to use.\n See the azureml.DataTypeIds class for constants.\n dataframe: pandas.DataFrame\n Dataframe to serialize."} {"query":"Listen for the named event with the specified callback .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : assert callable ( arg_2 ) , 'callback is not callable' if arg_2 in arg_0 . __listeners [ arg_1 ] : raise DuplicateListenerError arg_0 . __listeners [ arg_1 ] . append ( arg_2 )","id_":253650,"task_name":"https:\/\/github.com\/nsqio\/pynsq\/blob\/48bf62d65ea63cddaa401efb23187b95511dbc84\/nsq\/event.py#L44-L57","negative":"Multiply this frame, viewed as a matrix, by another matrix.\n\n :param matrix: another frame that you want to multiply the current frame by; must be compatible with the\n current frame (i.e. its number of rows must be the same as number of columns in the current frame).\n :returns: new H2OFrame, which is the result of multiplying the current frame by ``matrix``."} {"query":"Modulo addition operation","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : try : arg_4 = Operators . ITEBV ( 256 , arg_3 == 0 , 0 , ( arg_1 + arg_2 ) % arg_3 ) except ZeroDivisionError : arg_4 = 0 return arg_4","id_":253651,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/platforms\/evm.py#L1185-L1191","negative":"Run stochastic volatility model.\n\n This model estimates the volatility of a returns series over time.\n Returns are assumed to be T-distributed. lambda (width of\n T-distributed) is assumed to follow a random-walk.\n\n Parameters\n ----------\n data : pandas.Series\n Return series to model.\n samples : int, optional\n Posterior samples to draw.\n\n Returns\n -------\n model : pymc.Model object\n PyMC3 model containing all random variables.\n trace : pymc3.sampling.BaseTrace object\n A PyMC3 trace object that contains samples for each parameter\n of the posterior.\n\n See Also\n --------\n plot_stoch_vol : plotting of tochastic volatility model"} {"query":"Try to find an InstallationCandidate for req","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_0 . _find_all_versions ( arg_1 . name ) arg_4 = set ( arg_1 . specifier . filter ( [ x . version for x in arg_3 ] , prereleases = ( arg_0 . allow_all_prereleases if arg_0 . allow_all_prereleases else None ) , ) ) arg_5 = [ x for x in arg_3 if x . version in arg_4 ] if arg_1 . satisfied_by is not None : arg_5 . insert ( 0 , InstallationCandidate ( arg_1 . name , arg_1 . satisfied_by . version , INSTALLED_VERSION , ) ) arg_6 = True else : arg_6 = False arg_5 = arg_0 . _sort_versions ( arg_5 ) if not arg_2 and arg_6 : if arg_5 [ 0 ] . location is INSTALLED_VERSION : logger . debug ( 'Existing installed version (%s) is most up-to-date and ' 'satisfies requirement' , arg_1 . satisfied_by . version , ) else : logger . debug ( 'Existing installed version (%s) satisfies requirement ' '(most up-to-date version is %s)' , arg_1 . satisfied_by . version , arg_5 [ 0 ] [ 2 ] , ) return None if not arg_5 : logger . critical ( 'Could not find a version that satisfies the requirement %s ' '(from versions: %s)' , arg_1 , ', ' . join ( sorted ( set ( str ( arg_7 . version ) for arg_7 in arg_3 ) , key = parse_version , ) ) ) if arg_0 . need_warn_external : logger . warning ( \"Some externally hosted files were ignored as access to \" \"them may be unreliable (use --allow-external %s to \" \"allow).\" , arg_1 . name , ) if arg_0 . need_warn_unverified : logger . warning ( \"Some insecure and unverifiable files were ignored\" \" (use --allow-unverified %s to allow).\" , arg_1 . name , ) raise DistributionNotFound ( 'No matching distribution found for %s' % arg_1 ) if arg_5 [ 0 ] . location is INSTALLED_VERSION : logger . debug ( 'Installed version (%s) is most up-to-date (past versions: ' '%s)' , arg_1 . satisfied_by . version , ', ' . join ( str ( arg_7 . version ) for arg_7 in arg_5 [ 1 : ] ) or \"none\" , ) raise BestVersionAlreadyInstalled if len ( arg_5 ) > 1 : logger . debug ( 'Using version %s (newest of versions: %s)' , arg_5 [ 0 ] . version , ', ' . join ( str ( arg_7 . version ) for arg_7 in arg_5 ) ) arg_8 = arg_5 [ 0 ] . location if ( arg_8 . verifiable is not None and not arg_8 . verifiable ) : logger . warning ( \"%s is potentially insecure and unverifiable.\" , arg_1 . name , ) return arg_8","id_":253652,"task_name":"https:\/\/github.com\/AkihikoITOH\/capybara\/blob\/e86c2173ea386654f4ae061148e8fbe3f25e715c\/capybara\/virtualenv\/lib\/python2.7\/site-packages\/pip\/index.py#L479-L592","negative":"Test whether a path can be written to."} {"query":"Check whether a year is a leap year .","positive":"def Func ( arg_0 ) : import pandas as pd return pd . Series ( arg_0 ) . dt . is_leap_year . values","id_":253653,"task_name":"https:\/\/github.com\/vaexio\/vaex\/blob\/a45b672f8287afca2ada8e36b74b604b9b28dd85\/packages\/vaex-core\/vaex\/functions.py#L218-L244","negative":"Init openstack glance mq\n\n 1. Check if enable listening glance notification\n 2. Create consumer\n\n :param mq: class ternya.mq.MQ"} {"query":"Parse a FIQL formatted string into an Expression .","positive":"def Func ( arg_0 ) : arg_1 = 0 arg_2 = None arg_3 = Expression ( ) for ( arg_4 , arg_5 , arg_6 , arg_7 ) in iter_parse ( arg_0 ) : if arg_4 : for arg_8 in arg_4 : if arg_8 == '(' : if isinstance ( arg_2 , BaseExpression ) : raise FiqlFormatException ( \"%s can not be followed by %s\" % ( arg_2 . __class__ , Expression ) ) arg_3 = arg_3 . create_nested_expression ( ) arg_1 += 1 elif arg_8 == ')' : arg_3 = arg_3 . get_parent ( ) arg_2 = arg_3 arg_1 -= 1 else : if not arg_3 . has_constraint ( ) : raise FiqlFormatException ( \"%s proceeding initial %s\" % ( Operator , Constraint ) ) if isinstance ( arg_2 , Operator ) : raise FiqlFormatException ( \"%s can not be followed by %s\" % ( Operator , Operator ) ) arg_2 = Operator ( arg_8 ) arg_3 = arg_3 . add_operator ( arg_2 ) if arg_5 : if isinstance ( arg_2 , BaseExpression ) : raise FiqlFormatException ( \"%s can not be followed by %s\" % ( arg_2 . __class__ , Constraint ) ) arg_2 = Constraint ( arg_5 , arg_6 , arg_7 ) arg_3 . add_element ( arg_2 ) if arg_1 != 0 : raise FiqlFormatException ( \"At least one nested expression was not correctly closed\" ) if not arg_3 . has_constraint ( ) : raise FiqlFormatException ( \"Parsed string '%s' contained no constraint\" % arg_0 ) return arg_3","id_":253654,"task_name":"https:\/\/github.com\/sergedomk\/fiql_parser\/blob\/499dd7cd0741603530ce5f3803d92813e74ac9c3\/fiql_parser\/parser.py#L62-L123","negative":"Prepare the actors, the world, and the messaging system to begin \n playing the game.\n \n This method is guaranteed to be called exactly once upon entering the \n game stage."} {"query":"Creates a virtual column which is the equivalent of numpy . arange but uses 0 memory","positive":"def Func ( arg_0 , arg_1 , arg_2 = 1 , arg_3 = 'f8' ) : from . column import ColumnVirtualRange return ColumnVirtualRange ( arg_0 , arg_1 , arg_2 , arg_3 )","id_":253655,"task_name":"https:\/\/github.com\/vaexio\/vaex\/blob\/a45b672f8287afca2ada8e36b74b604b9b28dd85\/packages\/vaex-core\/vaex\/__init__.py#L587-L590","negative":"Return whether the input course or program exist."} {"query":"Sign inputs in a finalized bundle .","positive":"def Func ( arg_0 , arg_1 ) : if not arg_0 . hash : raise RuntimeError ( 'Cannot sign inputs until bundle is finalized.' ) arg_2 = 0 while arg_2 < len ( arg_0 ) : arg_3 = arg_0 [ arg_2 ] if arg_3 . value < 0 : if arg_3 . address . key_index is None : raise with_context ( exc = ValueError ( 'Unable to sign input {input}; ' '``key_index`` is None ' '(``exc.context`` has more info).' . format ( input = arg_3 . address , ) , ) , context = { 'transaction' : arg_3 , } , ) if arg_3 . address . security_level is None : raise with_context ( exc = ValueError ( 'Unable to sign input {input}; ' '``security_level`` is None ' '(``exc.context`` has more info).' . format ( input = arg_3 . address , ) , ) , context = { 'transaction' : arg_3 , } , ) arg_0 . sign_input_at ( arg_2 , arg_1 . get_key_for ( arg_3 . address ) ) arg_2 += arg_3 . address . security_level else : arg_2 += 1","id_":253656,"task_name":"https:\/\/github.com\/iotaledger\/iota.lib.py\/blob\/97cdd1e241498446b46157b79b2a1ea2ec6d387a\/iota\/transaction\/creation.py#L386-L438","negative":"Returns a randomly generated permanence value for a synapses that is\n initialized in a connected state. The basic idea here is to initialize\n permanence values very close to synPermConnected so that a small number of\n learning steps could make it disconnected or connected.\n\n Note: experimentation was done a long time ago on the best way to initialize\n permanence values, but the history for this particular scheme has been lost."} {"query":"Registers the models of the app with the given appName for the admin site","positive":"def Func ( arg_0 , arg_1 = [ ] ) : for arg_2 in apps . get_app_config ( arg_0 ) . get_models ( ) : if arg_2 not in arg_1 : admin . site . register ( arg_2 )","id_":253657,"task_name":"https:\/\/github.com\/seebass\/django-tooling\/blob\/aaee703040b299cae560c501c94b18e0c2620f0d\/django_tooling\/registeradmin.py#L5-L9","negative":"Performs completion at the current cursor location."} {"query":"Parses a semi - colon delimited list of headers .","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 , arg_3 in _parse_keyvalue_list ( arg_1 ) : arg_0 . headers [ arg_2 ] = arg_3","id_":253658,"task_name":"https:\/\/github.com\/streamlink\/streamlink\/blob\/c8ed1daff14ac03195870238b9b900c1109dd5c1\/src\/streamlink\/plugin\/api\/http_session.py#L118-L124","negative":"Enables uniform interface to value and batch jacobian calculation.\n\n Works in both eager and graph modes.\n\n Arguments:\n f: The scalar function to evaluate.\n x: The value at which to compute the value and the batch jacobian.\n\n Returns:\n A tuple (f(x), J(x)), where J(x) is the batch jacobian."} {"query":"Converts stderr string to a list .","positive":"def Func ( arg_0 ) : if arg_0 . _streaming : Func = [ ] while not arg_0 . __stderr . empty ( ) : try : arg_2 = arg_0 . __stderr . get_nowait ( ) Func . append ( arg_2 ) except : pass else : Func = arg_0 . __stderr return Func","id_":253659,"task_name":"https:\/\/github.com\/aeroxis\/sultan\/blob\/65b4271a161d6c19a9eb0170b5a95832a139ab7f\/src\/sultan\/result.py#L169-L183","negative":"Compares and exchanges.\n\n Compares the value in the AL, AX, EAX or RAX register (depending on the\n size of the operand) with the first operand (destination operand). If\n the two values are equal, the second operand (source operand) is loaded\n into the destination operand. Otherwise, the destination operand is\n loaded into the AL, AX, EAX or RAX register.\n\n The ZF flag is set if the values in the destination operand and\n register AL, AX, or EAX are equal; otherwise it is cleared. The CF, PF,\n AF, SF, and OF flags are set according to the results of the comparison\n operation::\n\n (* accumulator = AL, AX, EAX or RAX, depending on whether *)\n (* a byte, word, a doubleword or a 64bit comparison is being performed*)\n IF accumulator == DEST\n THEN\n ZF = 1\n DEST = SRC\n ELSE\n ZF = 0\n accumulator = DEST\n FI;\n\n :param cpu: current CPU.\n :param dest: destination operand.\n :param src: source operand."} {"query":"Direct operate a single command","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = arg_4 . PrintingCommandCallback . Get ( ) , arg_7 = arg_8 . TaskConfig ( ) . Default ( ) ) : arg_0 . master . DirectOperate ( arg_1 , arg_2 , arg_3 , arg_7 )","id_":253660,"task_name":"https:\/\/github.com\/ChargePoint\/pydnp3\/blob\/5bcd8240d1fc0aa1579e71f2efcab63b4c61c547\/examples\/master.py#L94-L104","negative":"Generate a header for a tag section with specific parameters.\n\n :param str newer_tag_name: Name (title) of newer tag.\n :param str newer_tag_link: Tag name of newer tag, used for links.\n Could be same as **newer_tag_name** or some\n specific value, like `HEAD`.\n :param datetime newer_tag_time: Date and time when\n newer tag was created.\n :param str older_tag_link: Tag name of older tag, used for links.\n :param str project_url: URL for current project.\n :rtype: str\n :return: Generated ready-to-add tag section."} {"query":"Query group by a group name .","positive":"def Func ( arg_0 , arg_1 ) : try : return arg_0 . query . filter_by ( arg_1 = arg_1 ) . one ( ) except NoResultFound : return None","id_":253661,"task_name":"https:\/\/github.com\/inveniosoftware-contrib\/invenio-groups\/blob\/109481d6b02701db00b72223dd4a65e167c589a6\/invenio_groups\/models.py#L282-L291","negative":"Initialize the dictionary of architectures for disassembling via capstone"} {"query":"This creates an SQS queue .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None ) : if not arg_2 : arg_2 = boto3 . client ( 'sqs' ) try : if isinstance ( arg_1 , dict ) : arg_3 = arg_2 . create_queue ( QueueName = arg_0 , Attributes = arg_1 ) else : arg_3 = arg_2 . create_queue ( QueueName = arg_0 ) if arg_3 is not None : return { 'url' : arg_3 [ 'QueueUrl' ] , 'name' : arg_0 } else : LOGERROR ( 'could not create the specified queue: %s with options: %s' % ( arg_0 , arg_1 ) ) return None except Exception as e : LOGEXCEPTION ( 'could not create the specified queue: %s with options: %s' % ( arg_0 , arg_1 ) ) return None","id_":253662,"task_name":"https:\/\/github.com\/waqasbhatti\/astrobase\/blob\/2922a14619d183fb28005fa7d02027ac436f2265\/astrobase\/awsutils.py#L377-L429","negative":"issue a command to read the archive records after a known time stamp."} {"query":"Submit the selected exercise to the server .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = False , arg_3 = False ) : if arg_1 is not None : return Func_exercise ( Exercise . byid ( arg_1 ) , arg_2 = arg_2 , request_review = arg_3 ) else : arg_4 = Exercise . get_selected ( ) if not arg_4 : raise NoExerciseSelected ( ) return Func_exercise ( arg_4 , arg_2 = arg_2 , request_review = arg_3 )","id_":253663,"task_name":"https:\/\/github.com\/minttu\/tmc.py\/blob\/212cfe1791a4aab4783f99b665cc32da6437f419\/tmc\/__main__.py#L323-L335","negative":"Parses data from Wikipedia page markup.\n\n The markup comes from Wikipedia's edit page.\n We parse it here into objects containing plain text.\n The light version parses only links to other articles, it's faster than a full parse."} {"query":"Show Program Landing page for the Enterprise s Program .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : verify_edx_resources ( ) arg_4 = Func_enterprise_customer_or_404 ( arg_2 ) arg_5 = Func_global_context ( arg_1 , arg_4 ) arg_6 , arg_7 = arg_0 . Func_program_details ( arg_1 , arg_3 , arg_4 ) if arg_7 : return render ( arg_1 , ENTERPRISE_GENERAL_ERROR_PAGE , context = arg_5 , status = 404 , ) if arg_6 [ 'certificate_eligible_for_program' ] : return redirect ( LMS_PROGRAMS_DASHBOARD_URL . format ( uuid = arg_3 ) ) arg_8 = [ ] for arg_9 in arg_6 [ 'courses' ] : for arg_10 in arg_9 [ 'course_runs' ] : arg_8 . append ( arg_10 [ 'key' ] ) arg_11 = EmbargoApiClient . redirect_if_blocked ( arg_8 , arg_1 . user , Func_ip ( arg_1 ) , arg_1 . path ) if arg_11 : return redirect ( arg_11 ) return arg_0 . Func_enterprise_program_enrollment_page ( arg_1 , arg_4 , arg_6 )","id_":253664,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/enterprise\/views.py#L1523-L1564","negative":"If there are edits to the current input buffer, store them."} {"query":"Start subscribing channels . If the necessary connection isn t open yet it opens now .","positive":"async def Func ( arg_0 , arg_1 ) : arg_2 = [ ] arg_3 = [ ] for arg_4 in arg_1 : if arg_4 . startswith ( ( 'Q.' , 'T.' , 'A.' , 'AM.' , ) ) : arg_3 . append ( arg_4 ) else : arg_2 . append ( arg_4 ) if len ( arg_2 ) > 0 : await arg_0 . _ensure_ws ( ) await arg_0 . _ws . send ( json . dumps ( { 'action' : 'listen' , 'data' : { 'streams' : arg_2 , } } ) ) if len ( arg_3 ) > 0 : await arg_0 . _ensure_nats ( ) await arg_0 . polygon . Func ( arg_3 )","id_":253665,"task_name":"https:\/\/github.com\/alpacahq\/alpaca-trade-api-python\/blob\/9c9dea3b4a37c909f88391b202e86ff356a8b4d7\/alpaca_trade_api\/stream2.py#L76-L99","negative":"Serial call to read month tariffs block into meter object buffer.\n\n Args:\n months_type (int): A :class:`~ekmmeters.ReadMonths` value.\n\n Returns:\n bool: True on completion."} {"query":"Processes Alexa requests from skill server and returns responses to Alexa .","positive":"def Func ( arg_0 , arg_1 : arg_2 ) -> arg_2 : arg_3 : bytes = arg_1 [ 'request_body' ] arg_4 : str = arg_1 [ 'signature_chain_url' ] arg_5 : str = arg_1 [ 'signature' ] arg_6 : arg_2 = arg_1 [ 'alexa_request' ] if not arg_0 . _verify_request ( arg_4 , arg_5 , arg_3 ) : return { 'error' : 'failed certificate\/signature check' } arg_7 = arg_6 [ 'request' ] [ 'timestamp' ] arg_8 = datetime . strptime ( arg_7 , '%Y-%m-%dT%H:%M:%SZ' ) arg_9 = datetime . utcnow ( ) arg_10 = arg_9 - arg_8 if arg_9 >= arg_8 else arg_8 - arg_9 if abs ( arg_10 . seconds ) > REQUEST_TIMESTAMP_TOLERANCE_SECS : log . error ( f'Failed timestamp check for request: {request_body.decode(\"utf-8\", \"replace\")}' ) return { 'error' : 'failed request timestamp check' } arg_11 = arg_6 [ 'session' ] [ 'user' ] [ 'userId' ] if arg_11 not in arg_0 . conversations . keys ( ) : if arg_0 . config [ 'multi_instance' ] : arg_12 = arg_0 . _init_agent ( ) log . info ( 'New conversation instance level agent initiated' ) else : arg_12 = arg_0 . agent arg_0 . conversations [ arg_11 ] = Conversation ( config = arg_0 . config , agent = arg_12 , arg_11 = arg_11 , self_destruct_callback = lambda : arg_0 . _del_conversation ( arg_11 ) ) log . info ( f'Created new conversation, key: {conversation_key}' ) arg_14 = arg_0 . conversations [ arg_11 ] arg_15 = arg_14 . handle_request ( arg_6 ) return arg_15","id_":253666,"task_name":"https:\/\/github.com\/deepmipt\/DeepPavlov\/blob\/f3e4a69a3764d25d2f5bad4f1f1aebc872b00f9c\/deeppavlov\/utils\/alexa\/bot.py#L148-L194","negative":"Report elapsed time."} {"query":"Download the accessions into a the designated workdir .","positive":"def Func ( arg_0 , arg_1 = False , arg_2 = None , arg_3 = 30 , arg_4 = \"_\" , arg_5 = False ) : try : if not os . path . exists ( arg_0 . workdir ) : os . makedirs ( arg_0 . workdir ) arg_0 . _set_vdbconfig_path ( ) if arg_2 : arg_0 . _ipcluster [ \"pids\" ] = { } for arg_7 in arg_2 . ids : arg_8 = arg_2 [ arg_7 ] if not arg_8 . outstanding : arg_9 = arg_8 . apply ( os . getpid ) . get ( ) arg_0 . _ipcluster [ \"pids\" ] [ arg_7 ] = arg_9 arg_0 . _submit_jobs ( arg_1 = arg_1 , arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = arg_4 , arg_5 = arg_5 , ) except IPyradWarningExit as inst : print ( inst ) except KeyboardInterrupt : print ( \"keyboard interrupt...\" ) except Exception as inst : print ( \"Exception in Func() - {}\" . format ( inst ) ) finally : arg_0 . _restore_vdbconfig_path ( ) arg_10 = os . path . join ( arg_0 . workdir , \"sra\" ) if os . path . exists ( arg_10 ) and ( not os . listdir ( arg_10 ) ) : shutil . rmtree ( arg_10 ) else : try : print ( FAILED_DOWNLOAD . format ( os . listdir ( arg_10 ) ) ) except OSError as inst : raise IPyradWarningExit ( \"Download failed. Exiting.\" ) for arg_11 in os . listdir ( arg_10 ) : arg_12 = arg_11 . split ( \".\" ) [ 0 ] arg_13 = os . path . join ( arg_0 . workdir , \"*_{}*.gz\" . format ( arg_12 ) ) arg_14 = glob . glob ( arg_13 ) [ 0 ] if os . path . exists ( arg_14 ) : os . remove ( arg_14 ) shutil . rmtree ( arg_10 ) if arg_2 : try : arg_2 . abort ( ) time . sleep ( 0.5 ) for arg_15 , arg_9 in arg_0 . _ipcluster [ \"pids\" ] . items ( ) : if arg_2 . queue_status ( ) [ arg_15 ] [ \"tasks\" ] : os . kill ( arg_9 , 2 ) time . sleep ( 0.1 ) except ipp . NoEnginesRegistered : pass if not arg_2 . outstanding : arg_2 . purge_everything ( ) else : arg_2 . shutdown ( hub = True , block = False ) arg_2 . close ( ) print ( \"\\nwarning: ipcluster shutdown and must be restarted\" )","id_":253667,"task_name":"https:\/\/github.com\/dereneaton\/ipyrad\/blob\/5eeb8a178160f45faf71bf47cec4abe998a575d1\/ipyrad\/analysis\/sratools.py#L87-L202","negative":"Init a uniform noise variable."} {"query":"put a list of tasks and their arguments","positive":"def Func ( arg_0 , arg_1 ) : if not arg_0 . isopen : arg_2 = logging . getLogger ( __name__ ) arg_2 . warning ( 'the drop box is not open' ) return arg_3 = [ ] for arg_4 in arg_1 : try : arg_5 = arg_4 [ 'task' ] arg_6 = arg_4 . get ( 'args' , ( ) ) arg_7 = arg_4 . get ( 'kwargs' , { } ) arg_8 = TaskPackage ( arg_5 = arg_5 , arg_6 = arg_6 , arg_7 = arg_7 ) except TypeError : arg_8 = TaskPackage ( arg_5 = arg_4 , arg_6 = ( ) , arg_7 = { } ) arg_3 . append ( arg_8 ) return arg_0 . dropbox . Func ( arg_3 )","id_":253668,"task_name":"https:\/\/github.com\/alphatwirl\/alphatwirl\/blob\/5138eeba6cd8a334ba52d6c2c022b33c61e3ba38\/alphatwirl\/concurrently\/CommunicationChannel.py#L152-L187","negative":"issue a command to read the archive records after a known time stamp."} {"query":"Override of clean method to perform additional validation","positive":"def Func ( arg_0 ) : arg_1 = super ( EnterpriseCustomerReportingConfigAdminForm , arg_0 ) . Func ( ) arg_2 = arg_1 . get ( 'enterprise_customer' ) arg_3 = [ '{} ({})' . format ( catalog . title , catalog . uuid ) for catalog in arg_1 . get ( 'enterprise_customer_catalogs' ) if catalog . enterprise_customer != arg_2 ] if arg_3 : arg_4 = _ ( 'These catalogs for reporting do not match enterprise' 'customer {enterprise_customer}: {invalid_catalogs}' , ) . format ( enterprise_customer = arg_2 , arg_3 = arg_3 , ) arg_0 . add_error ( 'enterprise_customer_catalogs' , arg_4 )","id_":253669,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/enterprise\/admin\/forms.py#L528-L550","negative":"Generate a square lattice with auxiliary nodes for spanning detection\n\n Parameters\n ----------\n\n length : int\n Number of nodes in one dimension, excluding the auxiliary nodes.\n\n Returns\n -------\n\n networkx.Graph\n A square lattice graph with auxiliary nodes for spanning cluster\n detection\n\n See Also\n --------\n\n sample_states : spanning cluster detection"} {"query":"Add a header check i . e . check whether the header record is consistent with the expected field names .","positive":"def Func ( arg_0 , arg_1 = arg_2 , arg_3 = arg_4 [ arg_2 ] ) : arg_5 = arg_1 , arg_3 arg_0 . _header_checks . append ( arg_5 )","id_":253670,"task_name":"https:\/\/github.com\/alimanfoo\/csvvalidator\/blob\/50a86eefdc549c48f65a91a5c0a66099010ee65d\/csvvalidator.py#L154-L172","negative":"Set command option defaults."} {"query":"Ensures that we have an open connection to the given peer . Returns the peer id . This should be equal to the given one but it might not if the given peer was say the IP and the peer actually identifies itself with a host name . The returned peer is the real one that should be used . This can be handy if we aren t 100% sure of the peer s identity .","positive":"def Func ( arg_0 , arg_1 ) : if arg_1 in arg_0 . _connections : return defer . succeed ( arg_1 ) else : arg_2 = arg_0 . _connect ( arg_1 , exact_peer = False ) def connected ( arg_3 ) : return arg_3 . peer arg_2 . addCallback ( connected ) return arg_2","id_":253671,"task_name":"https:\/\/github.com\/pydron\/anycall\/blob\/43add96660258a14b24aa8e8413dffb1741b72d7\/anycall\/connectionpool.py#L114-L131","negative":"Read existing output data from a previous run.\n\n Returns\n -------\n process output : NumPy array (raster) or feature iterator (vector)"} {"query":"Gets list of BoxScore objects corresponding to the box scores from that year .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . get_year_doc ( arg_1 ) arg_3 = arg_2 ( 'table#games' ) arg_4 = sportsref . utils . parse_table ( arg_3 ) if arg_4 . empty : return np . array ( [ ] ) return arg_4 . boxscore_id . values","id_":253672,"task_name":"https:\/\/github.com\/mdgoldberg\/sportsref\/blob\/09f11ac856a23c96d666d1d510bb35d6f050b5c3\/sportsref\/nfl\/teams.py#L146-L159","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Split sentences in groups given a specific group length .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ ] arg_3 = [ ] for arg_4 in range ( 0 , arg_0 ) : if arg_4 % arg_1 == 0 : if len ( arg_3 ) > 0 : arg_2 . append ( arg_3 ) arg_3 = [ arg_4 ] else : arg_3 . append ( arg_4 ) if len ( arg_3 ) > 0 : arg_2 . append ( arg_3 ) return arg_2","id_":253673,"task_name":"https:\/\/github.com\/hirmeos\/entity-fishing-client-python\/blob\/cd5c6e10c6c4e653669e11d735d5773766986bda\/nerd\/nerd_client.py#L136-L160","negative":"main execution loop. query weather data and post to online service."} {"query":"Generic redirect for item editor .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if '_addanother' in arg_1 . POST : return HttpResponseRedirect ( '..\/item_add\/' ) elif '_save' in arg_1 . POST : return HttpResponseRedirect ( '..\/' ) elif '_continue' in arg_1 . POST : return arg_2 return HttpResponseRedirect ( '' )","id_":253674,"task_name":"https:\/\/github.com\/idlesign\/django-sitetree\/blob\/61de4608e6e415247c75fe8691027d7c4ed0d1e7\/sitetree\/admin.py#L127-L139","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Cleans the temporary fastq files . If they are symlinks the link source is removed","positive":"def Func ( arg_0 ) : for arg_1 in arg_0 : arg_2 = os . path . realpath ( arg_1 ) logger . debug ( \"Removing temporary fastq file path: {}\" . format ( arg_2 ) ) if re . match ( \".*\/work\/.{2}\/.{30}\/.*\" , arg_2 ) : os . remove ( arg_2 )","id_":253675,"task_name":"https:\/\/github.com\/assemblerflow\/flowcraft\/blob\/fc3f4bddded1efc76006600016dc71a06dd908c0\/flowcraft\/templates\/megahit.py#L177-L193","negative":"Return a list of all enrollments for the passed section_id.\n\n https:\/\/canvas.instructure.com\/doc\/api\/enrollments.html#method.enrollments_api.index"} {"query":"Extract swagger operation details from colander view definitions .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = { 'responses' : { 'default' : { 'description' : 'UNDOCUMENTED RESPONSE' } } , } arg_4 = arg_2 . get ( 'renderer' , '' ) if \"json\" in arg_4 : arg_5 = [ 'application\/json' ] elif arg_4 == 'xml' : arg_5 = [ 'text\/xml' ] else : arg_5 = None if arg_5 : arg_3 . setdefault ( 'produces' , arg_5 ) arg_6 = arg_2 . get ( 'content_type' ) if arg_6 is not None : arg_6 = to_list ( arg_6 ) arg_6 = [ x for x in arg_6 if not callable ( x ) ] arg_3 [ 'consumes' ] = arg_6 arg_7 = arg_0 . _is_colander_schema ( arg_2 ) if arg_7 : arg_8 = arg_0 . _extract_transform_colander_schema ( arg_2 ) arg_9 = arg_0 . parameters . from_schema ( arg_8 ) else : arg_9 = None if arg_9 : arg_3 [ 'parameters' ] = arg_9 if isinstance ( arg_1 , six . string_types ) : if 'klass' in arg_2 : arg_10 = arg_2 [ 'klass' ] arg_11 = getattr ( arg_10 , arg_1 . lower ( ) ) arg_12 = trim ( arg_11 . __doc__ ) else : arg_12 = str ( trim ( arg_1 . __doc__ ) ) if arg_12 and arg_0 . summary_docstrings : arg_3 [ 'summary' ] = arg_12 if 'response_schemas' in arg_2 : arg_3 [ 'responses' ] = arg_0 . responses . from_schema_mapping ( arg_2 [ 'response_schemas' ] ) if 'tags' in arg_2 : arg_3 [ 'tags' ] = arg_2 [ 'tags' ] if 'operation_id' in arg_2 : arg_3 [ 'operationId' ] = arg_2 [ 'operation_id' ] if 'api_security' in arg_2 : arg_3 [ 'security' ] = arg_2 [ 'api_security' ] return arg_3","id_":253676,"task_name":"https:\/\/github.com\/Cornices\/cornice.ext.swagger\/blob\/c31a5cc8d5dd112b11dc41ccb6d09b423b537abc\/cornice_swagger\/swagger.py#L594-L679","negative":"Awake one process waiting to receive data on fd"} {"query":"pass in a list of options promt the user to select one and return the selected option or None","positive":"def Func ( arg_0 = None ) : if not arg_0 : return None arg_1 = len ( str ( len ( arg_0 ) ) ) for arg_2 , arg_3 in enumerate ( arg_0 ) : arg_5 . stdout . write ( '{:{width}}) {}\\n' . format ( arg_2 + 1 , arg_3 , arg_1 = arg_1 ) ) arg_5 . stdout . write ( '{:>{width}} ' . format ( '#?' , arg_1 = arg_1 + 1 ) ) arg_5 . stdout . flush ( ) if arg_5 . stdin . isatty ( ) : try : arg_4 = raw_input ( ) . strip ( ) except ( EOFError , KeyboardInterrupt ) : arg_4 = '' else : arg_5 . stdin = open ( \"\/dev\/tty\" ) try : arg_4 = '' while True : arg_4 += arg_5 . stdin . read ( 1 ) if arg_4 . endswith ( '\\n' ) : break except ( EOFError , KeyboardInterrupt ) : arg_5 . stdout . flush ( ) pass try : arg_4 = int ( arg_4 ) - 1 except ValueError : return None if arg_4 < 0 or arg_4 >= len ( arg_0 ) : return None return arg_0 [ arg_4 ]","id_":253677,"task_name":"https:\/\/github.com\/askedrelic\/pyselect\/blob\/2f68e3e87e3c44e9d96e1506ba98f9c3a30ded2c\/pyselect.py#L11-L46","negative":"Check that a binary operator is surrounded by exactly one space."} {"query":"Run simulation for Unit instance","positive":"def Func ( arg_0 , arg_1 : arg_2 , arg_3 : arg_4 , arg_5 = [ ] ) : arg_6 = arg_0 . config . beforeSim if arg_6 is not None : arg_6 ( arg_0 , arg_1 ) arg_7 = arg_0 . add_process for arg_8 in arg_5 : arg_7 ( arg_8 ( arg_0 ) ) arg_0 . _initUnitSignals ( arg_1 ) arg_0 . run ( arg_3 )","id_":253678,"task_name":"https:\/\/github.com\/Nic30\/hwt\/blob\/8cbb399e326da3b22c233b98188a9d08dec057e6\/hwt\/simulator\/hdlSimulator.py#L538-L551","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Generate message authentication code .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = None if arg_1 in arg_0 . _hash_algorithms . keys ( ) : arg_0 . logger . info ( \"Generating a hash-based message authentication code using \" \"{0}\" . format ( arg_1 . name ) ) arg_5 = arg_0 . _hash_algorithms . get ( arg_1 ) try : arg_6 = hFunc . HMAC ( arg_2 , arg_5 ( ) , backend = default_backend ( ) ) arg_6 . update ( arg_3 ) arg_4 = arg_6 . finalize ( ) except Exception as e : arg_0 . logger . exception ( e ) raise exceptions . CryptographicFailure ( \"An error occurred while computing an HMAC. \" \"See the server log for more information.\" ) elif arg_1 in arg_0 . _symmetric_key_algorithms . keys ( ) : arg_0 . logger . info ( \"Generating a cipher-based message authentication code using \" \"{0}\" . format ( arg_1 . name ) ) arg_7 = arg_0 . _symmetric_key_algorithms . get ( arg_1 ) try : arg_8 = cFunc . CMAC ( arg_7 ( arg_2 ) , backend = default_backend ( ) ) arg_8 . update ( arg_3 ) arg_4 = arg_8 . finalize ( ) except Exception as e : raise exceptions . CryptographicFailure ( \"An error occurred while computing a CMAC. \" \"See the server log for more information.\" ) else : raise exceptions . InvalidField ( \"The cryptographic algorithm ({0}) is not a supported \" \"for a MAC operation.\" . format ( arg_1 ) ) return arg_4","id_":253679,"task_name":"https:\/\/github.com\/OpenKMIP\/PyKMIP\/blob\/b51c5b044bd05f8c85a1d65d13a583a4d8fc1b0e\/kmip\/services\/server\/crypto\/engine.py#L222-L288","negative":"Convenience function to efficiently construct a MultivariateNormalDiag."} {"query":"Return the Baystat distance .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None , arg_4 = None ) : return Baystat ( ) . dist ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 )","id_":253680,"task_name":"https:\/\/github.com\/chrislit\/abydos\/blob\/165466b3ff6afd8024a4c8660421b0c4e7773db9\/abydos\/distance\/_baystat.py#L214-L249","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Group iterable by n elements .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : return list ( zip_longest ( * [ iter ( arg_0 ) ] * arg_1 , arg_2 = arg_2 ) )","id_":253681,"task_name":"https:\/\/github.com\/alexprengere\/currencyconverter\/blob\/e3cb0d693819c0c824214225b23a47e9380f71df\/currency_converter\/__main__.py#L16-L25","negative":"Print informations about PyFunceble and the date of generation of a file\n into a given path, if doesn't exist."} {"query":"- Value of purchases and sales divided by either the actual gross book or the portfolio value for the time step .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 'AGB' ) : arg_3 = get_txn_vol ( arg_1 ) arg_4 = arg_3 . txn_volume if arg_2 == 'AGB' : arg_5 = arg_0 . drop ( 'cash' , axis = 1 ) . abs ( ) . sum ( axis = 1 ) arg_6 = arg_5 . rolling ( 2 ) . mean ( ) arg_6 . iloc [ 0 ] = arg_5 . iloc [ 0 ] \/ 2 elif arg_2 == 'portfolio_value' : arg_6 = arg_0 . sum ( axis = 1 ) else : raise ValueError ( \"Unexpected value for denominator '{}'. The \" \"denominator parameter must be either 'AGB'\" \" or 'portfolio_value'.\" . format ( arg_2 ) ) arg_6 . index = arg_6 . index . normalize ( ) arg_9 = arg_4 . div ( arg_6 , axis = 'index' ) arg_9 = arg_9 . fillna ( 0 ) return arg_9","id_":253682,"task_name":"https:\/\/github.com\/quantopian\/pyfolio\/blob\/712716ab0cdebbec9fabb25eea3bf40e4354749d\/pyfolio\/txn.py#L149-L206","negative":"Count the number of non-zero values for each feature in sparse X."} {"query":"Writes data to the zip file and adds it to the manifest dictionary","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_0 . manifest [ arg_1 ] = md5hash ( arg_2 ) arg_0 . package_zip . writestr ( arg_1 , arg_2 )","id_":253683,"task_name":"https:\/\/github.com\/knipknap\/SpiffWorkflow\/blob\/f0af7f59a332e0619e4f3c00a7d4a3d230760e00\/SpiffWorkflow\/bpmn\/serializer\/Packager.py#L195-L204","negative":"Get a list of variable names from the user's namespace.\n\n Parameters\n ----------\n names : list of strings\n A list of names of variables to be read from the user namespace.\n\n Returns\n -------\n A dict, keyed by the input names and with the repr() of each value."} {"query":"Equation B . 17 of Clauset et al 2009","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = ( arg_0 >= arg_1 ) arg_3 = arg_2 . sum ( ) if arg_3 < 2 : return 0 arg_4 = arg_0 [ arg_2 ] arg_5 = 1.0 + float ( arg_3 ) * ( sum ( log ( arg_4 \/ ( float ( arg_1 ) - 0.5 ) ) ) ) ** - 1 return arg_5","id_":253684,"task_name":"https:\/\/github.com\/keflavich\/plfit\/blob\/7dafa6302b427ba8c89651148e3e9d29add436c3\/plfit\/plfit.py#L999-L1013","negative":"An integer-valued dimension bounded between `min` <= x <= `max`.\n Note that the right endpoint of the interval includes `max`.\n\n When `warp` is None, the base measure associated with this dimension\n is a categorical distribution with each weight on each of the integers\n in [min, max]. With `warp == 'log'`, the base measure is a uniform\n distribution on the log of the variable, with bounds at `log(min)` and\n `log(max)`. This is appropriate for variables that are \"naturally\" in\n log-space. Other `warp` functions are not supported (yet), but may be\n at a later time. Please note that this functionality is not supported\n for `hyperopt_tpe`."} {"query":"Release previously - acquired lock .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = False , arg_4 = None ) : arg_0 . __lockImpl . Func ( arg_1 , arg_0 . __selfID , arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = arg_4 )","id_":253685,"task_name":"https:\/\/github.com\/bakwc\/PySyncObj\/blob\/be3b0aaa932d5156f5df140c23c962430f51b7b8\/pysyncobj\/batteries.py#L520-L533","negative":"Delete previous webhooks. If local ngrok tunnel, create a webhook."} {"query":"Creates the email","positive":"def Func ( arg_0 , arg_1 = None ) : arg_2 = arg_0 . charset or 'utf-8' arg_3 = arg_0 . attachments or [ ] if len ( arg_3 ) == 0 and not arg_0 . html : arg_4 = arg_0 . _mimetext ( arg_0 . body ) elif len ( arg_3 ) > 0 and not arg_0 . html : arg_4 = MIMEMultipart ( ) arg_4 . attach ( arg_0 . _mimetext ( arg_0 . body ) ) else : arg_4 = MIMEMultipart ( ) arg_5 = MIMEMultipart ( 'alternative' ) arg_5 . attach ( arg_0 . _mimetext ( arg_0 . body , 'plain' ) ) arg_5 . attach ( arg_0 . _mimetext ( arg_0 . html , 'html' ) ) arg_4 . attach ( arg_5 ) if arg_0 . charset : arg_4 [ 'Subject' ] = Header ( arg_0 . subject , arg_2 ) else : arg_4 [ 'Subject' ] = arg_0 . subject arg_6 = arg_0 . sender or arg_1 if arg_6 is not None : arg_4 [ 'From' ] = sanitize_address ( arg_6 , arg_2 ) arg_4 [ 'To' ] = ', ' . join ( list ( set ( sanitize_addresses ( arg_0 . recipients , arg_2 ) ) ) ) arg_4 [ 'Date' ] = formatdate ( arg_0 . date , localtime = True ) arg_4 [ 'Message-ID' ] = arg_0 . msgId if arg_0 . cc : arg_4 [ 'Cc' ] = ', ' . join ( list ( set ( sanitize_addresses ( arg_0 . cc , arg_2 ) ) ) ) if arg_0 . reply_to : arg_4 [ 'Reply-To' ] = sanitize_address ( arg_0 . reply_to , arg_2 ) if arg_0 . extra_headers : for arg_7 , arg_8 in arg_0 . extra_headers . items ( ) : arg_4 [ arg_7 ] = arg_8 for arg_9 in arg_3 : arg_10 = MIMEBase ( * arg_9 . content_type . split ( '\/' ) ) arg_10 . set_payload ( arg_9 . data ) encode_base64 ( arg_10 ) try : arg_9 . filename and arg_9 . filename . encode ( 'ascii' ) except UnicodeEncodeError : arg_11 = arg_9 . filename if not PY3 : arg_11 = arg_11 . encode ( 'utf8' ) arg_10 . add_header ( 'Content-Disposition' , arg_9 . disposition , arg_11 = ( 'UTF8' , '' , arg_11 ) ) else : arg_10 . add_header ( 'Content-Disposition' , '%s;filename=%s' % ( arg_9 . disposition , arg_9 . filename ) ) for arg_12 , arg_13 in arg_9 . headers : arg_10 . add_header ( arg_12 , arg_13 ) arg_4 . attach ( arg_10 ) return arg_4 . Func ( )","id_":253686,"task_name":"https:\/\/github.com\/nicolas-van\/mailflash\/blob\/794598d9df0e343bb1f64b03d09a68a540229774\/mailflash.py#L306-L374","negative":"Read attribute from sysfs and return as string"} {"query":"Make a prediction with the component described in corresponding configuration file .","positive":"def Func ( arg_0 : arg_1 [ arg_2 , arg_3 , arg_4 ] , arg_5 : arg_6 = 1 , arg_7 : arg_8 [ arg_2 ] = None ) -> None : if arg_7 is None or arg_7 == '-' : if sys . stdin . isatty ( ) : raise RuntimeError ( 'To process data from terminal please use interact mode' ) arg_9 = sys . stdin else : arg_9 = open ( arg_7 , encoding = 'utf8' ) arg_10 : Chainer = build_model ( arg_0 ) arg_11 = len ( arg_10 . in_x ) while True : arg_12 = list ( ( l . strip ( ) for l in islice ( arg_9 , arg_5 * arg_11 ) ) ) if not arg_12 : break arg_13 = [ ] for arg_14 in range ( arg_11 ) : arg_13 . append ( arg_12 [ arg_14 : : arg_11 ] ) arg_15 = arg_10 ( * arg_13 ) if len ( arg_10 . out_params ) == 1 : arg_15 = [ arg_15 ] for arg_15 in zip ( * arg_15 ) : arg_15 = json . dumps ( arg_15 , ensure_ascii = False ) print ( arg_15 , flush = True ) if arg_9 is not sys . stdin : arg_9 . close ( )","id_":253687,"task_name":"https:\/\/github.com\/deepmipt\/DeepPavlov\/blob\/f3e4a69a3764d25d2f5bad4f1f1aebc872b00f9c\/deeppavlov\/core\/commands\/infer.py#L92-L122","negative":"Call each callable in callables, suppressing any exceptions supplied. If\n\tno exception classes are supplied, all Exceptions will be suppressed.\n\n\t>>> import functools\n\t>>> c1 = functools.partial(int, 'a')\n\t>>> c2 = functools.partial(int, '10')\n\t>>> list(suppress_exceptions((c1, c2)))\n\t[10]\n\t>>> list(suppress_exceptions((c1, c2), KeyError))\n\tTraceback (most recent call last):\n\t...\n\tValueError: invalid literal for int() with base 10: 'a'"} {"query":"r Sample from the specified distribution .","positive":"def Func ( arg_0 , arg_1 = None ) : from numpy_sugar import epsilon from numpy_sugar . linalg import sum2diag from numpy_sugar . random import multivariate_normal if arg_1 is None : arg_1 = RandomState ( ) arg_2 = arg_0 . _mean . value ( ) arg_3 = arg_0 . _cov . value ( ) . copy ( ) sum2diag ( arg_3 , + epsilon . small , out = arg_3 ) return arg_0 . _lik . Func ( multivariate_normal ( arg_2 , arg_3 , arg_1 ) , arg_1 )","id_":253688,"task_name":"https:\/\/github.com\/limix\/glimix-core\/blob\/cddd0994591d100499cc41c1f480ddd575e7a980\/glimix_core\/random\/_ggp.py#L52-L77","negative":"Calculate hexadecimal representation of the phrase."} {"query":"Appends a child to the object s children .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if isinstance ( arg_2 , Subgraph ) : arg_1 . subgraphs . append ( arg_2 ) elif isinstance ( arg_2 , Cluster ) : arg_1 . clusters . append ( arg_2 ) elif isinstance ( arg_2 , Node ) : arg_1 . nodes . append ( arg_2 ) elif isinstance ( arg_2 , Edge ) : arg_1 . edges . append ( arg_2 ) else : pass","id_":253689,"task_name":"https:\/\/github.com\/rwl\/godot\/blob\/013687c9e8983d2aa2ceebb8a76c5c4f1e37c90f\/godot\/ui\/graph_tree.py#L90-L106","negative":"Run stochastic volatility model.\n\n This model estimates the volatility of a returns series over time.\n Returns are assumed to be T-distributed. lambda (width of\n T-distributed) is assumed to follow a random-walk.\n\n Parameters\n ----------\n data : pandas.Series\n Return series to model.\n samples : int, optional\n Posterior samples to draw.\n\n Returns\n -------\n model : pymc.Model object\n PyMC3 model containing all random variables.\n trace : pymc3.sampling.BaseTrace object\n A PyMC3 trace object that contains samples for each parameter\n of the posterior.\n\n See Also\n --------\n plot_stoch_vol : plotting of tochastic volatility model"} {"query":"Process a message received from remote .","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 . ws . closed : return None try : safe_call ( arg_0 . logger . debug , '< %s %r' , arg_0 , arg_1 ) for arg_2 in arg_0 . ddp_frames_from_message ( arg_1 ) : arg_0 . process_ddp ( arg_2 ) signals . request_finished . send ( sender = arg_0 . __class__ ) except geventwebsocket . WebSocketError : arg_0 . ws . close ( )","id_":253690,"task_name":"https:\/\/github.com\/jazzband\/django-ddp\/blob\/1e1954b06fe140346acea43582515991685e4e01\/dddp\/websocket.py#L183-L196","negative":"Load a default value for redshift from config and set it as the redshift for source or lens galaxies that have\n falsey redshifts\n\n Parameters\n ----------\n key: str\n\n Returns\n -------\n decorator\n A decorator that wraps the setter function to set defaults"} {"query":"Handler for del command","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . validate ( 'cmd|s3' , arg_1 ) arg_2 = arg_1 [ 1 ] arg_0 . s3handler ( ) . del_files ( arg_2 )","id_":253691,"task_name":"https:\/\/github.com\/bloomreach\/s4cmd\/blob\/bb51075bf43703e7cd95aa39288cf7732ec13a6d\/s4cmd.py#L1727-L1731","negative":"Save filter script to an mlx file"} {"query":"regress tip values against branch values","positive":"def Func ( arg_0 , arg_1 = None ) : arg_0 . _calculate_averages ( ) arg_2 = base_Func ( arg_0 . tree . root . Q , arg_1 ) arg_2 [ 'r_val' ] = arg_0 . explained_variance ( ) return arg_2","id_":253692,"task_name":"https:\/\/github.com\/neherlab\/treetime\/blob\/f6cdb58d19243a18ffdaa2b2ec71872fa00e65c0\/treetime\/treeregression.py#L292-L310","negative":"Returns a decorator to swallow a requests exception for modules that\n are not accessible without logging in, and turn it into an Unavailable\n exception."} {"query":"Take doc and create a new doc using only keys from the fields list . Supports referencing fields using dotted notation a . b . c so we can parse nested fields the way MongoDB does . The nested field class is a hack . It should be a sub - class of dict .","positive":"def Func ( arg_0 , arg_1 ) : if arg_1 is None or len ( arg_1 ) == 0 : return arg_0 arg_2 = Nested_Dict ( { } ) arg_3 = Nested_Dict ( arg_0 ) for arg_4 in arg_1 : if arg_3 . has_key ( arg_4 ) : arg_2 . set_value ( arg_4 , arg_3 . get_value ( arg_4 ) ) return arg_2 . dict_value ( )","id_":253693,"task_name":"https:\/\/github.com\/jdrumgoole\/pymongo_formatter\/blob\/313fef8f2ff5e7d4f1515ea59a99ec25f7999e7b\/pymongo_formatter\/formatter.py#L80-L99","negative":"The factory method to create WebDriverResult from JSON Object.\n\n Args:\n obj(dict): The JSON Object returned by server."} {"query":"Sonify a jams annotation through mir_eval","positive":"def Func ( arg_0 , arg_1 = 22050 , arg_2 = None , ** arg_3 ) : arg_4 = None if arg_2 is None : arg_2 = arg_0 . duration if arg_2 is not None : arg_4 = int ( arg_2 * arg_1 ) if arg_0 . namespace in SONIFY_MAPPING : arg_5 = coerce_annotation ( arg_0 , arg_0 . namespace ) return SONIFY_MAPPING [ arg_0 . namespace ] ( arg_5 , arg_1 = arg_1 , arg_4 = arg_4 , ** arg_3 ) for arg_6 , arg_7 in six . iteritems ( SONIFY_MAPPING ) : try : arg_5 = coerce_annotation ( arg_0 , arg_6 ) return arg_7 ( arg_5 , arg_1 = arg_1 , arg_4 = arg_4 , ** arg_3 ) except NamespaceError : pass raise NamespaceError ( 'Unable to Func annotation of namespace=\"{:s}\"' . format ( arg_0 . namespace ) )","id_":253694,"task_name":"https:\/\/github.com\/marl\/jams\/blob\/b16778399b9528efbd71434842a079f7691a7a66\/jams\/sonify.py#L187-L239","negative":"Return the generation index of the first generation in the given\n swarm that does not have numParticles particles in it, either still in the\n running state or completed. This does not include orphaned particles.\n\n Parameters:\n ---------------------------------------------------------------------\n swarmId: A string representation of the sorted list of encoders in this\n swarm. For example '__address_encoder.__gym_encoder'\n minNumParticles: minium number of partices required for a full\n generation.\n\n retval: generation index, or None if no particles at all."} {"query":"End of a configuration session . Tells the router we re done managing admin functionality .","positive":"def Func ( arg_0 ) : _LOGGER . info ( \"Config finish\" ) if not arg_0 . config_started : return True arg_1 , arg_2 = arg_0 . _make_request ( SERVICE_DEVICE_CONFIG , \"ConfigurationFinished\" , { \"NewStatus\" : \"ChangesApplied\" } ) arg_0 . config_started = not arg_1 return arg_1","id_":253695,"task_name":"https:\/\/github.com\/MatMaul\/pynetgear\/blob\/247d6b9524fcee4b2da0e65ca12c52ebdd3676b2\/pynetgear\/__init__.py#L295-L308","negative":"Add a torque to this body.\n\n Parameters\n ----------\n force : 3-tuple of float\n A vector giving the torque along each world or body coordinate axis.\n relative : bool, optional\n If False, the torque values are assumed to be given in the world\n coordinate frame. If True, they are assumed to be given in the\n body-relative coordinate frame. Defaults to False."} {"query":"Get an asset","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . get ( '\/assets\/{}' . format ( arg_1 ) ) return Asset ( arg_2 )","id_":253696,"task_name":"https:\/\/github.com\/alpacahq\/alpaca-trade-api-python\/blob\/9c9dea3b4a37c909f88391b202e86ff356a8b4d7\/alpaca_trade_api\/rest.py#L229-L232","negative":"Clean up a bunch of loose files."} {"query":"Get the data in JSON form","positive":"def Func ( arg_0 , arg_1 = False , arg_2 = True ) : arg_3 = [ ] if arg_2 : arg_4 = arg_0 . get_translated_data ( ) else : arg_4 = arg_0 . data for arg_5 in arg_4 : arg_3 . append ( arg_4 [ arg_5 ] ) if arg_1 : arg_3 = json . dumps ( arg_3 , indent = 2 , separators = ( ',' , ': ' ) ) else : arg_3 = json . dumps ( arg_3 ) return arg_3","id_":253697,"task_name":"https:\/\/github.com\/zwischenloesung\/ardu-report-lib\/blob\/51bd4a07e036065aafcb1273b151bea3fdfa50fa\/libardurep\/datastore.py#L190-L205","negative":"Return the maximum file descriptor value."} {"query":"Provides permissions for mongoadmin for use in the context","positive":"def Func ( arg_0 , arg_1 = { } ) : arg_1 [ 'has_view_permission' ] = arg_0 . mongoadmin . has_view_permission ( arg_0 . request ) arg_1 [ 'has_edit_permission' ] = arg_0 . mongoadmin . has_edit_permission ( arg_0 . request ) arg_1 [ 'has_add_permission' ] = arg_0 . mongoadmin . has_add_permission ( arg_0 . request ) arg_1 [ 'has_delete_permission' ] = arg_0 . mongoadmin . has_delete_permission ( arg_0 . request ) return arg_1","id_":253698,"task_name":"https:\/\/github.com\/jazzband\/django-mongonaut\/blob\/5485b2e029dff8ae267a4cb39c92d0a72cb5b144\/mongonaut\/mixins.py#L119-L126","negative":"Get the context for this view."} {"query":"Create a field by field info dict .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . get ( 'type' ) if arg_1 not in FIELDS_NAME_MAP : raise ValueError ( _ ( 'not support this field: {}' ) . format ( arg_1 ) ) arg_2 = FIELDS_NAME_MAP . get ( arg_1 ) arg_3 = dict ( arg_0 ) arg_3 . pop ( 'type' ) return arg_2 . from_dict ( arg_3 )","id_":253699,"task_name":"https:\/\/github.com\/ausaki\/python-validator\/blob\/a3e591b1eae6d7a70f894c203dbd7195f929baa8\/validator\/fields.py#L29-L39","negative":"Run according to options in sys.argv and diff classifiers."} {"query":"Comparison for y coordinate","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . _is_coordinate ( arg_1 ) if arg_0 . y > arg_1 . y : return True else : return False","id_":253700,"task_name":"https:\/\/github.com\/katerina7479\/pypdflite\/blob\/ac2501f30d6619eae9dea5644717575ca9263d0a\/pypdflite\/pdfobjects\/pdfcursor.py#L120-L126","negative":"Returns a dictionary with all the past baking statuses of a single book."} {"query":"Delete the password for the username of the service .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if not arg_0 . connected ( arg_1 ) : raise PasswordDeleteError ( \"Cancelled by user\" ) if not arg_0 . iface . hasEntry ( arg_0 . handle , arg_1 , arg_2 , arg_0 . appid ) : raise PasswordDeleteError ( \"Password not found\" ) arg_0 . iface . removeEntry ( arg_0 . handle , arg_1 , arg_2 , arg_0 . appid )","id_":253701,"task_name":"https:\/\/github.com\/jaraco\/keyring\/blob\/71c798378e365286b7cc03c06e4d7d24c7de8fc4\/keyring\/backends\/kwallet.py#L116-L124","negative":"Add noise to the given input.\n\n Parameters:\n -----------------------------------------------\n input: the input to add noise to\n noise: how much noise to add\n doForeground: If true, turn off some of the 1 bits in the input\n doBackground: If true, turn on some of the 0 bits in the input"} {"query":"Helper method for print_full_documentation .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : if not arg_2 : arg_2 = sys . stdout arg_3 = arg_1 . get ( \"doc\" ) arg_4 = arg_1 . get ( \"module\" ) arg_5 = arg_1 . get ( \"msgs\" ) arg_6 = arg_1 . get ( \"options\" ) arg_7 = arg_1 . get ( \"reports\" ) arg_8 = \"%s checker\" % ( arg_0 . replace ( \"_\" , \" \" ) . title ( ) ) if arg_4 : print ( \".. _%s:\\n\" % arg_4 , file = arg_2 ) print ( arg_8 , file = arg_2 ) print ( \"~\" * len ( arg_8 ) , file = arg_2 ) print ( \"\" , file = arg_2 ) if arg_4 : print ( \"This checker is provided by ``%s``.\" % arg_4 , file = arg_2 ) print ( \"Verbatim name of the checker is ``%s``.\" % arg_0 , file = arg_2 ) print ( \"\" , file = arg_2 ) if arg_3 : arg_9 = \"{} Documentation\" . format ( arg_8 ) print ( arg_9 , file = arg_2 ) print ( \"^\" * len ( arg_9 ) , file = arg_2 ) print ( cleandoc ( arg_3 ) , file = arg_2 ) print ( \"\" , file = arg_2 ) if arg_6 : arg_9 = \"{} Options\" . format ( arg_8 ) print ( arg_9 , file = arg_2 ) print ( \"^\" * len ( arg_9 ) , file = arg_2 ) _rest_format_section ( arg_2 , None , arg_6 ) print ( \"\" , file = arg_2 ) if arg_5 : arg_9 = \"{} Messages\" . format ( arg_8 ) print ( arg_9 , file = arg_2 ) print ( \"^\" * len ( arg_9 ) , file = arg_2 ) for arg_10 , arg_11 in sorted ( arg_5 . items ( ) , key = lambda kv : ( _MSG_ORDER . index ( kv [ 0 ] [ 0 ] ) , kv [ 1 ] ) ) : arg_11 = build_message_definition ( arg_0 , arg_10 , arg_11 ) print ( arg_11 . format_help ( checkerref = False ) , file = arg_2 ) print ( \"\" , file = arg_2 ) if arg_7 : arg_9 = \"{} Reports\" . format ( arg_8 ) print ( arg_9 , file = arg_2 ) print ( \"^\" * len ( arg_9 ) , file = arg_2 ) for arg_12 in arg_7 : print ( \":%s: %s\" % arg_12 [ : 2 ] , file = arg_2 ) print ( \"\" , file = arg_2 ) print ( \"\" , file = arg_2 )","id_":253702,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/message\/message_handler_mix_in.py#L403-L459","negative":"Handle the case where the employee on SAPSF's side is marked as inactive."} {"query":"Convert schemas to be compatible with storage schemas .","positive":"def Func ( arg_0 , arg_1 ) : arg_1 = deepcopy ( arg_1 ) for arg_2 in arg_1 : for arg_3 in arg_2 . get ( 'foreignKeys' , [ ] ) : arg_4 = arg_3 [ 'reference' ] [ 'resource' ] if arg_4 != 'self' : if arg_4 not in arg_0 : arg_5 = 'Not resource \"%s\" for foreign key \"%s\"' arg_5 = arg_5 % ( arg_4 , arg_3 ) raise ValueError ( arg_5 ) arg_3 [ 'reference' ] [ 'resource' ] = arg_0 [ arg_4 ] return arg_1","id_":253703,"task_name":"https:\/\/github.com\/frictionlessdata\/datapackage-py\/blob\/aca085ea54541b087140b58a81332f8728baeeb2\/datapackage\/pushpull.py#L202-L229","negative":"Download outputs for job.\n\n Uses [Caching](\/references\/polyaxon-cli\/#caching)\n\n Examples:\n\n \\b\n ```bash\n $ polyaxon job -j 1 outputs\n ```"} {"query":"enters funcs for pairs","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 , arg_6 , arg_7 , arg_8 , arg_9 ) : LOGGER . info ( \"edges in Func %s\" , arg_3 ) arg_10 = arg_4 [ arg_0 , : , arg_3 [ 0 ] : arg_3 [ 1 ] + 1 ] arg_11 = arg_5 [ arg_0 , arg_3 [ 0 ] : arg_3 [ 1 ] + 1 , ] arg_12 = arg_4 [ arg_0 , : , arg_3 [ 2 ] : arg_3 [ 3 ] + 1 ] arg_13 = arg_5 [ arg_0 , arg_3 [ 2 ] : arg_3 [ 3 ] + 1 , ] arg_14 = np . all ( arg_10 == \"N\" , axis = 1 ) arg_15 = arg_14 + arg_6 LOGGER . info ( \"nsidx %s, nalln %s, smask %s\" , arg_15 , arg_14 , arg_6 ) arg_7 = arg_7 + np . invert ( arg_15 ) . astype ( np . int32 ) LOGGER . info ( \"samplecov %s\" , arg_7 ) arg_16 = np . sum ( np . invert ( arg_15 ) . astype ( np . int32 ) ) LOGGER . info ( \"idx %s\" , arg_16 ) arg_8 [ arg_16 ] += 1 arg_10 = arg_10 [ ~ arg_15 , ] arg_12 = arg_12 [ ~ arg_15 , ] arg_17 = arg_1 [ ~ arg_15 ] arg_18 = \"\\n\" . join ( [ name + s1 . tostring ( ) + \"nnnn\" + s2 . tostring ( ) for name , s1 , s2 in zip ( arg_17 , arg_10 , arg_12 ) ] ) arg_19 = [ \"-\" if arg_11 [ i , 0 ] else \"*\" if arg_11 [ i , 1 ] else \" \" for i in range ( len ( arg_11 ) ) ] arg_20 = [ \"-\" if arg_13 [ i , 0 ] else \"*\" if arg_13 [ i , 1 ] else \" \" for i in range ( len ( arg_13 ) ) ] arg_18 += \"\\n\" + arg_2 + \"\" . join ( arg_19 ) + \" \" + \"\" . join ( arg_20 ) + \"|{}|\" . format ( arg_0 + arg_9 ) return arg_18 , arg_7 , arg_8","id_":253704,"task_name":"https:\/\/github.com\/dereneaton\/ipyrad\/blob\/5eeb8a178160f45faf71bf47cec4abe998a575d1\/ipyrad\/assemble\/write_outfiles.py#L727-L780","negative":"Retrieves the most recent timestamp of the media in the static root.\n\n If last_timestamp is given, retrieves the first timestamp more recent than this value."} {"query":"Create a new config file at the default location .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 , arg_6 ) : arg_7 = os . path . dirname ( arg_1 ) if not os . path . exists ( arg_7 ) : os . makedirs ( arg_7 ) arg_8 = configparser . ConfigParser ( ) arg_8 . add_section ( \"twtxt\" ) arg_8 . set ( \"twtxt\" , \"nick\" , arg_2 ) arg_8 . set ( \"twtxt\" , \"twtfile\" , arg_3 ) arg_8 . set ( \"twtxt\" , \"twturl\" , arg_4 ) arg_8 . set ( \"twtxt\" , \"disclose_identity\" , str ( arg_5 ) ) arg_8 . set ( \"twtxt\" , \"character_limit\" , \"140\" ) arg_8 . set ( \"twtxt\" , \"character_warning\" , \"140\" ) arg_8 . add_section ( \"following\" ) if arg_6 : arg_8 . set ( \"following\" , \"twtxt\" , \"https:\/\/buckket.org\/twtxt_news.txt\" ) arg_9 = arg_0 ( arg_1 , arg_8 ) arg_9 . write_config ( ) return arg_9","id_":253705,"task_name":"https:\/\/github.com\/buckket\/twtxt\/blob\/6c8ad8ef3cbcf0dd335a12285d8b6bbdf93ce851\/twtxt\/config.py#L63-L93","negative":"Show character in readable format"} {"query":"Helper to read the contents of the given file or path into a string with the given encoding . Encoding defaults to utf - 8 other useful encodings are ascii and latin - 1 .","positive":"def Func ( arg_0 , arg_1 = 'utf-8' ) : try : arg_2 = arg_0 . read ( ) except AttributeError : try : with open ( arg_0 , 'r' ) as fp : arg_2 = fp . read ( ) except TypeError : raise ValueError ( 'val must be file or path, but was type <%s>' % type ( arg_0 ) . __name__ ) except OSError : if not isinstance ( arg_0 , str_types ) : raise ValueError ( 'val must be file or path, but was type <%s>' % type ( arg_0 ) . __name__ ) raise if sys . version_info [ 0 ] == 3 and type ( arg_2 ) is bytes : return arg_2 . decode ( arg_1 , 'replace' ) elif sys . version_info [ 0 ] == 2 and arg_1 == 'ascii' : return arg_2 . encode ( 'ascii' , 'replace' ) else : try : return arg_2 . decode ( arg_1 , 'replace' ) except AttributeError : pass return arg_2","id_":253706,"task_name":"https:\/\/github.com\/ActivisionGameScience\/assertpy\/blob\/08d799cdb01f9a25d3e20672efac991c7bc26d79\/assertpy\/assertpy.py#L101-L131","negative":"This functions returns a list of jobs"} {"query":"Convert a datetime object into a valid STIX timestamp string .","positive":"def Func ( arg_0 ) : if arg_0 . tzinfo is None or arg_0 . tzinfo . utcoffset ( arg_0 ) is None : arg_1 = pytz . utc . localize ( arg_0 ) else : arg_1 = arg_0 . astimezone ( pytz . utc ) arg_2 = arg_1 . strftime ( \"%Y-%m-%dT%H:%M:%S\" ) arg_3 = arg_1 . strftime ( \"%f\" ) arg_4 = getattr ( arg_0 , \"precision\" , None ) if arg_4 == \"second\" : pass elif arg_4 == \"millisecond\" : arg_2 = arg_2 + \".\" + arg_3 [ : 3 ] elif arg_1 . microsecond > 0 : arg_2 = arg_2 + \".\" + arg_3 . rstrip ( \"0\" ) return arg_2 + \"Z\"","id_":253707,"task_name":"https:\/\/github.com\/oasis-open\/cti-taxii-client\/blob\/b4c037fb61d8b8892af34423e2c67c81218d6f8e\/taxii2client\/__init__.py#L48-L74","negative":"Show course track selection page for the enterprise.\n\n Based on `enterprise_uuid` in URL, the view will decide which\n enterprise customer's course enrollment page is to use.\n\n Unauthenticated learners will be redirected to enterprise-linked SSO.\n\n A 404 will be raised if any of the following conditions are met:\n * No enterprise customer uuid kwarg `enterprise_uuid` in request.\n * No enterprise customer found against the enterprise customer\n uuid `enterprise_uuid` in the request kwargs.\n * No course is found in database against the provided `course_id`."} {"query":"Use this if you want to create a new contact from an existing . vcf file .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : return arg_0 ( arg_1 , arg_2 , arg_3 , None , arg_4 )","id_":253708,"task_name":"https:\/\/github.com\/scheibler\/khard\/blob\/0f69430c2680f1ff5f073a977a3c5b753b96cc17\/khard\/carddav_object.py#L98-L105","negative":"Check if the device is on."} {"query":"Convert . egg - info directory with PKG - INFO to the Metadata 1 . 3 aka old - draft Metadata 2 . 0 format .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = read_pkg_info ( arg_1 ) arg_2 . replace_header ( 'Metadata-Version' , '2.0' ) arg_3 = os . path . join ( arg_0 , 'requires.txt' ) if os . path . exists ( arg_3 ) : arg_4 = open ( arg_3 ) . read ( ) for arg_5 , arg_6 in pkg_resources . split_sections ( arg_4 ) : arg_7 = '' if arg_5 and ':' in arg_5 : arg_5 , arg_7 = arg_5 . split ( ':' , 1 ) if arg_5 : arg_2 [ 'Provides-Extra' ] = arg_5 if arg_7 : arg_7 += \" and \" arg_7 += 'extra == %s' % repr ( arg_5 ) if arg_7 : arg_7 = '; ' + arg_7 for arg_8 in convert_requirements ( arg_6 ) : arg_2 [ 'Requires-Dist' ] = arg_8 + arg_7 arg_9 = arg_2 [ 'Description' ] if arg_9 : arg_2 . set_payload ( dedent_description ( arg_2 ) ) del arg_2 [ 'Description' ] return arg_2","id_":253709,"task_name":"https:\/\/github.com\/AkihikoITOH\/capybara\/blob\/e86c2173ea386654f4ae061148e8fbe3f25e715c\/capybara\/virtualenv\/lib\/python2.7\/site-packages\/wheel\/metadata.py#L226-L255","negative":"Execute show subcommand."} {"query":"Returns a DataFrame of shooting stats .","positive":"def Func ( arg_0 , arg_1 = 'R' , arg_2 = False ) : return arg_0 . _get_stats_table ( 'shooting' , arg_1 = arg_1 , arg_2 = arg_2 )","id_":253710,"task_name":"https:\/\/github.com\/mdgoldberg\/sportsref\/blob\/09f11ac856a23c96d666d1d510bb35d6f050b5c3\/sportsref\/nba\/players.py#L185-L187","negative":"Unlock a message for processing by other receivers on a given\n subscription. This operation deletes the lock object, causing the\n message to be unlocked. A message must have first been locked by a\n receiver before this operation is called.\n\n topic_name:\n Name of the topic.\n subscription_name:\n Name of the subscription.\n sequence_number:\n The sequence number of the message to be unlocked as returned in\n BrokerProperties['SequenceNumber'] by the Peek Message operation.\n lock_token:\n The ID of the lock as returned by the Peek Message operation in\n BrokerProperties['LockToken']"} {"query":"Updates a running PowerShell command with more data .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 = None , arg_6 = False , arg_7 = True , ** arg_8 ) : arg_9 = arg_0 . _Func_initial ( arg_1 = arg_1 , arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = arg_4 , arg_5 = arg_5 , arg_6 = True , ** arg_8 ) def get_long_running_output ( arg_10 ) : arg_11 = arg_0 . _deserialize ( 'PowerShellCommandResults' , arg_10 ) if arg_6 : arg_12 = ClientRawResponse ( arg_11 , arg_10 ) return arg_12 return arg_11 arg_13 = arg_8 . get ( 'long_running_operation_timeout' , arg_0 . config . long_running_operation_timeout ) if arg_7 is True : arg_14 = ARMPolling ( arg_13 , ** arg_8 ) elif arg_7 is False : arg_14 = NoPolling ( ) else : arg_14 = arg_7 return LROPoller ( arg_0 . _client , arg_9 , get_long_running_output , arg_14 )","id_":253711,"task_name":"https:\/\/github.com\/Azure\/azure-sdk-for-python\/blob\/d7306fde32f60a293a7567678692bdad31e4b667\/azure-mgmt-servermanager\/azure\/mgmt\/servermanager\/operations\/power_shell_operations.py#L328-L381","negative":"Delete a milestone request"} {"query":"Sends header payload and topics through a ZeroMQ socket .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = ( ) , arg_4 = 0 ) : arg_5 = [ ] arg_5 . extend ( arg_3 ) arg_5 . append ( SEAM ) arg_5 . extend ( arg_1 ) arg_5 . append ( arg_2 ) return eintr_retry_zmq ( arg_0 . Func_multipart , arg_5 , arg_4 )","id_":253712,"task_name":"https:\/\/github.com\/sublee\/zeronimo\/blob\/b216638232932718d2cbc5eabd870c8f5b5e83fb\/zeronimo\/messaging.py#L72-L87","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"like with_objattr but enter context one by one .","positive":"def Func ( * arg_0 ) : def _wrap ( arg_1 ) : @ functools . wraps ( arg_1 ) def wrapper ( arg_2 , * arg_3 , ** arg_4 ) : with contextlib . ExitStack ( ) as stack : for arg_5 in arg_0 : stack . enter_context ( getattr ( arg_2 , arg_5 ) ) return arg_1 ( arg_2 , * arg_3 , ** arg_4 ) return wrapper return _wrap","id_":253713,"task_name":"https:\/\/github.com\/Jasily\/jasily-python\/blob\/1c821a120ebbbbc3c5761f5f1e8a73588059242a\/jasily\/lang\/with_any.py#L63-L79","negative":"Same as `send_stream_error`, but expects `lock` acquired."} {"query":"Check if the criterias for Pathogenic is fullfilled","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : if arg_0 : if arg_1 : return True if arg_2 : if arg_3 : return True if len ( arg_2 ) >= 2 : return True if len ( arg_3 ) >= 2 : return True if arg_1 : if len ( arg_1 ) >= 2 : return True if arg_2 : if len ( arg_2 ) >= 3 : return True elif len ( arg_2 ) >= 2 : if len ( arg_3 ) >= 2 : return True elif len ( arg_3 ) >= 4 : return True return False","id_":253714,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/utils\/acmg.py#L2-L55","negative":"Replace munged string components with their original\n representation."} {"query":"Declaration of routing","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . add_route arg_1 ( 'get-content' , '\/contents\/{ident_hash}' ) arg_1 ( 'get-resource' , '\/resources\/{hash}' ) arg_1 ( 'license-request' , '\/contents\/{uuid}\/licensors' ) arg_1 ( 'roles-request' , '\/contents\/{uuid}\/roles' ) arg_1 ( 'acl-request' , '\/contents\/{uuid}\/permissions' ) arg_1 ( 'publications' , '\/publications' ) arg_1 ( 'get-publication' , '\/publications\/{id}' ) arg_1 ( 'publication-license-acceptance' , '\/publications\/{id}\/license-acceptances\/{uid}' ) arg_1 ( 'publication-role-acceptance' , '\/publications\/{id}\/role-acceptances\/{uid}' ) arg_1 ( 'collate-content' , '\/contents\/{ident_hash}\/collate-content' ) arg_1 ( 'bake-content' , '\/contents\/{ident_hash}\/baked' ) arg_1 ( 'moderation' , '\/moderations' ) arg_1 ( 'moderate' , '\/moderations\/{id}' ) arg_1 ( 'moderation-rss' , '\/feeds\/moderations.rss' ) arg_1 ( 'api-keys' , '\/api-keys' ) arg_1 ( 'api-key' , '\/api-keys\/{id}' )","id_":253715,"task_name":"https:\/\/github.com\/openstax\/cnx-publishing\/blob\/f55b4a2c45d8618737288f1b74b4139d5ac74154\/cnxpublishing\/views\/__init__.py#L5-L34","negative":"Clean up stats file, if configured to do so."} {"query":"Decorator for methods accepting old_path and new_path .","positive":"def Func ( arg_0 , arg_1 ) : def _wrapper ( arg_2 , arg_3 , arg_4 , * arg_5 , ** arg_6 ) : arg_7 , arg_8 , arg_9 = _resolve_path ( arg_3 , arg_2 . managers ) arg_10 , arg_11 , arg_12 = _resolve_path ( arg_4 , arg_2 . managers , ) if arg_8 is not arg_11 : raise HTTPError ( 400 , \"Can't move files between backends ({old} -> {new})\" . format ( old = arg_3 , new = arg_4 , ) ) assert arg_10 == arg_7 arg_13 = getattr ( arg_11 , arg_0 ) ( arg_9 , arg_12 , * arg_5 , ** arg_6 ) if arg_1 and arg_10 : return _apply_prefix ( arg_10 , arg_13 ) else : return arg_13 return _wrapper","id_":253716,"task_name":"https:\/\/github.com\/quantopian\/pgcontents\/blob\/ed36268b7917332d16868208e1e565742a8753e1\/pgcontents\/hybridmanager.py#L133-L164","negative":"Retrieve the last analog data value received for the specified pin.\n\n :param pin: Selected pin\n\n :return: The last value entered into the analog response table."} {"query":"Sets the main channel names based on the provide input and output channel suffixes . This is performed when connecting processes .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_0 . input_channel = \"{}_in_{}\" . format ( arg_0 . template , arg_1 ) arg_0 . output_channel = \"{}_out_{}\" . format ( arg_0 . template , arg_2 ) arg_0 . lane = arg_3","id_":253717,"task_name":"https:\/\/github.com\/assemblerflow\/flowcraft\/blob\/fc3f4bddded1efc76006600016dc71a06dd908c0\/flowcraft\/generator\/process.py#L330-L348","negative":"Wraps a class with reporting to errors backend by decorating each function of the class.\n Decorators are injected under the classmethod decorator if they exist."} {"query":"Open Tensorboard .","positive":"def Func ( arg_0 = '\/tmp\/tensorflow' , arg_1 = 6006 ) : arg_2 = \"[TL] Open tensorboard, go to localhost:\" + str ( arg_1 ) + \" to access\" arg_3 = \" not yet supported by this function (tl.ops.open_tb)\" if not tl . files . exists_or_mkdir ( arg_0 , verbose = False ) : tl . logging . info ( \"[TL] Log reportory was created at %s\" % arg_0 ) if _platform == \"linux\" or _platform == \"linux2\" : raise NotImplementedError ( ) elif _platform == \"darwin\" : tl . logging . info ( 'OS X: %s' % arg_2 ) subprocess . Popen ( sys . prefix + \" | python -m tensorflow.tensorboard --logdir=\" + arg_0 + \" --port=\" + str ( arg_1 ) , shell = True ) elif _platform == \"win32\" : raise NotImplementedError ( \"this function is not supported on the Windows platform\" ) else : tl . logging . info ( _platform + arg_3 )","id_":253718,"task_name":"https:\/\/github.com\/tensorlayer\/tensorlayer\/blob\/aa9e52e36c7058a7e6fd81d36563ca6850b21956\/tensorlayer\/utils.py#L586-L613","negative":"Open a matplotlib figure and plot the array of data on it.\n\n Parameters\n -----------\n array : data.array.scaled_array.ScaledArray\n The 2D array of data which is plotted.\n as_subplot : bool\n Whether the array is plotted as part of a subplot, in which case the grid figure is not opened \/ closed.\n units : str\n The units of the y \/ x axis of the plots, in arc-seconds ('arcsec') or kiloparsecs ('kpc').\n kpc_per_arcsec : float or None\n The conversion factor between arc-seconds and kiloparsecs, required to plot the units in kpc.\n figsize : (int, int)\n The size of the figure in (rows, columns).\n aspect : str\n The aspect ratio of the array, specifically whether it is forced to be square ('equal') or adapts its size to \\\n the figure size ('auto').\n cmap : str\n The colormap the array is plotted using, which may be chosen from the standard matplotlib colormaps.\n norm : str\n The normalization of the colormap used to plot the image, specifically whether it is linear ('linear'), log \\\n ('log') or a symmetric log normalization ('symmetric_log').\n norm_min : float or None\n The minimum array value the colormap map spans (all values below this value are plotted the same color).\n norm_max : float or None\n The maximum array value the colormap map spans (all values above this value are plotted the same color).\n linthresh : float\n For the 'symmetric_log' colormap normalization ,this specifies the range of values within which the colormap \\\n is linear.\n linscale : float\n For the 'symmetric_log' colormap normalization, this allowws the linear range set by linthresh to be stretched \\\n relative to the logarithmic range.\n xticks_manual : [] or None\n If input, the xticks do not use the array's default xticks but instead overwrite them as these values.\n yticks_manual : [] or None\n If input, the yticks do not use the array's default yticks but instead overwrite them as these values."} {"query":"Cast the positional argument at given position into a list if not already a list .","positive":"def Func ( arg_0 ) : @ wrapt . decorator def wrapper ( arg_1 , arg_2 , arg_3 , arg_4 ) : if not isinstance ( arg_3 [ arg_0 ] , list ) : arg_3 = list ( arg_3 ) arg_3 [ arg_0 ] = [ arg_3 [ arg_0 ] ] arg_3 = tuple ( arg_3 ) return arg_1 ( * arg_3 , ** arg_4 ) return wrapper","id_":253719,"task_name":"https:\/\/github.com\/thebigmunch\/gmusicapi-wrapper\/blob\/8708683cd33955def1378fc28319ef37805b851d\/gmusicapi_wrapper\/decorators.py#L12-L24","negative":"Save a vectorized image to file."} {"query":"Clear all the internal data the token needed while it was part of the world .","positive":"def Func ( arg_0 ) : arg_0 . onFunc ( ) arg_0 . _extensions = { } arg_0 . _disable_forum_observation ( ) arg_0 . _world = None arg_0 . _id = None","id_":253720,"task_name":"https:\/\/github.com\/kxgames\/kxg\/blob\/a68c01dc4aa1abf6b3780ba2c65a7828282566aa\/kxg\/tokens.py#L383-L396","negative":"Output profiler report."} {"query":"Detect when a bad built - in is referenced .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 , arg_3 = arg_1 . lookup ( arg_1 . name ) if not _is_builtin ( arg_2 ) : return if arg_1 . name not in arg_0 . _bad_builtins : return if node_ignores_exception ( arg_1 ) or isinstance ( find_try_except_wrapper_node ( arg_1 ) , astroid . ExceptHandler ) : return arg_4 = arg_1 . name . lower ( ) + \"-builtin\" arg_0 . add_message ( arg_4 , arg_1 = arg_1 )","id_":253721,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/checkers\/python3.py#L1001-L1014","negative":"Sets the player's paused state."} {"query":"Get the escape sequence for indexed color index .","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 . Funcs == 16 : if arg_1 >= 8 : return arg_0 . csi ( 'bold' ) + arg_0 . csi ( 'setaf' , arg_1 - 8 ) else : return arg_0 . csi ( 'sgr0' ) + arg_0 . csi ( 'setaf' , arg_1 ) else : return arg_0 . csi ( 'setaf' , arg_1 )","id_":253722,"task_name":"https:\/\/github.com\/tehmaze\/diagram\/blob\/1701526a91c14dc8ebc6452c45c8ec9a563a56db\/diagram.py#L121-L137","negative":"Creates a layer from its config.\n\n This method is the reverse of `get_config`, capable of instantiating the\n same layer from the config dictionary.\n\n Args:\n config: A Python dictionary, typically the output of `get_config`.\n\n Returns:\n layer: A layer instance."} {"query":"Send a message to the room .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = Message ( to_jid = arg_0 . room_jid . bare ( ) , stanza_type = \"groupchat\" , arg_1 = arg_1 ) arg_0 . manager . stream . send ( arg_2 )","id_":253723,"task_name":"https:\/\/github.com\/Jajcus\/pyxmpp2\/blob\/14a40a3950910a9cd008b55f0d8905aa0186ce18\/pyxmpp2\/ext\/muc\/muc.py#L494-L504","negative":"Function to create an overview of the services.\n Will print a list of ports found an the number of times the port was seen."} {"query":"Iterates over the labels for the descendants of a given term","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = None ) : for arg_5 in _help_iterate_labels ( arg_0 . iter_descendants ( arg_1 , arg_2 , arg_3 = arg_3 , arg_4 = arg_4 ) ) : yield arg_5","id_":253724,"task_name":"https:\/\/github.com\/cthoyt\/ols-client\/blob\/8c6bb54888675652d25324184967392d00d128fc\/src\/ols_client\/client.py#L185-L195","negative":"Connect to the stream\n\n Returns\n -------\n asyncio.coroutine\n The streaming response"} {"query":"Count the number of nodes in each subgraph induced by an annotation .","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 = 'Subgraph' ) -> Counter [ int ] : return count_dict_values ( group_nodes_by_annotation ( arg_0 , arg_2 ) )","id_":253725,"task_name":"https:\/\/github.com\/pybel\/pybel-tools\/blob\/3491adea0ac4ee60f57275ef72f9b73da6dbfe0c\/src\/pybel_tools\/summary\/subgraph_summary.py#L27-L33","negative":"The user is required to have an application secrets file in his\n or her environment. The client exists with error \n if the variable isn't found."} {"query":"Call the timeout handlers due .","positive":"def Func ( arg_0 ) : arg_1 = 0 arg_2 = time . time ( ) arg_3 = None while arg_0 . _timeout_handlers : arg_3 , arg_4 = arg_0 . _timeout_handlers [ 0 ] if arg_3 <= arg_2 : logger . debug ( \"About to call a timeout handler: {0!r}\" . format ( arg_4 ) ) arg_0 . _timeout_handlers = arg_0 . _timeout_handlers [ 1 : ] arg_6 = arg_4 ( ) logger . debug ( \" handler result: {0!r}\" . format ( arg_6 ) ) arg_7 = arg_4 . _pyxmpp_recurring if arg_7 : logger . debug ( \" recurring, restarting in {0} s\" . format ( arg_4 . _pyxmpp_timeout ) ) arg_0 . _timeout_handlers . append ( ( arg_2 + arg_4 . _pyxmpp_timeout , arg_4 ) ) arg_0 . _timeout_handlers . sort ( key = lambda x : x [ 0 ] ) elif arg_7 is None and arg_6 is not None : logger . debug ( \" auto-recurring, restarting in {0} s\" . format ( arg_6 ) ) arg_0 . _timeout_handlers . append ( ( arg_2 + arg_6 , arg_4 ) ) arg_0 . _timeout_handlers . sort ( key = lambda x : x [ 0 ] ) arg_1 += 1 else : break if arg_0 . check_events ( ) : return 0 , arg_1 if arg_0 . _timeout_handlers and arg_3 : arg_8 = arg_3 - arg_2 else : arg_8 = None return arg_8 , arg_1","id_":253726,"task_name":"https:\/\/github.com\/Jajcus\/pyxmpp2\/blob\/14a40a3950910a9cd008b55f0d8905aa0186ce18\/pyxmpp2\/mainloop\/base.py#L124-L164","negative":"Fetch items using the given backend.\n\n Generator to get items using the given backend class. When\n an archive manager is given, this function will store\n the fetched items in an `Archive`. If an exception is raised,\n this archive will be removed to avoid corrupted archives.\n\n The parameters needed to initialize the `backend` class and\n get the items are given using `backend_args` dict parameter.\n\n :param backend_class: backend class to fetch items\n :param backend_args: dict of arguments needed to fetch the items\n :param category: category of the items to retrieve.\n If None, it will use the default backend category\n :param filter_classified: remove classified fields from the resulting items\n :param manager: archive manager needed to store the items\n\n :returns: a generator of items"} {"query":"Update the status of the job lists .","positive":"def Func ( arg_0 ) : arg_1 , arg_2 , arg_3 = arg_0 . _s_running , arg_0 . _s_completed , arg_0 . _s_dead arg_4 , arg_5 , arg_6 = arg_0 . _running , arg_0 . _completed , arg_0 . _dead for arg_7 , arg_8 in enumerate ( arg_4 ) : arg_9 = arg_8 . stat_code if arg_9 == arg_1 : continue elif arg_9 == arg_2 : arg_5 . append ( arg_8 ) arg_0 . _comp_report . append ( arg_8 ) arg_4 [ arg_7 ] = False elif arg_9 == arg_3 : arg_6 . append ( arg_8 ) arg_0 . _dead_report . append ( arg_8 ) arg_4 [ arg_7 ] = False arg_4 [ : ] = filter ( None , arg_4 )","id_":253727,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/lib\/backgroundjobs.py#L208-L239","negative":"Decorator. Abortable worker. If wrapped task will be cancelled by\n dispatcher, decorator will send ftp codes of successful interrupt.\n\n ::\n\n >>> @worker\n ... async def worker(self, connection, rest):\n ... ..."} {"query":"Returns a link to a view that moves the passed in object up in rank .","positive":"def Func ( arg_0 , arg_1 = 'up' ) : if arg_0 . rank == 1 : return '' arg_2 = ContentType . objects . get_for_model ( arg_0 ) arg_3 = reverse ( 'awl-rankedmodel-move' , args = ( arg_2 . id , arg_0 . id , arg_0 . rank - 1 ) ) return '%s<\/a>' % ( arg_3 , arg_1 )","id_":253728,"task_name":"https:\/\/github.com\/cltrudeau\/django-awl\/blob\/70d469ef9a161c1170b53aa017cf02d7c15eb90c\/awl\/rankedmodel\/admintools.py#L10-L27","negative":"Override of clean method to perform additional validation"} {"query":"Checks for horizontal spacing near commas .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : arg_5 = arg_1 . elided [ arg_2 ] arg_6 = Match ( r'^(.*[^ ({>]){' , arg_5 ) if arg_6 : arg_7 = arg_6 . group ( 1 ) ( arg_8 , arg_9 , arg_10 ) = CloseExpression ( arg_1 , arg_2 , len ( arg_6 . group ( 1 ) ) ) arg_11 = '' if arg_10 > - 1 : arg_11 = arg_8 [ arg_10 : ] for arg_12 in xrange ( arg_9 + 1 , min ( arg_9 + 3 , arg_1 . NumLines ( ) - 1 ) ) : arg_11 += arg_1 . elided [ arg_12 ] if ( not Match ( r'^[\\s}]*[{.;,)<>\\]:]' , arg_11 ) and not _IsType ( arg_1 , arg_3 , arg_7 ) ) : arg_4 ( arg_0 , arg_2 , 'whitespace\/braces' , 5 , 'Missing space before {' ) if Search ( r'}else' , arg_5 ) : arg_4 ( arg_0 , arg_2 , 'whitespace\/braces' , 5 , 'Missing space before else' ) if Search ( r':\\s*;\\s*$' , arg_5 ) : arg_4 ( arg_0 , arg_2 , 'whitespace\/semicolon' , 5 , 'Semicolon defining empty statement. Use {} instead.' ) elif Search ( r'^\\s*;\\s*$' , arg_5 ) : arg_4 ( arg_0 , arg_2 , 'whitespace\/semicolon' , 5 , 'Line contains only semicolon. If this should be an empty statement, ' 'use {} instead.' ) elif ( Search ( r'\\s+;\\s*$' , arg_5 ) and not Search ( r'\\bfor\\b' , arg_5 ) ) : arg_4 ( arg_0 , arg_2 , 'whitespace\/semicolon' , 5 , 'Extra space before last semicolon. If this should be an empty ' 'statement, use {} instead.' )","id_":253729,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/third_party\/python\/cpplint\/cpplint.py#L3692-L3778","negative":"Save the model in the given directory.\n\n :param saveModelDir: (string)\n Absolute directory path for saving the model. This directory should\n only be used to store a saved model. If the directory does not exist,\n it will be created automatically and populated with model data. A\n pre-existing directory will only be accepted if it contains previously\n saved model data. If such a directory is given, the full contents of\n the directory will be deleted and replaced with current model data."} {"query":"Read phantom tool specific options","positive":"def Func ( arg_0 ) : arg_0 . threads = arg_0 . cfg [ \"threads\" ] or str ( int ( multiprocessing . cpu_count ( ) \/ 2 ) + 1 ) arg_0 . phantom_modules_path = arg_0 . cfg [ \"phantom_modules_path\" ] arg_0 . additional_libs = ' ' . join ( arg_0 . cfg [ \"additional_libs\" ] ) arg_0 . answ_log_level = arg_0 . cfg [ \"writelog\" ] if arg_0 . answ_log_level . lower ( ) in [ '0' , 'false' ] : arg_0 . answ_log_level = 'none' elif arg_0 . answ_log_level . lower ( ) in [ '1' , 'true' ] : arg_0 . answ_log_level = 'all' arg_0 . timeout = parse_duration ( arg_0 . cfg [ \"timeout\" ] ) if arg_0 . timeout > 120000 : logger . warning ( \"You've set timeout over 2 minutes.\" \" Are you a functional tester?\" ) arg_0 . answ_log = arg_0 . core . mkstemp ( \".log\" , \"answ_\" ) arg_0 . core . add_artifact_file ( arg_0 . answ_log ) arg_0 . core . add_artifact_file ( arg_0 . phout_file ) arg_0 . core . add_artifact_file ( arg_0 . stat_log ) arg_0 . phantom_log = arg_0 . core . mkstemp ( \".log\" , \"phantom_\" ) arg_0 . core . add_artifact_file ( arg_0 . phantom_log ) arg_8 = StreamConfig ( arg_0 . core , len ( arg_0 . streams ) , arg_0 . phout_file , arg_0 . answ_log , arg_0 . answ_log_level , arg_0 . timeout , arg_0 . cfg , True ) arg_0 . streams . append ( arg_8 ) for arg_9 in arg_0 . multi ( ) : arg_0 . streams . append ( StreamConfig ( arg_0 . core , len ( arg_0 . streams ) , arg_0 . phout_file , arg_0 . answ_log , arg_0 . answ_log_level , arg_0 . timeout , arg_9 ) ) for arg_10 in arg_0 . streams : arg_10 . Func ( ) if any ( arg_10 . ssl for arg_10 in arg_0 . streams ) : arg_0 . additional_libs += ' ssl io_benchmark_method_stream_transport_ssl'","id_":253730,"task_name":"https:\/\/github.com\/yandex\/yandex-tank\/blob\/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b\/yandextank\/plugins\/Phantom\/utils.py#L59-L98","negative":"Generate RENEWING time.\n\n [:rfc:`2131#section-4.4.5`]::\n\n T1\n defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 *\n duration_of_lease). Times T1 and T2 SHOULD be chosen with some\n random \"fuzz\" around a fixed value, to avoid synchronization of\n client reacquisition."} {"query":"Generate markdown document from module including API section .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = True , arg_4 = 0 ) : arg_5 = arg_0 . __doc__ arg_6 = doctrim ( arg_5 ) arg_7 = arg_6 . split ( '\\n' ) arg_8 = find_sections ( arg_7 ) if arg_8 : arg_9 = min ( n for n , t in arg_8 ) - 1 else : arg_9 = 1 arg_10 = [ ] arg_11 = [ ] if arg_2 and arg_0 . __all__ : arg_8 . append ( ( arg_9 + 1 , arg_2 ) ) for arg_12 in arg_0 . __all__ : arg_11 . append ( ( arg_9 + 2 , \"`\" + arg_12 + \"`\" ) ) arg_10 += [ '' , '' ] arg_13 = arg_0 . __dict__ [ arg_12 ] if arg_13 . __doc__ : arg_14 , arg_15 = doc2md ( arg_13 . __doc__ , \"`\" + arg_12 + \"`\" , min_level = arg_9 + 2 , more_info = True , arg_3 = False ) arg_11 += arg_15 arg_10 += arg_14 arg_8 += arg_11 arg_16 = next ( ( i for i , l in enumerate ( arg_7 ) if is_heading ( l ) ) , 0 ) arg_14 = [ make_heading ( arg_9 , arg_1 ) , \"\" , ] + arg_7 [ : arg_16 ] if arg_3 : arg_14 += make_toc ( arg_8 , arg_4 ) arg_14 += [ '' ] arg_14 += _doc2md ( arg_7 [ arg_16 : ] ) arg_14 += [ '' , '' , make_heading ( arg_9 + 1 , arg_2 ) , ] if arg_3 : arg_14 += [ '' ] arg_14 += make_toc ( arg_11 , 1 ) arg_14 += arg_10 return \"\\n\" . join ( arg_14 )","id_":253731,"task_name":"https:\/\/github.com\/coldfix\/doc2md\/blob\/afd2876316a715d3401adb442d46c9a07cd7e806\/doc2md.py#L210-L265","negative":"Temporal distance probability density function.\n\n Returns\n -------\n non_delta_peak_split_points: numpy.array\n non_delta_peak_densities: numpy.array\n len(density) == len(temporal_distance_split_points_ordered) -1\n delta_peak_loc_to_probability_mass : dict"} {"query":"Returns a batch of example images and the corresponding labels","positive":"def Func ( arg_0 = 'imagenet' , arg_1 = 0 , arg_2 = 1 , arg_3 = ( 224 , 224 ) , arg_4 = 'channels_last' ) : from PIL import Image arg_5 , arg_6 = [ ] , [ ] arg_7 = os . path . dirname ( __file__ ) arg_8 = os . path . join ( arg_7 , 'data' ) arg_9 = os . listdir ( arg_8 ) for arg_10 in range ( arg_1 , arg_1 + arg_2 ) : arg_11 = arg_10 % 20 arg_12 = [ n for n in arg_9 if '{}_{:02d}_' . format ( arg_0 , arg_11 ) in n ] [ 0 ] arg_13 = int ( arg_12 . split ( '.' ) [ 0 ] . split ( '_' ) [ - 1 ] ) arg_14 = os . path . join ( arg_8 , arg_12 ) arg_15 = Image . open ( arg_14 ) if arg_0 == 'imagenet' : arg_15 = arg_15 . resize ( arg_3 ) arg_15 = np . asarray ( arg_15 , dtype = np . float32 ) if arg_0 != 'mnist' and arg_4 == 'channels_first' : arg_15 = np . transpose ( arg_15 , ( 2 , 0 , 1 ) ) arg_5 . append ( arg_15 ) arg_6 . append ( arg_13 ) arg_6 = np . array ( arg_6 ) arg_5 = np . stack ( arg_5 ) return arg_5 , arg_6","id_":253732,"task_name":"https:\/\/github.com\/bethgelab\/foolbox\/blob\/8ab54248c70e45d8580a7d9ee44c9c0fb5755c4a\/foolbox\/utils.py#L155-L214","negative":"Called when socket is read-ready"} {"query":"Returns an array of length size and type dtype that is everywhere 0 except in the index in pos .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = numpy . zeros ( arg_1 , arg_2 = arg_2 ) arg_3 [ arg_0 ] = 1 return arg_3","id_":253733,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/math\/stats.py#L64-L77","negative":"Returns the maximum delay for the InferenceElements in the inference\n dictionary\n\n Parameters:\n -----------------------------------------------------------------------\n inferences: A dictionary where the keys are InferenceElements"} {"query":"When a model gets deleted .","positive":"def Func ( arg_0 , ** arg_1 ) : arg_2 = arg_1 [ 'instance' ] signals . delete . send ( arg_0 , pk = arg_2 . pk )","id_":253734,"task_name":"https:\/\/github.com\/SectorLabs\/django-postgres-extra\/blob\/eef2ed5504d225858d4e4f5d77a838082ca6053e\/psqlextra\/manager\/manager.py#L577-L581","negative":"Adds all parameters to `traj`"} {"query":"Create an optimizer . Depend on the input type the returning optimizer can be a local optimizer \\ or a distributed optimizer .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = 32 , arg_5 = None , arg_6 = None , arg_7 = \"float\" ) : if not arg_3 : arg_3 = MaxEpoch ( 1 ) if not arg_5 : arg_5 = SGD ( ) if isinstance ( arg_1 , RDD ) or isinstance ( arg_1 , DataSet ) : return DistriOptimizer ( arg_0 = arg_0 , training_rdd = arg_1 , arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = arg_4 , arg_5 = arg_5 , arg_7 = arg_7 ) elif isinstance ( arg_1 , tuple ) and len ( arg_1 ) == 2 : arg_8 , arg_9 = arg_1 return LocalOptimizer ( X = arg_8 , Y = arg_9 , arg_0 = arg_0 , arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = arg_4 , arg_5 = arg_5 , arg_6 = arg_6 , arg_7 = \"float\" ) else : raise Exception ( \"Not supported training set: %s\" % type ( arg_1 ) )","id_":253735,"task_name":"https:\/\/github.com\/intel-analytics\/BigDL\/blob\/e9c19788285986ab789a2e2998f9a85d7524779f\/pyspark\/bigdl\/optim\/optimizer.py#L848-L894","negative":"Wait for the termination of a process and log its stdout & stderr"} {"query":"Fit lens data with a model tracer automatically determining the type of fit based on the \\ properties of the galaxies in the tracer .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None ) : if arg_2 . has_light_profile and not arg_2 . has_pixelization : return LensProfileFit ( arg_1 = arg_1 , arg_2 = arg_2 , arg_3 = arg_3 ) elif not arg_2 . has_light_profile and arg_2 . has_pixelization : return LensInversionFit ( arg_1 = arg_1 , arg_2 = arg_2 , arg_3 = None ) elif arg_2 . has_light_profile and arg_2 . has_pixelization : return LensProfileInversionFit ( arg_1 = arg_1 , arg_2 = arg_2 , arg_3 = None ) else : raise exc . FittingException ( 'The fit routine did not call a Fit class - check the ' 'properties of the tracer' )","id_":253736,"task_name":"https:\/\/github.com\/Jammy2211\/PyAutoLens\/blob\/91e50369c7a9c048c83d217625578b72423cd5a7\/autolens\/lens\/lens_fit.py#L26-L49","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"Punctuate author names properly .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . strip ( ) arg_2 = [ x for x in arg_1 . split ( ',' ) if x != '' ] arg_3 = '' for arg_4 , arg_5 in enumerate ( arg_2 ) : arg_6 = arg_5 . strip ( ) . split ( ' ' ) for arg_7 , arg_8 in enumerate ( arg_6 ) : arg_3 += arg_8 if len ( arg_8 ) == 1 : arg_3 += '.' if arg_7 < ( len ( arg_6 ) - 1 ) : arg_3 += ' ' if arg_4 < ( len ( arg_2 ) - 1 ) : arg_3 += ', ' return arg_3 . strip ( )","id_":253737,"task_name":"https:\/\/github.com\/inspirehep\/harvesting-kit\/blob\/33a7f8aa9dade1d863110c6d8b27dfd955cb471f\/harvestingkit\/utils.py#L343-L361","negative":"Bring the interrupt pin on the GPIO into Linux userspace."} {"query":"Get the names from a namespace that wasn t actually defined .","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 ) -> Set [ arg_3 ] : return { arg_5 . name for arg_4 , arg_5 , arg_4 in arg_0 . warnings if isinstance ( arg_5 , UndefinedNamespaceWarning ) and arg_5 . namespace == arg_2 }","id_":253738,"task_name":"https:\/\/github.com\/pybel\/pybel-tools\/blob\/3491adea0ac4ee60f57275ef72f9b73da6dbfe0c\/src\/pybel_tools\/summary\/error_summary.py#L67-L76","negative":"Query for segment allocations."} {"query":"Handle replies for call tips .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . log . debug ( \"oinfo: %s\" , arg_1 . get ( 'content' , '' ) ) arg_2 = arg_0 . _get_cursor ( ) arg_3 = arg_0 . _request_info . get ( 'call_tip' ) if arg_3 and arg_3 . id == arg_1 [ 'parent_header' ] [ 'msg_id' ] and arg_3 . pos == arg_2 . position ( ) : arg_4 = arg_1 [ 'content' ] if arg_4 . get ( 'ismagic' , False ) : arg_5 , arg_6 = None , None else : arg_5 , arg_6 = call_tip ( arg_4 , format_call = True ) if arg_5 or arg_6 : arg_0 . _call_tip_widget . show_call_info ( arg_5 , arg_6 )","id_":253739,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/frontend\/qt\/console\/frontend_widget.py#L456-L478","negative":"Generates a bytecode from an object.\n\n :param obb: The object to generate.\n :param previous: The previous bytecode to use when generating subobjects.\n :return: The generated bytecode."} {"query":"Create a platform without an ELF loaded .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 ( None ) arg_2 . _init_cpu ( arg_1 ) arg_2 . _init_std_fds ( ) return arg_2","id_":253740,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/platforms\/linux.py#L452-L462","negative":"Update the profile picture for the current user.\n\n Args:\n image (file): a file-like object to read the image from"} {"query":"Adds a histogram to the plot s figure .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 111 , arg_3 = None , arg_4 = None , arg_5 = None ) : arg_6 = arg_0 . _addBase ( arg_2 , arg_3 = arg_3 , arg_4 = arg_4 ) arg_6 . hist ( arg_1 , arg_5 = arg_5 , color = \"green\" , alpha = 0.8 ) plt . draw ( )","id_":253741,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/algorithms\/monitor_mixin\/plot.py#L89-L102","negative":"This method increases the permanence values of synapses of columns whose\n activity level has been too low. Such columns are identified by having an\n overlap duty cycle that drops too much below those of their peers. The\n permanence values for such columns are increased."} {"query":"Return a function which is the partial application of f with args .","positive":"def Func ( arg_0 , * arg_1 ) : @ functools . wraps ( arg_0 ) def Func_f ( * arg_2 ) : return arg_0 ( * itertools . chain ( arg_1 , arg_2 ) ) return Func_f","id_":253742,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/runtime.py#L910-L917","negative":"Remove hidden notes and tag a CERN if detected."} {"query":"Check if the device is on .","positive":"def Func ( arg_0 ) : arg_1 = ( yield from arg_0 . handle_int ( arg_0 . API . get ( 'power' ) ) ) return bool ( arg_1 )","id_":253743,"task_name":"https:\/\/github.com\/zhelev\/python-afsapi\/blob\/bb1990cf1460ae42f2dde75f2291625ddac2c0e4\/afsapi\/__init__.py#L222-L225","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Given a QTextBlock return its unformatted text .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = QtGui . QTextCursor ( arg_1 ) arg_2 . movePosition ( QtGui . QTextCursor . StartOfBlock ) arg_2 . movePosition ( QtGui . QTextCursor . EndOfBlock , QtGui . QTextCursor . KeepAnchor ) return arg_2 . selection ( ) . toPlainText ( )","id_":253744,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/frontend\/qt\/console\/console_widget.py#L1430-L1437","negative":"Generate the dataset dictionary"} {"query":"Replace target with replacement","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_0 . data = arg_0 . data . replace ( arg_1 , arg_2 )","id_":253745,"task_name":"https:\/\/github.com\/fizzbucket\/latexfixer\/blob\/1b127e866fbca9764e638fb05fdd43da9dd1a97b\/latexfixer\/fix.py#L75-L77","negative":"Fetch the events pages of a given group."} {"query":"A convenience method that provides all the features that analyzeSentiment analyzeEntities and analyzeSyntax provide in one call .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = None , arg_5 = None , arg_6 = None ) : arg_7 = arg_0 . get_conn ( ) return arg_7 . Func ( arg_1 = arg_1 , arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = arg_4 , arg_5 = arg_5 , arg_6 = arg_6 , )","id_":253746,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/gcp_natural_language_hook.py#L163-L195","negative":"Get or set `Settings._wrapped`\n\n :param str path: a python module file,\n if user set it,write config to `Settings._wrapped`\n :param str with_path: search path\n :return: A instance of `Settings`"} {"query":"Move subfield at specified position .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = None , arg_5 = None ) : arg_6 = record_get_subfields ( arg_0 , arg_1 , arg_4 = arg_4 , arg_5 = arg_5 ) try : arg_7 = arg_6 . pop ( arg_2 ) arg_6 . insert ( arg_3 , arg_7 ) except IndexError : raise InvenioBibRecordFieldError ( \"There is no subfield with position '%d'.\" % arg_2 )","id_":253747,"task_name":"https:\/\/github.com\/inspirehep\/harvesting-kit\/blob\/33a7f8aa9dade1d863110c6d8b27dfd955cb471f\/harvestingkit\/bibrecord.py#L981-L1000","negative":"This function creates the command list from available information"} {"query":"How to use Embedding layer and how to convert IDs to vector IDs to words etc .","positive":"def Func ( ) : arg_0 = 50000 arg_1 = 128 arg_2 = \"model_word2vec_50k_128\" arg_3 = None print ( \"Load existing embedding matrix and dictionaries\" ) arg_4 = tl . files . load_npy_to_any ( name = arg_2 + '.npy' ) arg_5 = arg_4 [ 'data' ] arg_6 = arg_4 [ 'count' ] arg_7 = arg_4 [ 'dictionary' ] arg_8 = arg_4 [ 'reverse_dictionary' ] tl . nlp . save_vocab ( arg_6 , name = 'vocab_' + arg_2 + '.txt' ) del arg_4 , arg_5 , arg_6 arg_9 = tl . files . load_npz ( name = arg_2 + '.npz' ) arg_10 = tf . placeholder ( tf . int32 , shape = [ arg_3 ] ) arg_11 = tl . layers . EmbeddingInputlayer ( arg_10 , arg_0 , arg_1 , name = 'emb' ) sess . run ( tf . global_variables_initializer ( ) ) tl . files . assign_params ( sess , [ arg_9 [ 0 ] ] , arg_11 ) arg_11 . print_params ( ) arg_11 . print_layers ( ) arg_12 = b'hello' arg_13 = arg_7 [ arg_12 ] print ( 'word_id:' , arg_13 ) arg_14 = [ b'i' , b'am' , b'tensor' , b'layer' ] arg_15 = tl . nlp . words_to_word_ids ( arg_14 , arg_7 , _UNK ) arg_16 = tl . nlp . word_ids_to_words ( arg_15 , arg_8 ) print ( 'word_ids:' , arg_15 ) print ( 'context:' , arg_16 ) arg_17 = sess . run ( arg_11 . outputs , feed_dict = { arg_10 : [ arg_13 ] } ) print ( 'vector:' , arg_17 . shape ) arg_18 = sess . run ( arg_11 . outputs , feed_dict = { arg_10 : arg_15 } ) print ( 'vectors:' , arg_18 . shape )","id_":253748,"task_name":"https:\/\/github.com\/tensorlayer\/tensorlayer\/blob\/aa9e52e36c7058a7e6fd81d36563ca6850b21956\/examples\/text_generation\/tutorial_generate_text.py#L132-L182","negative":"Try loading given config file.\n\n :param str file: full path to the config file to load"} {"query":"much faster implementation for aligning chunks","positive":"def Func ( arg_0 , arg_1 = 5 , arg_2 = False ) : try : with open ( arg_0 , 'rb' ) as infile : arg_3 = infile . read ( ) . split ( \"\/\/\\n\/\/\\n\" ) arg_3 = [ i for i in arg_3 if i ] if not arg_3 : raise IPyradError except ( IOError , IPyradError ) : LOGGER . debug ( \"skipping empty chunk - {}\" . format ( arg_0 ) ) return 0 arg_4 = 0 try : arg_5 = persistent_popen_align3 ( arg_3 , 200 , arg_2 ) except Exception as inst : LOGGER . debug ( \"Error in handle - {} - {}\" . format ( arg_0 , inst ) ) arg_5 = [ ] arg_6 = [ ] for arg_7 in arg_5 : arg_8 = aligned_indel_filter ( arg_7 , arg_1 ) if not arg_8 : arg_6 . append ( arg_7 ) else : arg_4 += 1 if arg_6 : arg_9 = arg_0 . rsplit ( \".\" , 1 ) [ 0 ] + \".aligned\" with open ( arg_9 , 'wb' ) as outfile : outfile . write ( \"\\n\/\/\\n\/\/\\n\" . join ( arg_6 ) + \"\\n\" ) arg_10 = logging . getLevelName ( LOGGER . getEffectiveLevel ( ) ) if not arg_10 == \"DEBUG\" : os . remove ( arg_0 ) return arg_4","id_":253749,"task_name":"https:\/\/github.com\/dereneaton\/ipyrad\/blob\/5eeb8a178160f45faf71bf47cec4abe998a575d1\/ipyrad\/assemble\/cluster_within.py#L408-L463","negative":"Run stochastic volatility model.\n\n This model estimates the volatility of a returns series over time.\n Returns are assumed to be T-distributed. lambda (width of\n T-distributed) is assumed to follow a random-walk.\n\n Parameters\n ----------\n data : pandas.Series\n Return series to model.\n samples : int, optional\n Posterior samples to draw.\n\n Returns\n -------\n model : pymc.Model object\n PyMC3 model containing all random variables.\n trace : pymc3.sampling.BaseTrace object\n A PyMC3 trace object that contains samples for each parameter\n of the posterior.\n\n See Also\n --------\n plot_stoch_vol : plotting of tochastic volatility model"} {"query":"Get general stats for the cache .","positive":"def Func ( arg_0 ) : arg_1 = sum ( [ x [ 'expired' ] for _ , x in arg_0 . _CACHE_STATS [ 'access_stats' ] . items ( ) ] ) arg_2 = sum ( [ x [ 'miss' ] for _ , x in arg_0 . _CACHE_STATS [ 'access_stats' ] . items ( ) ] ) arg_3 = sum ( [ x [ 'hit' ] for _ , x in arg_0 . _CACHE_STATS [ 'access_stats' ] . items ( ) ] ) return { 'totals' : { 'keys' : len ( arg_0 . _CACHE_STATS [ 'access_stats' ] ) , 'expired' : arg_1 , 'miss' : arg_2 , 'hit' : arg_3 , } }","id_":253750,"task_name":"https:\/\/github.com\/Netflix-Skunkworks\/cloudaux\/blob\/c4b0870c3ac68b1c69e71d33cf78b6a8bdf437ea\/cloudaux\/gcp\/gcpcache.py#L100-L116","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Reconcile this collection with the server .","positive":"def Func ( arg_0 , arg_1 ) : if not arg_0 . challenge . exists ( arg_1 ) : raise Exception ( 'Challenge does not exist on server' ) arg_2 = MapRouletteTaskCollection . from_server ( arg_1 , arg_0 . challenge ) arg_3 = [ ] arg_4 = [ ] arg_5 = [ ] arg_6 = [ ] for arg_7 in arg_0 . tasks : if arg_7 . identifier in [ arg_8 . identifier for arg_8 in arg_2 . tasks ] : if arg_7 == arg_2 . get_by_identifier ( arg_7 . identifier ) : arg_3 . append ( arg_7 ) else : arg_5 . append ( arg_7 ) else : arg_4 . append ( arg_7 ) for arg_7 in arg_2 . tasks : if arg_7 . identifier not in [ arg_7 . identifier for arg_7 in arg_0 . tasks ] : arg_6 . append ( arg_7 ) if arg_4 : arg_9 = MapRouletteTaskCollection ( arg_0 . challenge , tasks = arg_4 ) arg_9 . create ( arg_1 ) if arg_5 : arg_10 = MapRouletteTaskCollection ( arg_0 . challenge , tasks = arg_5 ) arg_10 . update ( arg_1 ) if arg_6 : arg_11 = MapRouletteTaskCollection ( arg_0 . challenge , tasks = arg_6 ) for arg_7 in arg_11 . tasks : arg_7 . status = 'deleted' arg_11 . update ( arg_1 ) return { 'same' : arg_3 , 'new' : arg_4 , 'changed' : arg_5 , 'deleted' : arg_6 }","id_":253751,"task_name":"https:\/\/github.com\/mvexel\/maproulette-api-wrapper\/blob\/835278111afefed2beecf9716a033529304c548f\/maproulette\/taskcollection.py#L60-L108","negative":"Does this filename match any of the patterns?"} {"query":"Registers metrics to context","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = system_config . get_sys_config ( ) arg_3 = float ( arg_2 [ constants . HERON_METRICS_EXPORT_INTERVAL_SEC ] ) arg_4 = arg_1 . get_metrics_collector ( ) super ( ComponentMetrics , arg_0 ) . Func ( arg_4 , arg_3 )","id_":253752,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/instance\/src\/python\/utils\/metrics\/metrics_helper.py#L174-L182","negative":"This is a decorator that retries a function.\n\n Tries `n` times and catches a given tuple of `errors`.\n\n If the `n` retries are not enough, the error is reraised.\n\n If desired `waits` some seconds.\n\n Optionally takes a 'logger_name' of a given logger to print the caught error."} {"query":"Authenticate using OAuth authorization code .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = { 'client_id' : OAUTH2_CLIENT_ID , 'client_secret' : OAUTH2_CLIENT_SECRET , 'code' : arg_1 , 'grant_type' : 'authorization_code' , 'redirect_uri' : 'urn:ietf:wg:oauth:2.0:oob' , } arg_3 = _make_token_request ( arg_0 , arg_2 ) return arg_3 [ 'access_token' ] , arg_3 [ 'refresh_token' ]","id_":253753,"task_name":"https:\/\/github.com\/tdryer\/hangups\/blob\/85c0bf0a57698d077461283895707260f9dbf931\/hangups\/auth.py#L364-L380","negative":"Translates the given metrics value to JSON string\n\n metrics: A list of dictionaries per OPFTaskDriver.getMetrics():\n\n Returns: JSON string representing the given metrics object."} {"query":"Checks whether there are any bolts that consume any of my streams using custom grouping","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in range ( len ( arg_1 . bolts ) ) : for arg_3 in arg_1 . bolts [ arg_2 ] . inputs : if arg_3 . stream . component_name == arg_0 . my_component_name and arg_3 . gtype == topology_pb2 . Grouping . Value ( \"CUSTOM\" ) : if arg_3 . type == topology_pb2 . CustomGroupingObjectType . Value ( \"PYTHON_OBJECT\" ) : arg_4 = default_serializer . deserialize ( arg_3 . custom_grouping_object ) if isinstance ( arg_4 , str ) : pex_loader . load_pex ( arg_0 . topology_pex_abs_path ) arg_5 = pex_loader . import_and_get_class ( arg_0 . topology_pex_abs_path , arg_4 ) arg_4 = arg_5 ( ) assert isinstance ( arg_4 , ICustomGrouping ) arg_0 . custom_grouper . add ( arg_3 . stream . id , arg_0 . _get_taskids_for_component ( arg_1 . bolts [ arg_2 ] . comp . name ) , arg_4 , arg_0 . my_component_name ) elif arg_3 . type == topology_pb2 . CustomGroupingObjectType . Value ( \"JAVA_OBJECT\" ) : raise NotImplementedError ( \"Java-serialized custom grouping is not yet supported \" \"for python topology\" ) else : raise ValueError ( \"Unrecognized custom grouping type found: %s\" % str ( arg_3 . type ) )","id_":253754,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/instance\/src\/python\/utils\/misc\/pplan_helper.py#L233-L257","negative":"Parses a file and returns a document object.\n File, a file like object."} {"query":"dispatch values previously read from a configuration file to each options provider )","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . cfgfile_parser for arg_2 in arg_1 . sections ( ) : for arg_3 , arg_4 in arg_1 . items ( arg_2 ) : try : arg_0 . global_set_option ( arg_3 , arg_4 ) except ( KeyError , optparse . OptionError ) : continue","id_":253755,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/config.py#L744-L755","negative":"For each element in an H2OFrame, determine if it is NA or not.\n\n :returns: an H2OFrame of 1s and 0s, where 1s mean the values were NAs."} {"query":"Sets up or removes a listener for children being changed on a specified object .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_1 . on_trait_change ( arg_2 , \"subgraphs_items\" , arg_3 = arg_3 , dispatch = \"fast_ui\" ) arg_1 . on_trait_change ( arg_2 , \"clusters_items\" , arg_3 = arg_3 , dispatch = \"fast_ui\" ) arg_1 . on_trait_change ( arg_2 , \"nodes_items\" , arg_3 = arg_3 , dispatch = \"fast_ui\" ) arg_1 . on_trait_change ( arg_2 , \"edges_items\" , arg_3 = arg_3 , dispatch = \"fast_ui\" )","id_":253756,"task_name":"https:\/\/github.com\/rwl\/godot\/blob\/013687c9e8983d2aa2ceebb8a76c5c4f1e37c90f\/godot\/ui\/graph_tree.py#L161-L172","negative":"RequestHandler for the OAuth 2.0 redirect callback.\n\n Usage::\n\n app = webapp.WSGIApplication([\n ('\/index', MyIndexHandler),\n ...,\n (decorator.callback_path, decorator.callback_handler())\n ])\n\n Returns:\n A webapp.RequestHandler that handles the redirect back from the\n server during the OAuth 2.0 dance."} {"query":"This method handles the incoming encoder data message and stores the data in the digital response table .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . digital_response_table [ arg_1 [ arg_0 . RESPONSE_TABLE_MODE ] ] [ arg_0 . RESPONSE_TABLE_PIN_DATA_VALUE ] arg_3 = int ( ( arg_1 [ arg_0 . MSB ] << 7 ) + arg_1 [ arg_0 . LSB ] ) if arg_3 > 8192 : arg_3 -= 16384 arg_4 = arg_1 [ 0 ] with arg_0 . pymata . data_lock : arg_0 . digital_response_table [ arg_1 [ arg_0 . RESPONSE_TABLE_MODE ] ] [ arg_0 . RESPONSE_TABLE_PIN_DATA_VALUE ] = arg_3 if arg_2 != arg_3 : arg_8 = arg_0 . digital_response_table [ arg_4 ] [ arg_0 . RESPONSE_TABLE_CALLBACK ] if arg_8 is not None : arg_8 ( [ arg_0 . pymata . ENCODER , arg_4 , arg_0 . digital_response_table [ arg_4 ] [ arg_0 . RESPONSE_TABLE_PIN_DATA_VALUE ] ] )","id_":253757,"task_name":"https:\/\/github.com\/MrYsLab\/PyMata\/blob\/7e0ec34670b5a0d3d6b74bcbe4f3808c845cc429\/PyMata\/pymata_command_handler.py#L554-L575","negative":"Sort a file into a group based on on-disk size.\n\n :param paths: See :func:`fastdupes.groupify`\n\n :param min_size: Files smaller than this size (in bytes) will be ignored.\n :type min_size: :class:`__builtins__.int`\n\n :returns: See :func:`fastdupes.groupify`\n\n .. todo:: Rework the calling of :func:`~os.stat` to minimize the number of\n calls. It's a fairly significant percentage of the time taken according\n to the profiler."} {"query":"Generates RGB values from HSV values in line with a typical light spectrum .","positive":"def Func ( arg_0 ) : arg_1 , arg_2 , arg_3 = arg_0 return hsv2rgb_raw ( ( ( arg_1 * 192 ) >> 8 , arg_2 , arg_3 ) )","id_":253758,"task_name":"https:\/\/github.com\/ManiacalLabs\/BiblioPixel\/blob\/fd97e6c651a4bbcade64733847f4eec8f7704b7c\/bibliopixel\/colors\/conversions.py#L63-L67","negative":"Convert this unnormalized batch to an instance of Batch.\n\n As this method is intended to be called before augmentation, it\n assumes that none of the ``*_aug`` attributes is yet set.\n It will produce an AssertionError otherwise.\n\n The newly created Batch's ``*_unaug`` attributes will match the ones\n in this batch, just in normalized form.\n\n Returns\n -------\n imgaug.augmentables.batches.Batch\n The batch, with ``*_unaug`` attributes being normalized."} {"query":"Returns the previous sibling of this node .","positive":"def Func ( arg_0 , arg_1 = None ) : if arg_0 . parent is None or arg_0 . index is None : return None for arg_2 in xrange ( arg_0 . index - 1 , - 1 , - 1 ) : if arg_1 is None or arg_0 . parent [ arg_2 ] . tagname == arg_1 : return arg_0 . parent [ arg_2 ]","id_":253759,"task_name":"https:\/\/github.com\/dcwatson\/drill\/blob\/b8a30ec0fd5b5bf55154bd44c1c75f5f5945691b\/drill.py#L471-L482","negative":"Upload all po files to GDocs ignoring conflicts.\n This method looks for all msgids in po_files and sends them\n as ods to GDocs Spreadsheet."} {"query":"r Return the magnitude of the Fast Fourier Transform of a waveform .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = None ) : return abs ( fft ( arg_0 , arg_1 , arg_2 , arg_3 ) )","id_":253760,"task_name":"https:\/\/github.com\/pmacosta\/peng\/blob\/976935377adaa3de26fc5677aceb2cdfbd6f93a7\/peng\/wave_functions.py#L675-L717","negative":"Drop a node from the network\n\n :param node: node to drop\n :type node: Node"} {"query":"Parse issue and generate single line formatted issue line .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . encapsulate_string ( arg_1 [ 'title' ] ) try : arg_3 = u\"{0} [\\\\#{1}]({2})\" . format ( arg_2 , arg_1 [ \"number\" ] , arg_1 [ \"html_url\" ] ) except UnicodeEncodeError : arg_3 = \"ERROR ERROR ERROR: #{0} {1}\" . format ( arg_1 [ \"number\" ] , arg_1 [ 'title' ] ) print ( arg_3 , '\\n' , arg_1 [ \"html_url\" ] ) return arg_0 . issue_line_with_user ( arg_3 , arg_1 )","id_":253761,"task_name":"https:\/\/github.com\/topic2k\/pygcgen\/blob\/c41701815df2c8c3a57fd5f7b8babe702127c8a1\/pygcgen\/generator.py#L462-L487","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Draw solid rectangle with top - left corner at x y width w and height h","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 = None , arg_6 = False ) : for arg_7 in range ( arg_1 , arg_1 + arg_3 ) : _draw_fast_vline ( arg_0 , arg_7 , arg_2 , arg_4 , arg_5 , arg_6 )","id_":253762,"task_name":"https:\/\/github.com\/ManiacalLabs\/BiblioPixel\/blob\/fd97e6c651a4bbcade64733847f4eec8f7704b7c\/bibliopixel\/layout\/matrix_drawing.py#L252-L255","negative":"Collect the learner completion data from the course certificate.\n\n Used for Instructor-paced courses.\n\n If no certificate is found, then returns the completed_date = None, grade = In Progress, on the idea that a\n certificate will eventually be generated.\n\n Args:\n enterprise_enrollment (EnterpriseCourseEnrollment): the enterprise enrollment record for which we need to\n collect completion\/grade data\n\n Returns:\n completed_date: Date the course was completed, this is None if course has not been completed.\n grade: Current grade in the course.\n is_passing: Boolean indicating if the grade is a passing grade or not."} {"query":"Return the current value .","positive":"def Func ( arg_0 ) : if arg_0 . __Func__ is None : try : arg_1 = arg_0 . __dict__ [ 'loader' ] except KeyError : raise AttributeError ( \"Loader is not defined\" ) arg_2 = arg_1 ( ) try : arg_0 . set_Func ( arg_2 ) except TypeError : arg_3 = \"Loader must return variable of type %s or None, got %s\" % ( arg_0 . __dict__ [ 'dtype' ] , type ( arg_2 ) ) raise TypeError ( arg_3 ) return arg_0 . __Func__","id_":253763,"task_name":"https:\/\/github.com\/kadrlica\/pymodeler\/blob\/f426c01416fd4b8fc3afeeb6d3b5d1cb0cb8f8e3\/pymodeler\/parameter.py#L290-L317","negative":"Generate RENEWING time.\n\n [:rfc:`2131#section-4.4.5`]::\n\n T1\n defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 *\n duration_of_lease). Times T1 and T2 SHOULD be chosen with some\n random \"fuzz\" around a fixed value, to avoid synchronization of\n client reacquisition."} {"query":"The reduced chi - square of the linear least squares","positive":"def Func ( arg_0 ) : if arg_0 . _Func is None : arg_0 . _Func = chisquare ( arg_0 . y_unweighted . transpose ( ) , _np . dot ( arg_0 . X_unweighted , arg_0 . beta ) , arg_0 . y_error , ddof = 3 , verbose = False ) return arg_0 . _Func","id_":253764,"task_name":"https:\/\/github.com\/joelfrederico\/SciSalt\/blob\/7bf57c49c7dde0a8b0aa337fbd2fbd527ce7a67f\/scisalt\/scipy\/LinLsqFit_mod.py#L157-L163","negative":"This method is called before first step of simulation."} {"query":"Read and return the dataset contents as binary .","positive":"def Func ( arg_0 ) : return arg_0 . workspace . _rest . read_intermediate_dataset_contents_binary ( arg_0 . workspace . workspace_id , arg_0 . experiment . experiment_id , arg_0 . node_id , arg_0 . port_name )","id_":253765,"task_name":"https:\/\/github.com\/Azure\/Azure-MachineLearning-ClientLibrary-Python\/blob\/d1211b289747671898eb063013e0dc53d3c80acd\/azureml\/__init__.py#L612-L619","negative":"Call the timeout handlers due.\n\n :Return: (next_event_timeout, sources_handled) tuple.\n next_event_timeout is number of seconds until the next timeout\n event, sources_handled is number of handlers called."} {"query":"Lists available and visible GBDX tasks .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . gbdx_connection . get ( arg_0 . _base_url ) raise_for_status ( arg_1 ) return arg_1 . json ( ) [ 'tasks' ]","id_":253766,"task_name":"https:\/\/github.com\/DigitalGlobe\/gbdxtools\/blob\/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb\/gbdxtools\/task_registry.py#L31-L40","negative":"Reads the contents of the config file"} {"query":"Get a resource matching the supplied type and title . Additional arguments may also be specified that will be passed to the query function .","positive":"def Func ( arg_0 , arg_1 , arg_2 , ** arg_3 ) : arg_4 = arg_0 . __api . resources ( arg_1 = arg_1 , arg_2 = arg_2 , query = EqualsOperator ( \"certname\" , arg_0 . name ) , ** arg_3 ) return next ( Func for Func in arg_4 )","id_":253767,"task_name":"https:\/\/github.com\/voxpupuli\/pypuppetdb\/blob\/cedeecf48014b4ad5b8e2513ca8230c814f45603\/pypuppetdb\/types.py#L488-L498","negative":"Sequentially update the actors, the world, and the messaging system. \n The theater terminates once all of the actors indicate that they are done."} {"query":"Deletes several links from the hard disk .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False ) : arg_3 = [ ] arg_4 = [ ] for arg_5 in arg_1 : if isinstance ( arg_5 , str ) : arg_6 = arg_5 . split ( '.' ) arg_7 = '.' . join ( arg_6 [ : - 1 ] ) arg_8 = arg_6 [ - 1 ] arg_9 = arg_0 . f_get ( arg_7 ) if arg_7 != '' else arg_0 arg_10 = arg_9 . v_full_name + '.' + arg_8 if arg_7 != '' else arg_8 arg_3 . append ( ( pypetconstants . DELETE_LINK , arg_10 ) ) arg_4 . append ( ( arg_9 , arg_8 ) ) else : arg_10 = arg_5 [ 0 ] . v_full_name + '.' + arg_5 [ 1 ] arg_3 . append ( ( pypetconstants . DELETE_LINK , arg_10 ) ) arg_4 . append ( arg_5 ) try : arg_0 . _storage_service . store ( pypetconstants . LIST , arg_3 , trajectory_name = arg_0 . v_name ) except : arg_0 . _logger . error ( 'Could not remove `%s` from the trajectory. Maybe the' ' item(s) was\/were never stored to disk.' % str ( arg_3 ) ) raise if arg_2 : for arg_11 , arg_8 in arg_4 : arg_11 . f_remove_link ( arg_8 )","id_":253768,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/trajectory.py#L3832-L3868","negative":"Enter the new span context. All annotations logged inside this\n context will be attributed to this span. All new spans generated\n inside this context will have this span as their parent.\n\n In the unsampled case, this context still generates new span IDs and\n pushes them onto the threadlocal stack, so downstream services calls\n made will pass the correct headers. However, the logging handler is\n never attached in the unsampled case, so the spans are never logged."} {"query":"Adds an empty generic group under the current node .","positive":"def Func ( arg_0 , * arg_1 , ** arg_2 ) : return arg_0 . _nn_interface . _add_generic ( arg_0 , type_name = GROUP , group_type_name = GROUP , arg_1 = arg_1 , arg_2 = arg_2 , add_prefix = False )","id_":253769,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/naturalnaming.py#L2660-L2678","negative":"Send data synchronous to an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: PLC storage address\n :param value: value to write to the storage address of the PLC\n :param Type data_type: type of the data given to the PLC,\n according to PLCTYPE constants"} {"query":"expects a dictionary with mail . keys to create an appropriate smtplib . SMTP instance","positive":"def Func ( ** arg_0 ) : return CustomSMTP ( host = arg_0 . get ( 'mail.host' , 'localhost' ) , port = int ( arg_0 . get ( 'mail.port' , 25 ) ) , user = arg_0 . get ( 'mail.user' ) , password = arg_0 . get ( 'mail.password' ) , timeout = float ( arg_0 . get ( 'mail.timeout' , 60 ) ) , )","id_":253770,"task_name":"https:\/\/github.com\/ZeitOnline\/briefkasten\/blob\/ce6b6eeb89196014fe21d68614c20059d02daa11\/application\/briefkasten\/notifications.py#L26-L34","negative":"Sequentially update the actors, the world, and the messaging system. \n The theater terminates once all of the actors indicate that they are done."} {"query":"Encode argument to be sent in a valid GuacamoleInstruction .","positive":"def Func ( arg_0 ) : arg_1 = utf8 ( arg_0 ) return ELEM_SEP . join ( [ str ( len ( str ( arg_1 ) ) ) , str ( arg_1 ) ] )","id_":253771,"task_name":"https:\/\/github.com\/mohabusama\/pyguacamole\/blob\/344dccc6cb3a9a045afeaf337677e5d0001aa83a\/guacamole\/instruction.py#L133-L148","negative":"Return the most recent timestamp in the operation."} {"query":"Management of the json template .","positive":"def Func ( arg_0 ) : if arg_0 . output : if PyFunceble . path . isfile ( arg_0 . output ) : arg_1 = Dict ( ) . from_json ( File ( arg_0 . output ) . read ( ) ) if isinstance ( arg_1 , list ) : arg_1 . extend ( arg_0 . data_to_print ) arg_1 = List ( arg_1 ) . custom_format ( Sort . standard ) if PyFunceble . CONFIGURATION [ \"hierarchical_sorting\" ] : arg_1 = List ( arg_1 ) . custom_format ( Sort . hierarchical ) Dict ( arg_1 ) . to_json ( arg_0 . output ) else : raise Exception ( \"Output not correctly formatted.\" ) else : Dict ( arg_0 . data_to_print ) . to_json ( arg_0 . output ) else : raise Exception ( \"Empty output given.\" )","id_":253772,"task_name":"https:\/\/github.com\/funilrys\/PyFunceble\/blob\/cdf69cbde120199171f7158e1c33635753e6e2f5\/PyFunceble\/prints.py#L544-L593","negative":"TARGET power button"} {"query":"Validation email handler .","positive":"def Func ( arg_0 ) : arg_1 = _ ( '{domain} account validate' ) . format ( domain = arg_0 . site . domain ) arg_2 = getattr ( settings , 'DUM_VALIDATE_EMAIL_SUBJECT' , arg_1 ) arg_0 . email_subject = arg_2 email_handler ( arg_0 , validation_email_context )","id_":253773,"task_name":"https:\/\/github.com\/incuna\/django-user-management\/blob\/6784e33191d4eff624d2cf2df9ca01db4f23c9c6\/user_management\/utils\/notifications.py#L46-L51","negative":"Updates the current set of parameter values and previous values,\n sets a flag to re-calculate J.\n\n Parameters\n ----------\n new_vals : numpy.ndarray\n The new values to update to\n incremental : Bool, optional\n Set to True to make it an incremental update relative\n to the old parameters. Default is False"} {"query":"Sanity checks a states dict used to define the state space for an MDP . Throws an error or warns if mismatches are found .","positive":"def Func ( arg_0 ) : arg_1 = copy . deepcopy ( arg_0 ) arg_2 = ( 'shape' in arg_1 ) if arg_2 : arg_1 = dict ( arg_4 = arg_1 ) for arg_3 , arg_4 in arg_1 . items ( ) : if isinstance ( arg_4 [ 'shape' ] , int ) : arg_4 [ 'shape' ] = ( arg_4 [ 'shape' ] , ) if 'type' not in arg_4 : arg_4 [ 'type' ] = 'float' return arg_1 , arg_2","id_":253774,"task_name":"https:\/\/github.com\/tensorforce\/tensorforce\/blob\/520a8d992230e382f08e315ede5fc477f5e26bfb\/tensorforce\/contrib\/sanity_check_specs.py#L24-L52","negative":"Extract Packed Floating-Point Values\n\n Extracts 128-bits of packed floating-point values from the source\n operand (second operand) at an 128-bit offset from imm8[0] into the\n destination operand (first operand). The destination may be either an\n XMM register or an 128-bit memory location."} {"query":"Selects an open lswitch for a network .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , ** arg_3 ) : if arg_2 is not None : for arg_4 in arg_2 [ \"results\" ] : arg_5 = arg_4 [ \"_relations\" ] [ \"LogicalSwitchStatus\" ] [ \"lport_count\" ] if ( arg_0 . limits [ 'max_ports_per_switch' ] == 0 or arg_5 < arg_0 . limits [ 'max_ports_per_switch' ] ) : return arg_4 [ \"uuid\" ] return None","id_":253775,"task_name":"https:\/\/github.com\/openstack\/quark\/blob\/1112e6a66917d3e98e44cb7b33b107fd5a74bb2e\/quark\/drivers\/nvp_driver.py#L587-L600","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Set new training dataset for optimizer reuse","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : callBigDlFunc ( arg_0 . bigdl_type , \"setTrainData\" , arg_0 . value , arg_1 , arg_2 )","id_":253776,"task_name":"https:\/\/github.com\/intel-analytics\/BigDL\/blob\/e9c19788285986ab789a2e2998f9a85d7524779f\/pyspark\/bigdl\/optim\/optimizer.py#L914-L923","negative":"A utility function that creates a list of enumeration values from a bit\n mask for a specific mask enumeration class.\n\n Args:\n enumeration (class): The enumeration class from which to draw\n enumeration values.\n mask (int): The bit mask from which to identify enumeration values.\n\n Returns:\n list: A list of enumeration values corresponding to the bit mask."} {"query":"Gets the FileDescriptor for the file containing the specified symbol .","positive":"def Func ( arg_0 , arg_1 ) : arg_1 = _NormalizeFullyQualifiedName ( arg_1 ) try : return arg_0 . _descriptors [ arg_1 ] . file except KeyError : pass try : return arg_0 . _enum_descriptors [ arg_1 ] . file except KeyError : pass try : arg_2 = arg_0 . _internal_db . Func ( arg_1 ) except KeyError as error : if arg_0 . _descriptor_db : arg_2 = arg_0 . _descriptor_db . Func ( arg_1 ) else : raise error if not arg_2 : raise KeyError ( 'Cannot find a file containing %s' % arg_1 ) return arg_0 . _ConvertFileProtoToFileDescriptor ( arg_2 )","id_":253777,"task_name":"https:\/\/github.com\/ibelie\/typy\/blob\/3616845fb91459aacd8df6bf82c5d91f4542bee7\/typy\/google\/protobuf\/descriptor_pool.py#L208-L241","negative":"This functions returns a list of jobs"} {"query":"Stores a group node to disk","positive":"def Func ( arg_0 , arg_1 = True , arg_2 = arg_3 . STORE_DATA , arg_5 = None ) : arg_6 = arg_0 . _nn_interface . _root_instance arg_7 = arg_6 . v_storage_service arg_7 . store ( arg_3 . GROUP , arg_0 , trajectory_name = arg_6 . v_name , arg_1 = arg_1 , arg_2 = arg_2 , arg_5 = arg_5 )","id_":253778,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/naturalnaming.py#L3307-L3333","negative":"Return HgResource object for path.\n\n See DAVProvider.get_resource_inst()"} {"query":"Apply value predicates on the given record r .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = False , arg_4 = True , arg_5 = None ) : for arg_6 , arg_7 , arg_8 , arg_9 , arg_10 in arg_0 . _value_predicates : if arg_1 % arg_10 == 0 : arg_11 = arg_0 . _field_names . index ( arg_6 ) if arg_11 < len ( arg_2 ) : arg_12 = arg_2 [ arg_11 ] try : arg_13 = arg_7 ( arg_12 ) if not arg_13 : arg_14 = { 'code' : arg_8 } if not arg_3 : arg_14 [ 'message' ] = arg_9 arg_14 [ 'row' ] = arg_1 + 1 arg_14 [ 'column' ] = arg_11 + 1 arg_14 [ 'field' ] = arg_6 arg_14 [ 'value' ] = arg_12 arg_14 [ 'record' ] = arg_2 if arg_5 is not None : arg_14 [ 'context' ] = arg_5 yield arg_14 except Exception as e : if arg_4 : arg_14 = { 'code' : UNEXPECTED_EXCEPTION } if not arg_3 : arg_14 [ 'message' ] = MESSAGES [ UNEXPECTED_EXCEPTION ] % ( e . __class__ . __name__ , e ) arg_14 [ 'row' ] = arg_1 + 1 arg_14 [ 'column' ] = arg_11 + 1 arg_14 [ 'field' ] = arg_6 arg_14 [ 'value' ] = arg_12 arg_14 [ 'record' ] = arg_2 arg_14 [ 'exception' ] = e arg_14 [ 'function' ] = '%s: %s' % ( arg_7 . __name__ , arg_7 . __doc__ ) if arg_5 is not None : arg_14 [ 'context' ] = arg_5 yield arg_14","id_":253779,"task_name":"https:\/\/github.com\/alimanfoo\/csvvalidator\/blob\/50a86eefdc549c48f65a91a5c0a66099010ee65d\/csvvalidator.py#L591-L629","negative":"Write the index.html file for this report."} {"query":"Gets the offsets that occur as close as possible to the onsets in the given onset - front .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = [ ] for arg_5 in _get_front_idxs_from_id ( arg_0 , arg_1 ) : arg_6 , arg_7 = _lookup_offset_by_onset_idx ( arg_5 , arg_2 , arg_3 ) arg_4 . append ( ( arg_6 , arg_7 ) ) return arg_4","id_":253780,"task_name":"https:\/\/github.com\/MaxStrange\/AudioSegment\/blob\/1daefb8de626ddff3ff7016697c3ad31d262ecd6\/algorithms\/asa.py#L605-L613","negative":"This functions returns a list of jobs"} {"query":"Setup a grid - stack of grid_stack from a mask sub - grid size and psf - shape .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = RegularGrid . from_mask ( arg_1 ) arg_5 = SubGrid . from_mask_and_sub_grid_size ( arg_1 , arg_2 ) arg_6 = RegularGrid . blurring_grid_from_mask_and_psf_shape ( arg_1 , arg_3 ) return GridStack ( arg_4 , arg_5 , arg_6 )","id_":253781,"task_name":"https:\/\/github.com\/Jammy2211\/PyAutoLens\/blob\/91e50369c7a9c048c83d217625578b72423cd5a7\/autolens\/data\/array\/grids.py#L123-L138","negative":"Add an HTTP header to response object.\n\n Arguments:\n name (str): HTTP header field name\n value (str): HTTP header field value"} {"query":"Updates the global list of line error - suppressions .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = Search ( r'\\bNOLINT(NEXTLINE)?\\b(\\([^)]+\\))?' , arg_1 ) if arg_4 : if arg_4 . group ( 1 ) : arg_5 = arg_2 + 1 else : arg_5 = arg_2 arg_6 = arg_4 . group ( 2 ) if arg_6 in ( None , '(*)' ) : _error_suppressions . setdefault ( None , set ( ) ) . add ( arg_5 ) else : if arg_6 . startswith ( '(' ) and arg_6 . endswith ( ')' ) : arg_6 = arg_6 [ 1 : - 1 ] if arg_6 in _ERROR_CATEGORIES : _error_suppressions . setdefault ( arg_6 , set ( ) ) . add ( arg_5 ) elif arg_6 not in _LEGACY_ERROR_CATEGORIES : arg_3 ( arg_0 , arg_2 , 'readability\/nolint' , 5 , 'Unknown NOLINT error category: %s' % arg_6 )","id_":253782,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/third_party\/python\/cpplint\/cpplint.py#L683-L712","negative":"Return a rasterio.io.MemoryFile instance from input.\n\n Parameters\n ----------\n data : array\n array to be written\n profile : dict\n rasterio profile for MemoryFile"} {"query":"Read a chunk of bytes from queue .","positive":"def Func ( arg_0 , arg_1 = 0 ) : arg_2 = arg_0 . unFunc arg_0 . unFunc = \"\" while arg_2 == \"\" or arg_1 < 0 or ( arg_1 > 0 and len ( arg_2 ) < arg_1 ) : try : arg_2 += compat . to_native ( arg_0 . queue . get ( True , 0.1 ) ) except compat . queue . Empty : if arg_0 . is_closed : break if arg_1 > 0 and len ( arg_2 ) > arg_1 : arg_0 . unFunc = arg_2 [ arg_1 : ] arg_2 = arg_2 [ : arg_1 ] return arg_2","id_":253783,"task_name":"https:\/\/github.com\/mar10\/wsgidav\/blob\/cec0d84222fc24bea01be1cea91729001963f172\/wsgidav\/stream_tools.py#L55-L82","negative":"Creates a service from a constructor and checks which kwargs are not used"} {"query":"Used for breadcrumb dynamic_list_constructor .","positive":"def Func ( arg_0 ) : arg_1 = Group . query . get ( arg_0 ) if arg_1 is not None : return arg_1 . name","id_":253784,"task_name":"https:\/\/github.com\/inveniosoftware-contrib\/invenio-groups\/blob\/109481d6b02701db00b72223dd4a65e167c589a6\/invenio_groups\/views.py#L52-L56","negative":"Decorator. Abortable worker. If wrapped task will be cancelled by\n dispatcher, decorator will send ftp codes of successful interrupt.\n\n ::\n\n >>> @worker\n ... async def worker(self, connection, rest):\n ... ..."} {"query":"Return the partial assignment implied by the current inferences .","positive":"def Func ( arg_0 ) : arg_0 . support_pruning ( ) return dict ( ( arg_1 , arg_0 . curr_domains [ arg_1 ] [ 0 ] ) for arg_1 in arg_0 . vars if 1 == len ( arg_0 . curr_domains [ arg_1 ] ) )","id_":253785,"task_name":"https:\/\/github.com\/hobson\/aima\/blob\/3572b2fb92039b4a1abe384be8545560fbd3d470\/aima\/csp.py#L127-L131","negative":"we need to be flexible in order to determine which plugin's configuration\n specified and make appropriate configs to metrics collector\n\n :return: SECTION name or None for defaults"} {"query":"Build requirements based on flags","positive":"def Func ( arg_0 = True , arg_1 = True , arg_2 = None ) : arg_3 = list ( BASE_REQUIREMENTS ) if arg_2 is None : arg_2 = is_jython if arg_0 : print ( \"setup options: \" ) print ( \"with_pgi: \" , \"yes\" if arg_2 else \"no\" ) print ( \"with_examples: \" , \"yes\" if arg_1 else \"no\" ) if arg_2 : arg_3 . append ( \"pgi\" ) if arg_0 : print ( \"warning, as of April 2019 typography does not work with pgi\" ) else : arg_3 . append ( PYGOBJECT ) if arg_1 : arg_3 . extend ( EXAMPLE_REQUIREMENTS ) if arg_0 : print ( \"\" ) print ( \"\" ) for arg_4 in arg_3 : print ( arg_4 ) return arg_3","id_":253786,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/setup.py#L141-L172","negative":"Returns the maximum delay for the InferenceElements in the inference\n dictionary\n\n Parameters:\n -----------------------------------------------------------------------\n inferences: A dictionary where the keys are InferenceElements"} {"query":"Builds a single end cluster from the refmapped data .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : arg_5 = arg_0 . _hackersonly [ \"min_SE_refmap_overlap\" ] arg_6 = arg_3 + arg_5 arg_7 = arg_4 - arg_5 if arg_6 > arg_7 : arg_8 = arg_6 arg_6 = arg_7 arg_7 = arg_8 if arg_6 == arg_7 : arg_7 += 1 arg_9 = { } arg_10 = [ ] arg_11 = [ ] arg_11 = arg_1 . fetch ( arg_2 , arg_6 , arg_7 ) for arg_12 in arg_11 : if arg_12 . qname not in arg_9 : arg_9 [ arg_12 . qname ] = arg_12 arg_14 = lambda x : int ( x . split ( \";size=\" ) [ 1 ] . split ( \";\" ) [ 0 ] ) arg_15 = sorted ( arg_9 . keys ( ) , arg_22 = arg_14 , reverse = True ) try : arg_16 = arg_9 [ arg_15 [ 0 ] ] except ValueError : LOGGER . error ( \"Found bad cluster, skipping - key:{} rdict:{}\" . format ( arg_15 [ 0 ] , arg_9 ) ) return \"\" arg_17 = arg_16 . get_reference_positions ( full_length = True ) arg_18 = min ( arg_17 ) arg_19 = max ( arg_17 ) if arg_16 . is_reverse : arg_20 = revcomp ( arg_16 . seq ) else : arg_20 = arg_16 . seq arg_21 = arg_14 ( arg_15 [ 0 ] ) arg_10 . append ( \">{}:{}:{};size={};*\\n{}\" . format ( arg_2 , arg_18 , arg_19 , arg_21 , arg_20 ) ) if len ( arg_15 ) > 1 : for arg_22 in arg_15 [ 1 : ] : arg_23 = False try : arg_16 = arg_9 [ arg_22 ] except ValueError : arg_16 = arg_9 [ arg_22 ] [ 0 ] arg_23 = True if not arg_23 : arg_17 = arg_16 . get_reference_positions ( full_length = True ) arg_24 = min ( arg_17 ) arg_25 = max ( arg_17 ) if arg_16 . is_reverse : arg_20 = revcomp ( arg_16 . seq ) else : arg_20 = arg_16 . seq arg_21 = arg_14 ( arg_22 ) arg_10 . append ( \">{}:{}:{};size={};+\\n{}\" . format ( arg_2 , arg_24 , arg_25 , arg_21 , arg_20 ) ) else : pass return arg_10","id_":253787,"task_name":"https:\/\/github.com\/dereneaton\/ipyrad\/blob\/5eeb8a178160f45faf71bf47cec4abe998a575d1\/ipyrad\/assemble\/refmap.py#L331-L429","negative":"Outputs a list of all plugins Streamlink has loaded."} {"query":"Gets the variable part of the source code for a rule .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = len ( arg_0 . input_source ) + arg_1 . position arg_3 = arg_0 . input_source [ arg_2 : arg_2 + arg_1 . consumed ] . rstrip ( ) return arg_0 . _indent ( arg_3 , depth = arg_0 . indent + \" \" , skip_first_line = True )","id_":253788,"task_name":"https:\/\/github.com\/treycucco\/pyebnf\/blob\/3634ddabbe5d73508bcc20f4a591f86a46634e1d\/pyebnf\/compiler.py#L202-L206","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Run Fastqc on the input reads","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_0 . fileStore . getLocalTempDir ( ) arg_0 . fileStore . readGlobalFile ( arg_1 , os . path . join ( arg_3 , 'R1.fastq' ) ) arg_4 = [ '\/data\/R1.fastq' ] arg_5 = [ 'R1_fastqc.html' , 'R1_fastqc.zip' ] if arg_2 : arg_0 . fileStore . readGlobalFile ( arg_2 , os . path . join ( arg_3 , 'R2.fastq' ) ) arg_4 . extend ( [ '-t' , '2' , '\/data\/R2.fastq' ] ) arg_5 . extend ( [ 'R2_fastqc.html' , 'R2_fastqc.zip' ] ) dockerCall ( arg_0 = arg_0 , tool = 'quay.io\/ucsc_cgl\/fastqc:0.11.5--be13567d00cd4c586edf8ae47d991815c8c72a49' , workDir = arg_3 , arg_4 = arg_4 ) arg_6 = [ os . path . join ( arg_3 , x ) for x in arg_5 ] tarball_files ( tar_name = 'fastqc.tar.gz' , file_paths = arg_6 , output_dir = arg_3 ) return arg_0 . fileStore . writeGlobalFile ( os . path . join ( arg_3 , 'fastqc.tar.gz' ) )","id_":253789,"task_name":"https:\/\/github.com\/BD2KGenomics\/toil-lib\/blob\/022a615fc3dc98fc1aaa7bfd232409962ca44fbd\/src\/toil_lib\/tools\/QC.py#L8-L30","negative":"Return True if we should retry. False otherwise.\n\n Args:\n exception: An exception to test for transience.\n\n Returns:\n True if we should retry. False otherwise."} {"query":"Choose the name for stepped data file","positive":"def Func ( arg_0 ) : if arg_0 . use_caching : arg_1 = \"|\" arg_2 = hashlib . md5 ( ) arg_3 = \"cache version 6\" + arg_1 + ';' . join ( arg_0 . load_profile . schedule ) + arg_1 + str ( arg_0 . loop_limit ) arg_3 += arg_1 + str ( arg_0 . ammo_limit ) + arg_1 + ';' . join ( arg_0 . load_profile . schedule ) + arg_1 + str ( arg_0 . autocases ) arg_3 += arg_1 + \";\" . join ( arg_0 . uris ) + arg_1 + \";\" . join ( arg_0 . headers ) + arg_1 + arg_0 . http_ver + arg_1 + \";\" . join ( arg_0 . chosen_cases ) arg_3 += arg_1 + str ( arg_0 . enum_ammo ) + arg_1 + str ( arg_0 . ammo_type ) if arg_0 . load_profile . is_instances ( ) : arg_3 += arg_1 + str ( arg_0 . instances ) if arg_0 . ammo_file : arg_4 = resource . get_opener ( arg_0 . ammo_file ) arg_3 += arg_1 + arg_4 . hash else : if not arg_0 . uris : raise RuntimeError ( \"Neither ammofile nor uris specified\" ) arg_3 += arg_1 + ';' . join ( arg_0 . uris ) + arg_1 + ';' . join ( arg_0 . headers ) arg_0 . log . debug ( \"stpd-hash source: %s\" , arg_3 ) arg_2 . update ( arg_3 . encode ( 'utf8' ) ) if not os . path . exists ( arg_0 . cache_dir ) : os . makedirs ( arg_0 . cache_dir ) arg_5 = arg_0 . cache_dir + '\/' + os . path . basename ( arg_0 . ammo_file ) + \"_\" + arg_2 . hexdigest ( ) + \".stpd\" else : arg_5 = os . path . realpath ( \"ammo.stpd\" ) arg_0 . log . debug ( \"Generated cache file name: %s\" , arg_5 ) return arg_5","id_":253790,"task_name":"https:\/\/github.com\/yandex\/yandex-tank\/blob\/d71d63b6ab5de8b8a5ea2b728b6ab9ac0b1ba71b\/yandextank\/stepper\/main.py#L246-L279","negative":"Processes a level of segmentation, and converts it into times.\n\n Parameters\n ----------\n est_idxs: np.array\n Estimated boundaries in frame indeces.\n est_labels: np.array\n Estimated labels.\n N: int\n Number of frames in the whole track.\n frame_times: np.array\n Time stamp for each frame.\n dur: float\n Duration of the audio track.\n\n Returns\n -------\n est_times: np.array\n Estimated segment boundaries in seconds.\n est_labels: np.array\n Estimated labels for each segment."} {"query":"Checks to see if Spark worker and HDFS datanode are still running .","positive":"def Func ( arg_0 ) : arg_1 = _FuncContainerStatus ( arg_0 . sparkContainerID , arg_0 . hdfsContainerID , sparkNoun = 'worker' , hdfsNoun = 'datanode' ) return arg_1","id_":253791,"task_name":"https:\/\/github.com\/BD2KGenomics\/toil-lib\/blob\/022a615fc3dc98fc1aaa7bfd232409962ca44fbd\/src\/toil_lib\/spark.py#L320-L330","negative":"Apply all filters to issues and pull requests.\n\n :param dict older_tag: All issues before this tag's date will be\n excluded. May be special value, if new tag is\n the first tag. (Means **older_tag** is when\n the repo was created.)\n :param dict newer_tag: All issues after this tag's date will be\n excluded. May be title of unreleased section.\n :rtype: list(dict), list(dict)\n :return: Filtered issues and pull requests."} {"query":"Reverse all bumpers","positive":"def Func ( arg_0 ) : if not arg_0 . test_drive and arg_0 . bumps : map ( lambda b : b . Func ( ) , arg_0 . bumpers )","id_":253792,"task_name":"https:\/\/github.com\/maxzheng\/bumper-lib\/blob\/32a9dec5448673825bb2d7d92fa68882b597f794\/bumper\/__init__.py#L199-L202","negative":"Using the record length and appropriate start points, seek to the\n country that corresponds to the converted IP address integer.\n Return offset of record.\n\n :arg ipnum: Result of ip2long conversion"} {"query":"Downloads a HTTP resource from url and save to dest . Capable of dealing with Gzip compressed content .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = urllib2 . Request ( arg_0 ) arg_2 . add_header ( 'Accept-encoding' , 'gzip' ) arg_3 = urllib2 . build_opener ( ) arg_4 = arg_3 . open ( arg_2 ) arg_5 = arg_4 . read ( ) if arg_4 . headers . get ( 'content-encoding' , '' ) == 'gzip' : arg_6 = StringIO . StringIO ( arg_5 ) arg_7 = gzip . GzipFile ( fileobj = arg_6 ) arg_5 = arg_7 . read ( ) arg_8 = open ( arg_1 , 'wb' ) arg_8 . write ( arg_5 ) arg_8 . close ( )","id_":253793,"task_name":"https:\/\/github.com\/ricobl\/django-importer\/blob\/6967adfa7a286be7aaf59d3f33c6637270bd9df6\/django_importer\/utils.py#L7-L36","negative":"Returns the value specified in the XDG_CONFIG_HOME environment variable\n or the appropriate default."} {"query":"Print the colored logo based on global results .","positive":"def Func ( arg_0 , arg_1 = False ) : if not PyFunceble . CONFIGURATION [ \"quiet\" ] : arg_2 = [ ] if arg_1 : for arg_3 in PyFunceble . ASCII_PYFUNCEBLE . split ( \"\\n\" ) : arg_2 . append ( PyFunceble . Fore . YELLOW + arg_3 + PyFunceble . Fore . RESET ) elif PyFunceble . INTERN [ \"counter\" ] [ \"percentage\" ] [ \"up\" ] >= 50 : for arg_3 in PyFunceble . ASCII_PYFUNCEBLE . split ( \"\\n\" ) : arg_2 . append ( PyFunceble . Fore . GREEN + arg_3 + PyFunceble . Fore . RESET ) else : for arg_3 in PyFunceble . ASCII_PYFUNCEBLE . split ( \"\\n\" ) : arg_2 . append ( PyFunceble . Fore . RED + arg_3 + PyFunceble . Fore . RESET ) print ( \"\\n\" . join ( arg_2 ) )","id_":253794,"task_name":"https:\/\/github.com\/funilrys\/PyFunceble\/blob\/cdf69cbde120199171f7158e1c33635753e6e2f5\/PyFunceble\/core.py#L844-L893","negative":"REST Add soundtouch audio effects to a Call"} {"query":"Hashes the data in a file on disk .","positive":"def Func ( arg_0 , arg_1 = 65536 , arg_2 = 1 , arg_3 = arg_4 , arg_5 = arg_4 , arg_6 = arg_4 ) : arg_6 = _rectify_base ( arg_6 ) arg_5 = _rectify_hashlen ( arg_5 ) arg_3 = _rectify_hasher ( arg_3 ) ( ) with open ( arg_0 , 'rb' ) as file : arg_7 = file . read ( arg_1 ) if arg_2 > 1 : while len ( arg_7 ) > 0 : arg_3 . update ( arg_7 ) file . seek ( arg_1 * ( arg_2 - 1 ) , 1 ) arg_7 = file . read ( arg_1 ) else : while len ( arg_7 ) > 0 : arg_3 . update ( arg_7 ) arg_7 = file . read ( arg_1 ) arg_8 = _digest_hasher ( arg_3 , arg_5 , arg_6 ) return arg_8","id_":253795,"task_name":"https:\/\/github.com\/Erotemic\/ubelt\/blob\/db802f3ad8abba025db74b54f86e6892b8927325\/ubelt\/util_hash.py#L772-L854","negative":"This is a decorator that retries a function.\n\n Tries `n` times and catches a given tuple of `errors`.\n\n If the `n` retries are not enough, the error is reraised.\n\n If desired `waits` some seconds.\n\n Optionally takes a 'logger_name' of a given logger to print the caught error."} {"query":"Emit a record . Format the record and send it to the specified addressees .","positive":"def Func ( arg_0 , arg_1 ) : try : import smtplib try : from email . utils import arg_2 except ImportError : arg_2 = arg_0 . date_time arg_3 = arg_0 . mailport if not arg_3 : arg_3 = smtplib . SMTP_PORT arg_4 = smtplib . SMTP ( arg_0 . mailhost , arg_3 ) arg_5 = arg_0 . format ( arg_1 ) arg_5 = \"From: %s\\r\\nTo: %s\\r\\nSubject: %s\\r\\nDate: %s\\r\\n\\r\\n%s\" % ( arg_0 . fromaddr , ',' . join ( arg_0 . toaddrs ) , arg_0 . getSubject ( arg_1 ) , arg_2 ( ) , arg_5 ) if arg_0 . username : arg_4 . ehlo ( ) arg_4 . starttls ( ) arg_4 . ehlo ( ) arg_4 . login ( arg_0 . username , arg_0 . password ) arg_4 . sendmail ( arg_0 . fromaddr , arg_0 . toaddrs , arg_5 ) arg_4 . quit ( ) except ( KeyboardInterrupt , SystemExit ) : raise except : arg_0 . handleError ( arg_1 )","id_":253796,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/log\/handlers.py#L7-L44","negative":"Decorator. Abortable worker. If wrapped task will be cancelled by\n dispatcher, decorator will send ftp codes of successful interrupt.\n\n ::\n\n >>> @worker\n ... async def worker(self, connection, rest):\n ... ..."} {"query":"Transform a given vector to a volume . This is a reshape function for 3D flattened and maybe masked vectors .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 'C' ) : if arg_1 . dtype != np . bool : raise ValueError ( \"mask must be a boolean array\" ) if arg_0 . ndim != 1 : raise ValueError ( \"vector must be a 1-dimensional array\" ) if arg_0 . ndim == 2 and any ( arg_3 == 1 for arg_3 in arg_0 . shape ) : log . debug ( 'Got an array of shape {}, flattening for my purposes.' . format ( arg_0 . shape ) ) arg_0 = arg_0 . flatten ( ) arg_4 = np . zeros ( arg_1 . shape [ : 3 ] , dtype = arg_0 . dtype , arg_2 = arg_2 ) arg_4 [ arg_1 ] = arg_0 return arg_4","id_":253797,"task_name":"https:\/\/github.com\/Neurita\/boyle\/blob\/2dae7199849395a209c887d5f30506e1de8a9ad9\/boyle\/nifti\/mask.py#L267-L295","negative":"Return the most recent timestamp in the operation."} {"query":"Gets a list of all suggestions for an object","positive":"def Func ( arg_0 ) : arg_1 = ContentType . objects . get_for_model ( type ( arg_0 ) ) return ObjectViewDictionary . objects . filter ( current_object_id = arg_0 . id , current_content_type = arg_1 ) . extra ( order_by = [ '-visits' ] )","id_":253798,"task_name":"https:\/\/github.com\/dreidev\/Suggestions\/blob\/f04c181dc815d32c35b44c6e1c91521e88a9dd6c\/suggestions\/views.py#L114-L119","negative":"Load a multipage tiff into a single variable in x,y,z format.\n\n Arguments:\n tiff_filename: Filename of source data\n dtype: data type to use for the returned tensor\n\n Returns:\n Array containing contents from input tiff file in xyz order"} {"query":"Get widget for param_name","positive":"def Func ( arg_0 , arg_1 ) : if arg_1 not in arg_0 . _Funcs : arg_0 . _Funcs [ arg_1 ] = arg_0 . _make_Func ( arg_1 ) return arg_0 . _Funcs [ arg_1 ]","id_":253799,"task_name":"https:\/\/github.com\/ioam\/parambokeh\/blob\/fb9744f216273c7b24e65d037b1d621c08d7fde6\/parambokeh\/__init__.py#L458-L462","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Sets the current model as orphaned . This is called when the scheduler is about to kill the process to reallocate the worker to a different process .","positive":"def Func ( arg_0 ) : arg_1 = ClientJobsDAO . CMPL_REASON_ORPHAN arg_2 = \"Killed by Scheduler\" arg_0 . _jobsDAO . modelSetCompleted ( arg_0 . _modelID , arg_1 , arg_2 )","id_":253800,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/swarming\/ModelRunner.py#L968-L975","negative":"Force an audio signal down to mono.\n\n Parameters\n ----------\n y : np.ndarray [shape=(2,n) or shape=(n,)]\n audio time series, either stereo or mono\n\n Returns\n -------\n y_mono : np.ndarray [shape=(n,)]\n `y` as a monophonic time-series\n\n Notes\n -----\n This function caches at level 20.\n\n Examples\n --------\n >>> y, sr = librosa.load(librosa.util.example_audio_file(), mono=False)\n >>> y.shape\n (2, 1355168)\n >>> y_mono = librosa.to_mono(y)\n >>> y_mono.shape\n (1355168,)"} {"query":"Press scrollbar left with number of iterations","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : if not arg_0 . verifyscrollbarhorizontal ( arg_1 , arg_2 ) : raise LdtpServerException ( 'Object not horizontal scrollbar' ) arg_4 = arg_0 . _get_object_handle ( arg_1 , arg_2 ) arg_5 = 0 arg_6 = 1.0 \/ 8 arg_7 = False while arg_5 < arg_3 : if arg_4 . AXValue <= 0 : raise LdtpServerException ( 'Minimum limit reached' ) arg_4 . AXValue -= arg_6 time . sleep ( 1.0 \/ 100 ) arg_7 = True arg_5 += 1 if arg_7 : return 1 else : raise LdtpServerException ( 'Unable to decrease scrollbar' )","id_":253801,"task_name":"https:\/\/github.com\/alex-kostirin\/pyatomac\/blob\/3f46f6feb4504315eec07abb18bb41be4d257aeb\/atomac\/ldtpd\/value.py#L284-L316","negative":"APEv2 tag value factory.\n\n Use this if you need to specify the value's type manually. Binary\n and text data are automatically detected by APEv2.__setitem__."} {"query":"Patch information in an existing table . It only updates fileds that are provided in the request object .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = None , arg_5 = None , arg_6 = None , arg_7 = None , arg_8 = None , arg_9 = None , arg_10 = None , arg_11 = None , arg_12 = None ) : arg_3 = arg_3 if arg_3 is not None else arg_0 . project_id arg_13 = { } if arg_4 is not None : arg_13 [ 'description' ] = arg_4 if arg_5 is not None : arg_13 [ 'expirationTime' ] = arg_5 if arg_6 : arg_13 [ 'externalDataConfiguration' ] = arg_6 if arg_7 is not None : arg_13 [ 'friendlyName' ] = arg_7 if arg_8 : arg_13 [ 'labels' ] = arg_8 if arg_9 : arg_13 [ 'schema' ] = { 'fields' : arg_9 } if arg_10 : arg_13 [ 'timePartitioning' ] = arg_10 if arg_11 : arg_13 [ 'view' ] = arg_11 if arg_12 is not None : arg_13 [ 'requirePartitionFilter' ] = arg_12 arg_0 . log . info ( 'Patching Table %s:%s.%s' , arg_3 , arg_1 , arg_2 ) try : arg_0 . service . tables ( ) . patch ( projectId = arg_3 , datasetId = arg_1 , tableId = arg_2 , body = arg_13 ) . execute ( num_retries = arg_0 . num_retries ) arg_0 . log . info ( 'Table patched successfully: %s:%s.%s' , arg_3 , arg_1 , arg_2 ) except HttpError as err : raise AirflowException ( 'BigQuery job failed. Error was: {}' . format ( err . content ) )","id_":253802,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/bigquery_hook.py#L526-L632","negative":"Gets the base class for the custom database back-end.\n\n This should be the Django PostgreSQL back-end. However,\n some people are already using a custom back-end from\n another package. We are nice people and expose an option\n that allows them to configure the back-end we base upon.\n\n As long as the specified base eventually also has\n the PostgreSQL back-end as a base, then everything should\n work as intended."} {"query":"Remove all binary files in the adslib directory .","positive":"def Func ( ) : arg_0 = ( \"adslib\/*.a\" , \"adslib\/*.o\" , \"adslib\/obj\/*.o\" , \"adslib\/*.bin\" , \"adslib\/*.so\" , ) for arg_1 in functools . reduce ( operator . iconcat , [ glob . glob ( p ) for p in arg_0 ] ) : os . remove ( arg_1 )","id_":253803,"task_name":"https:\/\/github.com\/stlehmann\/pyads\/blob\/44bd84394db2785332ac44b2948373916bea0f02\/setup.py#L60-L71","negative":"Update the rate limit and the time to reset\n from the response headers.\n\n :param: response: the response object"} {"query":"Validate a string as a valid storage path","positive":"def Func ( arg_0 , arg_1 , arg_2 = True ) : if not arg_1 or not isinstance ( arg_1 , str ) or arg_1 [ 0 ] != '\/' or arg_1 == '\/' : raise StorageArgumentException ( 'The path must be a string, start with a slash (\/), and be longer' ' than 1 character.' ) if not arg_2 and len ( [ arg_3 for arg_3 in arg_1 . split ( '\/' ) if arg_3 ] ) == 1 : raise StorageArgumentException ( 'This method does not accept projects in the path.' )","id_":253804,"task_name":"https:\/\/github.com\/HumanBrainProject\/hbp-service-client\/blob\/b338fb41a7f0e7b9d654ff28fcf13a56d03bff4d\/hbp_service_client\/storage_service\/client.py#L262-L271","negative":"Get analog data."} {"query":"Used to plot a set of coordinates .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = None , arg_5 = None , arg_6 = None , arg_7 = True , arg_8 = True , arg_9 = True , ** arg_10 ) : if arg_3 is None and arg_4 is None : arg_4 , arg_3 = _setup_axes ( ) elif arg_3 is None : arg_3 = arg_4 . gca ( ) elif arg_4 is None : arg_4 = arg_3 . get_figure ( ) arg_11 = arg_10 . get ( 'norm' , None ) arg_12 = _mplim . Func ( arg_3 , ** arg_10 ) arg_13 = arg_10 . pop ( 'vmin' , _np . min ( arg_2 ) ) arg_14 = arg_10 . pop ( 'vmax' , _np . max ( arg_2 ) ) if arg_5 is not None : arg_12 . set_cmap ( arg_5 ) arg_15 = _cm . ScalarMappable ( arg_5 = arg_12 . get_cmap ( ) , arg_11 = arg_11 ) arg_15 . set_array ( arg_2 ) if arg_9 : arg_16 , arg_17 = _cb ( arg_3 = arg_3 , arg_12 = arg_15 , arg_4 = arg_4 ) if arg_6 is not None : arg_12 . set_alpha ( arg_6 ) arg_12 . set_data ( arg_0 , arg_1 , arg_2 ) arg_3 . images . append ( arg_12 ) if arg_7 : arg_18 = min ( arg_0 ) arg_19 = max ( arg_0 ) arg_3 . set_xlim ( arg_18 , arg_19 ) if arg_8 : arg_20 = min ( arg_1 ) arg_21 = max ( arg_1 ) arg_3 . set_ylim ( arg_20 , arg_21 ) return _SI ( arg_12 = arg_12 , arg_17 = arg_17 , arg_16 = arg_16 )","id_":253805,"task_name":"https:\/\/github.com\/joelfrederico\/SciSalt\/blob\/7bf57c49c7dde0a8b0aa337fbd2fbd527ce7a67f\/scisalt\/matplotlib\/NonUniformImage.py#L13-L84","negative":"Gets back all response headers."} {"query":"Edit additional information about a panel gene .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = store . panel ( arg_0 ) arg_3 = store . hgnc_gene ( arg_1 ) arg_4 = controllers . existing_gene ( store , arg_2 , arg_1 ) arg_5 = PanelGeneForm ( ) arg_6 = [ ] for arg_7 in arg_3 [ 'transcripts' ] : if arg_7 . get ( 'refseq_id' ) : arg_8 = arg_7 . get ( 'refseq_id' ) arg_6 . append ( ( arg_8 , arg_8 ) ) arg_5 . disease_associated_transcripts . choices = arg_6 if arg_5 . validate_on_submit ( ) : arg_11 = 'edit' if arg_4 else 'add' arg_12 = arg_5 . data . copy ( ) if 'csrf_token' in arg_12 : del arg_12 [ 'csrf_token' ] store . add_pending ( arg_2 , arg_3 , arg_11 = arg_11 , info = arg_12 ) return redirect ( url_for ( '.panel' , arg_0 = arg_0 ) ) if arg_4 : for arg_13 in [ 'disease_associated_transcripts' , 'reduced_penetrance' , 'mosaicism' , 'inheritance_models' , 'database_entry_version' , 'comment' ] : arg_14 = getattr ( arg_5 , arg_13 ) if not arg_14 . data : arg_15 = arg_4 . get ( arg_13 ) if arg_15 is not None : arg_14 . process_data ( arg_15 ) return dict ( panel = arg_2 , arg_5 = arg_5 , gene = arg_3 , arg_4 = arg_4 )","id_":253806,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/server\/blueprints\/panels\/views.py#L147-L177","negative":"Updates the target temperature on the NuHeat API\n\n :param temperature: The desired temperature in NuHeat format\n :param permanent: Permanently hold the temperature. If set to False, the schedule will\n resume at the next programmed event"} {"query":"Return log messages for a given SMILES string using the default validations .","positive":"def Func ( arg_0 ) : arg_1 = Chem . MolFromSmiles ( arg_0 ) arg_2 = Validator ( ) . validate ( arg_1 ) return arg_2","id_":253807,"task_name":"https:\/\/github.com\/mcs07\/MolVS\/blob\/d815fe52d160abcecbcbf117e6437bf727dbd8ad\/molvs\/validate.py#L105-L119","negative":"Enables uniform interface to value and batch jacobian calculation.\n\n Works in both eager and graph modes.\n\n Arguments:\n f: The scalar function to evaluate.\n x: The value at which to compute the value and the batch jacobian.\n\n Returns:\n A tuple (f(x), J(x)), where J(x) is the batch jacobian."} {"query":"Add a description to a controlled vocabulary .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None ) : if arg_2 not in arg_0 . languages : raise ValueError ( 'Language not present: {}' . format ( arg_2 ) ) arg_0 . controlled_vocabularies [ arg_1 ] [ 0 ] . append ( ( arg_2 , arg_3 ) )","id_":253808,"task_name":"https:\/\/github.com\/dopefishh\/pympi\/blob\/79c747cde45b5ba203ed93154d8c123ac9c3ef56\/pympi\/Elan.py#L171-L183","negative":"create a copy of each selected object"} {"query":"Downloads a native resolution orthorectified chip in tif format from a user - specified catalog id .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = 'PAN' , arg_4 = 'TIF' , arg_5 = 'chip.tif' ) : def t2s1 ( arg_6 ) : return str ( arg_6 ) . strip ( '(,)' ) . replace ( ',' , '' ) def t2s2 ( arg_6 ) : return str ( arg_6 ) . strip ( '(,)' ) . replace ( ' ' , '' ) if len ( arg_1 ) != 4 : print ( 'Wrong coordinate entry' ) return False arg_7 , arg_8 , arg_9 , arg_10 = arg_1 arg_11 = ( ( arg_7 , arg_8 ) , ( arg_7 , arg_10 ) , ( arg_9 , arg_10 ) , ( arg_9 , arg_8 ) , ( arg_7 , arg_8 ) ) arg_12 = 'POLYGON ((' + ',' . join ( [ t2s1 ( corner ) for corner in arg_11 ] ) + '))' arg_13 = arg_0 . get_images_by_catid_and_aoi ( arg_2 = arg_2 , aoi_wkt = arg_12 ) arg_14 = arg_0 . describe_images ( arg_13 ) arg_15 , arg_16 , arg_17 = None , None , 0 for arg_2 , arg_18 in arg_14 . items ( ) : for arg_19 , arg_20 in arg_18 [ 'parts' ] . items ( ) : if 'PAN' in arg_20 . keys ( ) : arg_15 = arg_20 [ 'PAN' ] [ 'id' ] arg_21 = arg_20 [ 'PAN' ] [ 'bucket' ] if 'WORLDVIEW_8_BAND' in arg_20 . keys ( ) : arg_16 = arg_20 [ 'WORLDVIEW_8_BAND' ] [ 'id' ] arg_17 = 8 arg_21 = arg_20 [ 'WORLDVIEW_8_BAND' ] [ 'bucket' ] elif 'RGBN' in arg_20 . keys ( ) : arg_16 = arg_20 [ 'RGBN' ] [ 'id' ] arg_17 = 4 arg_21 = arg_20 [ 'RGBN' ] [ 'bucket' ] arg_22 = '' if arg_3 == 'PAN' : arg_22 = arg_15 + '?bands=0' elif arg_3 == 'MS' : arg_22 = arg_16 + '?' elif arg_3 == 'PS' : if arg_17 == 8 : arg_22 = arg_16 + '?bands=4,2,1&panId=' + arg_15 elif arg_17 == 4 : arg_22 = arg_16 + '?bands=0,1,2&panId=' + arg_15 arg_23 = '&upperLeft={}&lowerRight={}' . format ( t2s2 ( ( arg_7 , arg_10 ) ) , t2s2 ( ( arg_9 , arg_8 ) ) ) arg_24 = 'https:\/\/idaho.geobigdata.io\/v1\/chip\/bbox\/' + arg_21 + '\/' arg_25 = arg_24 + arg_22 + arg_23 arg_25 += '&format=' + arg_4 + '&token=' + arg_0 . gbdx_connection . access_token arg_26 = requests . get ( arg_25 ) if arg_26 . status_code == 200 : with open ( arg_5 , 'wb' ) as f : f . write ( arg_26 . content ) return True else : print ( 'Cannot download chip' ) return False","id_":253809,"task_name":"https:\/\/github.com\/DigitalGlobe\/gbdxtools\/blob\/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb\/gbdxtools\/idaho.py#L138-L217","negative":"r\"\"\"Computes the equivalent noise bandwidth\n\n .. math:: ENBW = N \\frac{\\sum_{n=1}^{N} w_n^2}{\\left(\\sum_{n=1}^{N} w_n \\right)^2}\n\n .. doctest::\n\n >>> from spectrum import create_window, enbw\n >>> w = create_window(64, 'rectangular')\n >>> enbw(w)\n 1.0\n\n The following table contains the ENBW values for some of the\n implemented windows in this module (with N=16384). They have been\n double checked against litterature (Source: [Harris]_, [Marple]_).\n\n If not present, it means that it has not been checked.\n\n =================== ============ =============\n name ENBW litterature\n =================== ============ =============\n rectangular 1. 1.\n triangle 1.3334 1.33\n Hann 1.5001 1.5\n Hamming 1.3629 1.36\n blackman 1.7268 1.73\n kaiser 1.7\n blackmanharris,4 2.004 2.\n riesz 1.2000 1.2\n riemann 1.32 1.3\n parzen 1.917 1.92\n tukey 0.25 1.102 1.1\n bohman 1.7858 1.79\n poisson 2 1.3130 1.3\n hanningpoisson 0.5 1.609 1.61\n cauchy 1.489 1.48\n lanczos 1.3\n =================== ============ ============="} {"query":"Release waiters .","positive":"def Func ( arg_0 , arg_1 ) : super ( PooledAIODatabase , arg_0 ) . Func ( arg_1 ) for arg_2 in arg_0 . _waiters : if not arg_2 . done ( ) : logger . debug ( 'Release a waiter' ) arg_2 . set_result ( True ) break","id_":253810,"task_name":"https:\/\/github.com\/klen\/muffin-peewee\/blob\/8e893e3ea1dfc82fbcfc6efe784308c8d4e2852e\/muffin_peewee\/mpeewee.py#L146-L153","negative":"Revoke the token and remove the cookie."} {"query":"Returns a list of the names of US Government GitHub organizations","positive":"def Func ( ) : arg_0 = set ( ) Func = requests . get ( 'https:\/\/government.github.com\/organizations.json' ) . json ( ) arg_0 . update ( Func [ 'governments' ] [ 'U.S. Federal' ] ) arg_0 . update ( Func [ 'governments' ] [ 'U.S. Military and Intelligence' ] ) arg_0 . update ( Func [ 'research' ] [ 'U.S. Research Labs' ] ) return list ( arg_0 )","id_":253811,"task_name":"https:\/\/github.com\/LLNL\/scraper\/blob\/881a316e4c04dfa5a9cf491b7c7f9f997a7c56ea\/scraper\/github\/__init__.py#L14-L31","negative":"Slice all the annotations inside the jam and return as a new `JAMS`\n object.\n\n See `Annotation.slice` for details about how the annotations\n are sliced.\n\n This operation is also documented in the jam-level sandbox\n with a list keyed by ``JAMS.sandbox.slice`` containing a tuple for each\n jam-level slice of the form ``(start_time, end_time)``.\n\n Since slicing is implemented using trimming, the operation will also be\n documented in ``JAMS.sandbox.trim`` as described in `JAMS.trim`.\n\n This function also copies over all of the file metadata from the\n original jam.\n\n Note: slicing will affect the duration of the jam, i.e. the new value\n of ``JAMS.file_metadata.duration`` will be ``end_time - start_time``.\n\n Parameters\n ----------\n start_time : float\n The desired start time for slicing in seconds.\n end_time\n The desired end time for slicing in seconds. Must be greater than\n ``start_time``.\n strict : bool\n When ``False`` (default) observations that lie at the boundaries of\n the slicing range (see `Annotation.slice` for details), will have\n their time and\/or duration adjusted such that only the part of the\n observation that lies within the slice range is kept. When ``True``\n such observations are discarded and not included in the sliced\n annotation.\n\n Returns\n -------\n jam_sliced: JAMS\n The sliced jam with sliced annotations, returned as a new\n JAMS object."} {"query":"Lists all the client nodes registered with Nomad .","positive":"def Func ( arg_0 , arg_1 = None ) : arg_2 = { \"prefix\" : arg_1 } return arg_0 . request ( method = \"get\" , arg_2 = arg_2 ) . json ( )","id_":253812,"task_name":"https:\/\/github.com\/jrxFive\/python-nomad\/blob\/37df37e4de21e6f8ac41c6154e7f1f44f1800020\/nomad\/api\/nodes.py#L64-L77","negative":"Jumps short if not greater.\n\n :param cpu: current CPU.\n :param target: destination operand."} {"query":"Generate an HTML report .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = None , arg_4 = None , arg_5 = None , arg_6 = None , arg_7 = None ) : arg_0 . _harvest_data ( ) arg_0 . config . from_args ( arg_3 = arg_3 , arg_4 = arg_4 , arg_5 = arg_5 , html_dir = arg_2 , arg_6 = arg_6 , html_title = arg_7 , ) arg_8 = HtmlReporter ( arg_0 , arg_0 . config ) return arg_8 . report ( arg_1 )","id_":253813,"task_name":"https:\/\/github.com\/tnkteja\/myhelp\/blob\/fb3a4809d448ad14d5b2e6ddf2e7e89ad52b71cb\/virtualEnvironment\/lib\/python2.7\/site-packages\/coverage\/control.py#L637-L662","negative":"Sets the player's paused state."} {"query":"Iterator function traversing the tree below node in breadth first search manner .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = arg_3 ( 'inf' ) , arg_4 = True , arg_5 = False , arg_6 = None ) : if arg_6 is None : arg_6 = lambda x : True arg_7 = IteratorChain ( [ ( 0 , arg_0 . v_name , arg_0 ) ] ) arg_8 = True arg_9 = set ( [ ] ) while True : try : arg_10 , arg_11 , arg_12 = next ( arg_7 ) arg_13 = arg_12 . _full_name if arg_8 or arg_6 ( arg_12 ) : if arg_13 in arg_9 : if arg_5 : yield arg_10 , arg_11 , arg_12 elif arg_10 <= arg_2 : if arg_8 : arg_8 = False else : if arg_5 : yield arg_10 , arg_11 , arg_12 else : yield arg_12 if arg_13 in arg_1 : arg_9 . add ( arg_13 ) if not arg_12 . _is_leaf and arg_10 < arg_2 : arg_14 = NaturalNamingInterface . _make_child_iterator ( arg_12 , arg_4 , current_depth = arg_10 ) arg_7 . add ( arg_14 ) except StopIteration : break","id_":253814,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/naturalnaming.py#L1841-L1888","negative":"Store the HTTP status and headers to be sent when self.write is\n called."} {"query":"First EOS - generic method ; should be called by all specific EOSs . For solving for T the EOS must provide the method solve_T . For all cases the EOS must provide a_alpha_and_derivatives . Calls set_from_PT once done .","positive":"def Func ( arg_0 ) : arg_0 . check_sufficient_inputs ( ) if arg_0 . V : if arg_0 . P : arg_0 . T = arg_0 . Func_T ( arg_0 . P , arg_0 . V ) arg_0 . a_alpha , arg_0 . da_alpha_dT , arg_0 . d2a_alpha_dT2 = arg_0 . a_alpha_and_derivatives ( arg_0 . T ) else : arg_0 . a_alpha , arg_0 . da_alpha_dT , arg_0 . d2a_alpha_dT2 = arg_0 . a_alpha_and_derivatives ( arg_0 . T ) arg_0 . P = R * arg_0 . T \/ ( arg_0 . V - arg_0 . b ) - arg_0 . a_alpha \/ ( arg_0 . V * arg_0 . V + arg_0 . delta * arg_0 . V + arg_0 . epsilon ) arg_6 = [ arg_0 . V , 1j , 1j ] else : arg_0 . a_alpha , arg_0 . da_alpha_dT , arg_0 . d2a_alpha_dT2 = arg_0 . a_alpha_and_derivatives ( arg_0 . T ) arg_6 = arg_0 . volume_solutions ( arg_0 . T , arg_0 . P , arg_0 . b , arg_0 . delta , arg_0 . epsilon , arg_0 . a_alpha ) arg_0 . set_from_PT ( arg_6 )","id_":253815,"task_name":"https:\/\/github.com\/CalebBell\/thermo\/blob\/3857ed023a3e64fd3039a32d53576c24990ef1c3\/thermo\/eos.py#L100-L119","negative":"Regex that adds a 'SHOULD_SPLIT' marker at the end\n location of each matching group of the given regex.\n\n Arguments\n ---------\n regex : re.Expression\n text : str, same length as split_locations\n split_locations : list, split decisions."} {"query":"Allow provider to extract job - specific metadata from command - line args .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 , arg_6 ) : arg_7 = dsub_util . replace_timezone ( datetime . datetime . now ( ) , tzlocal ( ) ) arg_1 = arg_1 or dsub_util . get_os_user ( ) arg_8 = arg_0 . prepare_job_metadata ( arg_3 . name , arg_2 , arg_1 , arg_7 ) if arg_6 : arg_8 [ 'job-id' ] = uuid . uuid4 ( ) . hex arg_8 [ 'create-time' ] = arg_7 arg_8 [ 'script' ] = arg_3 arg_8 [ 'user-project' ] = arg_5 if arg_4 : arg_8 [ 'task-ids' ] = dsub_util . compact_interval_string ( list ( arg_4 ) ) return arg_8","id_":253816,"task_name":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/443ce31daa6023dc2fd65ef2051796e19d18d5a7\/dsub\/commands\/dsub.py#L560-L589","negative":"Create a bucket."} {"query":"Return section resource for given sis id .","positive":"def Func ( arg_0 , arg_1 , arg_2 = { } ) : return arg_0 . get_section ( arg_0 . _sis_id ( arg_1 , sis_field = \"section\" ) , arg_2 )","id_":253817,"task_name":"https:\/\/github.com\/uw-it-aca\/uw-restclients-canvas\/blob\/9845faf33d49a8f06908efc22640c001116d6ea2\/uw_canvas\/sections.py#L18-L23","negative":"Create event start and end datetimes."} {"query":"Validate a jams file against a schema","positive":"def Func ( arg_0 = None , arg_1 = None ) : arg_2 = load_json ( arg_0 ) for arg_3 in arg_1 : try : arg_4 = load_json ( arg_3 ) jsonschema . Func ( arg_4 , arg_2 ) print '{:s} was successfully Funcd' . format ( arg_3 ) except jsonschema . ValidationError as exc : print '{:s} was NOT successfully Funcd' . format ( arg_3 ) print exc","id_":253818,"task_name":"https:\/\/github.com\/marl\/jams\/blob\/b16778399b9528efbd71434842a079f7691a7a66\/jams\/schemata\/validate.py#L31-L44","negative":"Returns a field object instance for a given PrefProxy object.\n\n :param PrefProxy pref_proxy:\n\n :rtype: models.Field"} {"query":"Return the modulo value .","positive":"def Func ( arg_0 , arg_1 ) : try : return valid_numeric ( arg_0 ) % valid_numeric ( arg_1 ) except ( ValueError , TypeError ) : try : return arg_0 % arg_1 except Exception : return ''","id_":253819,"task_name":"https:\/\/github.com\/theduke\/django-baseline\/blob\/7be8b956e53c70b35f34e1783a8fe8f716955afb\/django_baseline\/templatetags\/helpers.py#L166-L174","negative":"Read data synchronous from an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: data name\n :param Type data_type: type of the data given to the PLC, according to\n PLCTYPE constants\n :param bool return_ctypes: return ctypes instead of python types if True\n (default: False)\n :rtype: data_type\n :return: value: **value**"} {"query":"Get a pandas dataframe from a Hive query","positive":"def Func ( arg_0 , arg_1 , arg_2 = 'default' ) : import pandas as pd arg_3 = arg_0 . get_results ( arg_1 , arg_2 = arg_2 ) arg_4 = pd . DataFrame ( arg_3 [ 'data' ] ) arg_4 . columns = [ c [ 0 ] for c in arg_3 [ 'header' ] ] return arg_4","id_":253820,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/hooks\/hive_hooks.py#L940-L963","negative":"This method should only be called while the reference is locked.\n\n Decrements the reference count for the resource. If this process holds\n the only reference at the time we finish dereferencing it; True is\n returned. Operating on the resource after it has been dereferenced is\n undefined behavior.\n\n Dereference queries the value stored in the backend, if any, iff (if\n and only if) this instance is the last reference to that resource. e.g.\n self.count() == 0\n\n :param function callback: A function to execute iff it's determined\n this process holds the only reference to the resource. When there\n is a failure communicating with the backend in the cleanup step the\n callback function will be called an additional time for that\n failure and each subsequent one thereafter. Ensure your callback\n handles this properly.\n :param tuple args: Positional arguments to pass your callback.\n :param dict kwargs: keyword arguments to pass your callback.\n\n :returns: Whether or not there are no more references among all\n processes. True if this was the last reference. False otherwise.\n :rtype: bool"} {"query":"check_install will attempt to run the singularity command and return True if installed . The command line utils will not run without this check .","positive":"def Func ( arg_0 = None , arg_1 = True ) : if arg_0 is None : arg_0 = \"singularity\" arg_2 = [ arg_0 , '--version' ] try : arg_3 = run_command ( arg_2 , arg_0 ) except : return False if arg_3 is not None : if arg_1 is False and arg_3 [ 'return_code' ] == 0 : arg_3 = arg_3 [ 'message' ] bot . info ( \"Found %s version %s\" % ( arg_0 . upper ( ) , arg_3 ) ) return True return False","id_":253821,"task_name":"https:\/\/github.com\/singularityhub\/sregistry-cli\/blob\/abc96140a1d15b5e96d83432e1e0e1f4f8f36331\/sregistry\/utils\/terminal.py#L74-L96","negative":"Get an IO write task for the requested set of data\n\n This task can be ran immediately or be submitted to the IO executor\n for it to run.\n\n :type fileobj: file-like object\n :param fileobj: The file-like object to write to\n\n :type data: bytes\n :param data: The data to write out\n\n :type offset: integer\n :param offset: The offset to write the data to in the file-like object\n\n :returns: An IO task to be used to write data to a file-like object"} {"query":"Transform Kraus representation to Choi representation .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = 0 arg_4 , arg_5 = arg_0 if arg_5 is None : for arg_6 in arg_4 : arg_7 = arg_6 . ravel ( order = 'F' ) arg_3 += np . outer ( arg_7 , arg_7 . conj ( ) ) else : for arg_6 , arg_8 in zip ( arg_4 , arg_5 ) : arg_3 += np . outer ( arg_6 . ravel ( order = 'F' ) , arg_8 . ravel ( order = 'F' ) . conj ( ) ) return arg_3","id_":253822,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/quantum_info\/operators\/channel\/transformations.py#L180-L191","negative":"Downloads a MP4 or WebM file that is associated with the video at the URL passed.\n\n :param str url: URL of the video to be downloaded\n :return str: Filename of the file in local storage"} {"query":"Remove all tags that have the tag name tag","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in arg_0 . iter ( ) : if arg_2 . tag == arg_1 : arg_2 . getparent ( ) . remove ( arg_2 )","id_":253823,"task_name":"https:\/\/github.com\/PolicyStat\/docx2html\/blob\/2dc4afd1e3a3f2f0b357d0bff903eb58bcc94429\/docx2html\/core.py#L1344-L1350","negative":"This is a parallel worker for running period-recovery.\n\n Parameters\n ----------\n\n task : tuple\n This is used to pass args to the `periodicvar_recovery` function::\n\n task[0] = period-finding result pickle to work on\n task[1] = simbasedir\n task[2] = period_tolerance\n\n Returns\n -------\n\n dict\n This is the dict produced by the `periodicvar_recovery` function for the\n input period-finding result pickle."} {"query":"Returns the packager detected on the remote system .","positive":"def Func ( ) : import warnings warnings . filterwarnings ( \"ignore\" , category = DeprecationWarning ) arg_0 = get_rc ( 'common_packager' ) if arg_0 : return arg_0 with settings ( warn_only = True ) : with hide ( 'running' , 'stdout' , 'stderr' , 'warnings' ) : arg_1 = _run ( 'cat \/etc\/fedora-release' ) if arg_1 . succeeded : arg_0 = YUM else : arg_1 = _run ( 'cat \/etc\/lsb-release' ) if arg_1 . succeeded : arg_0 = APT else : for arg_2 in PACKAGERS : arg_1 = _run ( 'which %s' % arg_2 ) if arg_1 . succeeded : arg_0 = arg_2 break if not arg_0 : raise Exception ( 'Unable to determine packager.' ) set_rc ( 'common_packager' , arg_0 ) return arg_0","id_":253824,"task_name":"https:\/\/github.com\/chrisspen\/burlap\/blob\/a92b0a8e5206850bb777c74af8421ea8b33779bd\/burlap\/common.py#L2540-L2572","negative":"Obtain slices for the given dimensions, padding, and chunks.\n\n Given a plan for the number of chunks along each dimension and the amount of padding,\n calculate a list of slices required to generate those chunks.\n\n Parameters\n ----------\n plan: tuple or array-like\n Size of chunks (in number of elements) along each dimensions.\n Length must be equal to the number of dimensions.\n\n padding: tuple or array-like\n Size of overlap (in number of elements) between chunks along each dimension.\n Length must be equal to the number of dimensions.\n\n shape: tuple\n Dimensions of axes to be chunked."} {"query":"Get vcf entry from variant object","positive":"def Func ( arg_0 , arg_1 = None ) : if arg_0 [ 'category' ] == 'snv' : arg_2 = 'TYPE' else : arg_2 = 'SVTYPE' arg_3 = ';' . join ( [ 'END=' + str ( arg_0 [ 'end' ] ) , arg_2 + '=' + arg_0 [ 'sub_category' ] . upper ( ) ] ) arg_4 = \"{0}\\t{1}\\t{2}\\t{3}\\t{4}\\t{5}\\t{6}\\t{7}\" . format ( arg_0 [ 'chromosome' ] , arg_0 [ 'position' ] , arg_0 [ 'dbsnp_id' ] , arg_0 [ 'reference' ] , arg_0 [ 'alternative' ] , arg_0 [ 'quality' ] , ';' . join ( arg_0 [ 'filters' ] ) , arg_3 ) if arg_1 : arg_4 += \"\\tGT\" for arg_5 in arg_0 [ 'samples' ] : arg_4 += \"\\t\" + arg_5 [ 'genotype_call' ] return arg_4","id_":253825,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/commands\/export\/variant.py#L138-L175","negative":"Whether a connection can be established between those two meshes."} {"query":"Generate multiple records . Refer to definition for generateRecord","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 . verbosity > 0 : print 'Generating' , len ( arg_1 ) , 'records...' for arg_2 in arg_1 : arg_0 . generateRecord ( arg_2 )","id_":253826,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/data\/generators\/data_generator.py#L166-L171","negative":"Sets the player's paused state."} {"query":"Get details about specific user","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : if arg_2 : arg_2 [ 'compact' ] = True arg_3 = make_get_request ( arg_0 , 'users\/{}' . format ( arg_1 ) , params_data = arg_2 ) arg_4 = arg_3 . json ( ) if arg_3 . status_code == 200 : return arg_4 [ 'result' ] else : raise UserNotFoundException ( message = arg_4 [ 'message' ] , error_code = arg_4 [ 'error_code' ] , request_id = arg_4 [ 'request_id' ] )","id_":253827,"task_name":"https:\/\/github.com\/freelancer\/freelancer-sdk-python\/blob\/e09034936d6f13b3909a9464ee329c81c1834941\/freelancersdk\/resources\/users\/users.py#L31-L47","negative":"Parse a FIQL formatted string into an ``Expression``.\n\n Args:\n fiql_str (string): The FIQL formatted string we want to parse.\n\n Returns:\n Expression: An ``Expression`` object representing the parsed FIQL\n string.\n\n Raises:\n FiqlFormatException: Unable to parse string due to incorrect\n formatting.\n\n Example:\n\n >>> expression = parse_str_to_expression(\n ... \"name==bar,dob=gt=1990-01-01\")"} {"query":"issue . xml from Elsevier assume the existence of a local DTD . This procedure install the DTDs next to the issue . xml file and normalize it using xmllint in order to resolve all namespaces and references .","positive":"def Func ( arg_0 , arg_1 ) : if exists ( join ( arg_1 , 'resolved_issue.xml' ) ) : return arg_2 = open ( join ( arg_1 , 'issue.xml' ) ) . read ( ) arg_3 = [ 'si510.dtd' , 'si520.dtd' , 'si540.dtd' ] arg_4 = 0 for arg_5 in arg_3 : if arg_5 in arg_2 : arg_0 . _extract_correct_dtd_package ( arg_5 . split ( '.' ) [ 0 ] , arg_1 ) arg_4 = 1 if not arg_4 : arg_6 = \"It looks like the path \" + arg_1 arg_6 += \" does not contain an si510, si520 or si540 in issue.xml file\" arg_0 . logger . error ( arg_6 ) raise ValueError ( arg_6 ) arg_7 = [ \"xmllint\" , \"--format\" , \"--loaddtd\" , join ( arg_1 , 'issue.xml' ) , \"--output\" , join ( arg_1 , 'resolved_issue.xml' ) ] arg_8 , arg_8 , arg_9 = run_shell_command ( arg_7 ) if arg_9 : arg_6 = \"Error in cleaning %s: %s\" % ( join ( arg_1 , 'issue.xml' ) , arg_9 ) arg_0 . logger . error ( arg_6 ) raise ValueError ( arg_6 )","id_":253828,"task_name":"https:\/\/github.com\/inspirehep\/harvesting-kit\/blob\/33a7f8aa9dade1d863110c6d8b27dfd955cb471f\/harvestingkit\/elsevier_package.py#L231-L261","negative":"Generate a new random masterkey, encrypt it with the password and\n store it in the store.\n\n :param str password: Password to use for en-\/de-cryption"} {"query":"Handle stdout stderr and stdin .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . log . debug ( \"stream: %s\" , arg_1 . get ( 'content' , '' ) ) if not arg_0 . _hidden and arg_0 . _is_from_this_session ( arg_1 ) : arg_2 = arg_1 [ 'content' ] [ 'data' ] . expandtabs ( 8 ) arg_0 . _append_plain_text ( arg_2 , before_prompt = True ) arg_0 . _control . moveCursor ( QtGui . QTextCursor . End )","id_":253829,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/frontend\/qt\/console\/frontend_widget.py#L488-L499","negative":"Renders a javascript snippet suitable for use as a mapbox-gl heatmap paint entry\n\n Returns:\n A dict that can be converted to a mapbox-gl javascript paint snippet"} {"query":"Load a private key from a file","positive":"def Func ( arg_0 , arg_1 , arg_2 = arg_3 ) : arg_1 = _path_string ( arg_1 ) if arg_2 is arg_3 : arg_2 = FILETYPE_PEM elif not isinstance ( arg_2 , integer_types ) : raise TypeError ( \"filetype must be an integer\" ) arg_4 = _lib . SSL_CTX_use_PrivateKey_file ( arg_0 . _context , arg_1 , arg_2 ) if not arg_4 : arg_0 . _raise_passphrase_exception ( )","id_":253830,"task_name":"https:\/\/github.com\/pyca\/pyopenssl\/blob\/1fbe064c50fd030948141d7d630673761525b0d0\/src\/OpenSSL\/SSL.py#L968-L989","negative":"Init a uniform noise variable."} {"query":"Run the report .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_0 . Func_files ( arg_0 . annotate_file , arg_1 , arg_2 )","id_":253831,"task_name":"https:\/\/github.com\/tnkteja\/myhelp\/blob\/fb3a4809d448ad14d5b2e6ddf2e7e89ad52b71cb\/virtualEnvironment\/lib\/python2.7\/site-packages\/coverage\/annotate.py#L37-L43","negative":"Sets the player's paused state."} {"query":"Create input output queues call function in a subprocess or a thread .","positive":"def Func ( arg_0 , * arg_1 , arg_2 = False , arg_3 = True , ** arg_4 ) : if arg_2 : arg_5 , arg_6 = multiprocessing . Process , multiprocessing . Queue else : arg_5 , arg_6 = threading . Thread , queue . Queue arg_7 , arg_8 = arg_6 ( ) , arg_6 ( ) arg_1 = arg_7 , arg_8 , arg_0 , arg_1 arg_9 = arg_5 ( target = _Func_locally , arg_1 = arg_1 , kwargs = arg_4 , arg_3 = arg_3 ) arg_9 . start ( ) return arg_9 , arg_7 , arg_8","id_":253832,"task_name":"https:\/\/github.com\/ManiacalLabs\/BiblioPixel\/blob\/fd97e6c651a4bbcade64733847f4eec8f7704b7c\/bibliopixel\/util\/threads\/sub.py#L12-L38","negative":"Given an email address, check the email_remapping table to see if the email\n should be sent to a different address. This function also handles overriding\n the email domain if ignore_vcs_email_domain is set or the domain was missing"} {"query":"Adds the session cookie to headers .","positive":"def Func ( arg_0 ) : if arg_0 . data : arg_1 = arg_0 . create_cookie ( ) arg_2 = len ( arg_1 ) if arg_2 > 4093 : raise SessionError ( 'Cookie too long! The cookie size {0} ' 'is more than 4093 bytes.' . format ( arg_2 ) ) arg_0 . adapter . set_header ( 'Set-Cookie' , arg_1 ) arg_0 . _data = { }","id_":253833,"task_name":"https:\/\/github.com\/authomatic\/authomatic\/blob\/90a9ce60cc405ae8a2bf5c3713acd5d78579a04e\/authomatic\/core.py#L394-L410","negative":"Init openstack neutron mq\n\n 1. Check if enable listening neutron notification\n 2. Create consumer\n\n :param mq: class ternya.mq.MQ"} {"query":"Returns a PngImageFile instance of the chart","positive":"def Func ( arg_0 ) : try : try : import Image except ImportError : from PIL import Image except ImportError : raise ImportError ( 'You must install PIL to fetch Func objects' ) try : from cStringIO import StringIO except ImportError : from StringIO import StringIO return Image . open ( StringIO ( arg_0 . urlopen ( ) . read ( ) ) )","id_":253834,"task_name":"https:\/\/github.com\/appknox\/google-chartwrapper\/blob\/3769aecbef6c83b6cd93ee72ece478ffe433ac57\/GChartWrapper\/GChart.py#L611-L628","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Returns a sqlite connection object","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . Funcection ( arg_0 . sqlite_conn_id ) arg_1 = sqlite3 . connect ( arg_1 . host ) return arg_1","id_":253835,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/hooks\/sqlite_hook.py#L35-L41","negative":"issue a command to read the archive records after a known time stamp."} {"query":"Verify a list of registers .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : for arg_3 in arg_1 . children : arg_0 . verify_reg ( arg_3 , arg_2 )","id_":253836,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/qasm\/qasmparser.py#L181-L187","negative":"Upload all po files to GDocs ignoring conflicts.\n This method looks for all msgids in po_files and sends them\n as ods to GDocs Spreadsheet."} {"query":"Return the current statevector in JSON Result spec format","positive":"def Func ( arg_0 ) : arg_1 = np . reshape ( arg_0 . _statevector , 2 ** arg_0 . _number_of_qubits ) arg_1 = np . stack ( [ arg_1 . real , arg_1 . imag ] , axis = 1 ) arg_1 [ arg_2 ( arg_1 ) < arg_0 . _chop_threshold ] = 0.0 return arg_1","id_":253837,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/providers\/basicaer\/qasm_simulator.py#L325-L332","negative":"Remove cards from watchlist.\n\n :params trade_id: Trade id."} {"query":"Updates the list of known servers to the provided list . Replaces all previous server addresses with the new list .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = { \"address\" : arg_1 } return arg_0 . request ( \"servers\" , arg_2 = arg_2 , method = \"post\" ) . status_code","id_":253838,"task_name":"https:\/\/github.com\/jrxFive\/python-nomad\/blob\/37df37e4de21e6f8ac41c6154e7f1f44f1800020\/nomad\/api\/agent.py#L73-L85","negative":"Read a varint from file, parse it, and return the decoded integer."} {"query":"Find local maxima in an array x .","positive":"def Func ( arg_0 , arg_1 = 0 ) : arg_2 = [ ( 0 , 0 ) ] * arg_0 . ndim arg_2 [ arg_1 ] = ( 1 , 1 ) arg_3 = np . pad ( arg_0 , arg_2 , mode = 'edge' ) arg_4 = [ slice ( None ) ] * arg_0 . ndim arg_4 [ arg_1 ] = slice ( 0 , - 2 ) arg_5 = [ slice ( None ) ] * arg_0 . ndim arg_5 [ arg_1 ] = slice ( 2 , arg_3 . shape [ arg_1 ] ) return ( arg_0 > arg_3 [ tuple ( arg_4 ) ] ) & ( arg_0 >= arg_3 [ tuple ( arg_5 ) ] )","id_":253839,"task_name":"https:\/\/github.com\/librosa\/librosa\/blob\/180e8e6eb8f958fa6b20b8cba389f7945d508247\/librosa\/util\/utils.py#L780-L835","negative":"Handle CLI command"} {"query":"Return a Python AST Node for a quote expression .","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 ) -> GeneratedPyAST : assert arg_2 . op == NodeOp . QUOTE return _const_node_to_py_ast ( arg_0 , arg_2 . expr )","id_":253840,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/compiler\/generator.py#L1495-L1498","negative":"Generate the dataset dictionary"} {"query":"Checks if the bin files referenced exist","positive":"def Func ( arg_0 ) : for arg_1 in arg_0 . buffers : if not arg_1 . is_separate_file : continue arg_2 = arg_0 . path . parent \/ arg_1 . uri if not os . path . exists ( arg_2 ) : raise FileNotFoundError ( \"Buffer {} referenced in {} not found\" . format ( arg_2 , arg_0 . path ) )","id_":253841,"task_name":"https:\/\/github.com\/Contraz\/demosys-py\/blob\/6466128a3029c4d09631420ccce73024025bd5b6\/demosys\/loaders\/scene\/gltf.py#L340-L348","negative":"Handles HTTP error codes for the given request\n\n Raises:\n AuthenticationError on the appropriate 4** errors\n ServerError if the response is not an ok (2**)\n\n Arguments:\n r -- The request result"} {"query":"determines NetJSON channel_width radio attribute","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . pop ( 'htmode' ) if arg_2 == 'NONE' : return 20 arg_3 = arg_2 . replace ( 'VHT' , '' ) . replace ( 'HT' , '' ) if '+' in arg_3 or '-' in arg_3 : arg_1 [ 'htmode' ] = arg_2 arg_3 = arg_3 [ 0 : - 1 ] return int ( arg_3 )","id_":253842,"task_name":"https:\/\/github.com\/openwisp\/netjsonconfig\/blob\/c23ce9732720856e2f6dc54060db71a8182c7d4b\/netjsonconfig\/backends\/openwrt\/converters\/radios.py#L115-L127","negative":"Drops all views."} {"query":"Rejects the SenderLink and destroys the handle .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_3 = arg_0 . _sender_links . get ( arg_1 ) if not arg_3 : raise Exception ( \"Invalid link_handle: %s\" % arg_1 ) arg_3 . reject ( arg_2 ) arg_3 . destroy ( )","id_":253843,"task_name":"https:\/\/github.com\/kgiusti\/pyngus\/blob\/5392392046989f1bb84ba938c30e4d48311075f1\/pyngus\/connection.py#L670-L679","negative":"Adds all parameters to `traj`"} {"query":"Initialize the Service Discovery process .","positive":"def Func ( arg_0 ) : from . . iq import Iq arg_1 , arg_2 = arg_0 . address arg_3 = Iq ( to_jid = arg_1 , stanza_type = \"get\" ) arg_4 = arg_0 . disco_class ( arg_2 ) arg_3 . add_content ( arg_4 . xmlnode ) arg_0 . stream . set_response_handlers ( arg_3 , arg_0 . __response , arg_0 . __error , arg_0 . __timeout ) arg_0 . stream . send ( arg_3 )","id_":253844,"task_name":"https:\/\/github.com\/Jajcus\/pyxmpp2\/blob\/14a40a3950910a9cd008b55f0d8905aa0186ce18\/pyxmpp2\/ext\/disco.py#L826-L835","negative":"Add members found in prior versions up till the next major release\n\n These members are to be considered deprecated. When a new major\n release is made, these members are removed."} {"query":"List the contents of the directory .","positive":"def Func ( arg_0 ) : return [ File ( arg_1 , parent = arg_0 ) for arg_1 in os . Funcdir ( arg_0 . path ) ]","id_":253845,"task_name":"https:\/\/github.com\/snare\/scruffy\/blob\/0fedc08cfdb6db927ff93c09f25f24ce5a04c541\/scruffy\/file.py#L373-L377","negative":"initialize the merger model with a coalescent time\n\n Args:\n - Tc: a float or an iterable, if iterable another argument T of same shape is required\n - T: an array like of same shape as Tc that specifies the time pivots corresponding to Tc\n Returns:\n - None"} {"query":"Construct the spark - sql command to execute . Verbose output is enabled as default .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ \"spark-sql\" ] if arg_0 . _conf : for arg_3 in arg_0 . _conf . split ( \",\" ) : arg_2 += [ \"--conf\" , arg_3 ] if arg_0 . _total_executor_cores : arg_2 += [ \"--total-executor-cores\" , str ( arg_0 . _total_executor_cores ) ] if arg_0 . _executor_cores : arg_2 += [ \"--executor-cores\" , str ( arg_0 . _executor_cores ) ] if arg_0 . _executor_memory : arg_2 += [ \"--executor-memory\" , arg_0 . _executor_memory ] if arg_0 . _keytab : arg_2 += [ \"--keytab\" , arg_0 . _keytab ] if arg_0 . _principal : arg_2 += [ \"--principal\" , arg_0 . _principal ] if arg_0 . _num_executors : arg_2 += [ \"--num-executors\" , str ( arg_0 . _num_executors ) ] if arg_0 . _sql : arg_4 = arg_0 . _sql . strip ( ) if arg_4 . endswith ( \".sql\" ) or arg_4 . endswith ( \".hql\" ) : arg_2 += [ \"-f\" , arg_4 ] else : arg_2 += [ \"-e\" , arg_4 ] if arg_0 . _master : arg_2 += [ \"--master\" , arg_0 . _master ] if arg_0 . _name : arg_2 += [ \"--name\" , arg_0 . _name ] if arg_0 . _verbose : arg_2 += [ \"--verbose\" ] if arg_0 . _yarn_queue : arg_2 += [ \"--queue\" , arg_0 . _yarn_queue ] arg_2 += arg_1 arg_0 . log . debug ( \"Spark-Sql cmd: %s\" , arg_2 ) return arg_2","id_":253846,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/spark_sql_hook.py#L91-L134","negative":"Returns the profile with the received ID as a dict\n\n If a local copy of the profile exists, it'll be returned. If not, it'll\n be downloaded from the web. The results are cached, so any subsequent\n calls won't hit the filesystem or the web.\n\n Args:\n profile_id (str): The ID of the profile you want.\n\n Raises:\n RegistryError: If there was some problem opening the profile file\n or its format was incorrect."} {"query":"Called to update the state of the iterator . This methods receives the set of task ids from the previous set of tasks together with the launch information to allow the output values to be parsed using the output_extractor . This data is then used to determine the next desired point in the parameter space by calling the _update_state method .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = os . path . join ( arg_2 [ 'root_directory' ] , 'streams' ) arg_4 = '%s_*_tid_*{tid}.o.{tid}*' % arg_2 [ 'batch_name' ] arg_5 = os . listdir ( arg_3 ) try : arg_6 = [ ] for arg_7 in arg_1 : arg_8 = fnmatch . filter ( arg_5 , arg_4 . format ( arg_7 = arg_7 ) ) if len ( arg_8 ) != 1 : arg_0 . warning ( \"No unique output file for tid %d\" % arg_7 ) arg_9 = open ( os . path . join ( arg_3 , arg_8 [ 0 ] ) , 'r' ) . read ( ) arg_6 . append ( arg_0 . output_extractor ( arg_9 ) ) arg_0 . _next_val = arg_0 . _Func_state ( arg_6 ) arg_0 . trace . append ( ( arg_6 , arg_0 . _next_val ) ) except : arg_0 . warning ( \"Cannot load required output files. Cannot continue.\" ) arg_0 . _next_val = StopIteration","id_":253847,"task_name":"https:\/\/github.com\/ioam\/lancet\/blob\/1fbbf88fa0e8974ff9ed462e3cb11722ddebdd6e\/lancet\/dynamic.py#L80-L105","negative":"Check that a binary operator is surrounded by exactly one space."} {"query":"If confidence > = verbose category passes filter and is not suppressed .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if IsErrorSuppressedByNolint ( arg_0 , arg_2 ) : return False if arg_1 < _cpplint_state . verbose_level : return False arg_3 = False for arg_4 in _Filters ( ) : if arg_4 . startswith ( '-' ) : if arg_0 . startswith ( arg_4 [ 1 : ] ) : arg_3 = True elif arg_4 . startswith ( '+' ) : if arg_0 . startswith ( arg_4 [ 1 : ] ) : arg_3 = False else : assert False if arg_3 : return False return True","id_":253848,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/third_party\/python\/cpplint\/cpplint.py#L1355-L1380","negative":"Generate RENEWING time.\n\n [:rfc:`2131#section-4.4.5`]::\n\n T1\n defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 *\n duration_of_lease). Times T1 and T2 SHOULD be chosen with some\n random \"fuzz\" around a fixed value, to avoid synchronization of\n client reacquisition."} {"query":"Get a mapping from drugs to their list of gene .","positive":"def Func ( arg_0 : arg_1 [ 'bio2bel_drugbank.manager' ] = None ) -> Mapping [ str , List [ str ] ] : if arg_0 is None : import bio2bel_drugbank arg_0 = bio2bel_drugbank . Manager ( ) if not arg_0 . is_populated ( ) : arg_0 . populate ( ) return arg_0 . get_drug_to_hgnc_symbols ( )","id_":253849,"task_name":"https:\/\/github.com\/pybel\/pybel-tools\/blob\/3491adea0ac4ee60f57275ef72f9b73da6dbfe0c\/src\/pybel_tools\/analysis\/epicom\/algorithm.py#L21-L30","negative":"Decode a CONNACK control packet."} {"query":"Runs a network before the actual experiment .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 ) : arg_0 . _execute_network_run ( arg_1 , arg_2 , arg_3 , arg_4 , arg_5 , pre_run = True )","id_":253850,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/brian2\/network.py#L276-L303","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Method to check if an image has changed since it was last downloaded . By making a head request this check can be done quicker that downloading and processing the whole file .","positive":"def Func ( arg_0 ) : arg_1 = urllib_request . Request ( arg_0 . url ) arg_1 . get_method = lambda : 'HEAD' arg_3 = urllib_request . urlopen ( arg_1 ) arg_4 = arg_3 . info ( ) if 'Last-Modified' in arg_4 : arg_5 = arg_4 [ 'Last-Modified' ] if arg_5 == arg_0 . image_last_modified : return False arg_0 . image_last_modified = arg_5 return True","id_":253851,"task_name":"https:\/\/github.com\/zorg\/zorg-network-camera\/blob\/e2d15725e50370e2df0c38be6b039215873e4278\/zorg_network_camera\/adaptor.py#L42-L67","negative":"Configure session for particular device\n\n Returns:\n tensorflow.Session"} {"query":"Initialize an empty repository with datapackage . json","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = False , arg_4 = None , arg_5 = False ) : arg_6 = plugins_get_mgr ( ) arg_7 = arg_6 . get ( what = 'repomanager' , name = 'git' ) arg_8 = None if arg_2 == 'git+s3' : arg_8 = arg_6 . get ( what = 'backend' , name = 's3' ) arg_9 = arg_7 . Func ( arg_0 , arg_1 , arg_3 , arg_8 ) ( arg_10 , arg_11 ) = tempfile . mkstemp ( ) with open ( arg_11 , 'w' ) as fd : fd . write ( \".dgit\" ) try : arg_12 = bootstrap_datapackage ( arg_9 , arg_3 , arg_4 , arg_5 ) except Exception as e : arg_7 . drop ( arg_9 , [ ] ) os . unlink ( arg_11 ) raise e arg_9 . run ( 'add_files' , [ { 'relativepath' : 'datapackage.json' , 'localfullpath' : arg_12 , } , { 'relativepath' : '.gitignore' , 'localfullpath' : arg_11 , } , ] ) os . unlink ( arg_12 ) os . unlink ( arg_11 ) arg_13 = [ '-a' , '-m' , 'Bootstrapped the repo' ] arg_9 . run ( 'commit' , arg_13 ) return arg_9","id_":253852,"task_name":"https:\/\/github.com\/pingali\/dgit\/blob\/ecde01f40b98f0719dbcfb54452270ed2f86686d\/dgitcore\/datasets\/common.py#L350-L412","negative":"Add a device notification.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr adr: local or remote AmsAddr\n :param string data_name: PLC storage address\n :param pyads.structs.NotificationAttrib pNoteAttrib: notification attributes\n :param callback: Callback function to handle notification\n :param user_handle: User Handle\n :rtype: (int, int)\n :returns: notification handle, user handle"} {"query":"Preparse the packaging system for installations .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . packager if arg_1 == APT : arg_0 . sudo ( 'DEBIAN_FRONTEND=noninteractive apt-get -yq Func' ) elif arg_1 == YUM : arg_0 . sudo ( 'yum Func' ) else : raise Exception ( 'Unknown packager: %s' % ( arg_1 , ) )","id_":253853,"task_name":"https:\/\/github.com\/chrisspen\/burlap\/blob\/a92b0a8e5206850bb777c74af8421ea8b33779bd\/burlap\/packager.py#L42-L52","negative":"Whether a connection can be established between those two meshes."} {"query":"The current position of the cursor .","positive":"def Func ( arg_0 ) -> Position : return Position ( arg_0 . _index , arg_0 . _lineno , arg_0 . _col_offset )","id_":253854,"task_name":"https:\/\/github.com\/LionelAuroux\/pyrser\/blob\/f153a97ef2b6bf915a1ed468c0252a9a59b754d5\/pyrser\/parsing\/stream.py#L40-L42","negative":"Uploads file to GDocs spreadsheet.\n Content type can be provided as argument, default is ods."} {"query":"this factory also requires the editor token","positive":"def Func ( arg_0 ) : arg_1 = dropbox_factory ( arg_0 ) if is_equal ( arg_1 . editor_token , arg_0 . matchdict [ 'editor_token' ] . encode ( 'utf-8' ) ) : return arg_1 else : raise HTTPNotFound ( 'invalid editor token' )","id_":253855,"task_name":"https:\/\/github.com\/ZeitOnline\/briefkasten\/blob\/ce6b6eeb89196014fe21d68614c20059d02daa11\/application\/briefkasten\/__init__.py#L65-L71","negative":"Creates a network from a specification dict."} {"query":"Table Branch Halfword causes a PC - relative forward branch using a table of single halfword offsets . A base register provides a pointer to the table and a second register supplies an index into the table . The branch length is twice the value of the halfword returned from the table .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 . get_mem_base_addr ( ) if arg_1 . mem . base in ( 'PC' , 'R15' ) : arg_2 = arg_0 . PC arg_3 = arg_0 . read_int ( arg_2 + arg_1 . get_mem_offset ( ) , 16 ) arg_3 = Operators . ZEXTEND ( arg_3 , arg_0 . address_bit_size ) arg_0 . PC += ( arg_3 << 1 )","id_":253856,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/native\/cpu\/arm.py#L1100-L1121","negative":"Sets general options used by plugins and streams originating\n from this session object.\n\n :param key: key of the option\n :param value: value to set the option to\n\n\n **Available options**:\n\n ======================== =========================================\n hds-live-edge ( float) Specify the time live HDS\n streams will start from the edge of\n stream, default: ``10.0``\n\n hds-segment-attempts (int) How many attempts should be done\n to download each HDS segment, default: ``3``\n\n hds-segment-threads (int) The size of the thread pool used\n to download segments, default: ``1``\n\n hds-segment-timeout (float) HDS segment connect and read\n timeout, default: ``10.0``\n\n hds-timeout (float) Timeout for reading data from\n HDS streams, default: ``60.0``\n\n hls-live-edge (int) How many segments from the end\n to start live streams on, default: ``3``\n\n hls-segment-attempts (int) How many attempts should be done\n to download each HLS segment, default: ``3``\n\n hls-segment-threads (int) The size of the thread pool used\n to download segments, default: ``1``\n\n hls-segment-timeout (float) HLS segment connect and read\n timeout, default: ``10.0``\n\n hls-timeout (float) Timeout for reading data from\n HLS streams, default: ``60.0``\n\n http-proxy (str) Specify a HTTP proxy to use for\n all HTTP requests\n\n https-proxy (str) Specify a HTTPS proxy to use for\n all HTTPS requests\n\n http-cookies (dict or str) A dict or a semi-colon (;)\n delimited str of cookies to add to each\n HTTP request, e.g. ``foo=bar;baz=qux``\n\n http-headers (dict or str) A dict or semi-colon (;)\n delimited str of headers to add to each\n HTTP request, e.g. ``foo=bar;baz=qux``\n\n http-query-params (dict or str) A dict or a ampersand (&)\n delimited string of query parameters to\n add to each HTTP request,\n e.g. ``foo=bar&baz=qux``\n\n http-trust-env (bool) Trust HTTP settings set in the\n environment, such as environment\n variables (HTTP_PROXY, etc) and\n ~\/.netrc authentication\n\n http-ssl-verify (bool) Verify SSL certificates,\n default: ``True``\n\n http-ssl-cert (str or tuple) SSL certificate to use,\n can be either a .pem file (str) or a\n .crt\/.key pair (tuple)\n\n http-timeout (float) General timeout used by all HTTP\n requests except the ones covered by\n other options, default: ``20.0``\n\n http-stream-timeout (float) Timeout for reading data from\n HTTP streams, default: ``60.0``\n\n subprocess-errorlog (bool) Log errors from subprocesses to\n a file located in the temp directory\n\n subprocess-errorlog-path (str) Log errors from subprocesses to\n a specific file\n\n ringbuffer-size (int) The size of the internal ring\n buffer used by most stream types,\n default: ``16777216`` (16MB)\n\n rtmp-proxy (str) Specify a proxy (SOCKS) that RTMP\n streams will use\n\n rtmp-rtmpdump (str) Specify the location of the\n rtmpdump executable used by RTMP streams,\n e.g. ``\/usr\/local\/bin\/rtmpdump``\n\n rtmp-timeout (float) Timeout for reading data from\n RTMP streams, default: ``60.0``\n\n ffmpeg-ffmpeg (str) Specify the location of the\n ffmpeg executable use by Muxing streams\n e.g. ``\/usr\/local\/bin\/ffmpeg``\n\n ffmpeg-verbose (bool) Log stderr from ffmpeg to the\n console\n\n ffmpeg-verbose-path (str) Specify the location of the\n ffmpeg stderr log file\n\n ffmpeg-video-transcode (str) The codec to use if transcoding\n video when muxing with ffmpeg\n e.g. ``h264``\n\n ffmpeg-audio-transcode (str) The codec to use if transcoding\n audio when muxing with ffmpeg\n e.g. ``aac``\n\n stream-segment-attempts (int) How many attempts should be done\n to download each segment, default: ``3``.\n General option used by streams not\n covered by other options.\n\n stream-segment-threads (int) The size of the thread pool used\n to download segments, default: ``1``.\n General option used by streams not\n covered by other options.\n\n stream-segment-timeout (float) Segment connect and read\n timeout, default: ``10.0``.\n General option used by streams not\n covered by other options.\n\n stream-timeout (float) Timeout for reading data from\n stream, default: ``60.0``.\n General option used by streams not\n covered by other options.\n\n locale (str) Locale setting, in the RFC 1766 format\n eg. en_US or es_ES\n default: ``system locale``.\n\n user-input-requester (UserInputRequester) instance of UserInputRequester\n to collect input from the user at runtime. Must be\n set before the plugins are loaded.\n default: ``UserInputRequester``.\n ======================== ========================================="} {"query":"Handle a DELETE request .","positive":"def Func ( arg_0 , arg_1 = '0' , arg_2 = None , arg_3 = None ) : arg_4 = arg_0 . get_thing ( arg_1 ) if arg_4 is None : arg_0 . set_status ( 404 ) return if arg_4 . remove_action ( arg_2 , arg_3 ) : arg_0 . set_status ( 204 ) else : arg_0 . set_status ( 404 )","id_":253857,"task_name":"https:\/\/github.com\/mozilla-iot\/webthing-python\/blob\/65d467c89ed79d0bbc42b8b3c8f9e5a320edd237\/webthing\/server.py#L554-L570","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Start logging all API requests to the provided destination .","positive":"def Func ( arg_0 , arg_1 = None ) : assert_is_type ( arg_1 , None , str , type ( sys . stdout ) ) if arg_1 is None : arg_1 = os . path . join ( tempfile . mkdtemp ( ) , \"h2o-connection.log\" ) arg_0 . _print ( \"Now logging all API requests to file %r\" % arg_1 ) arg_0 . _is_logging = True arg_0 . _logging_dest = arg_1","id_":253858,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-py\/h2o\/backend\/connection.py#L503-L515","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Returns a non - reference dtype based on this dtype .","positive":"def Func ( arg_0 ) : arg_0 = tf . as_dtype ( arg_0 ) if hasattr ( arg_0 , 'Func' ) : return arg_0 . Func return arg_0","id_":253859,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/internal\/dtype_util.py#L52-L57","negative":"Loads the user's LSI profile, or provides a default."} {"query":"Export all the records into a csv file in numenta format .","positive":"def Func ( arg_0 , arg_1 = 'myOutput' ) : arg_2 = arg_0 . fields [ 0 ] . numRecords assert ( all ( arg_3 . numRecords == arg_2 for arg_3 in arg_0 . fields ) ) import csv with open ( arg_1 + '.csv' , 'wb' ) as f : arg_4 = csv . writer ( f ) arg_4 . writerow ( arg_0 . getAllFieldNames ( ) ) arg_4 . writerow ( arg_0 . getAllDataTypes ( ) ) arg_4 . writerow ( arg_0 . getAllFlags ( ) ) arg_4 . writerows ( arg_0 . getAllRecords ( ) ) if arg_0 . verbosity > 0 : print '******' , arg_2 , 'records exported in numenta format to file:' , arg_1 , '******\\n'","id_":253860,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/data\/generators\/data_generator.py#L339-L363","negative":"True if something has been written to the storage.\n Note that if a slot has been erased from the storage this function may\n lose any meaning."} {"query":"Wrap a sqlalchemy . orm . query . Query object into a concurrent . futures . Future so that it can be yielded .","positive":"def Func ( arg_0 , arg_1 ) : if not arg_0 . _pool : arg_0 . _pool = ThreadPoolExecutor ( max_workers = arg_0 . _max_workers ) arg_3 = arg_0 . _pool . submit ( arg_1 ) arg_4 = Future ( ) IOLoop . current ( ) . add_future ( arg_3 , lambda f : chain_future ( f , arg_4 ) ) return arg_4","id_":253861,"task_name":"https:\/\/github.com\/siddhantgoel\/tornado-sqlalchemy\/blob\/3e622b5a2be57b505599b98156540b52a8a5cf4e\/tornado_sqlalchemy\/__init__.py#L47-L80","negative":"Main function for pyssim."} {"query":"Adds given functions as commands to given parser .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None , arg_4 = None , arg_5 = None , arg_6 = None , arg_7 = None ) : if DEST_FUNCTION in arg_0 . _defaults : _require_support_for_default_command_with_subparsers ( ) arg_3 = arg_3 or { } if arg_5 : warnings . warn ( 'argument `title` is deprecated in Func(),' ' use `parser_kwargs` instead' , DeprecationWarning ) arg_3 [ 'description' ] = arg_5 if arg_7 : warnings . warn ( 'argument `help` is deprecated in Func(),' ' use `parser_kwargs` instead' , DeprecationWarning ) arg_3 [ 'help' ] = arg_7 if arg_6 : warnings . warn ( 'argument `description` is deprecated in Func(),' ' use `parser_kwargs` instead' , DeprecationWarning ) arg_3 [ 'description' ] = arg_6 arg_8 = get_subparsers ( arg_0 , create = True ) if arg_2 : arg_9 = { 'help' : arg_3 . get ( 'title' ) , } arg_10 = arg_8 . add_parser ( arg_2 , ** arg_9 ) arg_8 = arg_10 . add_subparsers ( ** arg_3 ) else : assert not arg_3 , ( '`parser_kwargs` only makes sense ' 'with `namespace`.' ) for arg_11 in arg_1 : arg_12 , arg_13 = _extract_command_meta_from_func ( arg_11 ) if arg_4 : arg_13 . update ( arg_4 ) arg_14 = arg_8 . add_parser ( arg_12 , ** arg_13 ) set_default_command ( arg_14 , arg_11 )","id_":253862,"task_name":"https:\/\/github.com\/neithere\/argh\/blob\/dcd3253f2994400a6a58a700c118c53765bc50a4\/argh\/assembling.py#L321-L459","negative":"Create URL with supplied path and `opts` parameters dict.\n\n Parameters\n ----------\n path : str\n opts : dict\n Dictionary specifying URL parameters. Non-imgix parameters are\n added to the URL unprocessed. For a complete list of imgix\n supported parameters, visit https:\/\/docs.imgix.com\/apis\/url .\n (default {})\n\n Returns\n -------\n str\n imgix URL"} {"query":"Decorator to log the execution time of a function","positive":"def Func ( arg_0 ) : def decorator ( arg_1 ) : @ wraps ( arg_1 ) def wrapper ( * arg_2 , ** arg_3 ) : arg_4 = time . time ( ) arg_5 = arg_1 ( * arg_2 , ** arg_3 ) arg_6 = time . time ( ) _Func ( arg_0 , arg_1 . __name__ , arg_4 , arg_6 ) return arg_5 return wrapper return decorator","id_":253863,"task_name":"https:\/\/github.com\/ToucanToco\/toucan-data-sdk\/blob\/c3ca874e1b64f4bdcc2edda750a72d45d1561d8a\/toucan_data_sdk\/utils\/decorators.py#L123-L136","negative":"A list of row indices to remove. There are two caveats. First, this is\n a potentially slow operation. Second, pattern indices will shift if\n patterns before them are removed."} {"query":"Combines the disjoint keys in multiple dictionaries . For intersecting keys dictionaries towards the end of the sequence are given precedence .","positive":"def Func ( * arg_0 ) : if not arg_0 : return { } else : arg_1 = OrderedDict if isinstance ( arg_0 [ 0 ] , OrderedDict ) else dict return arg_1 ( it . chain . from_iterable ( arg_2 . items ( ) for arg_2 in arg_0 ) )","id_":253864,"task_name":"https:\/\/github.com\/Erotemic\/ubelt\/blob\/db802f3ad8abba025db74b54f86e6892b8927325\/ubelt\/util_dict.py#L383-L412","negative":"Returns a list of PIDs currently running on the system."} {"query":"Checks a remote file upload to status .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None ) : arg_3 = { 'limit' : arg_1 , 'id' : arg_2 } arg_4 = { key : value for key , value in arg_3 . items ( ) if value } return arg_0 . _get ( 'remotedl\/status' , arg_4 = arg_4 )","id_":253865,"task_name":"https:\/\/github.com\/mohan3d\/PyOpenload\/blob\/7f9353915ca5546926ef07be9395c6de60e761b1\/openload\/openload.py#L290-L331","negative":"Convert this unnormalized batch to an instance of Batch.\n\n As this method is intended to be called before augmentation, it\n assumes that none of the ``*_aug`` attributes is yet set.\n It will produce an AssertionError otherwise.\n\n The newly created Batch's ``*_unaug`` attributes will match the ones\n in this batch, just in normalized form.\n\n Returns\n -------\n imgaug.augmentables.batches.Batch\n The batch, with ``*_unaug`` attributes being normalized."} {"query":"Disallow users other than the user whose email is being reset .","positive":"def Func ( arg_0 , arg_1 , * arg_2 , ** arg_3 ) : arg_4 = arg_1 . data . get ( 'email' ) if arg_1 . user . is_authenticated ( ) and arg_4 != arg_1 . user . email : raise PermissionDenied ( ) return super ( ResendConfirmationEmail , arg_0 ) . Func ( arg_1 , * arg_2 , ** arg_3 )","id_":253866,"task_name":"https:\/\/github.com\/incuna\/django-user-management\/blob\/6784e33191d4eff624d2cf2df9ca01db4f23c9c6\/user_management\/api\/views.py#L277-L287","negative":"Get a single publication."} {"query":"Edit the SSH Key","positive":"def Func ( arg_0 ) : arg_1 = { \"name\" : arg_0 . name , \"public_key\" : arg_0 . public_key , } arg_2 = arg_0 . get_data ( \"account\/keys\/%s\" % arg_0 . id , type = PUT , params = arg_1 ) if arg_2 : arg_0 . id = arg_2 [ 'ssh_key' ] [ 'id' ]","id_":253867,"task_name":"https:\/\/github.com\/koalalorenzo\/python-digitalocean\/blob\/d0221b57856fb1e131cafecf99d826f7b07a947c\/digitalocean\/SSHKey.py#L73-L89","negative":"Build extra args map"} {"query":"Indicate that processing of a Tuple has succeeded","positive":"def Func ( arg_0 , arg_1 ) : if not isinstance ( arg_1 , HeronTuple ) : Log . error ( \"Only HeronTuple type is supported in Func()\" ) return if arg_0 . Funcing_enabled : arg_2 = tuple_pb2 . AckTuple ( ) arg_2 . Funcedtuple = int ( arg_1 . id ) arg_4 = 0 for arg_5 in arg_1 . roots : arg_6 = arg_2 . roots . add ( ) arg_6 . CopyFrom ( arg_5 ) arg_4 += arg_5 . ByteSize ( ) super ( BoltInstance , arg_0 ) . admit_control_tuple ( arg_2 , arg_4 , True ) arg_7 = ( time . time ( ) - arg_1 . creation_time ) * system_constants . SEC_TO_NS arg_0 . pplan_helper . context . invoke_hook_bolt_Func ( arg_1 , arg_7 ) arg_0 . bolt_metrics . Funced_tuple ( arg_1 . stream , arg_1 . component , arg_7 )","id_":253868,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/instance\/src\/python\/basics\/bolt_instance.py#L247-L269","negative":"Fit MeanShift clustering algorithm to data.\n\n Parameters\n ----------\n data : array-like\n A dataset formatted by `classifier.fitting_data`.\n bandwidth : float\n The bandwidth value used during clustering.\n If none, determined automatically. Note:\n the data are scaled before clutering, so\n this is not in the same units as the data.\n bin_seeding : bool\n Whether or not to use 'bin_seeding'. See\n documentation for `sklearn.cluster.MeanShift`.\n **kwargs\n passed to `sklearn.cluster.MeanShift`.\n\n Returns\n -------\n Fitted `sklearn.cluster.MeanShift` object."} {"query":"Close and upload local log file to remote storage Wasb .","positive":"def Func ( arg_0 ) : if arg_0 . Funcd : return super ( ) . Func ( ) if not arg_0 . upload_on_Func : return arg_1 = os . path . join ( arg_0 . local_base , arg_0 . log_relative_path ) arg_2 = os . path . join ( arg_0 . remote_base , arg_0 . log_relative_path ) if os . path . exists ( arg_1 ) : with open ( arg_1 , 'r' ) as logfile : arg_3 = logfile . read ( ) arg_0 . wasb_write ( arg_3 , arg_2 , append = True ) if arg_0 . delete_local_copy : shutil . rmtree ( os . path . dirname ( arg_1 ) ) arg_0 . Funcd = True","id_":253869,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/utils\/log\/wasb_task_handler.py#L68-L95","negative":"Bayesian Estimation Supersedes the T-Test\n\n This model runs a Bayesian hypothesis comparing if y1 and y2 come\n from the same distribution. Returns are assumed to be T-distributed.\n\n In addition, computes annual volatility and Sharpe of in and\n out-of-sample periods.\n\n This model replicates the example used in:\n Kruschke, John. (2012) Bayesian estimation supersedes the t\n test. Journal of Experimental Psychology: General.\n\n Parameters\n ----------\n y1 : array-like\n Array of returns (e.g. in-sample)\n y2 : array-like\n Array of returns (e.g. out-of-sample)\n samples : int, optional\n Number of posterior samples to draw.\n\n Returns\n -------\n model : pymc.Model object\n PyMC3 model containing all random variables.\n trace : pymc3.sampling.BaseTrace object\n A PyMC3 trace object that contains samples for each parameter\n of the posterior.\n\n See Also\n --------\n plot_stoch_vol : plotting of tochastic volatility model"} {"query":"Get the next sibling in the children list of the parent node .","positive":"def Func ( arg_0 , arg_1 = None ) : if arg_1 is None : return XMLElement ( lib . lsl_Func ( arg_0 . e ) ) else : return XMLElement ( lib . lsl_Func_n ( arg_0 . e , str . encode ( arg_1 ) ) )","id_":253870,"task_name":"https:\/\/github.com\/labstreaminglayer\/liblsl-Python\/blob\/1ff6fe2794f8dba286b7491d1f7a4c915b8a0605\/pylsl\/pylsl.py#L924-L933","negative":"Returns the profile with the received ID as a dict\n\n If a local copy of the profile exists, it'll be returned. If not, it'll\n be downloaded from the web. The results are cached, so any subsequent\n calls won't hit the filesystem or the web.\n\n Args:\n profile_id (str): The ID of the profile you want.\n\n Raises:\n RegistryError: If there was some problem opening the profile file\n or its format was incorrect."} {"query":"Returns the mean value .","positive":"def Func ( arg_0 ) : if arg_0 . counter . value > 0 : return arg_0 . sum . value \/ arg_0 . counter . value return 0.0","id_":253871,"task_name":"https:\/\/github.com\/cyberdelia\/metrology\/blob\/7599bea7de1fd59374c06e2f8041a217e3cf9c01\/metrology\/instruments\/histogram.py#L92-L96","negative":"Unlock a message for processing by other receivers on a given\n subscription. This operation deletes the lock object, causing the\n message to be unlocked. A message must have first been locked by a\n receiver before this operation is called.\n\n topic_name:\n Name of the topic.\n subscription_name:\n Name of the subscription.\n sequence_number:\n The sequence number of the message to be unlocked as returned in\n BrokerProperties['SequenceNumber'] by the Peek Message operation.\n lock_token:\n The ID of the lock as returned by the Peek Message operation in\n BrokerProperties['LockToken']"} {"query":"Return true if the IP address is in decimal notation .","positive":"def Func ( arg_0 ) : try : arg_1 = int ( str ( arg_0 ) ) except ValueError : return False if arg_1 > 4294967295 or arg_1 < 0 : return False return True","id_":253872,"task_name":"https:\/\/github.com\/alberanid\/python-iplib\/blob\/488b56fe57ad836b27feec9e76f51883db28faa6\/iplib.py#L150-L158","negative":"Creates the variational distribution for LDA.\n\n Args:\n activation: Activation function to use.\n num_topics: The number of topics.\n layer_sizes: The number of hidden units per layer in the encoder.\n\n Returns:\n lda_variational: A function that takes a bag-of-words Tensor as\n input and returns a distribution over topics."} {"query":"Copy to destination directory recursively . If symlinks is true symbolic links in the source tree are represented as symbolic links in the new tree but the metadata of the original links is NOT copied ; if false or omitted the contents and metadata of the linked files are copied to the new tree .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False ) : if isinstance ( arg_1 , Directory ) : arg_1 = arg_1 . get_name ( ) shutil . Functree ( arg_0 . dirname , arg_1 )","id_":253873,"task_name":"https:\/\/github.com\/bjoernricks\/python-quilt\/blob\/fae88237f601848cc34d073584d9dcb409f01777\/quilt\/utils.py#L180-L190","negative":"Return the RSSI signal strength in decibels."} {"query":"Returns the index of the lowest of the passed values . Catches nans etc .","positive":"def Func ( arg_0 ) : if np . all ( np . isnan ( arg_0 ) ) : raise ValueError ( 'All err_vals are nans!' ) return np . nanargmin ( arg_0 )","id_":253874,"task_name":"https:\/\/github.com\/peri-source\/peri\/blob\/61beed5deaaf978ab31ed716e8470d86ba639867\/peri\/opt\/optimize.py#L427-L433","negative":"Clears out the current store and gets a cookie. Set the cross site\n request forgery token for each subsequent request.\n\n :return: A response having cleared the current store.\n :rtype: requests.Response"} {"query":"Transform a QuantumChannel to the Operator representation .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : if arg_0 == 'Operator' : return arg_1 if arg_0 == 'Stinespring' : return _stinespringFunc ( arg_1 , arg_2 , arg_3 ) if arg_0 != 'Kraus' : arg_1 = _to_kraus ( arg_0 , arg_1 , arg_2 , arg_3 ) return _krausFunc ( arg_1 , arg_2 , arg_3 )","id_":253875,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/quantum_info\/operators\/channel\/transformations.py#L115-L124","negative":"Copy a path from inside a Dusty container to a path on the\n local filesystem. The path on the local filesystem must be\n wrist-accessible by the user specified in mac_username."} {"query":"Start producing .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . _consumer = arg_1 arg_0 . _current_deferred = defer . Deferred ( ) arg_0 . _sent = 0 arg_0 . _paused = False if not hasattr ( arg_0 , \"_chunk_headers\" ) : arg_0 . _build_chunk_headers ( ) if arg_0 . _data : arg_6 = \"\" for arg_7 in arg_0 . _data : arg_6 += arg_0 . _chunk_headers [ arg_7 ] arg_6 += arg_0 . _data [ arg_7 ] arg_6 += \"\\r\\n\" arg_0 . _send_to_consumer ( arg_6 ) if arg_0 . _files : arg_0 . _files_iterator = arg_0 . _files . iterkeys ( ) arg_0 . _files_sent = 0 arg_0 . _files_length = len ( arg_0 . _files ) arg_0 . _current_file_path = None arg_0 . _current_file_handle = None arg_0 . _current_file_length = None arg_0 . _current_file_sent = 0 arg_15 = arg_0 . _produce ( ) if arg_15 : return arg_15 else : return defer . succeed ( None ) return arg_0 . _current_deferred","id_":253876,"task_name":"https:\/\/github.com\/mariano\/pyfire\/blob\/42e3490c138abc8e10f2e9f8f8f3b40240a80412\/pyfire\/twistedx\/producer.py#L42-L80","negative":"Revoke the token and remove the cookie."} {"query":"Drop a node from the network","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . _connections . pop ( arg_1 , None ) if arg_2 is not None : arg_0 . _preventConnectNodes . add ( arg_1 ) arg_2 . disconnect ( ) arg_0 . _preventConnectNodes . remove ( arg_1 ) if isinstance ( arg_1 , TCPNode ) : arg_0 . _nodes . discard ( arg_1 ) arg_0 . _nodeAddrToNode . pop ( arg_1 . address , None ) else : arg_0 . _readonlyNodes . discard ( arg_1 ) arg_0 . _lastConnectAttempt . pop ( arg_1 , None )","id_":253877,"task_name":"https:\/\/github.com\/bakwc\/PySyncObj\/blob\/be3b0aaa932d5156f5df140c23c962430f51b7b8\/pysyncobj\/transport.py#L511-L530","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Find the id of a group given its name by iterating on the list of subscriptions","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . subscriptions ( arg_0 . auth ) for arg_2 in arg_1 : for arg_3 in arg_2 : if arg_3 [ 'group_name' ] == arg_0 . group_name : return arg_3 [ 'group_id' ] arg_4 = \"Group id not found for group name %s\" % arg_0 . group_name raise BackendError ( cause = arg_4 )","id_":253878,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-perceval\/blob\/41c908605e88b7ebc3a536c643fa0f212eaf9e0e\/perceval\/backends\/core\/groupsio.py#L229-L240","negative":"Pickle the Dataset instance to the provided file."} {"query":"Returns all potential loop fusion options for the psy object provided","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ ] arg_3 = arg_1 . invokes . invoke_list if arg_0 . _dependent_invokes : raise RuntimeError ( \"dependent invokes assumes fusion in one invoke might \" \"affect fusion in another invoke. This is not yet \" \"implemented\" ) else : for arg_4 , arg_5 in enumerate ( arg_3 ) : print \"invoke {0}\" . format ( arg_4 ) for arg_6 in arg_5 . schedule . loops ( ) : if arg_6 . loop_type == \"outer\" : arg_7 = arg_6 . parent . children arg_8 = arg_7 . index ( arg_6 ) arg_9 = [ ] arg_0 . _recurse ( arg_7 , arg_8 , arg_9 , arg_2 , arg_5 ) return arg_2","id_":253879,"task_name":"https:\/\/github.com\/rupertford\/melody\/blob\/d50459880a87fdd1802c6893f6e12b52d51b3b91\/examples\/PSyclone\/psyclone.py#L129-L155","negative":"Can compress an HDF5 to reduce file size.\n\n The properties on how to compress the new file are taken from a given\n trajectory in the file.\n Simply calls ``ptrepack`` from the command line.\n (Se also https:\/\/pytables.github.io\/usersguide\/utilities.html#ptrepackdescr)\n\n Currently only supported under Linux, no guarantee for Windows usage.\n\n :param filename:\n\n Name of the file to compact\n\n :param name:\n\n The name of the trajectory from which the compression properties are taken\n\n :param index:\n\n Instead of a name you could also specify an index, i.e -1 for the last trajectory\n in the file.\n\n :param keep_backup:\n\n If a back up version of the original file should be kept.\n The backup file is named as the original but `_backup` is appended to the end.\n\n :return:\n\n The return\/error code of ptrepack"} {"query":"Perform continuous integration tasks .","positive":"def Func ( arg_0 ) : arg_1 = [ '' ] if os . environ . get ( 'TRAVIS' , '' ) . lower ( ) == 'true' : arg_1 += [ 'test.pytest' ] else : arg_1 += [ 'test.tox' ] arg_0 . run ( \"invoke --echo --pty clean --all build --docs check --reports{}\" . format ( ' ' . join ( arg_1 ) ) )","id_":253880,"task_name":"https:\/\/github.com\/jhermann\/pygments-markdown-lexer\/blob\/e651a9a3f664285b01451eb39232b1ad9af65956\/tasks.py#L65-L75","negative":"Get the information of a list of bugs.\n\n :param from_date: retrieve bugs that where updated from that date;\n dates are converted to UTC\n :param offset: starting position for the search; i.e to return 11th\n element, set this value to 10.\n :param max_bugs: maximum number of bugs to reteurn per query"} {"query":"Reads the body to match from a disk file .","positive":"def Func ( arg_0 , arg_1 ) : with open ( arg_1 , 'r' ) as f : arg_0 . body ( str ( f . read ( ) ) )","id_":253881,"task_name":"https:\/\/github.com\/h2non\/pook\/blob\/e64094e41e4d89d98d2d29af7608ef27dc50cf19\/pook\/mock.py#L424-L435","negative":"compat_convertHashedIndexes - Reindex all fields for the provided objects, where the field value is hashed or not.\n\t\t\tIf the field is unhashable, do not allow.\n\n\t\t\tNOTE: This works one object at a time. It is intended to be used while your application is offline,\n\t\t\t as it doesn't make sense to be changing your model while applications are actively using it.\n\n\t\t\t@param objs \n\t\t\t@param conn - Specific Redis connection or None to reuse."} {"query":"Calculates a graph s concordance as well as its statistical probability .","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 , arg_4 : arg_5 [ arg_6 ] = None , arg_7 : arg_5 [ arg_8 ] = None , arg_9 : arg_5 [ arg_6 ] = None , arg_10 : arg_11 = False , arg_12 : arg_3 = 'shuffle_node_data' , ) -> Tuple [ arg_6 , List [ arg_6 ] , arg_6 ] : if arg_12 == 'random_by_edges' : arg_13 = partial ( random_by_edges , arg_9 = arg_9 ) elif arg_12 == 'shuffle_node_data' : arg_13 = partial ( shuffle_node_data , arg_2 = arg_2 , arg_9 = arg_9 ) elif arg_12 == 'shuffle_relations' : arg_13 = partial ( shuffle_relations , arg_9 = arg_9 ) else : raise ValueError ( 'Invalid permute_type: {}' . format ( arg_12 ) ) arg_0 : arg_1 = arg_0 . copy ( ) collapse_to_genes ( arg_0 ) collapse_all_variants ( arg_0 ) arg_14 = calculate_concordance ( arg_0 , arg_2 , arg_4 = arg_4 ) arg_15 = [ ] for arg_16 in range ( arg_7 or 500 ) : arg_17 = arg_13 ( arg_0 ) arg_18 = calculate_concordance ( arg_17 , arg_2 , arg_4 = arg_4 , arg_10 = arg_10 ) arg_15 . append ( arg_18 ) return arg_14 , arg_15 , one_sided ( arg_14 , arg_15 )","id_":253882,"task_name":"https:\/\/github.com\/pybel\/pybel-tools\/blob\/3491adea0ac4ee60f57275ef72f9b73da6dbfe0c\/src\/pybel_tools\/analysis\/concordance.py#L191-L233","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Remove any metadata associated with the provided CoreBluetooth object .","positive":"def Func ( arg_0 , arg_1 ) : with arg_0 . _lock : if arg_1 in arg_0 . _metadata : del arg_0 . _metadata [ arg_1 ]","id_":253883,"task_name":"https:\/\/github.com\/adafruit\/Adafruit_Python_BluefruitLE\/blob\/34fc6f596371b961628369d78ce836950514062f\/Adafruit_BluefruitLE\/corebluetooth\/metadata.py#L81-L86","negative":"An integer-valued dimension bounded between `min` <= x <= `max`.\n Note that the right endpoint of the interval includes `max`.\n\n When `warp` is None, the base measure associated with this dimension\n is a categorical distribution with each weight on each of the integers\n in [min, max]. With `warp == 'log'`, the base measure is a uniform\n distribution on the log of the variable, with bounds at `log(min)` and\n `log(max)`. This is appropriate for variables that are \"naturally\" in\n log-space. Other `warp` functions are not supported (yet), but may be\n at a later time. Please note that this functionality is not supported\n for `hyperopt_tpe`."} {"query":"Internal CLOCK_CHANNEL consumer to process task runs","positive":"def Func ( arg_0 ) : arg_1 = arrow . get ( arg_0 [ 'time' ] ) arg_2 = arrow . get ( ) if ( arg_2 - arg_1 ) > timezone . timedelta ( seconds = ( TICK_FREQ + 1 ) ) : pass else : Task . run_tasks ( )","id_":253884,"task_name":"https:\/\/github.com\/phoikoi\/sisy\/blob\/840c5463ab65488d34e99531f230e61f755d2d69\/src\/sisy\/consumers.py#L19-L27","negative":"Return True if this new candidate representation satisfies all our overlap\n rules. Since we know that neighboring representations differ by at most\n one bit, we compute running overlaps."} {"query":"Print a record .","positive":"def Func ( arg_0 , arg_1 = 1 , arg_2 = None ) : if arg_2 is None : arg_2 = [ ] if arg_1 == 1 : arg_3 = record_xml_output ( arg_0 , arg_2 ) else : return '' return arg_3","id_":253885,"task_name":"https:\/\/github.com\/inspirehep\/harvesting-kit\/blob\/33a7f8aa9dade1d863110c6d8b27dfd955cb471f\/harvestingkit\/bibrecord.py#L1315-L1329","negative":"Adds all parameters to `traj`"} {"query":"Unescapes a string that may contain commas tabs newlines and dashes","positive":"def Func ( arg_0 ) : assert isinstance ( arg_0 , basestring ) arg_0 = arg_0 . replace ( '\\t' , ',' ) arg_0 = arg_0 . replace ( '\\\\,' , ',' ) arg_0 = arg_0 . replace ( '\\\\n' , '\\n' ) arg_0 = arg_0 . replace ( '\\\\\\\\' , '\\\\' ) return arg_0","id_":253886,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/data\/utils.py#L159-L174","negative":"Asynchronously request a URL and get the encoded text content of the\n body.\n\n Parameters\n ----------\n url : `str`\n URL to download.\n session : `aiohttp.ClientSession`\n An open aiohttp session.\n\n Returns\n -------\n content : `str`\n Content downloaded from the URL."} {"query":"Creates a position","positive":"def Func ( arg_0 , arg_1 = { } ) : arg_2 = \"\/2\/positions\/\" arg_3 = arg_1 arg_4 = arg_0 . _post_resource ( arg_2 , arg_3 ) return arg_0 . position_from_json ( arg_4 [ \"position\" ] )","id_":253887,"task_name":"https:\/\/github.com\/uw-it-cte\/uw-restclients-wheniwork\/blob\/0d3ca09d5bbe808fec12e5f943596570d33a1731\/uw_wheniwork\/positions.py#L31-L41","negative":"Does google-lint on a single file.\n\n Args:\n filename: The name of the file to parse.\n\n vlevel: The level of errors to report. Every error of confidence\n >= verbose_level will be reported. 0 is a good default.\n\n extra_check_functions: An array of additional check functions that will be\n run on each source line. Each function takes 4\n arguments: filename, clean_lines, line, error"} {"query":"Perform the query and returns a single object matching the given keyword arguments .","positive":"def Func ( arg_0 , * arg_1 , ** arg_2 ) : arg_3 = arg_2 . copy ( ) if 'course_id' in arg_2 : try : arg_4 = str ( CourseKey . from_string ( arg_2 [ 'course_id' ] ) ) except InvalidKeyError : pass else : try : return arg_0 . get ( * arg_1 , ** arg_2 ) except DataSharingConsent . DoesNotExist : arg_2 [ 'course_id' ] = parse_course_key ( arg_4 ) try : return arg_0 . get ( * arg_1 , ** arg_2 ) except DataSharingConsent . DoesNotExist : return ProxyDataSharingConsent ( ** arg_3 )","id_":253888,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/consent\/models.py#L38-L66","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Get a site s publish profile as a string","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : return arg_0 . _perform_get ( arg_0 . _get_publishxml_path ( arg_1 , arg_2 ) , None ) . body . decode ( \"utf-8\" )","id_":253889,"task_name":"https:\/\/github.com\/Azure\/azure-sdk-for-python\/blob\/d7306fde32f60a293a7567678692bdad31e4b667\/azure-servicemanagement-legacy\/azure\/servicemanagement\/websitemanagementservice.py#L251-L261","negative":"A list of row indices to remove. There are two caveats. First, this is\n a potentially slow operation. Second, pattern indices will shift if\n patterns before them are removed."} {"query":"Determine the path from the intersphinx inventory entry","positive":"def Func ( arg_0 ) : arg_1 = arg_0 [ 2 ] . split ( \"#\" ) if len ( arg_1 ) > 1 : arg_2 = \"#\" . join ( ( arg_1 [ 0 ] , arg_1 [ - 1 ] ) ) else : arg_2 = arg_0 [ 2 ] return arg_2","id_":253890,"task_name":"https:\/\/github.com\/hynek\/doc2dash\/blob\/659a66e237eb0faa08e81094fc4140623b418952\/src\/doc2dash\/parsers\/intersphinx.py#L121-L133","negative":"This method is called before first step of simulation."} {"query":"Very lightweight parsing of a vcf line to get position .","positive":"def Func ( arg_0 ) : if not arg_0 : return None arg_1 = arg_0 . strip ( ) . split ( '\\t' ) arg_2 = dict ( ) arg_2 [ 'chrom' ] = CHROM_INDEX [ arg_1 [ 0 ] ] arg_2 [ 'pos' ] = int ( arg_1 [ 1 ] ) return arg_2","id_":253891,"task_name":"https:\/\/github.com\/madprime\/vcf2clinvar\/blob\/d5bbf6df2902c6cabe9ef1894cfac527e90fa32a\/vcf2clinvar\/common.py#L149-L163","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Check that a name is both nonlocal and global .","positive":"def Func ( arg_0 , arg_1 ) : def same_scope ( arg_2 ) : return arg_2 . scope ( ) is arg_1 arg_3 = itertools . chain . from_iterable arg_4 = set ( arg_3 ( child . names for child in arg_1 . nodes_of_class ( astroid . Nonlocal ) if same_scope ( child ) ) ) if not arg_4 : return arg_5 = set ( arg_3 ( child . names for child in arg_1 . nodes_of_class ( astroid . Global ) if same_scope ( child ) ) ) for arg_6 in arg_4 . intersection ( arg_5 ) : arg_0 . add_message ( \"nonlocal-and-global\" , args = ( arg_6 , ) , arg_1 = arg_1 )","id_":253892,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/checkers\/base.py#L659-L685","negative":"Loads a certificate, public key or private key into a Certificate,\n PublicKey or PrivateKey object via CryptoAPI\n\n :param key_object:\n An asn1crypto.x509.Certificate, asn1crypto.keys.PublicKeyInfo or\n asn1crypto.keys.PrivateKeyInfo object\n\n :param key_info:\n An asn1crypto.keys.PublicKeyInfo or asn1crypto.keys.PrivateKeyInfo\n object\n\n :param container:\n The class of the object to hold the key_handle\n\n :raises:\n ValueError - when any of the parameters contain an invalid value\n TypeError - when any of the parameters are of the wrong type\n oscrypto.errors.AsymmetricKeyError - when the key is incompatible with the OS crypto library\n OSError - when an error is returned by the OS crypto library\n\n :return:\n A PrivateKey, PublicKey or Certificate object, based on container"} {"query":"Returns cache entry parameter value by its name .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : return arg_0 . cache [ arg_1 ] . get ( arg_2 , False )","id_":253893,"task_name":"https:\/\/github.com\/idlesign\/django-sitetree\/blob\/61de4608e6e415247c75fe8691027d7c4ed0d1e7\/sitetree\/sitetreeapp.py#L341-L348","negative":"Run according to options in sys.argv and diff classifiers."} {"query":"Determine the set of elements present in a list of chemical compounds .","positive":"def Func ( arg_0 ) : arg_1 = [ parse_compound ( compound ) . count ( ) . keys ( ) for compound in arg_0 ] return set ( ) . union ( * arg_1 )","id_":253894,"task_name":"https:\/\/github.com\/Ex-Mente\/auxi.0\/blob\/2dcdae74154f136f8ca58289fe5b20772f215046\/auxi\/tools\/chemistry\/stoichiometry.py#L330-L344","negative":"Returns a DataFrame of offensive team splits for a season.\n\n :year: int representing the season.\n :returns: Pandas DataFrame of split data."} {"query":"Return a random taxation ID number for a company .","positive":"def Func ( ) : arg_0 = [ 2 , 4 , 10 , 3 , 5 , 9 , 4 , 6 , 8 ] arg_1 = [ random . randint ( 1 , 9 ) for _ in range ( 10 ) ] arg_2 = [ v * arg_0 [ i ] for i , v in enumerate ( arg_1 [ : - 1 ] ) ] arg_1 [ 9 ] = sum ( arg_2 ) % 11 % 10 return \"\" . join ( map ( str , arg_1 ) )","id_":253895,"task_name":"https:\/\/github.com\/pilosus\/ForgeryPy3\/blob\/e15f2e59538deb4cbfceaac314f5ea897f2d5450\/forgery_py\/forgery\/russian_tax.py#L72-L78","negative":"Stop streaming samples from device and delete samples buffer"} {"query":"Save the vocabulary to a file so the model can be reloaded .","positive":"def Func ( arg_0 = None , arg_1 = 'vocab.txt' ) : if arg_0 is None : arg_0 = [ ] arg_2 = os . getcwd ( ) arg_3 = len ( arg_0 ) with open ( os . path . join ( arg_2 , arg_1 ) , \"w\" ) as f : for arg_4 in xrange ( arg_3 ) : f . write ( \"%s %d\\n\" % ( tf . compat . as_text ( arg_0 [ arg_4 ] [ 0 ] ) , arg_0 [ arg_4 ] [ 1 ] ) ) tl . logging . info ( \"%d vocab saved to %s in %s\" % ( arg_3 , arg_1 , arg_2 ) )","id_":253896,"task_name":"https:\/\/github.com\/tensorlayer\/tensorlayer\/blob\/aa9e52e36c7058a7e6fd81d36563ca6850b21956\/tensorlayer\/nlp.py#L795-L830","negative":"Return real, effective and saved user ids."} {"query":"Deletes all objects and containers in the account .","positive":"def Func ( arg_0 , arg_1 = False , arg_2 = False ) : if not arg_1 : raise ReturnCode ( 'called Func without setting yes_empty_account=True' ) arg_3 = None while True : with arg_0 . client_manager . with_client ( ) as client : arg_4 , arg_5 , arg_6 , arg_7 = client . get_account ( arg_3 = arg_3 , arg_6 = arg_0 . headers , query = arg_0 . query , cdn = arg_0 . cdn ) if arg_4 \/\/ 100 != 2 : if arg_4 == 404 and arg_0 . ignore_404 : return raise ReturnCode ( 'listing account: %s %s' % ( arg_4 , arg_5 ) ) if not arg_7 : if arg_2 and arg_3 : arg_3 = None continue break for arg_8 in arg_7 : cli_delete ( arg_0 , arg_8 [ 'name' ] , arg_0 . headers , recursive = True ) arg_3 = arg_8 [ 'name' ]","id_":253897,"task_name":"https:\/\/github.com\/gholt\/swiftly\/blob\/5bcc1c65323b1caf1f85adbefd9fc4988c072149\/swiftly\/cli\/delete.py#L38-L80","negative":"Limit to framerate, should be called after\n rendering has completed\n\n :param start_time: When execution started"} {"query":"Return nearest parent permission for path .","positive":"def Func ( arg_0 , arg_1 ) : arg_1 = pathlib . PurePosixPath ( arg_1 ) arg_2 = filter ( lambda p : p . is_parent ( arg_1 ) , arg_0 . permissions ) arg_3 = min ( arg_2 , key = lambda p : len ( arg_1 . relative_to ( p . path ) . parts ) , default = Permission ( ) , ) return arg_3","id_":253898,"task_name":"https:\/\/github.com\/aio-libs\/aioftp\/blob\/b45395b1aba41301b898040acade7010e6878a08\/aioftp\/server.py#L145-L161","negative":"Delete previous webhooks. If local ngrok tunnel, create a webhook."} {"query":"Add a new tokenized line from the file to the token buffer .","positive":"def Func ( arg_0 ) : if len ( arg_0 . tokens ) == 0 : arg_0 . __tokenize ( arg_0 . dxfile . readline ( ) )","id_":253899,"task_name":"https:\/\/github.com\/MDAnalysis\/GridDataFormats\/blob\/3eeb0432f8cf856912436e4f3e7aba99d3c916be\/gridData\/OpenDX.py#L981-L993","negative":"Determines JSONAPI type for provided GUID"} {"query":"Convert this exception to a dictionary .","positive":"def Func ( arg_0 ) : arg_1 = { } arg_1 [ 'reason' ] = arg_0 . msg arg_1 [ 'type' ] = arg_0 . __class__ . __name__ arg_1 [ 'params' ] = arg_0 . params return arg_1","id_":253900,"task_name":"https:\/\/github.com\/iotile\/typedargs\/blob\/0a5091a664b9b4d836e091e9ba583e944f438fd8\/typedargs\/exceptions.py#L64-L77","negative":"Decimal adjusts AL after subtraction.\n\n Adjusts the result of the subtraction of two packed BCD values to create a packed BCD result.\n The AL register is the implied source and destination operand. If a decimal borrow is detected,\n the CF and AF flags are set accordingly. This instruction is not valid in 64-bit mode.\n\n The SF, ZF, and PF flags are set according to the result.::\n\n IF (AL AND 0FH) > 9 OR AF = 1\n THEN\n AL = AL - 6;\n CF = CF OR BorrowFromLastSubtraction; (* CF OR borrow from AL = AL - 6 *)\n AF = 1;\n ELSE\n AF = 0;\n FI;\n IF ((AL > 99H) or OLD_CF = 1)\n THEN\n AL = AL - 60H;\n CF = 1;\n\n :param cpu: current CPU."} {"query":"Assume that we re dealing with a human DRB allele which NetMHCIIpan treats differently because there is little population diversity in the DR - alpha gene","positive":"def Func ( arg_0 , arg_1 ) : if \"DRB\" not in arg_1 . gene : raise ValueError ( \"Unexpected allele %s\" % arg_1 ) return \"%s_%s%s\" % ( arg_1 . gene , arg_1 . allele_family , arg_1 . allele_code )","id_":253901,"task_name":"https:\/\/github.com\/openvax\/mhctools\/blob\/b329b4dccd60fae41296816b8cbfe15d6ca07e67\/mhctools\/netmhcii_pan.py#L48-L59","negative":"Update this with a new set of paths to DAG definition files.\n\n :param new_file_paths: list of paths to DAG definition files\n :type new_file_paths: list[unicode]\n :return: None"} {"query":"returns a Token object with the given access token or refresh token","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None ) : if arg_1 : return arg_0 . query . filter_by ( arg_1 = arg_1 ) . first ( ) elif arg_2 : return arg_0 . query . filter_by ( arg_2 = arg_2 ) . first ( ) return None","id_":253902,"task_name":"https:\/\/github.com\/lepture\/flask-oauthlib\/blob\/9e6f152a5bb360e7496210da21561c3e6d41b0e1\/flask_oauthlib\/contrib\/oauth2.py#L247-L257","negative":"Checks that input is a `float` matrix."} {"query":"calculate pvalues for all categories in the graph","positive":"def Func ( arg_0 , arg_1 , arg_2 = 20000 , ** arg_3 ) : arg_4 = len ( arg_0 ) arg_0 = set ( arg_0 ) arg_5 = [ ] if isinstance ( arg_2 , set ) : arg_6 = len ( arg_2 ) arg_0 = arg_0 . intersection ( arg_2 ) elif isinstance ( arg_2 , int ) : arg_6 = arg_2 else : raise ValueError ( \"background should be set or int object\" ) arg_7 = sorted ( arg_1 . keys ( ) ) for arg_8 in arg_7 : arg_9 = arg_1 . get ( arg_8 ) arg_10 = len ( arg_9 ) arg_11 = arg_0 . intersection ( set ( arg_9 ) ) arg_12 = len ( arg_11 ) if arg_12 < 1 : continue arg_5 . append ( ( arg_8 , hypergeom . sf ( arg_12 - 1 , arg_6 , arg_10 , arg_4 ) , arg_12 , arg_10 , arg_11 ) ) return zip ( * arg_5 )","id_":253903,"task_name":"https:\/\/github.com\/zqfang\/GSEApy\/blob\/673e9ec1391e3b14d3e8a4353117151fd2cb9345\/gseapy\/stats.py#L10-L77","negative":"Register a file reader for use in parse_config_file.\n\n Registered file readers will be used to try reading files passed to\n `parse_config_file`. All file readers (beginning with the default `open`) will\n be tried until one of them succeeds at opening the file.\n\n This function may also be be used used as a decorator. For example:\n\n @register_file_reader(IOError)\n def exotic_data_source(filename):\n ...\n\n Args:\n *args: (When used as a decorator, only the existence check is supplied.)\n - file_reader_fn: The file reader function to register. This should be a\n function that can be used as a context manager to open a file and\n provide a file-like object, similar to Python's built-in `open`.\n - is_readable_fn: A function taking the file path and returning a boolean\n indicating whether the file can be read by `file_reader_fn`.\n\n Returns:\n `None`, or when used as a decorator, a function that will perform the\n registration using the supplied readability predicate."} {"query":"Update only drafts .","positive":"def Func ( arg_0 , * arg_1 , ** arg_2 ) : super ( Deposit , arg_0 ) . Func ( * arg_1 , ** arg_2 )","id_":253904,"task_name":"https:\/\/github.com\/inveniosoftware\/invenio-deposit\/blob\/f243ea1d01ab0a3bc92ade3262d1abdd2bc32447\/invenio_deposit\/api.py#L487-L494","negative":"Adds all parameters to `traj`"} {"query":"return the module name and the frame id in the module","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . frame ( ) arg_2 , arg_3 = \"\" , [ ] while arg_1 : if isinstance ( arg_1 , Module ) : arg_2 = arg_1 . name else : arg_3 . append ( getattr ( arg_1 , \"name\" , \"\" ) ) try : arg_1 = arg_1 . parent . frame ( ) except AttributeError : arg_1 = None arg_3 . reverse ( ) return arg_2 , \".\" . join ( arg_3 )","id_":253905,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/utils\/utils.py#L30-L44","negative":"Return list of GATT descriptors that have been discovered for this\n characteristic."} {"query":"Parse a Supybot IRC log file .","positive":"def Func ( arg_0 ) : with open ( arg_0 , 'r' , errors = 'surrogateescape' , newline = os . linesep ) as f : arg_1 = SupybotParser ( f ) try : for arg_2 in arg_1 . parse ( ) : yield arg_2 except ParseError as e : arg_3 = \"file: %s; reason: %s\" % ( arg_0 , str ( e ) ) raise ParseError ( arg_3 = arg_3 )","id_":253906,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-perceval\/blob\/41c908605e88b7ebc3a536c643fa0f212eaf9e0e\/perceval\/backends\/core\/supybot.py#L184-L208","negative":"Set renewal, rebinding times."} {"query":"custom command line action to check file exist","positive":"def Func ( arg_0 ) : class CheckPathAction ( argparse . Action ) : def __call__ ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = None ) : if type ( arg_3 ) is list : arg_3 = arg_3 [ 0 ] arg_5 = arg_3 if arg_4 == 'None' : if not os . path . isdir ( arg_3 ) : arg_6 = os . path . expanduser ( \"~\" ) if not arg_3 . startswith ( arg_6 ) and not arg_3 . startswith ( os . getcwd ( ) ) : if os . path . isdir ( os . path . join ( arg_6 , arg_3 ) ) : arg_3 = os . path . join ( arg_6 , arg_3 ) elif os . path . isdir ( os . path . join ( os . getcwd ( ) , arg_3 ) ) : arg_3 = os . path . join ( os . getcwd ( ) , arg_3 ) else : arg_3 = None else : arg_3 = None elif arg_4 == '--template-name' : if not os . path . isdir ( arg_3 ) : if not os . path . isdir ( os . path . join ( arg_2 . target , arg_3 ) ) : arg_3 = None if not arg_3 : logger . error ( \"Could not to find path %s. Please provide \" \"correct path to %s option\" , arg_5 , arg_4 ) exit ( 1 ) setattr ( arg_2 , arg_0 . dest , arg_3 ) return CheckPathAction","id_":253907,"task_name":"https:\/\/github.com\/xnuinside\/clifier\/blob\/3d704a30dc985bea3b876216accc53c19dc8b0df\/clifier\/clifier.py#L110-L141","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Initialize the slots of the LeanMinHash .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_0 . seed = arg_1 arg_0 . hashvalues = arg_0 . _parse_hashvalues ( arg_2 )","id_":253908,"task_name":"https:\/\/github.com\/ekzhu\/datasketch\/blob\/b3e4129987890a2beb04f2c0b6dc618ae35f2e14\/datasketch\/lean_minhash.py#L51-L60","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"Run the Hot Gym example .","positive":"def Func ( arg_0 ) : arg_1 = FileRecordStream ( streamID = _INPUT_FILE_PATH ) arg_0 = min ( arg_0 , arg_1 . getDataRowCount ( ) ) arg_2 = createNetwork ( arg_1 ) arg_2 . regions [ \"sensor\" ] . setParameter ( \"predictedField\" , \"consumption\" ) arg_2 . regions [ \"SP\" ] . setParameter ( \"learningMode\" , 1 ) arg_2 . regions [ \"TM\" ] . setParameter ( \"learningMode\" , 1 ) arg_2 . regions [ \"classifier\" ] . setParameter ( \"learningMode\" , 1 ) arg_2 . regions [ \"SP\" ] . setParameter ( \"inferenceMode\" , 1 ) arg_2 . regions [ \"TM\" ] . setParameter ( \"inferenceMode\" , 1 ) arg_2 . regions [ \"classifier\" ] . setParameter ( \"inferenceMode\" , 1 ) arg_3 = [ ] arg_4 = 1 for arg_5 in range ( 0 , arg_0 , arg_4 ) : arg_2 . run ( arg_4 ) arg_6 = getPredictionResults ( arg_2 , \"classifier\" ) arg_7 = arg_6 [ 1 ] [ \"predictedValue\" ] arg_8 = arg_6 [ 1 ] [ \"predictionConfidence\" ] arg_9 = arg_6 [ 5 ] [ \"predictedValue\" ] arg_10 = arg_6 [ 5 ] [ \"predictionConfidence\" ] arg_11 = ( arg_7 , arg_8 * 100 , arg_9 , arg_10 * 100 ) print \"1-step: {:16} ({:4.4}%)\\t 5-step: {:16} ({:4.4}%)\" . format ( * arg_11 ) arg_3 . append ( arg_11 ) return arg_3","id_":253909,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/docs\/examples\/network\/complete-network-example.py#L113-L150","negative":"Returns participation data for the given sis_course_id.\n\n https:\/\/canvas.instructure.com\/doc\/api\/analytics.html#method.analytics_api.course_participation"} {"query":"Parses a string containing only 0 s and 1 s and return a Python list object .","positive":"def Func ( arg_0 ) : assert isinstance ( arg_0 , basestring ) arg_1 = [ int ( c ) for c in arg_0 if c in ( \"0\" , \"1\" ) ] if len ( arg_1 ) != len ( arg_0 ) : raise ValueError ( \"The provided string %s is malformed. The string should \" \"have only 0's and 1's.\" ) return arg_1","id_":253910,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/data\/utils.py#L178-L191","negative":"Generate a tag for the alignment of the geometry of the bulge and disk of a bulge-disk system, to customize \\ \n phase names based on the bulge-disk model. This adds together the bulge_disk tags generated in the 3 functions\n above"} {"query":"Try loading given config file .","positive":"def Func ( arg_0 , arg_1 ) : if not os . path . exists ( arg_1 ) : raise ValueError ( \"Config file not found.\" ) try : arg_2 = configparser . ConfigParser ( ) arg_2 . read ( arg_1 ) arg_3 = arg_0 ( arg_1 , arg_2 ) if not arg_3 . check_config_sanity ( ) : raise ValueError ( \"Error in config file.\" ) else : return arg_3 except configparser . Error : raise ValueError ( \"Config file is invalid.\" )","id_":253911,"task_name":"https:\/\/github.com\/buckket\/twtxt\/blob\/6c8ad8ef3cbcf0dd335a12285d8b6bbdf93ce851\/twtxt\/config.py#L36-L54","negative":"Initialize all ephemerals used by derived classes."} {"query":"Get the value of the Wigner function from measurement results .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None ) : arg_4 = len ( arg_1 ) arg_5 = 2 ** arg_4 arg_6 = [ 0.5 + 0.5 * np . sqrt ( 3 ) , 0.5 - 0.5 * np . sqrt ( 3 ) ] arg_7 = 1 for arg_8 in range ( arg_4 ) : arg_7 = np . kron ( arg_7 , arg_6 ) arg_9 = [ 0 ] * len ( arg_2 ) arg_10 = 0 arg_11 = [ marginal_counts ( arg_0 . get_counts ( circ ) , arg_1 ) for circ in arg_2 ] for arg_12 in arg_11 : arg_13 = [ 0 ] * arg_5 for arg_8 in range ( arg_5 ) : if bin ( arg_8 ) [ 2 : ] . zfill ( arg_4 ) in arg_12 : arg_13 [ arg_8 ] = float ( arg_12 [ bin ( arg_8 ) [ 2 : ] . zfill ( arg_4 ) ] ) if arg_3 is None : arg_3 = np . sum ( arg_13 ) for arg_8 in range ( arg_5 ) : arg_9 [ arg_10 ] = arg_9 [ arg_10 ] + ( arg_13 [ arg_8 ] \/ arg_3 ) * arg_7 [ arg_8 ] arg_10 += 1 return arg_9","id_":253912,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/tools\/qcvv\/tomography.py#L967-L1008","negative":"Replicate a VM image to multiple target locations. This operation\n is only for publishers. You have to be registered as image publisher\n with Microsoft Azure to be able to call this.\n\n vm_image_name:\n Specifies the name of the VM Image that is to be used for\n replication\n regions:\n Specified a list of regions to replicate the image to\n Note: The regions in the request body are not additive. If a VM\n Image has already been replicated to Regions A, B, and C, and\n a request is made to replicate to Regions A and D, the VM\n Image will remain in Region A, will be replicated in Region D,\n and will be unreplicated from Regions B and C\n offer:\n Specifies the publisher defined name of the offer. The allowed\n characters are uppercase or lowercase letters, digit,\n hypen(-), period (.).The maximum allowed length is 64 characters.\n sku:\n Specifies the publisher defined name of the Sku. The allowed\n characters are uppercase or lowercase letters, digit,\n hypen(-), period (.). The maximum allowed length is 64 characters.\n version:\n Specifies the publisher defined version of the image.\n The allowed characters are digit and period.\n Format: ..\n Example: '1.0.0' or '1.1.0' The 3 version number to\n follow standard of most of the RPs. See http:\/\/semver.org"} {"query":"When removing an object with foreign fields back - references from other objects to the current object should be deleted . This function identifies foreign fields of the specified object whose values are not None and which specify back - reference keys then removes back - references from linked objects to the specified object .","positive":"def Func ( arg_0 ) : for arg_1 in _collect_refs ( arg_0 ) : arg_1 [ 'value' ] . _remove_backref ( arg_1 [ 'field_instance' ] . _backref_field_name , arg_0 , arg_1 [ 'field_name' ] , strict = False )","id_":253913,"task_name":"https:\/\/github.com\/cos-archives\/modular-odm\/blob\/8a34891892b8af69b21fdc46701c91763a5c1cf9\/modularodm\/storedobject.py#L1317-L1333","negative":"Apply updates to the next tuple metrics"} {"query":"Scale in the number of active blocks by the specified number .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = dict ( zip ( arg_0 . engines , arg_0 . provider . status ( arg_0 . engines ) ) ) arg_3 = [ engine for engine in arg_2 if arg_2 [ engine ] == \"RUNNING\" ] [ : arg_1 ] if arg_0 . provider : arg_4 = arg_0 . provider . cancel ( arg_3 ) else : logger . error ( \"No execution provider available\" ) arg_4 = None return arg_4","id_":253914,"task_name":"https:\/\/github.com\/Parsl\/parsl\/blob\/d7afb3bc37f50dcf224ae78637944172edb35dac\/parsl\/executors\/ipp.py#L254-L269","negative":"main execution loop. query weather data and post to online service."} {"query":"Decorator declaring the wrapped function to be a new - style task .","positive":"def Func ( * arg_0 , ** arg_1 ) : arg_2 = bool ( not arg_0 or arg_1 ) arg_3 = arg_1 . pop ( \"task_class\" , WrappedCallableTask ) arg_4 , arg_0 = arg_0 [ 0 ] , ( ) def arg_5 ( arg_4 ) : return arg_3 ( arg_4 , * arg_0 , ** arg_1 ) arg_5 . is_Func = True arg_5 . wrapped = arg_4 return arg_5 if arg_2 else arg_5 ( arg_4 )","id_":253915,"task_name":"https:\/\/github.com\/chrisspen\/burlap\/blob\/a92b0a8e5206850bb777c74af8421ea8b33779bd\/burlap\/decorators.py#L14-L44","negative":"\\\n Writes an image src http string to disk as a temporary file\n and returns the LocallyStoredImage object\n that has the info you should need on the image"} {"query":"Performs a backwards search from the terminal node back to the start node","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = arg_4 ( 'inf' ) , arg_5 = True ) : arg_6 = [ ] arg_7 = set ( ) arg_8 = '.' . join ( arg_2 ) arg_9 = arg_2 [ - 1 ] arg_10 = arg_0 . _get_candidate_dict ( arg_9 , None , use_upper_bound = False ) arg_11 = arg_1 . v_full_name arg_12 = len ( arg_2 ) for arg_13 in arg_10 : arg_14 = arg_10 [ arg_13 ] if arg_9 != arg_14 . v_name or arg_14 . v_full_name in arg_7 : continue if arg_13 . startswith ( arg_11 ) : if arg_11 != '' : arg_15 = arg_13 [ len ( arg_11 ) + 1 : ] else : arg_15 = arg_13 arg_16 = arg_15 . split ( '.' ) if len ( arg_16 ) > arg_3 : break if len ( arg_2 ) == 1 or arg_15 . endswith ( arg_8 ) : arg_6 . append ( arg_14 ) arg_7 . add ( arg_14 . v_full_name ) elif arg_5 : arg_17 = set ( arg_16 ) arg_18 = True for arg_19 in arg_2 : if arg_19 not in arg_17 : arg_18 = False break if arg_18 : arg_20 = 0 arg_21 = len ( arg_16 ) for arg_22 in range ( arg_21 ) : if arg_22 + arg_12 - arg_20 > arg_21 : break if arg_2 [ arg_20 ] == arg_16 [ arg_22 ] : arg_20 += 1 if arg_20 == len ( arg_2 ) : arg_6 . append ( arg_14 ) arg_7 . add ( arg_14 . v_full_name ) break return arg_6","id_":253916,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/naturalnaming.py#L2059-L2138","negative":"Upload all po files to GDocs ignoring conflicts.\n This method looks for all msgids in po_files and sends them\n as ods to GDocs Spreadsheet."} {"query":"Find the best Jaccard match from query to candidates","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = - 1 arg_4 = - 1 for arg_5 in arg_2 : arg_6 = __jaccard ( arg_0 , arg_1 [ arg_5 ] ) if arg_6 > arg_3 : arg_3 , arg_4 = arg_6 , arg_5 return arg_4","id_":253917,"task_name":"https:\/\/github.com\/librosa\/librosa\/blob\/180e8e6eb8f958fa6b20b8cba389f7945d508247\/librosa\/util\/matching.py#L49-L59","negative":"Shorthand access to the color table scheme selector method."} {"query":"Print an if statement if needed .","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 . control is None : return arg_1 return \"if(%s==%d) \" % ( arg_0 . control [ 0 ] . name , arg_0 . control [ 1 ] ) + arg_1","id_":253918,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/circuit\/instruction.py#L262-L266","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Returns True if left and right are equal","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ \"diff\" ] arg_2 . append ( \"-q\" ) arg_2 . append ( arg_0 . left . get_name ( ) ) arg_2 . append ( arg_0 . right . get_name ( ) ) try : Process ( arg_2 ) . run ( arg_1 = arg_1 , suppress_output = True ) except SubprocessError as e : if e . get_returncode ( ) == 1 : return False else : raise e return True","id_":253919,"task_name":"https:\/\/github.com\/bjoernricks\/python-quilt\/blob\/fae88237f601848cc34d073584d9dcb409f01777\/quilt\/patch.py#L183-L198","negative":"Stop streaming samples from device and delete samples buffer"} {"query":"Get a set of records from Presto","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : try : return super ( ) . Func ( arg_0 . _strip_sql ( arg_1 ) , arg_2 ) except DatabaseError as e : raise PrestoException ( arg_0 . _get_pretty_exception_message ( e ) )","id_":253920,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/hooks\/presto_hook.py#L80-L88","negative":"Check the spacing of a single equals sign."} {"query":"Return list of evenly spaced integers over an interval .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = True , arg_4 = False , arg_5 = 10 ) : assert type ( arg_0 ) is int and type ( arg_1 ) is int and type ( arg_2 ) is int , \"start, stop and num need to be intergers\" assert arg_2 >= 2 , \"num has to be atleast 2\" if arg_4 : arg_0 = math . log ( arg_0 , arg_5 ) arg_1 = math . log ( arg_1 , arg_5 ) if arg_3 : arg_6 = float ( ( arg_1 - arg_0 ) ) \/ float ( arg_2 - 1 ) else : arg_6 = float ( ( arg_1 - arg_0 ) ) \/ float ( arg_2 ) arg_7 = 0 while arg_7 < arg_2 : if arg_4 : yield int ( round ( arg_5 ** ( arg_0 + arg_7 * arg_6 ) ) ) else : yield int ( round ( arg_0 + arg_7 * arg_6 ) ) arg_7 += 1","id_":253921,"task_name":"https:\/\/github.com\/RRZE-HPC\/kerncraft\/blob\/c60baf8043e4da8d8d66da7575021c2f4c6c78af\/kerncraft\/kerncraft.py#L24-L52","negative":"Construct user from ``ConversationParticipantData`` message.\n\n Args:\n conv_part_id: ``ConversationParticipantData`` message.\n self_user_id (~hangups.user.UserID or None): The ID of the current\n user. If ``None``, assume ``conv_part_id`` is the current user.\n\n Returns:\n :class:`~hangups.user.User` object."} {"query":"Return a random int in the range [ 0 n ) .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = _int_bit_length ( arg_1 ) arg_3 = arg_0 . getrandbits ( arg_2 ) while arg_3 >= arg_1 : arg_3 = arg_0 . getrandbits ( arg_2 ) return arg_3","id_":253922,"task_name":"https:\/\/github.com\/google\/grumpy\/blob\/3ec87959189cfcdeae82eb68a47648ac25ceb10b\/lib\/_random.py#L90-L99","negative":"Called when builder group collect files\n Resolves absolute url if relative passed\n\n :type asset: static_bundle.builders.Asset\n :type builder: static_bundle.builders.StandardBuilder"} {"query":"Function for registering a path pattern .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = None ) : if not arg_4 : arg_4 = { } with arg_0 . _lock : arg_0 . _data_store . append ( { 'pattern' : arg_1 , 'function' : arg_2 , 'method' : arg_3 , 'type_cast' : arg_4 , } )","id_":253923,"task_name":"https:\/\/github.com\/linuxwhatelse\/mapper\/blob\/3481715b2a36d2da8bf5e9c6da80ceaed0d7ca59\/mapper.py#L108-L131","negative":"Handle marking messages as read and keeping client active."} {"query":"Execute the workflow .","positive":"def Func ( arg_0 ) : arg_0 . generate_workflow_description ( ) if arg_0 . batch_values : arg_0 . id = arg_0 . workflow . launch_batch_workflow ( arg_0 . definition ) else : arg_0 . id = arg_0 . workflow . launch ( arg_0 . definition ) return arg_0 . id","id_":253924,"task_name":"https:\/\/github.com\/DigitalGlobe\/gbdxtools\/blob\/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb\/gbdxtools\/simpleworkflows.py#L487-L513","negative":"Remove all binary files in the adslib directory."} {"query":"Renders the message subject for the given context .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . subject_template . render ( unescape ( arg_1 ) ) return arg_2 . strip ( )","id_":253925,"task_name":"https:\/\/github.com\/disqus\/django-mailviews\/blob\/9993d5e911d545b3bc038433986c5f6812e7e965\/mailviews\/messages.py#L149-L162","negative":"Handle global keybindings."} {"query":"Checks for the correctness of various spacing around function calls .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = arg_1 . elided [ arg_2 ] arg_5 = arg_4 for arg_6 in ( r'\\bif\\s*\\((.*)\\)\\s*{' , r'\\bfor\\s*\\((.*)\\)\\s*{' , r'\\bwhile\\s*\\((.*)\\)\\s*[{;]' , r'\\bswitch\\s*\\((.*)\\)\\s*{' ) : arg_7 = Search ( arg_6 , arg_4 ) if arg_7 : arg_5 = arg_7 . group ( 1 ) break if ( not Search ( r'\\b(if|for|while|switch|return|new|delete|catch|sizeof)\\b' , arg_5 ) and not Search ( r' \\([^)]+\\)\\([^)]*(\\)|,$)' , arg_5 ) and not Search ( r' \\([^)]+\\)\\[[^\\]]+\\]' , arg_5 ) ) : if Search ( r'\\w\\s*\\(\\s(?!\\s*\\\\$)' , arg_5 ) : arg_3 ( arg_0 , arg_2 , 'whitespace\/parens' , 4 , 'Extra space after ( in function call' ) elif Search ( r'\\(\\s+(?!(\\s*\\\\)|\\()' , arg_5 ) : arg_3 ( arg_0 , arg_2 , 'whitespace\/parens' , 2 , 'Extra space after (' ) if ( Search ( r'\\w\\s+\\(' , arg_5 ) and not Search ( r'_{0,2}asm_{0,2}\\s+_{0,2}volatile_{0,2}\\s+\\(' , arg_5 ) and not Search ( r'#\\s*define|typedef|using\\s+\\w+\\s*=' , arg_5 ) and not Search ( r'\\w\\s+\\((\\w+::)*\\*\\w+\\)\\(' , arg_5 ) and not Search ( r'\\bcase\\s+\\(' , arg_5 ) ) : if Search ( r'\\boperator_*\\b' , arg_4 ) : arg_3 ( arg_0 , arg_2 , 'whitespace\/parens' , 0 , 'Extra space before ( in function call' ) else : arg_3 ( arg_0 , arg_2 , 'whitespace\/parens' , 4 , 'Extra space before ( in function call' ) if Search ( r'[^)]\\s+\\)\\s*[^{\\s]' , arg_5 ) : if Search ( r'^\\s+\\)' , arg_5 ) : arg_3 ( arg_0 , arg_2 , 'whitespace\/parens' , 2 , 'Closing ) should be moved to the previous line' ) else : arg_3 ( arg_0 , arg_2 , 'whitespace\/parens' , 2 , 'Extra space before )' )","id_":253926,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/third_party\/python\/cpplint\/cpplint.py#L3051-L3125","negative":"Remove cards from watchlist.\n\n :params trade_id: Trade id."} {"query":"Runtime batch shape of models represented by this component .","positive":"def Func ( arg_0 ) : arg_1 = tf . constant ( [ ] , dtype = tf . int32 ) for arg_2 in arg_0 . parameters : arg_1 = tf . broadcast_dynamic_shape ( arg_1 , arg_2 . prior . Func ( ) ) return arg_1","id_":253927,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/sts\/structural_time_series.py#L96-L109","negative":"Uploads file to GDocs spreadsheet.\n Content type can be provided as argument, default is ods."} {"query":"Let SSL know where we can find trusted certificates for the certificate chain . Note that the certificates have to be in PEM format .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : if arg_1 is None : arg_1 = _ffi . NULL else : arg_1 = _path_string ( arg_1 ) if arg_2 is None : arg_2 = _ffi . NULL else : arg_2 = _path_string ( arg_2 ) arg_3 = _lib . SSL_CTX_Func ( arg_0 . _context , arg_1 , arg_2 ) if not arg_3 : _raise_current_error ( )","id_":253928,"task_name":"https:\/\/github.com\/pyca\/pyopenssl\/blob\/1fbe064c50fd030948141d7d630673761525b0d0\/src\/OpenSSL\/SSL.py#L745-L775","negative":"Deletes the video\n\n Authentication is required\n\n Params:\n entry: video entry fetch via 'fetch_video()'\n\n Return:\n True if successful\n\n Raise:\n OperationError: on unsuccessful deletion"} {"query":"Show a tip message","positive":"def Func ( arg_0 ) : arg_1 = ( \"Close the main window to exit & save.\\n\" \"Drag & Drop \/ Click the controls from the ToolBox to create new ones.\\n\" \"Left click on the created controls to select them.\\n\" \"Double click to edit the default property.\\n\" \"Right click to pop-up the context menu.\\n\" ) arg_2 = STT . SuperToolTip ( arg_1 ) arg_2 . SetHeader ( \"Welcome to gui2py designer!\" ) arg_2 . SetDrawHeaderLine ( True ) arg_2 . ApplyStyle ( \"Office 2007 Blue\" ) arg_2 . SetDropShadow ( True ) arg_2 . SetHeaderBitmap ( images . designer . GetBitmap ( ) ) arg_2 . SetEndDelay ( 15000 ) arg_3 = CustomToolTipWindow ( arg_0 , arg_2 ) arg_3 . CalculateBestSize ( ) arg_3 . CalculateBestPosition ( arg_0 ) arg_3 . DropShadow ( arg_2 . GetDropShadow ( ) ) if arg_2 . GetUseFade ( ) : arg_4 = lambda : arg_3 . StartAlpha ( True ) else : arg_4 = lambda : arg_3 . Show ( ) wx . CallLater ( 1000 , arg_4 ) wx . CallLater ( 30000 , arg_3 . Destroy )","id_":253929,"task_name":"https:\/\/github.com\/reingart\/gui2py\/blob\/aca0a05f6fcde55c94ad7cc058671a06608b01a4\/gui\/tools\/designer.py#L650-L676","negative":"This is a parallel worker for running period-recovery.\n\n Parameters\n ----------\n\n task : tuple\n This is used to pass args to the `periodicvar_recovery` function::\n\n task[0] = period-finding result pickle to work on\n task[1] = simbasedir\n task[2] = period_tolerance\n\n Returns\n -------\n\n dict\n This is the dict produced by the `periodicvar_recovery` function for the\n input period-finding result pickle."} {"query":"Generate a square lattice with auxiliary nodes for spanning detection","positive":"def Func ( arg_0 ) : arg_1 = nx . grid_2d_graph ( arg_0 + 2 , arg_0 ) for arg_2 in range ( arg_0 ) : arg_1 . node [ ( 0 , arg_2 ) ] [ 'span' ] = 0 arg_1 [ ( 0 , arg_2 ) ] [ ( 1 , arg_2 ) ] [ 'span' ] = 0 arg_1 . node [ ( arg_0 + 1 , arg_2 ) ] [ 'span' ] = 1 arg_1 [ ( arg_0 + 1 , arg_2 ) ] [ ( arg_0 , arg_2 ) ] [ 'span' ] = 1 return arg_1","id_":253930,"task_name":"https:\/\/github.com\/andsor\/pypercolate\/blob\/92478c1fc4d4ff5ae157f7607fd74f6f9ec360ac\/percolate\/percolate.py#L931-L965","negative":"Remove cards from watchlist.\n\n :params trade_id: Trade id."} {"query":"Include a hidden input to stored the serialized upload value .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None ) : arg_4 = arg_3 or { } arg_4 . update ( { 'name' : arg_1 , 'value' : arg_2 , } ) return Func_to_string ( arg_0 . template_name , arg_4 )","id_":253931,"task_name":"https:\/\/github.com\/rochapps\/django-secure-input\/blob\/6da714475613870f2891b2ccf3317f55b3e81107\/secure_input\/widgets.py#L11-L15","negative":"This function gets the Fortney mass-radius relation for planets.\n\n Parameters\n ----------\n\n age : float\n This should be one of: 0.3, 1.0, 4.5 [in Gyr].\n\n planetdist : float\n This should be one of: 0.02, 0.045, 0.1, 1.0, 9.5 [in AU]\n\n coremass : int\n This should be one of: 0, 10, 25, 50, 100 [in Mearth]\n\n mass : {'massjupiter','massearth'}\n Sets the mass units.\n\n radius : str\n Sets the radius units. Only 'radiusjupiter' is used for now.\n\n Returns\n -------\n\n dict\n A dict of the following form is returned::\n\n {'mass': an array containing the masses to plot),\n 'radius': an array containing the radii to plot}\n\n These can be passed to a plotting routine to make mass-radius plot for\n the specified age, planet-star distance, and core-mass."} {"query":"Update values of a network .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : LOG . info ( \"Func %s for tenant %s\" % ( arg_1 , arg_0 . tenant_id ) ) with arg_0 . session . begin ( ) : arg_3 = db_api . network_find ( arg_0 , arg_1 = arg_1 , scope = db_api . ONE ) if not arg_3 : raise n_exc . NetworkNotFound ( net_id = arg_1 ) arg_4 = arg_2 [ \"network\" ] utils . pop_param ( arg_4 , \"network_plugin\" ) if not arg_0 . is_admin and \"ipam_strategy\" in arg_4 : utils . pop_param ( arg_4 , \"ipam_strategy\" ) arg_3 = db_api . network_update ( arg_0 , arg_3 , ** arg_4 ) return v . _make_network_dict ( arg_3 )","id_":253932,"task_name":"https:\/\/github.com\/openstack\/quark\/blob\/1112e6a66917d3e98e44cb7b33b107fd5a74bb2e\/quark\/plugin_modules\/networks.py#L154-L176","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Get results of the provided hql in target schema .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 'default' , arg_3 = None , arg_4 = None ) : arg_5 = arg_0 . _Func ( arg_1 , arg_2 , arg_3 = arg_3 , arg_4 = arg_4 ) arg_6 = next ( arg_5 ) arg_7 = { 'data' : list ( arg_5 ) , 'header' : arg_6 } return arg_7","id_":253933,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/hooks\/hive_hooks.py#L834-L856","negative":"set current cursor position"} {"query":"Attach a method to a parsing class and register it as a parser hook .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = False ) : if not hasattr ( arg_0 , '_Funcs' ) : raise TypeError ( \"%s didn't seems to be a BasicParser subsclasse\" % arg_0 . __name__ ) arg_3 = arg_0 . _Funcs arg_4 = arg_0 . _rules def wrapper ( arg_5 ) : nonlocal arg_1 add_method ( arg_0 ) ( arg_5 ) if arg_1 is None : arg_1 = arg_5 . __name__ if not arg_2 and ( arg_1 in arg_3 or arg_1 in arg_4 ) : raise TypeError ( \"%s is already define has rule or Func\" % arg_1 ) if '.' not in arg_1 : arg_1 = '.' . join ( [ arg_0 . __module__ , arg_0 . __name__ , arg_1 ] ) set_one ( arg_3 , arg_1 , arg_5 ) return arg_5 return wrapper","id_":253934,"task_name":"https:\/\/github.com\/LionelAuroux\/pyrser\/blob\/f153a97ef2b6bf915a1ed468c0252a9a59b754d5\/pyrser\/meta.py#L135-L157","negative":"Define the return schema of an API.\n\n 'return_values' is a dictionary mapping\n HTTP return code => documentation\n In addition to validating that the status code of the response belongs to\n one of the accepted status codes, it also validates that the returned\n object is JSON (derived from JsonResponse)\n\n In debug and test modes, failure to validate the fields will result in a\n 400 Bad Request response.\n In production mode, failure to validate will just log a\n warning, unless overwritten by a 'strict' setting.\n\n For example:\n\n @api_returns({\n 200: 'Operation successful',\n 403: 'User does not have persion',\n 404: 'Resource not found',\n 404: 'User not found',\n })\n def add(request, *args, **kwargs):\n if not request.user.is_superuser:\n return JsonResponseForbidden() # 403\n\n return HttpResponse() # 200"} {"query":"Get a room by name .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . get_rooms ( ) for arg_3 in arg_2 or [ ] : if arg_3 [ \"name\" ] == arg_1 : return arg_0 . get_room ( arg_3 [ \"id\" ] ) raise RoomNotFoundException ( \"Room %s not found\" % arg_1 )","id_":253935,"task_name":"https:\/\/github.com\/mariano\/pyfire\/blob\/42e3490c138abc8e10f2e9f8f8f3b40240a80412\/pyfire\/campfire.py#L90-L103","negative":"Dump a certificate revocation list to a buffer.\n\n :param type: The file type (one of ``FILETYPE_PEM``, ``FILETYPE_ASN1``, or\n ``FILETYPE_TEXT``).\n :param CRL crl: The CRL to dump.\n\n :return: The buffer with the CRL.\n :rtype: bytes"} {"query":"Return text cast to the correct type or the selected type","positive":"def Func ( arg_0 , arg_1 = None ) : if arg_1 is None : arg_1 = arg_0 . code return arg_0 . cast [ arg_1 ] ( arg_0 . text )","id_":253936,"task_name":"https:\/\/github.com\/MDAnalysis\/GridDataFormats\/blob\/3eeb0432f8cf856912436e4f3e7aba99d3c916be\/gridData\/OpenDX.py#L565-L569","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Save a snapshot of the page .","positive":"def Func ( arg_0 , arg_1 = None ) : arg_1 = _prepare_path ( arg_1 , \"html\" ) with open ( arg_1 , \"wb\" ) as f : f . write ( encode_string ( arg_0 . body ) ) return arg_1","id_":253937,"task_name":"https:\/\/github.com\/elliterate\/capybara.py\/blob\/0c6ae449cc37e4445ec3cd6af95674533beedc6c\/capybara\/session.py#L583-L603","negative":"Returns the Groupsio argument parser."} {"query":"Replaces instances of pattern in a string with a replacement .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_0 not in arg_3 : arg_3 [ arg_0 ] = sre_compile . compile ( arg_0 ) return arg_3 [ arg_0 ] . sub ( arg_1 , arg_2 )","id_":253938,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/third_party\/python\/cpplint\/cpplint.py#L767-L782","negative":"Attempts to find the Teradata install directory with the defaults\n for a given platform. Should always return `None` when the defaults\n are not present and the TERADATA_HOME environment variable wasn't\n explicitly set to the correct install location."} {"query":"Get all the items for this label . Returns a list of dictionaries . Each dictionary has the values for an item .","positive":"def Func ( arg_0 , arg_1 = None ) : return arg_0 . fetch_json ( uri_path = arg_0 . base_uri + '\/checkItems' , arg_1 = arg_1 or { } )","id_":253939,"task_name":"https:\/\/github.com\/its-rigs\/Trolly\/blob\/483dc94c352df40dc05ead31820b059b2545cf82\/trolly\/label.py#L29-L37","negative":"Called when there is an error in the websocket"} {"query":"Converts size string into megabytes","positive":"def Func ( arg_0 ) : if arg_0 . upper ( ) . endswith ( \"KB\" ) : return float ( arg_0 . rstrip ( \"KB\" ) ) \/ 1024 elif arg_0 . upper ( ) . endswith ( \" B\" ) : return float ( arg_0 . rstrip ( \"B\" ) ) \/ 1024 \/ 1024 elif arg_0 . upper ( ) . endswith ( \"MB\" ) : return float ( arg_0 . rstrip ( \"MB\" ) ) elif arg_0 . upper ( ) . endswith ( \"GB\" ) : return float ( arg_0 . rstrip ( \"GB\" ) ) * 1024 elif arg_0 . upper ( ) . endswith ( \"TB\" ) : return float ( arg_0 . rstrip ( \"TB\" ) ) * 1024 * 1024 else : return float ( arg_0 )","id_":253940,"task_name":"https:\/\/github.com\/assemblerflow\/flowcraft\/blob\/fc3f4bddded1efc76006600016dc71a06dd908c0\/flowcraft\/generator\/inspect.py#L360-L391","negative":"Sends a POST request to initialize the live reports\n\n Parameters\n ----------\n report_id : str\n Hash of the report JSON as retrieved from :func:`~_get_report_hash`"} {"query":"Keyword arguments for recreating the Shape from the vertices .","positive":"def Func ( arg_0 ) : return dict ( color = arg_0 . color , velocity = arg_0 . velocity , colors = arg_0 . colors )","id_":253941,"task_name":"https:\/\/github.com\/hsharrison\/pyglet2d\/blob\/46f610b3c76221bff19e5c0cf3d35d7875ce37a0\/src\/pyglet2d.py#L186-L190","negative":"The estimated signal-to-noise_maps mappers of the image."} {"query":"Resolve a Basilisp symbol as a Var or Python name .","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 . Symbol ) -> Union [ MaybeClass , MaybeHostForm , VarRef ] : if arg_2 . ns is None and arg_2 . name . endswith ( \".\" ) : try : arg_5 , arg_6 = arg_2 . name [ : - 1 ] . rsplit ( \".\" , maxsplit = 1 ) arg_2 = arg_3 . symbol ( arg_6 , arg_5 = arg_5 ) except ValueError : arg_2 = arg_3 . symbol ( arg_2 . name [ : - 1 ] ) if arg_2 . ns is not None : return __resolve_namespaced_symbol ( arg_0 , arg_2 ) else : return __resolve_bare_symbol ( arg_0 , arg_2 )","id_":253942,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/compiler\/parser.py#L1911-L1929","negative":"Indicates the start of a new sequence. Clears any predictions and makes sure\n synapses don't grow to the currently active cells in the next time step."} {"query":"Check if address is valid IP address and return it in a normalized form .","positive":"def Func ( arg_0 , arg_1 ) : try : arg_2 = socket . getaddrinfo ( arg_1 , 0 , arg_0 , socket . SOCK_STREAM , 0 , socket . AI_NUMERICHOST ) except socket . gaierror , err : logger . debug ( \"gaierror: {0} for {1!r}\" . format ( err , arg_1 ) ) raise ValueError ( \"Bad IP address\" ) if not arg_2 : logger . debug ( \"getaddrinfo result empty\" ) raise ValueError ( \"Bad IP address\" ) arg_3 = arg_2 [ 0 ] [ 4 ] logger . debug ( \" got address: {0!r}\" . format ( arg_3 ) ) try : return socket . getnameinfo ( arg_3 , socket . NI_NUMERICHOST ) [ 0 ] except socket . gaierror , err : logger . debug ( \"gaierror: {0} for {1!r}\" . format ( err , arg_3 ) ) raise ValueError ( \"Bad IP address\" )","id_":253943,"task_name":"https:\/\/github.com\/Jajcus\/pyxmpp2\/blob\/14a40a3950910a9cd008b55f0d8905aa0186ce18\/pyxmpp2\/jid.py#L65-L90","negative":"Not accurate false due to spikes are observed"} {"query":"Format a 3d vector field in certain ways see coords for a description of each formatting method .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 'broadcast' ) : if arg_2 == 'meshed' : return np . meshgrid ( * arg_1 , indexing = 'ij' ) elif arg_2 == 'vector' : arg_1 = np . meshgrid ( * arg_1 , indexing = 'ij' ) return np . rollaxis ( np . array ( np . broadcast_arrays ( * arg_1 ) ) , 0 , arg_0 . dim + 1 ) elif arg_2 == 'flat' : return arg_1 else : return [ arg_4 [ arg_0 . _coord_slicers [ arg_3 ] ] for arg_3 , arg_4 in enumerate ( arg_1 ) ]","id_":253944,"task_name":"https:\/\/github.com\/peri-source\/peri\/blob\/61beed5deaaf978ab31ed716e8470d86ba639867\/peri\/util.py#L375-L388","negative":"Clone throttles without memory"} {"query":"Take an existing Skype token and refresh it to extend the expiry time without other credentials .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . sendToken ( arg_1 ) return arg_0 . getToken ( arg_2 )","id_":253945,"task_name":"https:\/\/github.com\/Terrance\/SkPy\/blob\/0f9489c94e8ec4d3effab4314497428872a80ad1\/skpy\/conn.py#L646-L661","negative":"Does this filename match any of the patterns?"} {"query":"Turn metadata into JSON","positive":"def Func ( arg_0 , arg_1 , ** arg_2 ) : arg_2 . setdefault ( 'indent' , 4 ) arg_1 = json . dumps ( arg_1 , ** arg_2 ) return u ( arg_1 )","id_":253946,"task_name":"https:\/\/github.com\/eyeseast\/python-frontmatter\/blob\/c318e583c48599eb597e0ad59c5d972258c3febc\/frontmatter\/default_handlers.py#L238-L242","negative":"Return the maximum file descriptor value."} {"query":"Parse a notebook filename .","positive":"def Func ( arg_0 ) : if arg_0 . endswith ( u'.ipynb' ) : arg_1 = u'json' elif arg_0 . endswith ( u'.json' ) : arg_1 = u'json' elif arg_0 . endswith ( u'.py' ) : arg_1 = u'py' else : arg_0 = arg_0 + u'.ipynb' arg_1 = u'json' arg_2 = arg_0 . split ( '.' ) [ 0 ] return arg_0 , arg_2 , arg_1","id_":253947,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/nbformat\/v2\/__init__.py#L43-L77","negative":"Return list of GATT descriptors that have been discovered for this\n characteristic."} {"query":"Add a link from A to B of given distance in one direction only .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_0 . dict . setdefault ( arg_1 , { } ) [ arg_2 ] = arg_3","id_":253948,"task_name":"https:\/\/github.com\/hobson\/aima\/blob\/3572b2fb92039b4a1abe384be8545560fbd3d470\/aima\/search.py#L439-L441","negative":"Download and extract the tarball, and download each individual photo."} {"query":"Return the course and course run metadata for the given course run ID .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = parse_course_key ( arg_1 ) arg_3 = arg_0 . get_course_details ( arg_2 ) arg_4 = None if arg_3 : arg_4 = None arg_5 = [ arg_4 for arg_4 in arg_3 [ 'course_runs' ] if arg_4 [ 'key' ] == arg_1 ] if arg_5 : arg_4 = arg_5 [ 0 ] return arg_3 , arg_4","id_":253949,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/enterprise\/api_client\/discovery.py#L236-L259","negative":"Associate the given client data with the item at position n."} {"query":"Attempt to set the virtualenv activate command if it hasn t been specified .","positive":"def Func ( ) : try : arg_0 = options . virtualenv . activate_cmd except AttributeError : arg_0 = None if arg_0 is None : arg_1 = path ( os . environ . get ( 'VIRTUAL_ENV' , '' ) ) if not arg_1 : arg_1 = options . paved . cwd else : arg_1 = path ( arg_1 ) arg_0 = arg_1 \/ 'bin' \/ 'activate' if arg_0 . exists ( ) : info ( 'Using default virtualenv at %s' % arg_0 ) options . setdotted ( 'virtualenv.activate_cmd' , 'source %s' % arg_0 )","id_":253950,"task_name":"https:\/\/github.com\/eykd\/paved\/blob\/f04f8a4248c571f3d5ce882b325884a3e5d80203\/paved\/util.py#L14-L33","negative":"Remove all binary files in the adslib directory."} {"query":"Returns True if the considered string is a valid RIPEMD160 hash .","positive":"def Func ( arg_0 ) : if not arg_0 or not isinstance ( arg_0 , str ) : return False if not len ( arg_0 ) == 40 : return False for arg_1 in arg_0 : if ( arg_1 < '0' or arg_1 > '9' ) and ( arg_1 < 'A' or arg_1 > 'F' ) and ( arg_1 < 'a' or arg_1 > 'f' ) : return False return True","id_":253951,"task_name":"https:\/\/github.com\/ellmetha\/neojsonrpc\/blob\/e369b633a727482d5f9e310f0c3337ae5f7265db\/neojsonrpc\/utils.py#L23-L32","negative":"Bind the server unless it is already bound, this is a read-only node, or the last attempt was too recently.\n\n :raises TransportNotReadyError if the bind attempt fails"} {"query":"Removes a single node from the tree .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False ) : arg_3 = arg_1 . v_full_name arg_4 = deque ( arg_3 . split ( '.' ) ) arg_0 . _remove_along_branch ( arg_0 . _root_instance , arg_4 , arg_2 )","id_":253952,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/naturalnaming.py#L856-L868","negative":"This method is called before first step of simulation."} {"query":"Generates triangle wave SamplePulse .","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 , arg_4 : arg_5 = None , arg_6 : arg_5 = 0 , arg_7 : arg_8 = None ) -> SamplePulse : if arg_4 is None : arg_4 = arg_0 return _sampled_Func_pulse ( arg_0 , arg_2 , arg_4 , arg_6 = arg_6 , arg_7 = arg_7 )","id_":253953,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/pulse\/pulse_lib\/discrete.py#L94-L110","negative":"Reassemble a Binder object coming out of the database."} {"query":"This function will look at the local directory and pick out files that have the correct start name and summarize the results into one giant dict .","positive":"def Func ( ) : global g_summary_dict_all arg_0 = [ x for x in listdir ( g_test_root_dir ) if isfile ( join ( g_test_root_dir , x ) ) ] for arg_1 in arg_0 : for arg_2 in g_file_start : if ( arg_2 in arg_1 ) and ( os . path . getsize ( arg_1 ) > 10 ) : arg_3 = os . path . join ( g_test_root_dir , arg_1 ) try : arg_4 = json . load ( open ( arg_3 , 'r' ) ) for arg_5 in range ( len ( arg_4 [ \"TestName\" ] ) ) : addFailedTests ( g_summary_dict_all , arg_4 , arg_5 ) except : continue break","id_":253954,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/scripts\/summarizeIntermittens.py#L69-L92","negative":"Enables GPIO interrupts."} {"query":"Sort the fields inside the record by indicators .","positive":"def Func ( arg_0 ) : for arg_1 , arg_2 in arg_0 . items ( ) : arg_0 [ arg_1 ] = _fields_sort_by_indicators ( arg_2 )","id_":253955,"task_name":"https:\/\/github.com\/inspirehep\/harvesting-kit\/blob\/33a7f8aa9dade1d863110c6d8b27dfd955cb471f\/harvestingkit\/bibrecord.py#L1722-L1725","negative":"Shift the model result and return the new instance.\n\n Queues up the T(i+1) prediction value and emits a T(i)\n input\/prediction pair, if possible. E.g., if the previous T(i-1)\n iteration was learn-only, then we would not have a T(i) prediction in our\n FIFO and would not be able to emit a meaningful input\/prediction pair.\n\n :param modelResult: A :class:`~.nupic.frameworks.opf.opf_utils.ModelResult`\n instance to shift.\n :return: A :class:`~.nupic.frameworks.opf.opf_utils.ModelResult` instance that\n has been shifted"} {"query":"Get the last n lines from the history database .","positive":"def Func ( arg_0 , arg_1 = 10 , arg_2 = True , arg_3 = False , arg_4 = False ) : arg_0 . writeout_cache ( ) if not arg_4 : arg_1 += 1 arg_5 = arg_0 . _run_sql ( \"ORDER BY session DESC, line DESC LIMIT ?\" , ( arg_1 , ) , arg_2 = arg_2 , arg_3 = arg_3 ) if not arg_4 : return reversed ( list ( arg_5 ) [ 1 : ] ) return reversed ( list ( arg_5 ) )","id_":253956,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/core\/history.py#L230-L255","negative":"Return True if we should retry, False otherwise."} {"query":"Returns true if the specified error category is suppressed on this line .","positive":"def Func ( arg_0 , arg_1 ) : return ( _global_error_suppressions . get ( arg_0 , False ) or arg_1 in _error_suppressions . get ( arg_0 , set ( ) ) or arg_1 in _error_suppressions . get ( None , set ( ) ) )","id_":253957,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/third_party\/python\/cpplint\/cpplint.py#L739-L754","negative":"Generate RENEWING time.\n\n [:rfc:`2131#section-4.4.5`]::\n\n T1\n defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 *\n duration_of_lease). Times T1 and T2 SHOULD be chosen with some\n random \"fuzz\" around a fixed value, to avoid synchronization of\n client reacquisition."} {"query":"Draw this line segment as a binary image mask .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 1 , arg_3 = 0 , arg_4 = False ) : arg_5 = arg_0 . draw_heatmap_array ( arg_1 , alpha_lines = 1.0 , alpha_points = 1.0 , arg_2 = arg_2 , arg_3 = arg_3 , antialiased = False , arg_4 = arg_4 ) return arg_5 > 0.5","id_":253958,"task_name":"https:\/\/github.com\/aleju\/imgaug\/blob\/786be74aa855513840113ea523c5df495dc6a8af\/imgaug\/augmentables\/lines.py#L580-L613","negative":"Reject request."} {"query":"Full URL to the dataset contents .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . download_location return arg_1 . base_uri + arg_1 . location + arg_1 . access_credential","id_":253959,"task_name":"https:\/\/github.com\/Azure\/Azure-MachineLearning-ClientLibrary-Python\/blob\/d1211b289747671898eb063013e0dc53d3c80acd\/azureml\/__init__.py#L411-L414","negative":"Call the timeout handlers due.\n\n :Return: (next_event_timeout, sources_handled) tuple.\n next_event_timeout is number of seconds until the next timeout\n event, sources_handled is number of handlers called."} {"query":"Filter the annotation array down to only those Annotation objects matching the query .","positive":"def Func ( arg_0 , ** arg_1 ) : arg_2 = AnnotationArray ( ) for arg_3 in arg_0 : if arg_3 . Func ( ** arg_1 ) : arg_2 . append ( arg_3 ) return arg_2","id_":253960,"task_name":"https:\/\/github.com\/marl\/jams\/blob\/b16778399b9528efbd71434842a079f7691a7a66\/jams\/core.py#L1504-L1530","negative":"Upload given file into DKV and save it under give key as raw object.\n\n :param dest_key: name of destination key in DKV\n :param file_path: path to file to upload\n :return: key name if object was uploaded successfully"} {"query":"Add a list of vectors to Bloch sphere .","positive":"def Func ( arg_0 , arg_1 ) : if isinstance ( arg_1 [ 0 ] , ( list , np . ndarray ) ) : for arg_2 in arg_1 : arg_0 . vectors . append ( arg_2 ) else : arg_0 . vectors . append ( arg_1 )","id_":253961,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/visualization\/bloch.py#L331-L342","negative":"Print a status message about the logger."} {"query":"Build nbextension cmdclass dict for the setuptools . setup method .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None ) : import warnings from setuptools . command . install import install from setuptools . command . develop import arg_6 from os . path import dirname , join , exists , realpath from traceback import extract_stack try : from notebook . nbextensions import install_nbextension from notebook . services . config import ConfigManager except ImportError : try : from IPython . html . nbextensions import install_nbextension from IPython . html . services . config import ConfigManager except ImportError : warnings . warn ( \"No jupyter notebook found in your environment. \" \"Hence jupyter nbextensions were not installed. \" \"If you would like to have them,\" \"please issue 'pip install jupyter'.\" ) return { } if arg_2 is None : arg_2 = not _is_root ( ) arg_3 = extract_stack ( ) [ - 2 ] [ 0 ] arg_4 = realpath ( arg_3 ) if not exists ( arg_4 ) : raise Exception ( 'Could not find path of setup file.' ) arg_5 = join ( dirname ( arg_4 ) , arg_0 ) def run_nbextension_install ( arg_6 ) : import sys arg_7 = hasattr ( sys , 'real_prefix' ) if arg_7 : install_nbextension ( arg_5 , symlink = arg_6 , sys_prefix = arg_7 ) else : install_nbextension ( arg_5 , symlink = arg_6 , arg_2 = arg_2 ) if arg_1 is not None : print ( \"Enabling the extension ...\" ) arg_8 = ConfigManager ( ) arg_8 . update ( 'notebook' , { \"load_extensions\" : { arg_1 : True } } ) class InstallCommand ( install ) : def run ( arg_9 ) : print ( \"Installing Python module...\" ) install . run ( arg_9 ) print ( \"Installing nbextension ...\" ) run_nbextension_install ( False ) class DevelopCommand ( arg_6 ) : def run ( arg_9 ) : print ( \"Installing Python module...\" ) arg_6 . run ( arg_9 ) print ( \"Installing nbextension ...\" ) run_nbextension_install ( True ) return { 'install' : InstallCommand , 'develop' : DevelopCommand , }","id_":253962,"task_name":"https:\/\/github.com\/jdfreder\/jupyter-pip\/blob\/9f04c6096f1169b08aeaf6221616a5fb48111044\/jupyterpip\/__init__.py#L12-L113","negative":"Create required links from a sensor region to a classifier region."} {"query":"Initialize attributes that are not saved with the checkpoint .","positive":"def Func ( arg_0 ) : arg_0 . _firstComputeCall = True arg_0 . _accuracy = None arg_0 . _protoScores = None arg_0 . _categoryDistances = None arg_0 . _knn = knn_classifier . KNNClassifier ( ** arg_0 . knnParams ) for arg_6 in ( '_partitions' , '_useAuxiliary' , '_doSphering' , '_scanInfo' , '_protoScores' ) : if not hasattr ( arg_0 , arg_6 ) : setattr ( arg_0 , arg_6 , None )","id_":253963,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/regions\/knn_classifier_region.py#L605-L619","negative":"A helper function to look up license object information\n\n Use names from: https:\/\/api.github.com\/licenses"} {"query":"Cancels the item if it was not yet completed and removes any children that are LIKELY .","positive":"def Func ( arg_0 ) : if arg_0 . _is_finished ( ) : for arg_1 in arg_0 . children : arg_1 . Func ( ) return arg_0 . _set_state ( arg_0 . CANCELLED ) arg_0 . _drop_children ( ) arg_0 . task_spec . _on_Func ( arg_0 )","id_":253964,"task_name":"https:\/\/github.com\/knipknap\/SpiffWorkflow\/blob\/f0af7f59a332e0619e4f3c00a7d4a3d230760e00\/SpiffWorkflow\/task.py#L571-L582","negative":"This will output the nginx HTTP config string for specific port spec"} {"query":"Read the current ADS - state and the machine - state .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = _adsDLL . AdsSyncReadStateReqEx arg_3 = ctypes . pointer ( arg_1 . amsAddrStruct ( ) ) arg_4 = ctypes . c_int ( ) arg_5 = ctypes . pointer ( arg_4 ) arg_6 = ctypes . c_int ( ) arg_7 = ctypes . pointer ( arg_6 ) arg_8 = arg_2 ( arg_0 , arg_3 , arg_5 , arg_7 ) if arg_8 : raise ADSError ( arg_8 ) return ( arg_4 . value , arg_6 . value )","id_":253965,"task_name":"https:\/\/github.com\/stlehmann\/pyads\/blob\/44bd84394db2785332ac44b2948373916bea0f02\/pyads\/pyads_ex.py#L228-L260","negative":"Rename this conversation.\n\n Hangouts only officially supports renaming group conversations, so\n custom names for one-to-one conversations may or may not appear in all\n first party clients.\n\n Args:\n name (str): New name.\n\n Raises:\n .NetworkError: If conversation cannot be renamed."} {"query":"Return earliest start time in this collection .","positive":"def Func ( arg_0 , * arg_1 : arg_2 [ arg_3 ] ) -> int : arg_4 = list ( itertools . chain ( * ( arg_0 . _table [ chan ] for chan in arg_1 if chan in arg_0 . _table ) ) ) if arg_4 : return min ( ( arg_5 . begin for arg_5 in arg_4 ) ) return 0","id_":253966,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/pulse\/timeslots.py#L179-L189","negative":"r\"\"\"\n Converts any unicode based IRI to an acceptable ASCII URI. Werkzeug always\n uses utf-8 URLs internally because this is what browsers and HTTP do as\n well. In some places where it accepts an URL it also accepts a unicode IRI\n and converts it into a URI.\n\n Examples for IRI versus URI:\n\n >>> iri_to_uri(u'http:\/\/\u2603.net\/')\n 'http:\/\/xn--n3h.net\/'\n >>> iri_to_uri(u'http:\/\/\u00fcser:p\u00e4ssword@\u2603.net\/p\u00e5th')\n 'http:\/\/%C3%BCser:p%C3%A4ssword@xn--n3h.net\/p%C3%A5th'\n\n There is a general problem with IRI and URI conversion with some\n protocols that appear in the wild that are in violation of the URI\n specification. In places where Werkzeug goes through a forced IRI to\n URI conversion it will set the `safe_conversion` flag which will\n not perform a conversion if the end result is already ASCII. This\n can mean that the return value is not an entirely correct URI but\n it will not destroy such invalid URLs in the process.\n\n As an example consider the following two IRIs::\n\n magnet:?xt=uri:whatever\n itms-services:\/\/?action=download-manifest\n\n The internal representation after parsing of those URLs is the same\n and there is no way to reconstruct the original one. If safe\n conversion is enabled however this function becomes a noop for both of\n those strings as they both can be considered URIs.\n\n .. versionadded:: 0.6\n\n .. versionchanged:: 0.9.6\n The `safe_conversion` parameter was added.\n\n :param iri: The IRI to convert.\n :param charset: The charset for the URI.\n :param safe_conversion: indicates if a safe conversion should take place.\n For more information see the explanation above."} {"query":"Mute newly added handlers to the root level right after calling executor . status","positive":"def Func ( arg_0 ) : if arg_0 . logger_flag is True : return arg_1 = logging . getLogger ( ) for arg_2 in arg_1 . handlers : if arg_2 not in arg_0 . prior_loghandlers : arg_2 . setLevel ( logging . ERROR ) arg_0 . logger_flag = True","id_":253967,"task_name":"https:\/\/github.com\/Parsl\/parsl\/blob\/d7afb3bc37f50dcf224ae78637944172edb35dac\/parsl\/dataflow\/strategy.py#L141-L153","negative":"Get a temp filename for atomic download."} {"query":"Return my probabilities ; must be down to one variable .","positive":"def Func ( arg_0 ) : assert len ( arg_0 . vars ) == 1 return ProbDist ( arg_0 . vars [ 0 ] , dict ( ( arg_1 , arg_2 ) for ( ( arg_1 , ) , arg_2 ) in arg_0 . cpt . items ( ) ) )","id_":253968,"task_name":"https:\/\/github.com\/hobson\/aima\/blob\/3572b2fb92039b4a1abe384be8545560fbd3d470\/aima\/probability.py#L354-L358","negative":"send the registration_request"} {"query":"get version 1 of the google compute and storage service","positive":"def Func ( arg_0 , arg_1 = 'v1' ) : arg_0 . _bucket_service = storage . Client ( ) arg_3 = GoogleCredentials . get_application_default ( ) arg_0 . _storage_service = discovery_build ( 'storage' , arg_1 , credentials = arg_3 ) arg_0 . _compute_service = discovery_build ( 'compute' , arg_1 , credentials = arg_3 )","id_":253969,"task_name":"https:\/\/github.com\/singularityhub\/sregistry-cli\/blob\/abc96140a1d15b5e96d83432e1e0e1f4f8f36331\/sregistry\/main\/google_storage\/__init__.py#L91-L101","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Open and manage a BCon wrapper to a Bloomberg API session","positive":"def Func ( ** arg_0 ) : arg_1 = BCon ( ** arg_0 ) arg_1 . start ( ) try : yield arg_1 finally : arg_1 . stop ( )","id_":253970,"task_name":"https:\/\/github.com\/matthewgilbert\/pdblp\/blob\/aaef49ad6fca9af6ee44739d6e7e1cc3e7b0f8e2\/pdblp\/pdblp.py#L40-L54","negative":"will get the axis mode for the current series"} {"query":"Verify a certificate in a context .","positive":"def Func ( arg_0 ) : arg_0 . _cleanup ( ) arg_0 . _init ( ) arg_1 = _lib . X509_verify_cert ( arg_0 . _store_ctx ) arg_0 . _cleanup ( ) if arg_1 <= 0 : raise arg_0 . _exception_from_context ( )","id_":253971,"task_name":"https:\/\/github.com\/pyca\/pyopenssl\/blob\/1fbe064c50fd030948141d7d630673761525b0d0\/src\/OpenSSL\/crypto.py#L1746-L1766","negative":"Deletes the video\n\n Authentication is required\n\n Params:\n entry: video entry fetch via 'fetch_video()'\n\n Return:\n True if successful\n\n Raise:\n OperationError: on unsuccessful deletion"} {"query":"Identify clusters using Meanshift algorithm .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = False , ** arg_3 ) : if arg_1 is None : arg_1 = cl . estimate_bandwidth ( arg_0 ) arg_4 = cl . MeanShift ( arg_1 = arg_1 , arg_2 = arg_2 , ** arg_3 ) arg_4 . fit ( arg_0 ) arg_5 = arg_4 . labels_ return arg_5 , [ np . nan ]","id_":253972,"task_name":"https:\/\/github.com\/oscarbranson\/latools\/blob\/cd25a650cfee318152f234d992708511f7047fbe\/latools\/filtering\/clustering.py#L5-L33","negative":"quit command when several threads are involved."} {"query":"Otsu threshold on data .","positive":"def Func ( arg_0 , arg_1 = 255 ) : arg_2 , arg_3 = np . histogram ( arg_0 . ravel ( ) , arg_1 = arg_1 ) arg_4 = arg_2 . astype ( 'float' ) \/ arg_2 . sum ( ) arg_5 = 0.5 * ( arg_3 [ 1 : ] + arg_3 [ : - 1 ] ) arg_6 = np . array ( [ arg_4 [ : i + 1 ] . sum ( ) for i in range ( arg_4 . size ) ] ) arg_7 = np . array ( [ sum ( arg_5 [ : i + 1 ] * arg_4 [ : i + 1 ] ) for i in range ( arg_4 . size ) ] ) arg_8 = arg_7 [ - 1 ] arg_9 = ( arg_8 * arg_6 - arg_7 ) ** 2 \/ ( arg_6 * ( 1 - arg_6 ) + 1e-15 ) arg_10 = arg_9 . argmax ( ) return 0.5 * ( arg_3 [ arg_10 ] + arg_3 [ arg_10 + 1 ] )","id_":253973,"task_name":"https:\/\/github.com\/peri-source\/peri\/blob\/61beed5deaaf978ab31ed716e8470d86ba639867\/peri\/initializers.py#L181-L219","negative":"Play the video and block whilst the video is playing"} {"query":"Returns the unnormalized log density of an LKJ distribution .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : with tf . name_scope ( arg_2 or 'log_unnorm_prob_lkj' ) : arg_1 = tf . convert_to_tensor ( value = arg_1 , arg_2 = 'x' ) if arg_0 . input_output_cholesky : arg_3 = 2.0 * tf . reduce_sum ( input_tensor = tf . math . log ( tf . linalg . diag_part ( arg_1 ) ) , axis = [ - 1 ] ) else : arg_4 , arg_3 = tf . linalg . slogdet ( arg_1 ) arg_5 = ( arg_0 . concentration - 1. ) * arg_3 return arg_5","id_":253974,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/distributions\/lkj.py#L371-L410","negative":"This method is called before first step of simulation."} {"query":"Gets session data lazily .","positive":"def Func ( arg_0 ) : if not arg_0 . _Func : arg_0 . _Func = arg_0 . _get_Func ( ) if arg_0 . _Func is None : arg_0 . _Func = { } return arg_0 . _Func","id_":253975,"task_name":"https:\/\/github.com\/authomatic\/authomatic\/blob\/90a9ce60cc405ae8a2bf5c3713acd5d78579a04e\/authomatic\/core.py#L423-L432","negative":"Init openstack neutron mq\n\n 1. Check if enable listening neutron notification\n 2. Create consumer\n\n :param mq: class ternya.mq.MQ"} {"query":"Load a . py based config file by filename and path .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_3 = PyFileConfigLoader ( arg_1 , arg_2 = arg_2 ) try : arg_4 = arg_3 . load_config ( ) except ConfigFileNotFound : raise except Exception : arg_1 = arg_3 . full_filename or arg_1 arg_0 . log . error ( \"Exception while loading config file %s\" , arg_1 , exc_info = True ) else : arg_0 . log . debug ( \"Loaded config file: %s\" , arg_3 . full_filename ) arg_0 . update_config ( arg_4 )","id_":253976,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/config\/application.py#L443-L460","negative":"Revoke the token and remove the cookie."} {"query":"Select the proper LockManager based on the current backend used by Celery .","positive":"def Func ( arg_0 ) : if arg_0 == 'RedisBackend' : arg_1 = _LockManagerRedis elif arg_0 == 'DatabaseBackend' : arg_1 = _LockManagerDB else : raise NotImplementedError return arg_1","id_":253977,"task_name":"https:\/\/github.com\/Robpol86\/Flask-Celery-Helper\/blob\/92bd3b02954422665260116adda8eb899546c365\/flask_celery.py#L139-L155","negative":"Lists the categories in the lexicon, except the\n optional categories.\n\n Returns:\n list: A list of strings of category names."} {"query":"Gets the max partition for a table .","positive":"def Func ( arg_0 , arg_1 = \"default\" , arg_2 = None , arg_3 = None , arg_4 = 'metastore_default' ) : from airflow . hooks . hive_hooks import HiveMetastoreHook if '.' in arg_0 : arg_1 , arg_0 = arg_0 . split ( '.' ) arg_5 = HiveMetastoreHook ( arg_4 = arg_4 ) return arg_5 . Func ( arg_1 = arg_1 , table_name = arg_0 , arg_2 = arg_2 , arg_3 = arg_3 )","id_":253978,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/macros\/hive.py#L23-L55","negative":"Enables uniform interface to value and batch jacobian calculation.\n\n Works in both eager and graph modes.\n\n Arguments:\n f: The scalar function to evaluate.\n x: The value at which to compute the value and the batch jacobian.\n\n Returns:\n A tuple (f(x), J(x)), where J(x) is the batch jacobian."} {"query":"Returns the icon resource filename that corresponds to the given typename .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = { 'CLASS' : ICON_CLASS , 'IMPORT' : ICON_NAMESPACE , 'STATEMENT' : ICON_VAR , 'FORFLOW' : ICON_VAR , 'FORSTMT' : ICON_VAR , 'WITHSTMT' : ICON_VAR , 'GLOBALSTMT' : ICON_VAR , 'MODULE' : ICON_NAMESPACE , 'KEYWORD' : ICON_KEYWORD , 'PARAM' : ICON_VAR , 'ARRAY' : ICON_VAR , 'INSTANCEELEMENT' : ICON_VAR , 'INSTANCE' : ICON_VAR , 'PARAM-PRIV' : ICON_VAR , 'PARAM-PROT' : ICON_VAR , 'FUNCTION' : ICON_FUNC , 'DEF' : ICON_FUNC , 'FUNCTION-PRIV' : ICON_FUNC_PRIVATE , 'FUNCTION-PROT' : ICON_FUNC_PROTECTED } arg_3 = None arg_1 = arg_1 . upper ( ) if hasattr ( arg_0 , \"string\" ) : arg_0 = arg_0 . string if arg_1 == \"FORFLOW\" or arg_1 == \"STATEMENT\" : arg_1 = \"PARAM\" if arg_1 == \"PARAM\" or arg_1 == \"FUNCTION\" : if arg_0 . startswith ( \"__\" ) : arg_1 += \"-PRIV\" elif arg_0 . startswith ( \"_\" ) : arg_1 += \"-PROT\" if arg_1 in arg_2 : arg_3 = arg_2 [ arg_1 ] elif arg_1 : _logger ( ) . warning ( \"Unimplemented completion icon_type: %s\" , arg_1 ) return arg_3","id_":253979,"task_name":"https:\/\/github.com\/pyQode\/pyqode.python\/blob\/821e000ea2e2638a82ce095a559e69afd9bd4f38\/pyqode\/python\/backend\/workers.py#L248-L296","negative":"write lines, one by one, separated by \\n to device"} {"query":"Conditionally push new vectors into a batch of first - in - first - out queues .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = tf . concat ( [ arg_0 [ 1 : ] , [ arg_2 ] ] , axis = 0 ) arg_4 = tf . broadcast_to ( arg_1 [ tf . newaxis , ... , tf . newaxis ] , distribution_util . prefer_static_shape ( arg_0 ) ) return tf . where ( arg_4 , arg_3 , arg_0 )","id_":253980,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/optimizer\/lbfgs.py#L403-L466","negative":"Enumerate all possible resonance forms and return them as a list.\n\n :param mol: The input molecule.\n :type mol: rdkit.Chem.rdchem.Mol\n :return: A list of all possible resonance forms of the molecule.\n :rtype: list of rdkit.Chem.rdchem.Mol"} {"query":"General method for setting the input channels for the status process","positive":"def Func ( arg_0 , arg_1 , arg_2 = \"mix\" ) : if not arg_1 : raise eh . ProcessError ( \"At least one status channel must be \" \"provided to include this process in the \" \"pipeline\" ) if len ( arg_1 ) == 1 : logger . debug ( \"Setting only one status channel: {}\" . format ( arg_1 [ 0 ] ) ) arg_0 . _context = { \"compile_channels\" : arg_1 [ 0 ] } else : arg_4 = arg_1 [ 0 ] if arg_2 == \"mix\" : arg_5 = \",\" . join ( arg_1 [ 1 : ] ) arg_6 = \"{}.mix({})\" . format ( arg_4 , arg_5 ) elif arg_2 == \"join\" : arg_6 = arg_4 for arg_7 in arg_1 [ 1 : ] : arg_6 += \".join({})\" . format ( arg_7 ) arg_6 += \".map{ ot -> [ ot[0], ot[1..-1] ] }\" logger . debug ( \"Status channel string: {}\" . format ( arg_6 ) ) arg_0 . _context = { \"compile_channels\" : arg_6 }","id_":253981,"task_name":"https:\/\/github.com\/assemblerflow\/flowcraft\/blob\/fc3f4bddded1efc76006600016dc71a06dd908c0\/flowcraft\/generator\/process.py#L622-L673","negative":"Initialize the bucket map assuming the given number of maxBuckets."} {"query":"Returns coordinates for point at t on the line . Calculates the coordinates of x and y for a point at t on a straight line . The t parameter is a number between 0 . 0 and 1 . 0 x0 and y0 define the starting point of the line x1 and y1 the ending point of the line .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 ) : arg_6 = arg_2 + arg_1 * ( arg_4 - arg_2 ) arg_7 = arg_3 + arg_1 * ( arg_5 - arg_3 ) return ( arg_6 , arg_7 )","id_":253982,"task_name":"https:\/\/github.com\/shoebot\/shoebot\/blob\/d554c1765c1899fa25727c9fc6805d221585562b\/shoebot\/data\/bezier.py#L428-L438","negative":"Set the rotation of this body using a rotation matrix.\n\n Parameters\n ----------\n rotation : sequence of 9 floats\n The desired rotation matrix for this body."} {"query":"Start scheduling jobs .","positive":"def Func ( arg_0 ) : if arg_0 . async_mode : arg_0 . _Funcr . start ( ) arg_0 . _listener . start ( ) else : arg_0 . _Funcr . Func ( )","id_":253983,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-kingarthur\/blob\/9d6a638bee68d5e5c511f045eeebf06340fd3252\/arthur\/scheduler.py#L280-L287","negative":"This is our parsing dispatcher\n\n :param stype: Search type category\n :param soup: The beautifulsoup object that contains the parsed html"} {"query":"Convert all tags in an HTML tree to XHTML by moving them to the XHTML namespace .","positive":"def Func ( arg_0 ) : try : arg_0 = arg_0 . getroot ( ) except AttributeError : pass arg_1 = \"{%s}\" % XHTML_NAMESPACE for arg_2 in arg_0 . iter ( etree . Element ) : arg_3 = arg_2 . tag if arg_3 [ 0 ] != '{' : arg_2 . tag = arg_1 + arg_3","id_":253984,"task_name":"https:\/\/github.com\/AkihikoITOH\/capybara\/blob\/e86c2173ea386654f4ae061148e8fbe3f25e715c\/capybara\/virtualenv\/lib\/python2.7\/site-packages\/lxml\/html\/__init__.py#L1544-L1556","negative":"Return a random sample from the distribution."} {"query":"Truncate small values of a complex array .","positive":"def Func ( arg_0 , arg_1 = 1e-10 ) : arg_2 = np . array ( arg_0 ) if np . isrealobj ( arg_2 ) : arg_2 [ arg_3 ( arg_2 ) < arg_1 ] = 0.0 else : arg_2 . real [ arg_3 ( arg_2 . real ) < arg_1 ] = 0.0 arg_2 . imag [ arg_3 ( arg_2 . imag ) < arg_1 ] = 0.0 return arg_2","id_":253985,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/tools\/qi\/qi.py#L290-L308","negative":"Handle the retrieval of the code"} {"query":"Sonify multi - level segmentations","positive":"def Func ( arg_0 , arg_1 = 22050 , arg_2 = None , ** arg_3 ) : arg_4 = [ 1 , 32. \/ 27 , 4. \/ 3 , 3. \/ 2 , 16. \/ 9 ] arg_5 = 0.1 arg_6 , arg_7 = hierarchy_flatten ( arg_0 ) if arg_2 is None : arg_2 = int ( arg_1 * ( max ( np . max ( arg_7 ) for arg_7 in arg_6 ) + 1. \/ arg_5 ) + 1 ) arg_8 = 0.0 for arg_9 , ( arg_10 , arg_11 ) in zip ( arg_6 , product ( range ( 3 , 3 + len ( arg_6 ) ) , arg_4 ) ) : arg_12 = mkclick ( 440.0 * arg_11 * arg_10 , arg_1 = arg_1 , duration = arg_5 ) arg_8 = arg_8 + filter_kwargs ( mir_eval . sonify . clicks , np . unique ( arg_9 ) , fs = arg_1 , arg_2 = arg_2 , arg_12 = arg_12 ) return arg_8","id_":253986,"task_name":"https:\/\/github.com\/marl\/jams\/blob\/b16778399b9528efbd71434842a079f7691a7a66\/jams\/sonify.py#L83-L103","negative":"Connect to the stream\n\n Returns\n -------\n asyncio.coroutine\n The streaming response"} {"query":"Load Flickr25K dataset .","positive":"def Func ( arg_0 = 'sky' , arg_1 = \"data\" , arg_2 = 50 , arg_3 = False ) : arg_1 = os . path . join ( arg_1 , 'flickr25k' ) arg_4 = 'mirflickr25k.zip' arg_5 = 'http:\/\/press.liacs.nl\/mirflickr\/mirflickr25k\/' if folder_exists ( os . path . join ( arg_1 , \"mirflickr\" ) ) is False : logging . info ( \"[*] Flickr25k is nonexistent in {}\" . format ( arg_1 ) ) maybe_download_and_extract ( arg_4 , arg_1 , arg_5 , extract = True ) del_file ( os . path . join ( arg_1 , arg_4 ) ) arg_6 = os . path . join ( arg_1 , \"mirflickr\" ) arg_7 = load_file_list ( arg_1 = arg_6 , regx = '\\\\.jpg' , arg_3 = False ) arg_7 . sort ( key = natural_keys ) arg_8 = os . path . join ( arg_1 , \"mirflickr\" , \"meta\" , \"tags\" ) arg_9 = load_file_list ( arg_1 = arg_8 , regx = '\\\\.txt' , arg_3 = False ) arg_9 . sort ( key = natural_keys ) if arg_0 is None : logging . info ( \"[Flickr25k] reading all images\" ) else : logging . info ( \"[Flickr25k] reading images with tag: {}\" . format ( arg_0 ) ) arg_10 = [ ] for arg_11 , arg_12 in enumerate ( arg_9 ) : arg_13 = read_file ( os . path . join ( arg_8 , arg_9 [ arg_11 ] ) ) . split ( '\\n' ) if arg_0 is None or arg_0 in arg_13 : arg_10 . append ( arg_7 [ arg_11 ] ) arg_14 = visualize . read_images ( arg_10 , arg_6 , arg_2 = arg_2 , arg_3 = arg_3 ) return arg_14","id_":253987,"task_name":"https:\/\/github.com\/tensorlayer\/tensorlayer\/blob\/aa9e52e36c7058a7e6fd81d36563ca6850b21956\/tensorlayer\/files\/utils.py#L717-L784","negative":"Set renewal, rebinding times."} {"query":"Return an error code between 0 and 99 .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 99 ) : try : return Funcs_by_level [ arg_0 ] [ arg_1 ] except KeyError : pass if arg_1 . count ( '\"' ) == 2 and ' \"' in arg_1 and arg_1 . endswith ( '\".' ) : arg_3 = arg_1 [ : arg_1 . index ( ' \"' ) ] return Funcs_by_level [ arg_0 ] . get ( arg_3 , arg_2 ) return arg_2","id_":253988,"task_name":"https:\/\/github.com\/peterjc\/flake8-rst-docstrings\/blob\/b8b17d0317fc6728d5586553ab29a7d97e6417fd\/flake8_rst_docstrings.py#L201-L216","negative":"Configure the Python logging module for this file."} {"query":"Show current progress message to stderr . This function will remember the previous message so that next time it will clear the previous message before showing next one .","positive":"def Func ( arg_0 , * arg_1 ) : if not ( sys . stdout . isatty ( ) and sys . stderr . isatty ( ) ) : return arg_2 = ( arg_0 % arg_1 ) if Func . prev_message : sys . stderr . write ( ' ' * len ( Func . prev_message ) + '\\r' ) sys . stderr . write ( arg_2 + '\\r' ) Func . prev_message = arg_2","id_":253989,"task_name":"https:\/\/github.com\/bloomreach\/s4cmd\/blob\/bb51075bf43703e7cd95aa39288cf7732ec13a6d\/s4cmd.py#L151-L164","negative":"Check the spacing of a single equals sign."} {"query":"Build a filter that keeps edges whose data dictionaries are super - dictionaries to the given dictionary .","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 = True ) -> EdgePredicate : @ edge_predicate def annotation_dict_filter ( arg_4 : arg_5 ) -> arg_3 : return subdict_matches ( arg_4 , arg_0 , arg_2 = arg_2 ) return annotation_dict_filter","id_":253990,"task_name":"https:\/\/github.com\/pybel\/pybel-tools\/blob\/3491adea0ac4ee60f57275ef72f9b73da6dbfe0c\/src\/pybel_tools\/filters\/edge_filters.py#L45-L57","negative":"Register a file reader for use in parse_config_file.\n\n Registered file readers will be used to try reading files passed to\n `parse_config_file`. All file readers (beginning with the default `open`) will\n be tried until one of them succeeds at opening the file.\n\n This function may also be be used used as a decorator. For example:\n\n @register_file_reader(IOError)\n def exotic_data_source(filename):\n ...\n\n Args:\n *args: (When used as a decorator, only the existence check is supplied.)\n - file_reader_fn: The file reader function to register. This should be a\n function that can be used as a context manager to open a file and\n provide a file-like object, similar to Python's built-in `open`.\n - is_readable_fn: A function taking the file path and returning a boolean\n indicating whether the file can be read by `file_reader_fn`.\n\n Returns:\n `None`, or when used as a decorator, a function that will perform the\n registration using the supplied readability predicate."} {"query":"Set the red green blue color of the bulb .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = '\\x58\\x01\\x03\\x01\\xFF\\x00{0}{1}{2}' . format ( chr ( arg_1 & 0xFF ) , chr ( arg_2 & 0xFF ) , chr ( arg_3 & 0xFF ) ) arg_0 . _color . write_value ( arg_4 )","id_":253991,"task_name":"https:\/\/github.com\/adafruit\/Adafruit_Python_BluefruitLE\/blob\/34fc6f596371b961628369d78ce836950514062f\/Adafruit_BluefruitLE\/services\/colorific.py#L47-L54","negative":"Returns a list of the dicom files within root_path\n\n Parameters\n ----------\n root_path: str\n Path to the directory to be recursively searched for DICOM files.\n\n Returns\n -------\n dicoms: set\n Set of DICOM absolute file paths"} {"query":"Stores a python dictionary as pytable","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , ** arg_5 ) : if arg_1 in arg_3 : raise ValueError ( 'Dictionary `%s` already exists in `%s`. Appending is not supported (yet).' ) if arg_1 in arg_3 : raise ValueError ( 'Dict `%s` already exists in `%s`. Appending is not supported (yet).' ) arg_6 = { } for arg_7 in arg_2 : arg_8 = arg_2 [ arg_7 ] arg_6 [ arg_7 ] = [ arg_8 ] arg_9 = ObjectTable ( data = arg_6 ) arg_0 . _prm_write_into_pytable ( arg_1 , arg_9 , arg_3 , arg_4 , ** arg_5 ) arg_10 = arg_3 . _f_get_child ( arg_1 ) arg_0 . _all_set_attributes_to_recall_natives ( arg_6 , arg_10 , HDF5StorageService . DATA_PREFIX ) setattr ( arg_10 . _v_attrs , HDF5StorageService . STORAGE_TYPE , HDF5StorageService . DICT ) arg_0 . _hdf5file . flush ( )","id_":253992,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/storageservice.py#L4110-L4157","negative":"Synchronize content metadata using the Degreed course content API.\n\n Args:\n serialized_data: JSON-encoded object containing content metadata.\n http_method: The HTTP method to use for the API request.\n\n Raises:\n ClientError: If Degreed API request fails."} {"query":"Add a read_range primitive","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_1 . parser_tree = parsing . Range ( arg_0 . value ( arg_2 ) . strip ( \"'\" ) , arg_0 . value ( arg_3 ) . strip ( \"'\" ) ) return True","id_":253993,"task_name":"https:\/\/github.com\/LionelAuroux\/pyrser\/blob\/f153a97ef2b6bf915a1ed468c0252a9a59b754d5\/pyrser\/dsl.py#L591-L595","negative":"An agent that keeps track of what locations are clean or dirty."} {"query":"Decrypts file from entered links","positive":"def Func ( arg_0 ) : if ENCRYPTION_DISABLED : print ( 'For decryption please install gpg' ) exit ( ) try : arg_1 = re . findall ( r'(.*\/(.*))#(.{30})' , arg_0 ) [ 0 ] arg_2 = urllib . request . Request ( arg_1 [ 0 ] , data = None , headers = { 'User-Agent' : 'Mozilla\/5.0 (Macintosh; Intel Mac OS X 10_9_3) ' ' AppleWebKit\/537.36 (KHTML, like Gecko) Chrome\/35.0.1916.47 Safari\/537.36' } ) arg_3 = urllib . request . urlopen ( arg_2 ) arg_4 = arg_3 . read ( ) arg_5 , arg_6 = os . pipe ( ) arg_7 = 'gpg --batch --decrypt --passphrase-fd {}' . format ( arg_5 ) arg_8 = Popen ( shlex . split ( arg_7 ) , stdout = PIPE , stdin = PIPE , arg_10 = PIPE , pass_fds = ( arg_5 , ) ) os . close ( arg_5 ) open ( arg_6 , 'w' ) . write ( arg_1 [ 2 ] ) arg_9 , arg_10 = arg_8 . communicate ( arg_4 ) with open ( arg_1 [ 1 ] , 'wb' ) as decrypted_file : decrypted_file . write ( arg_9 ) return arg_1 [ 1 ] + ' is decrypted and saved.' except IndexError : return 'Please enter valid link.'","id_":253994,"task_name":"https:\/\/github.com\/lc-guy\/limf\/blob\/ad380feb70ef8e579a91ca09c807efec9e8af565\/limf\/decrypter.py#L14-L46","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Return client credentials based on the current request .","positive":"def Func ( arg_0 , arg_1 ) : if arg_1 . client_id is not None : return arg_1 . client_id , arg_1 . client_secret arg_2 = arg_1 . headers . get ( 'Authorization' ) if isinstance ( arg_2 , dict ) : return arg_2 [ 'username' ] , arg_2 [ 'password' ] return None , None","id_":253995,"task_name":"https:\/\/github.com\/lepture\/flask-oauthlib\/blob\/9e6f152a5bb360e7496210da21561c3e6d41b0e1\/flask_oauthlib\/provider\/oauth2.py#L636-L661","negative":"A list of row indices to remove. There are two caveats. First, this is\n a potentially slow operation. Second, pattern indices will shift if\n patterns before them are removed."} {"query":"Given a stream a callback and an optional transform sets up the subscription","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = \"\" ) : if arg_0 . status == \"disconnected\" or arg_0 . status == \"disconnecting\" or arg_0 . status == \"connecting\" : arg_0 . connect ( ) if arg_0 . status is not \"connected\" : return False logging . debug ( \"Subscribing to %s\" , arg_1 ) arg_0 . send ( { \"cmd\" : \"Func\" , \"arg\" : arg_1 , \"transform\" : arg_3 } ) with arg_0 . subscription_lock : arg_0 . subscriptions [ arg_1 + \":\" + arg_3 ] = arg_2 return True","id_":253996,"task_name":"https:\/\/github.com\/connectordb\/connectordb-python\/blob\/2092b0cb30898139a247176bcf433d5a4abde7cb\/connectordb\/_websocket.py#L110-L121","negative":"Returns a boolean as to whether the slot pool has room for this\n task to run"} {"query":"Display a specific structural variant .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = controllers . Func ( store , arg_0 , arg_1 , arg_2 ) return arg_3","id_":253997,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/server\/blueprints\/variants\/views.py#L299-L302","negative":"Stop streaming samples from device and delete samples buffer"} {"query":"Reassemble a Binder object coming out of the database .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = cnxepub . Binder ( arg_0 , arg_2 = arg_2 ) for arg_4 in arg_1 [ 'contents' ] : arg_5 = _node_to_model ( arg_4 , parent = arg_3 ) if arg_5 . metadata [ 'title' ] != arg_4 [ 'title' ] : arg_3 . set_title_for_node ( arg_5 , arg_4 [ 'title' ] ) return arg_3","id_":253998,"task_name":"https:\/\/github.com\/openstax\/cnx-publishing\/blob\/f55b4a2c45d8618737288f1b74b4139d5ac74154\/cnxpublishing\/db.py#L921-L928","negative":"Returns the profile with the received ID as a dict\n\n If a local copy of the profile exists, it'll be returned. If not, it'll\n be downloaded from the web. The results are cached, so any subsequent\n calls won't hit the filesystem or the web.\n\n Args:\n profile_id (str): The ID of the profile you want.\n\n Raises:\n RegistryError: If there was some problem opening the profile file\n or its format was incorrect."} {"query":"Read the data encoding the CreateKeyPair response payload and decode it into its constituent parts .","positive":"def Func ( arg_0 , arg_1 , arg_2 = arg_3 . KMIPVersion . KMIP_1_0 ) : super ( CreateKeyPairResponsePayload , arg_0 ) . Func ( arg_1 , arg_2 = arg_2 ) arg_6 = utils . BytearrayStream ( arg_1 . Func ( arg_0 . length ) ) if arg_0 . is_tag_next ( arg_3 . Tags . PRIVATE_KEY_UNIQUE_IDENTIFIER , arg_6 ) : arg_0 . _private_key_unique_identifier = primitives . TextString ( tag = arg_3 . Tags . PRIVATE_KEY_UNIQUE_IDENTIFIER ) arg_0 . _private_key_unique_identifier . Func ( arg_6 , arg_2 = arg_2 ) else : raise exceptions . InvalidKmipEncoding ( \"The CreateKeyPair response payload encoding is missing the \" \"private key unique identifier.\" ) if arg_0 . is_tag_next ( arg_3 . Tags . PUBLIC_KEY_UNIQUE_IDENTIFIER , arg_6 ) : arg_0 . _public_key_unique_identifier = primitives . TextString ( tag = arg_3 . Tags . PUBLIC_KEY_UNIQUE_IDENTIFIER ) arg_0 . _public_key_unique_identifier . Func ( arg_6 , arg_2 = arg_2 ) else : raise exceptions . InvalidKmipEncoding ( \"The CreateKeyPair response payload encoding is missing the \" \"public key unique identifier.\" ) if arg_2 < arg_3 . KMIPVersion . KMIP_2_0 : if arg_0 . is_tag_next ( arg_3 . Tags . PRIVATE_KEY_TEMPLATE_ATTRIBUTE , arg_6 ) : arg_0 . _private_key_template_attribute = objects . TemplateAttribute ( tag = arg_3 . Tags . PRIVATE_KEY_TEMPLATE_ATTRIBUTE ) arg_0 . _private_key_template_attribute . Func ( arg_6 , arg_2 = arg_2 ) if arg_0 . is_tag_next ( arg_3 . Tags . PUBLIC_KEY_TEMPLATE_ATTRIBUTE , arg_6 ) : arg_0 . _public_key_template_attribute = objects . TemplateAttribute ( tag = arg_3 . Tags . PUBLIC_KEY_TEMPLATE_ATTRIBUTE ) arg_0 . _public_key_template_attribute . Func ( arg_6 , arg_2 = arg_2 ) arg_0 . is_oversized ( arg_6 )","id_":253999,"task_name":"https:\/\/github.com\/OpenKMIP\/PyKMIP\/blob\/b51c5b044bd05f8c85a1d65d13a583a4d8fc1b0e\/kmip\/core\/messages\/payloads\/create_key_pair.py#L484-L568","negative":"Given the request and response headers, return `True` if an HTTP\n \"Not Modified\" response could be returned instead."} {"query":"simple example how to load an ABF file and plot every sweep .","positive":"def Func ( arg_0 ) : arg_1 = io . AxonIO ( filename = arg_0 ) arg_2 = arg_1 . read_block ( lazy = False , cascade = True ) print ( arg_0 + \"\\nplotting %d sweeps...\" % len ( arg_2 . segments ) ) plt . figure ( figsize = ( 12 , 10 ) ) plt . title ( arg_0 ) for arg_3 in range ( len ( arg_2 . segments ) ) : arg_4 = arg_2 . segments [ arg_3 ] . analogsignals [ 0 ] plt . plot ( arg_4 . times - arg_4 . times [ 0 ] , arg_4 . magnitude , alpha = .5 ) plt . ylabel ( arg_4 . dimensionality ) plt . xlabel ( \"seconds\" ) plt . show ( ) plt . close ( )","id_":254000,"task_name":"https:\/\/github.com\/swharden\/SWHLab\/blob\/a86c3c65323cec809a4bd4f81919644927094bf5\/doc\/misc\/neo demo.py#L9-L22","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Find matching q - value for each score in scores","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = find_nearest_matches ( np . float32 ( arg_1 . cutoff . values ) , np . float32 ( arg_0 ) ) return arg_1 . pvalue . iloc [ arg_2 ] . values , arg_1 . svalue . iloc [ arg_2 ] . values , arg_1 . pep . iloc [ arg_2 ] . values , arg_1 . qvalue . iloc [ arg_2 ] . values","id_":254001,"task_name":"https:\/\/github.com\/PyProphet\/pyprophet\/blob\/f546ad171750cd7685afbde6785fe71f82cadb35\/pyprophet\/stats.py#L56-L59","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Split one long analysis file into multiple smaller ones .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = 0 , arg_4 = None , arg_5 = 0 , arg_6 = 0 ) : if arg_1 is None : arg_1 = os . path . join ( os . path . dirname ( arg_0 ) , 'split' ) if not os . path . exists ( arg_1 ) : os . mkdir ( arg_1 ) with open ( arg_0 , 'r' ) as f : arg_7 = f . readlines ( ) arg_8 = os . path . splitext ( arg_0 ) [ - 1 ] arg_9 = arg_7 [ : arg_3 ] arg_10 = [ ] for arg_11 , arg_12 in enumerate ( arg_7 ) : if re . search ( arg_2 , arg_12 ) : arg_10 . append ( arg_11 ) arg_10 . append ( len ( arg_7 ) ) arg_13 = { } for arg_11 in range ( len ( arg_10 ) - 1 ) : arg_14 = re . search ( arg_4 , arg_7 [ arg_10 [ arg_11 ] ] ) if arg_14 : arg_15 = arg_14 . groups ( ) [ 0 ] . strip ( ) else : arg_15 = 'no_name_{:}' . format ( arg_11 ) arg_13 [ arg_15 ] = arg_9 + arg_7 [ arg_10 [ arg_11 ] : arg_10 [ arg_11 + 1 ] ] [ arg_6 : arg_5 ] print ( 'Writing files to: {:}' . format ( arg_1 ) ) for arg_16 , arg_17 in arg_13 . items ( ) : arg_15 = ( arg_16 + arg_8 ) . replace ( ' ' , '_' ) with open ( os . path . join ( arg_1 , arg_15 ) , 'w' ) as f : f . writelines ( arg_17 ) print ( ' {:}' . format ( arg_15 ) ) print ( 'Done.' ) return arg_1","id_":254002,"task_name":"https:\/\/github.com\/oscarbranson\/latools\/blob\/cd25a650cfee318152f234d992708511f7047fbe\/latools\/preprocessing\/split.py#L11-L90","negative":"Randomly resolve ambiguous bases. This is applied to each boot\n replicate so that over reps the random resolutions don't matter.\n Sites are randomly resolved, so best for unlinked SNPs since \n otherwise linked SNPs are losing their linkage information... \n though it's not like we're using it anyways."} {"query":"Return a new Streamlet by outer right join_streamlet with this streamlet","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : from heronpy . streamlet . impl . joinbolt import JoinStreamlet , JoinBolt arg_4 = JoinStreamlet ( JoinBolt . OUTER_RIGHT , arg_2 , arg_3 , arg_0 , arg_1 ) arg_0 . _add_child ( arg_4 ) arg_1 . _add_child ( arg_4 ) return arg_4","id_":254003,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heronpy\/streamlet\/streamlet.py#L168-L176","negative":"Get a rate for a given currency and date.\n\n :type date: datetime.date\n\n >>> from datetime import date\n >>> c = CurrencyConverter()\n >>> c._get_rate('USD', date=date(2014, 3, 28))\n 1.375...\n >>> c._get_rate('BGN', date=date(2010, 11, 21))\n Traceback (most recent call last):\n RateNotFoundError: BGN has no rate for 2010-11-21"} {"query":"Deletes a transfer job . This is a soft delete . After a transfer job is deleted the job and all the transfer executions are subject to garbage collection . Transfer jobs become eligible for garbage collection 30 days after soft delete .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : return ( arg_0 . get_conn ( ) . transferJobs ( ) . patch ( jobName = arg_1 , body = { PROJECT_ID : arg_2 , TRANSFER_JOB : { STATUS1 : GcpTransferJobsStatus . DELETED } , TRANSFER_JOB_FIELD_MASK : STATUS1 , } , ) . execute ( num_retries = arg_0 . num_retries ) )","id_":254004,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/gcp_transfer_hook.py#L204-L232","negative":"This method is called before first step of simulation."} {"query":"write connection info to JSON dict in self . connection_file","positive":"def Func ( arg_0 ) : if arg_0 . _connection_file_written : return arg_0 . connection_file , arg_2 = Func ( arg_0 . connection_file , ip = arg_0 . ip , key = arg_0 . session . key , arg_4 = arg_0 . stdin_port , arg_5 = arg_0 . iopub_port , arg_3 = arg_0 . shell_port , arg_6 = arg_0 . hb_port ) arg_0 . shell_port = arg_2 [ 'shell_port' ] arg_0 . stdin_port = arg_2 [ 'stdin_port' ] arg_0 . iopub_port = arg_2 [ 'iopub_port' ] arg_0 . hb_port = arg_2 [ 'hb_port' ] arg_0 . _connection_file_written = True","id_":254005,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/zmq\/kernelmanager.py#L760-L774","negative":"This method will remove any stored records within the range from start to\n end. Noninclusive of end.\n\n parameters\n ------------\n start - integer representing the ROWID of the start of the deletion range,\n end - integer representing the ROWID of the end of the deletion range,\n if None, it will default to end."} {"query":"Log the message msg to the destination self . _logging_dest .","positive":"def Func ( arg_0 , arg_1 ) : if is_type ( arg_0 . _logging_dest , str ) : with open ( arg_0 . _logging_dest , \"at\" , encoding = \"utf-8\" ) as f : f . write ( arg_1 ) else : arg_0 . _logging_dest . write ( arg_1 )","id_":254006,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-py\/h2o\/backend\/connection.py#L680-L692","negative":"Get the context for this view."} {"query":"Notify all subscribers of a property change .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = json . dumps ( { 'messageType' : 'propertyStatus' , 'data' : { arg_1 . name : arg_1 . get_value ( ) , } } ) for arg_3 in list ( arg_0 . subscribers ) : try : arg_3 . write_message ( arg_2 ) except tornado . websocket . WebSocketClosedError : pass","id_":254007,"task_name":"https:\/\/github.com\/mozilla-iot\/webthing-python\/blob\/65d467c89ed79d0bbc42b8b3c8f9e5a320edd237\/webthing\/thing.py#L423-L440","negative":"Save the model in the given directory.\n\n :param saveModelDir: (string)\n Absolute directory path for saving the model. This directory should\n only be used to store a saved model. If the directory does not exist,\n it will be created automatically and populated with model data. A\n pre-existing directory will only be accepted if it contains previously\n saved model data. If such a directory is given, the full contents of\n the directory will be deleted and replaced with current model data."} {"query":"Checks if a next message is possible .","positive":"def Func ( arg_0 ) : if arg_0 . is_initial : return True if arg_0 . before : if arg_0 . before_cursor : return True else : return False else : if arg_0 . after_cursor : return True else : return False","id_":254008,"task_name":"https:\/\/github.com\/agsimeonov\/cbexchange\/blob\/e3762f77583f89cf7b4f501ab3c7675fc7d30ab3\/cbexchange\/client.py#L158-L176","negative":"Unregister an extension code. For testing only."} {"query":"Setup our resultsPerChoice history based on the passed in resultsPerChoice .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . _resultsPerChoice = [ [ ] ] * len ( arg_0 . choices ) for ( arg_3 , arg_4 ) in arg_1 : arg_5 = arg_0 . choices . index ( arg_3 ) arg_0 . _resultsPerChoice [ arg_5 ] = list ( arg_4 )","id_":254009,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/swarming\/hypersearch\/permutation_helpers.py#L352-L369","negative":"Whether a connection can be established between those two meshes."} {"query":"Enables uniform interface to value and batch jacobian calculation .","positive":"def Func ( arg_0 , arg_1 ) : if tf . executing_eagerly ( ) : with tf . GradientTape ( ) as tape : tape . watch ( arg_1 ) arg_2 = arg_0 ( arg_1 ) arg_3 = tape . batch_jacobian ( arg_2 , arg_1 ) else : arg_2 = arg_0 ( arg_1 ) arg_3 = gradients . batch_jacobian ( arg_2 , arg_1 ) return arg_2 , arg_3","id_":254010,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/distributions\/mixture_same_family.py#L542-L562","negative":"Generate a tag for the alignment of the geometry of the bulge and disk of a bulge-disk system, to customize \\ \n phase names based on the bulge-disk model. This adds together the bulge_disk tags generated in the 3 functions\n above"} {"query":"Return a django - style database configuration based on url .","positive":"def Func ( arg_0 ) : return { arg_1 . upper ( ) : arg_2 for arg_1 , arg_2 in parse_database_url ( arg_0 ) . _asdict ( ) . items ( ) }","id_":254011,"task_name":"https:\/\/github.com\/5monkeys\/django-bananas\/blob\/cfd318c737f6c4580036c13d2acf32bca96654bf\/bananas\/url.py#L162-L183","negative":"Wrapper function for TUN and serial port monitoring\n\n Wraps the necessary functions to loop over until self._isRunning\n threading.Event() is set(). This checks for data on the TUN\/serial\n interfaces and then sends data over the appropriate interface. This\n function is automatically run when Threading.start() is called on the\n Monitor class."} {"query":"Read complete DSMR telegram s from the serial interface and parse it into CosemObject s and MbusObject s .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = serial_asyncio . open_serial_connection ( ** arg_0 . serial_settings ) arg_3 , arg_4 = yield from arg_2 while True : arg_5 = yield from arg_3 . Funcline ( ) arg_0 . telegram_buffer . append ( arg_5 . decode ( 'ascii' ) ) for arg_6 in arg_0 . telegram_buffer . get_all ( ) : try : arg_1 . put_nowait ( arg_0 . telegram_parser . parse ( arg_6 ) ) except ParseError as e : logger . warning ( 'Failed to parse telegram: %s' , e )","id_":254012,"task_name":"https:\/\/github.com\/ndokter\/dsmr_parser\/blob\/c04b0a5add58ce70153eede1a87ca171876b61c7\/dsmr_parser\/clients\/serial_.py#L51-L78","negative":"Increases or decreases the brightness in the layer.\n \n The given value is a percentage to increase\n or decrease the image brightness,\n for example 0.8 means brightness at 80%."} {"query":"Returns a dictionary of envs or file inputs for an operation .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_1 [ 'request' ] [ 'ephemeralPipeline' ] [ 'inputParameters' ] arg_4 = arg_1 [ 'request' ] [ 'pipelineArgs' ] [ 'inputs' ] arg_5 = [ arg [ 'name' ] for arg in arg_3 if ( 'localCopy' in arg ) == arg_2 ] return { arg_6 : arg_4 [ arg_6 ] for arg_6 in arg_5 if arg_6 in arg_4 }","id_":254013,"task_name":"https:\/\/github.com\/DataBiosphere\/dsub\/blob\/443ce31daa6023dc2fd65ef2051796e19d18d5a7\/dsub\/providers\/google.py#L1079-L1102","negative":"Return flake8 badge color.\n\n Parameters\n ----------\n score : float\n A flake8 score\n\n Returns\n -------\n str\n Badge color"} {"query":"Creates the basic network architecture transforming word embeddings to intermediate outputs","positive":"def Func ( arg_0 , arg_1 ) : if arg_0 . word_dropout > 0.0 : arg_2 = kl . Dropout ( arg_0 . word_dropout ) ( arg_1 ) else : arg_2 = arg_1 for arg_3 in range ( arg_0 . word_lstm_layers - 1 ) : arg_2 = kl . Bidirectional ( kl . LSTM ( arg_0 . word_lstm_units [ arg_3 ] , return_sequences = True , dropout = arg_0 . lstm_dropout ) ) ( arg_2 ) arg_2 = kl . Bidirectional ( kl . LSTM ( arg_0 . word_lstm_units [ - 1 ] , return_sequences = True , dropout = arg_0 . lstm_dropout ) ) ( arg_2 ) arg_4 = kl . TimeDistributed ( kl . Dense ( arg_0 . tags_number_ , activation = \"softmax\" , activity_regularizer = arg_0 . regularizer ) , name = \"p\" ) ( arg_2 ) return arg_4 , arg_2","id_":254014,"task_name":"https:\/\/github.com\/deepmipt\/DeepPavlov\/blob\/f3e4a69a3764d25d2f5bad4f1f1aebc872b00f9c\/deeppavlov\/models\/morpho_tagger\/network.py#L199-L219","negative":"The speed limit for a boid.\n \n Boids can momentarily go very fast,\n something that is impossible for real animals."} {"query":"These arguments are redundant with just using a project and we should encouraging that as you don t have to learn any dumb flags!","positive":"def Func ( arg_0 ) : arg_0 . add_argument ( '-a' , '--animation' , default = None , help = 'Default animation type if no animation is specified' ) if deprecated . allowed ( ) : arg_0 . add_argument ( '--dimensions' , '--dim' , default = None , help = 'DEPRECATED: x, (x, y) or (x, y, z) dimensions for project' ) arg_0 . add_argument ( '--shape' , default = None , help = 'x, (x, y) or (x, y, z) dimensions for project' ) arg_0 . add_argument ( '-l' , '--layout' , default = None , help = 'Default layout class if no layout is specified' ) arg_0 . add_argument ( '--numbers' , '-n' , default = 'python' , choices = NUMBER_TYPES , help = NUMBERS_HELP ) arg_0 . add_argument ( '-p' , '--path' , default = None , help = PATH_HELP )","id_":254015,"task_name":"https:\/\/github.com\/ManiacalLabs\/BiblioPixel\/blob\/fd97e6c651a4bbcade64733847f4eec8f7704b7c\/bibliopixel\/main\/project_flags.py#L71-L106","negative":"Get a single publication."} {"query":"Reimplemented to use the run magic .","positive":"def Func ( arg_0 , arg_1 , arg_2 = False ) : if sys . platform == 'win32' : arg_1 = os . path . normpath ( arg_1 ) . replace ( '\\\\' , '\/' ) if ' ' in arg_1 or \"'\" in arg_1 or '\"' in arg_1 : arg_1 = '\"%s\"' % arg_1 . replace ( '\"' , '\\\\\"' ) arg_0 . execute ( '%%run %s' % arg_1 , arg_2 = arg_2 )","id_":254016,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/frontend\/qt\/console\/ipython_widget.py#L287-L311","negative":"Given configuration initiate a SigningService instance\n\n :param config: The signing service configuration\n :param entity_id: The entity identifier\n :return: A SigningService instance"} {"query":"Determine the name type whose regex the a function s name should match .","positive":"def Func ( arg_0 , arg_1 = None ) : arg_2 , arg_3 = _get_properties ( arg_1 ) if not arg_0 . is_method ( ) : return \"function\" if arg_0 . decorators : arg_4 = arg_0 . decorators . nodes else : arg_4 = [ ] for arg_5 in arg_4 : if isinstance ( arg_5 , astroid . Name ) or ( isinstance ( arg_5 , astroid . Attribute ) and arg_5 . attrname in arg_3 ) : arg_6 = utils . safe_infer ( arg_5 ) if arg_6 and arg_6 . qname ( ) in arg_2 : return \"attr\" elif isinstance ( arg_5 , astroid . Attribute ) and arg_5 . attrname in ( \"setter\" , \"deleter\" , ) : return \"attr\" return \"method\"","id_":254017,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/checkers\/base.py#L305-L340","negative":"Sets the player's paused state."} {"query":"Perform global inhibition . Performing global inhibition entails picking the top numActive columns with the highest overlap score in the entire region . At most half of the columns in a local neighborhood are allowed to be active . Columns with an overlap score below the stimulusThreshold are always inhibited .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = int ( arg_2 * arg_0 . _numColumns ) arg_4 = numpy . argsort ( arg_1 , kind = 'mergesort' ) arg_5 = len ( arg_4 ) - arg_3 while arg_5 < len ( arg_4 ) : arg_6 = arg_4 [ arg_5 ] if arg_1 [ arg_6 ] >= arg_0 . _stimulusThreshold : break else : arg_5 += 1 return arg_4 [ arg_5 : ] [ : : - 1 ]","id_":254018,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/algorithms\/spatial_pooler.py#L1584-L1615","negative":"Returns protobuf mapcontainer. Read from translation file."} {"query":"Run all parsing functions .","positive":"def Func ( arg_0 ) : for arg_1 in arg_0 . soup . findAll ( 'span' ) : arg_0 . create_italic ( arg_1 ) arg_0 . create_strong ( arg_1 ) arg_0 . create_underline ( arg_1 ) arg_0 . unwrap_span ( arg_1 ) for arg_1 in arg_0 . soup . findAll ( 'a' ) : arg_0 . remove_comments ( arg_1 ) arg_0 . check_next ( arg_1 ) if arg_0 . soup . body : for arg_1 in arg_0 . soup . body . findAll ( ) : arg_0 . remove_empty ( arg_1 ) arg_0 . remove_inline_comment ( arg_1 ) arg_0 . Func_attrs ( arg_1 ) for arg_2 , arg_3 in arg_0 . tokens : arg_0 . find_token ( arg_1 , arg_2 , arg_3 ) arg_0 . remove_blacklisted_tags ( arg_1 )","id_":254019,"task_name":"https:\/\/github.com\/nprapps\/copydoc\/blob\/e1ab09b287beb0439748c319cf165cbc06c66624\/copydoc.py#L58-L80","negative":"Clip input array with a vector list.\n\n Parameters\n ----------\n array : array\n input raster data\n array_affine : Affine\n Affine object describing the raster's geolocation\n geometries : iterable\n iterable of dictionaries, where every entry has a 'geometry' and\n 'properties' key.\n inverted : bool\n invert clip (default: False)\n clip_buffer : integer\n buffer (in pixels) geometries before clipping\n\n Returns\n -------\n clipped array : array"} {"query":"Remove values common with another Set","positive":"def Func ( arg_0 , arg_1 : arg_2 ) -> arg_2 : arg_3 = list ( arg_0 . _hsig . keys ( ) ) for arg_4 in arg_3 : if arg_4 in arg_1 : del arg_0 . _hsig [ arg_4 ] return arg_0","id_":254020,"task_name":"https:\/\/github.com\/LionelAuroux\/pyrser\/blob\/f153a97ef2b6bf915a1ed468c0252a9a59b754d5\/pyrser\/type_system\/scope.py#L217-L223","negative":"This is a parallel worker for running period-recovery.\n\n Parameters\n ----------\n\n task : tuple\n This is used to pass args to the `periodicvar_recovery` function::\n\n task[0] = period-finding result pickle to work on\n task[1] = simbasedir\n task[2] = period_tolerance\n\n Returns\n -------\n\n dict\n This is the dict produced by the `periodicvar_recovery` function for the\n input period-finding result pickle."} {"query":"Return a list of statements","positive":"def Func ( arg_0 ) : if len ( arg_0 . rows ) == 0 : return [ ] arg_1 = Statement ( arg_0 . rows [ 0 ] ) arg_1 . startline = arg_0 . rows [ 0 ] . linenumber arg_1 . endline = arg_0 . rows [ 0 ] . linenumber Func = [ ] for arg_5 in arg_0 . rows [ 1 : ] : if len ( arg_5 ) > 0 and arg_5 [ 0 ] == \"...\" : arg_1 += arg_5 [ 1 : ] arg_1 . endline = arg_5 . linenumber else : if len ( arg_1 ) > 0 : Func . append ( arg_1 ) arg_1 = Statement ( arg_5 ) arg_1 . startline = arg_5 . linenumber arg_1 . endline = arg_5 . linenumber if len ( arg_1 ) > 0 : Func . append ( arg_1 ) while ( len ( Func [ - 1 ] ) == 0 or ( ( len ( Func [ - 1 ] ) == 1 ) and len ( Func [ - 1 ] [ 0 ] ) == 0 ) ) : Func . pop ( ) return Func","id_":254021,"task_name":"https:\/\/github.com\/boakley\/robotframework-lint\/blob\/3e3578f4e39af9af9961aa0a715f146b74474091\/rflint\/parser\/tables.py#L35-L71","negative":"Returns the profile with the received ID as a dict\n\n If a local copy of the profile exists, it'll be returned. If not, it'll\n be downloaded from the web. The results are cached, so any subsequent\n calls won't hit the filesystem or the web.\n\n Args:\n profile_id (str): The ID of the profile you want.\n\n Raises:\n RegistryError: If there was some problem opening the profile file\n or its format was incorrect."} {"query":"Adds the given stream to the query construction . Additionally you can choose the interpolator to use for this stream as well as a special name for the column in the returned dataset . If no column name is given the full stream path will be used .","positive":"def Func ( arg_0 , arg_1 , arg_2 = \"closest\" , arg_3 = None , arg_4 = None , arg_5 = None , arg_6 = None , arg_7 = None , arg_8 = None , arg_9 = None , arg_10 = None ) : arg_11 = query_maker ( arg_3 , arg_4 , arg_6 , arg_7 , arg_8 , arg_9 ) param_stream ( arg_0 . cdb , arg_11 , arg_1 ) arg_11 [ \"interpolator\" ] = arg_2 if arg_10 is None : if isinstance ( arg_1 , six . string_types ) : arg_10 = arg_1 elif isinstance ( arg_1 , Stream ) : arg_10 = arg_1 . path else : raise Exception ( \"Could not find a name for the column! use the 'colname' parameter.\" ) if arg_10 in arg_0 . query [ \"dataset\" ] or arg_10 is \"x\" : raise Exception ( \"The column name either exists, or is labeled 'x'. Use the colname parameter to change the column name.\" ) arg_0 . query [ \"dataset\" ] [ arg_10 ] = arg_11","id_":254022,"task_name":"https:\/\/github.com\/connectordb\/connectordb-python\/blob\/2092b0cb30898139a247176bcf433d5a4abde7cb\/connectordb\/query\/dataset.py#L165-L202","negative":"Check that a binary operator is surrounded by exactly one space."} {"query":"Return content of a free form text block as a string .","positive":"def Func ( arg_0 ) : arg_1 = re . compile ( '((.|\\n)+)<\/text>' , re . UNICODE ) arg_2 = arg_1 . match ( arg_0 ) if arg_2 : return arg_2 . group ( 1 ) else : return None","id_":254023,"task_name":"https:\/\/github.com\/spdx\/tools-python\/blob\/301d72f6ae57c832c1da7f6402fa49b192de6810\/spdx\/parsers\/tagvaluebuilders.py#L52-L61","negative":"Add process to events with default priority on current time"} {"query":"Gets contents of secret file","positive":"def Func ( arg_0 , arg_1 = None ) : arg_2 = Funcs_dir ( ) arg_3 = os . path . join ( arg_2 , arg_0 ) try : with open ( arg_3 , \"r\" ) as secret_file : return secret_file . read ( ) except OSError : return arg_1","id_":254024,"task_name":"https:\/\/github.com\/5monkeys\/django-bananas\/blob\/cfd318c737f6c4580036c13d2acf32bca96654bf\/bananas\/secrets.py#L8-L22","negative":"initialize the merger model with a coalescent time\n\n Args:\n - Tc: a float or an iterable, if iterable another argument T of same shape is required\n - T: an array like of same shape as Tc that specifies the time pivots corresponding to Tc\n Returns:\n - None"} {"query":"Prepare sys . path for running the linter checks .","positive":"def Func ( arg_0 ) : arg_1 = list ( arg_5 . path ) arg_2 = [ ] for arg_3 in arg_0 : arg_4 = _get_python_path ( arg_3 ) if arg_4 in arg_2 : continue else : arg_2 . append ( arg_4 ) arg_5 . path [ : ] = arg_2 + [ \".\" ] + arg_5 . path try : yield finally : arg_5 . path [ : ] = arg_1","id_":254025,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/lint.py#L1396-L1416","negative":"Overrides Django's default to_python to allow correct\r\n translation to instance."} {"query":"ensure that a SavedSearch object exists","positive":"def Func ( arg_0 ) : def wrapper ( arg_1 , * arg_2 , ** arg_3 ) : if not arg_1 . savedsearch : arg_1 . savedsearch = SavedSearch ( arg_1 ) return arg_0 ( arg_1 , * arg_2 , ** arg_3 ) return wrapper","id_":254026,"task_name":"https:\/\/github.com\/urschrei\/pyzotero\/blob\/b378966b30146a952f7953c23202fb5a1ddf81d9\/pyzotero\/zotero.py#L210-L218","negative":"Sets general options used by plugins and streams originating\n from this session object.\n\n :param key: key of the option\n :param value: value to set the option to\n\n\n **Available options**:\n\n ======================== =========================================\n hds-live-edge ( float) Specify the time live HDS\n streams will start from the edge of\n stream, default: ``10.0``\n\n hds-segment-attempts (int) How many attempts should be done\n to download each HDS segment, default: ``3``\n\n hds-segment-threads (int) The size of the thread pool used\n to download segments, default: ``1``\n\n hds-segment-timeout (float) HDS segment connect and read\n timeout, default: ``10.0``\n\n hds-timeout (float) Timeout for reading data from\n HDS streams, default: ``60.0``\n\n hls-live-edge (int) How many segments from the end\n to start live streams on, default: ``3``\n\n hls-segment-attempts (int) How many attempts should be done\n to download each HLS segment, default: ``3``\n\n hls-segment-threads (int) The size of the thread pool used\n to download segments, default: ``1``\n\n hls-segment-timeout (float) HLS segment connect and read\n timeout, default: ``10.0``\n\n hls-timeout (float) Timeout for reading data from\n HLS streams, default: ``60.0``\n\n http-proxy (str) Specify a HTTP proxy to use for\n all HTTP requests\n\n https-proxy (str) Specify a HTTPS proxy to use for\n all HTTPS requests\n\n http-cookies (dict or str) A dict or a semi-colon (;)\n delimited str of cookies to add to each\n HTTP request, e.g. ``foo=bar;baz=qux``\n\n http-headers (dict or str) A dict or semi-colon (;)\n delimited str of headers to add to each\n HTTP request, e.g. ``foo=bar;baz=qux``\n\n http-query-params (dict or str) A dict or a ampersand (&)\n delimited string of query parameters to\n add to each HTTP request,\n e.g. ``foo=bar&baz=qux``\n\n http-trust-env (bool) Trust HTTP settings set in the\n environment, such as environment\n variables (HTTP_PROXY, etc) and\n ~\/.netrc authentication\n\n http-ssl-verify (bool) Verify SSL certificates,\n default: ``True``\n\n http-ssl-cert (str or tuple) SSL certificate to use,\n can be either a .pem file (str) or a\n .crt\/.key pair (tuple)\n\n http-timeout (float) General timeout used by all HTTP\n requests except the ones covered by\n other options, default: ``20.0``\n\n http-stream-timeout (float) Timeout for reading data from\n HTTP streams, default: ``60.0``\n\n subprocess-errorlog (bool) Log errors from subprocesses to\n a file located in the temp directory\n\n subprocess-errorlog-path (str) Log errors from subprocesses to\n a specific file\n\n ringbuffer-size (int) The size of the internal ring\n buffer used by most stream types,\n default: ``16777216`` (16MB)\n\n rtmp-proxy (str) Specify a proxy (SOCKS) that RTMP\n streams will use\n\n rtmp-rtmpdump (str) Specify the location of the\n rtmpdump executable used by RTMP streams,\n e.g. ``\/usr\/local\/bin\/rtmpdump``\n\n rtmp-timeout (float) Timeout for reading data from\n RTMP streams, default: ``60.0``\n\n ffmpeg-ffmpeg (str) Specify the location of the\n ffmpeg executable use by Muxing streams\n e.g. ``\/usr\/local\/bin\/ffmpeg``\n\n ffmpeg-verbose (bool) Log stderr from ffmpeg to the\n console\n\n ffmpeg-verbose-path (str) Specify the location of the\n ffmpeg stderr log file\n\n ffmpeg-video-transcode (str) The codec to use if transcoding\n video when muxing with ffmpeg\n e.g. ``h264``\n\n ffmpeg-audio-transcode (str) The codec to use if transcoding\n audio when muxing with ffmpeg\n e.g. ``aac``\n\n stream-segment-attempts (int) How many attempts should be done\n to download each segment, default: ``3``.\n General option used by streams not\n covered by other options.\n\n stream-segment-threads (int) The size of the thread pool used\n to download segments, default: ``1``.\n General option used by streams not\n covered by other options.\n\n stream-segment-timeout (float) Segment connect and read\n timeout, default: ``10.0``.\n General option used by streams not\n covered by other options.\n\n stream-timeout (float) Timeout for reading data from\n stream, default: ``60.0``.\n General option used by streams not\n covered by other options.\n\n locale (str) Locale setting, in the RFC 1766 format\n eg. en_US or es_ES\n default: ``system locale``.\n\n user-input-requester (UserInputRequester) instance of UserInputRequester\n to collect input from the user at runtime. Must be\n set before the plugins are loaded.\n default: ``UserInputRequester``.\n ======================== ========================================="} {"query":"Provides an overview of the duplicate credentials .","positive":"def Func ( ) : arg_0 = Credential . search ( ) arg_0 . aggs . bucket ( 'password_count' , 'terms' , field = 'secret' , order = { '_count' : 'desc' } , size = 20 ) . metric ( 'username_count' , 'cardinality' , field = 'username' ) . metric ( 'host_count' , 'cardinality' , field = 'host_ip' ) . metric ( 'top_hits' , 'top_hits' , docvalue_fields = [ 'username' ] , size = 100 ) arg_1 = arg_0 . execute ( ) print_line ( \"{0:65} {1:5} {2:5} {3:5} {4}\" . format ( \"Secret\" , \"Count\" , \"Hosts\" , \"Users\" , \"Usernames\" ) ) print_line ( \"-\" * 100 ) for arg_2 in arg_1 . aggregations . password_count . buckets : arg_3 = [ ] for arg_4 in arg_2 . top_hits : arg_3 . append ( arg_4 . username [ 0 ] ) arg_3 = list ( set ( arg_3 ) ) print_line ( \"{0:65} {1:5} {2:5} {3:5} {4}\" . format ( arg_2 . key , arg_2 . doc_count , arg_2 . host_count . value , arg_2 . username_count . value , arg_3 ) )","id_":254027,"task_name":"https:\/\/github.com\/mwgielen\/jackal\/blob\/7fe62732eb5194b7246215d5277fb37c398097bf\/jackal\/scripts\/credentials.py#L23-L40","negative":"Logs all elements of this streamlet. This returns nothing"} {"query":"Perform the stringprep mapping step of SASLprep . Operates in - place on a list of unicode characters provided in chars .","positive":"def Func ( arg_0 ) : arg_1 = 0 while arg_1 < len ( arg_0 ) : arg_2 = arg_0 [ arg_1 ] if stringprep . in_table_c12 ( arg_2 ) : arg_0 [ arg_1 ] = \"\\u0020\" elif stringprep . in_table_b1 ( arg_2 ) : del arg_0 [ arg_1 ] continue arg_1 += 1","id_":254028,"task_name":"https:\/\/github.com\/horazont\/aiosasl\/blob\/af58bf30f688757e58af6e87892d35a8ce798482\/aiosasl\/stringprep.py#L130-L143","negative":"Send a Gauge metric with the specified value"} {"query":"Re - enable the FTDI drivers for the current platform .","positive":"def Func ( ) : logger . debug ( 'Enabling FTDI driver.' ) if sys . platform == 'darwin' : logger . debug ( 'Detected Mac OSX' ) _check_running_as_root ( ) subprocess . check_call ( 'kextload -b com.apple.driver.AppleUSBFTDI' , shell = True ) subprocess . check_call ( 'kextload \/System\/Library\/Extensions\/FTDIUSBSerialDriver.kext' , shell = True ) elif sys . platform . startswith ( 'linux' ) : logger . debug ( 'Detected Linux' ) _check_running_as_root ( ) subprocess . check_call ( 'modprobe -q ftdi_sio' , shell = True ) subprocess . check_call ( 'modprobe -q usbserial' , shell = True )","id_":254029,"task_name":"https:\/\/github.com\/adafruit\/Adafruit_Python_GPIO\/blob\/a92a23d6b5869663b2bc1ccf78bb11585076a9c4\/Adafruit_GPIO\/FT232H.py#L72-L86","negative":"Rename this conversation.\n\n Hangouts only officially supports renaming group conversations, so\n custom names for one-to-one conversations may or may not appear in all\n first party clients.\n\n Args:\n name (str): New name.\n\n Raises:\n .NetworkError: If conversation cannot be renamed."} {"query":"Add a message to the Django messages store indicating that we failed to retrieve price information about an item .","positive":"def Func ( arg_0 , arg_1 ) : messages . warning ( arg_0 , _ ( '{strong_start}We could not gather price information for {em_start}{item}{em_end}.{strong_end} ' '{span_start}If you continue to have these issues, please contact ' '{link_start}{platform_name} support{link_end}.{span_end}' ) . format ( arg_1 = arg_1 , em_start = '' , em_end = '<\/em>' , link_start = '' . format ( support_link = get_configuration_value ( 'ENTERPRISE_SUPPORT_URL' , settings . ENTERPRISE_SUPPORT_URL ) , ) , platform_name = get_configuration_value ( 'PLATFORM_NAME' , settings . PLATFORM_NAME ) , link_end = '<\/a>' , span_start = '' , span_end = '<\/span>' , strong_start = '' , strong_end = '<\/strong>' , ) )","id_":254030,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/enterprise\/messages.py#L47-L74","negative":"Process current member with 'op' operation."} {"query":"Returns the state of a TaskInstance at the command line . >>> airflow task_state tutorial sleep 2015 - 01 - 01 success","positive":"def Func ( arg_0 ) : arg_1 = get_dag ( arg_0 ) arg_2 = arg_1 . get_task ( task_id = arg_0 . task_id ) arg_3 = TaskInstance ( arg_2 , arg_0 . execution_date ) print ( arg_3 . current_state ( ) )","id_":254031,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/bin\/cli.py#L554-L563","negative":"Unregister an extension code. For testing only."} {"query":"Check if line contains a redundant override or final virt - specifier .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = arg_1 . elided [ arg_2 ] arg_5 = arg_4 . rfind ( ')' ) if arg_5 >= 0 : arg_6 = arg_4 [ arg_5 : ] else : if arg_2 > 1 and arg_1 . elided [ arg_2 - 1 ] . rfind ( ')' ) >= 0 : arg_6 = arg_4 else : return if Search ( r'\\boverride\\b' , arg_6 ) and Search ( r'\\bfinal\\b' , arg_6 ) : arg_3 ( arg_0 , arg_2 , 'readability\/inheritance' , 4 , ( '\"override\" is redundant since function is ' 'already declared as \"final\"' ) )","id_":254032,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/third_party\/python\/cpplint\/cpplint.py#L5837-L5863","negative":"Get the current position in the music in seconds"} {"query":"Render a given resource .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = getattr ( arg_0 , 'render_' + nativeString ( arg_1 . method ) , None ) if arg_2 is None : try : arg_3 = arg_0 . allowedMethods except AttributeError : arg_3 = _computeAllowedMethods ( arg_0 ) raise UnsupportedMethod ( arg_3 ) return arg_2 ( arg_1 )","id_":254033,"task_name":"https:\/\/github.com\/jonathanj\/txspinneret\/blob\/717008a2c313698984a23e3f3fc62ea3675ed02d\/txspinneret\/resource.py#L27-L40","negative":"Unregister an extension code. For testing only."} {"query":"Handle registration form received .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . lock . acquire ( ) try : arg_0 . __register = Register ( arg_1 . get_query ( ) ) arg_0 . registration_callback ( arg_1 , arg_0 . __register . get_form ( ) ) finally : arg_0 . lock . release ( )","id_":254034,"task_name":"https:\/\/github.com\/Jajcus\/pyxmpp2\/blob\/14a40a3950910a9cd008b55f0d8905aa0186ce18\/pyxmpp2\/ext\/legacyauth.py#L399-L417","negative":"Adds all parameters to `traj`"} {"query":"Find the port chain a device is plugged on .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . bus arg_2 = arg_0 . address for arg_3 in os . listdir ( USB_SYS_PREFIX ) : arg_4 = re . match ( USB_PORTS_STR + '$' , arg_3 ) if arg_4 : arg_5 = readattr ( arg_3 , 'busnum' ) if arg_5 : arg_6 = float ( arg_5 ) else : arg_6 = None arg_7 = readattr ( arg_3 , 'devnum' ) if arg_7 : arg_8 = float ( arg_7 ) else : arg_8 = None if arg_6 == arg_1 and arg_8 == arg_2 : return str ( arg_4 . groups ( ) [ 1 ] )","id_":254035,"task_name":"https:\/\/github.com\/padelt\/temper-python\/blob\/cbdbace7e6755b1d91a2603ab63c9cb778078f79\/temperusb\/temper.py#L49-L75","negative":"Mark all message instances for a user as read.\n\n :param user: user instance for the recipient"} {"query":"computes the ideal conversion ratio for the given alphabet . A ratio is considered ideal when the number of bits in one output encoding chunk that don t add up to one input encoding chunk is minimal .","positive":"def Func ( arg_0 ) : arg_1 , arg_2 = min ( [ ( i , i * 8 \/ math . log ( arg_0 , 2 ) ) for i in range ( 1 , 7 ) ] , key = lambda k : k [ 1 ] % 1 ) return arg_1 , int ( arg_2 )","id_":254036,"task_name":"https:\/\/github.com\/thusoy\/pwm\/blob\/fff7d755c34f3a7235a8bf217ffa2ff5aed4926f\/pwm\/encoding.py#L22-L33","negative":"Wrapper method that calls the appropriate main updating methods of\n the inspection.\n\n It is meant to be used inside a loop (like while), so that it can\n continuously update the class attributes from the trace and log files.\n It already implements checks to parse these files only when they\n change, and they ignore entries that have been previously processes."} {"query":"Invert all instructions .","positive":"def Func ( arg_0 ) : for arg_1 , arg_2 in enumerate ( arg_0 . instructions ) : arg_0 . instructions [ arg_1 ] = arg_2 . Func ( ) return arg_0","id_":254037,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/circuit\/instructionset.py#L45-L49","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Returns list of json compatible states of the RichMessage instance nested controls .","positive":"def Func ( arg_0 ) -> list : arg_1 = [ control . Func ( ) for control in arg_0 . controls ] return arg_1","id_":254038,"task_name":"https:\/\/github.com\/deepmipt\/DeepPavlov\/blob\/f3e4a69a3764d25d2f5bad4f1f1aebc872b00f9c\/deeppavlov\/core\/agent\/rich_content.py#L115-L124","negative":"Get base-64 encoded data as a string for the given image. Fallback to return\n fallback_image_file if cannot get the image data or img is None.\n @param {Image} img - The PIL Image to get src data for\n @param {String} fallback_image_file - The filename of the image file,\n to be used when image data capture fails\n @return {String} The base-64 encoded image data string, or path to the file\n itself if not supported."} {"query":"Reads the contents of the config file","positive":"def Func ( arg_0 ) -> str : arg_1 = None arg_2 = io . StringIO ( \"\" ) arg_0 . config . write ( arg_2 ) arg_2 . seek ( 0 ) arg_1 = arg_2 . read ( ) arg_2 . close ( ) return arg_1","id_":254039,"task_name":"https:\/\/github.com\/MisterY\/price-database\/blob\/b4fd366b7763891c690fe3000b8840e656da023e\/pricedb\/config.py#L100-L113","negative":"Fetch the events pages of a given group."} {"query":"List all course roles available to an account for the passed Canvas account ID including course roles inherited from parent accounts .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ ] arg_3 = { \"show_inherited\" : \"1\" } for arg_4 in arg_0 . get_roles_in_account ( arg_1 , arg_3 ) : if arg_4 . base_role_type != \"AccountMembership\" : arg_2 . append ( arg_4 ) return arg_2","id_":254040,"task_name":"https:\/\/github.com\/uw-it-aca\/uw-restclients-canvas\/blob\/9845faf33d49a8f06908efc22640c001116d6ea2\/uw_canvas\/roles.py#L29-L39","negative":"Replace this observation's data with a fresh container.\n\n Returns\n -------\n annotation_data : SortedKeyList\n The original annotation data container"} {"query":"Folds Stokes I noise diode data and integrates along coarse channels","positive":"def Func ( arg_0 , arg_1 , arg_2 = False , ** arg_3 ) : arg_4 = Waterfall ( arg_0 , max_load = 150 ) arg_5 = arg_4 . data if arg_2 == False and arg_5 . shape [ 1 ] > 1 : arg_5 = arg_5 [ : , 0 , : ] + arg_5 [ : , 1 , : ] arg_5 = np . expand_dims ( arg_5 , axis = 1 ) if arg_2 == True : arg_5 = arg_5 [ : , 0 , : ] arg_5 = np . expand_dims ( arg_5 , axis = 1 ) arg_6 = arg_4 . header [ 'tsamp' ] arg_7 , arg_8 = foldcal ( arg_5 , arg_6 , ** arg_3 ) arg_9 = arg_4 . populate_freqs ( ) arg_10 = integrate_chans ( arg_8 , arg_9 , arg_1 ) arg_11 = integrate_chans ( arg_7 , arg_9 , arg_1 ) if np . sum ( arg_10 ) < np . sum ( arg_11 ) : arg_12 = arg_10 arg_10 = arg_11 arg_11 = arg_12 return arg_11 , arg_10","id_":254041,"task_name":"https:\/\/github.com\/UCBerkeleySETI\/blimpy\/blob\/b8822d3e3e911944370d84371a91fa0c29e9772e\/blimpy\/calib_utils\/fluxcal.py#L92-L137","negative":"Gets a list of snapshots for a cluster\n\n :param cluster_identifier: unique identifier of a cluster\n :type cluster_identifier: str"} {"query":"Replace multiple values in a string","positive":"def Func ( arg_0 , Func ) : for arg_2 in Func : arg_0 = arg_0 . replace ( * arg_2 ) return arg_0","id_":254042,"task_name":"https:\/\/github.com\/dbcli\/cli_helpers\/blob\/3ebd891ac0c02bad061182dbcb54a47fb21980ae\/cli_helpers\/utils.py#L64-L68","negative":"Get a single publication."} {"query":"Search vndb . org for a term and return matching results from type .","positive":"async def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = \"\" if arg_1 not in [ 'v' , 'r' , 'p' , 's' , 'c' , 'g' , 'i' , 'u' ] : raise VNDBBadStype ( arg_1 ) else : if arg_1 in [ 'v' , 'p' , 's' , 'c' , 'u' ] : arg_3 = '\/{}\/all' . format ( arg_1 ) elif arg_1 in [ 'g' , 'i' ] : arg_3 = '\/{}\/list' . format ( arg_1 ) elif arg_1 == 'r' : arg_3 = '\/r' async with arg_0 . session . get ( arg_0 . base_url + \"{}\" . format ( arg_3 ) , params = { \"q\" : arg_2 } , headers = arg_0 . headers ) as response : if response . status == 404 : raise aiohttp . HttpBadRequest ( \"VN Not Found\" ) elif 'q=' not in response . url : raise VNDBOneResult ( arg_2 , response . url . rsplit ( '\/' , 1 ) [ 1 ] ) arg_4 = await response . text ( ) if 'No Results' in arg_4 : raise VNDBNoResults ( arg_2 ) arg_5 = BeautifulSoup ( arg_4 , 'lxml' ) arg_6 = await arg_0 . parse_search ( arg_1 , arg_5 ) if arg_6 == [ ] : raise VNDBNoResults ( arg_2 ) return arg_6","id_":254043,"task_name":"https:\/\/github.com\/ccubed\/Shosetsu\/blob\/eba01c058100ec8806129b11a2859f3126a1b101\/Shosetsu\/VNDB.py#L17-L62","negative":"REST Conference Mute helper"} {"query":"Get the action description .","positive":"def Func ( arg_0 ) : arg_1 = { arg_0 . name : { 'href' : arg_0 . href_prefix + arg_0 . href , 'timeRequested' : arg_0 . time_requested , 'status' : arg_0 . status , } , } if arg_0 . input is not None : arg_1 [ arg_0 . name ] [ 'input' ] = arg_0 . input if arg_0 . time_completed is not None : arg_1 [ arg_0 . name ] [ 'timeCompleted' ] = arg_0 . time_completed return arg_1","id_":254044,"task_name":"https:\/\/github.com\/mozilla-iot\/webthing-python\/blob\/65d467c89ed79d0bbc42b8b3c8f9e5a320edd237\/webthing\/action.py#L28-L48","negative":"Generate RENEWING time.\n\n [:rfc:`2131#section-4.4.5`]::\n\n T1\n defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 *\n duration_of_lease). Times T1 and T2 SHOULD be chosen with some\n random \"fuzz\" around a fixed value, to avoid synchronization of\n client reacquisition."} {"query":"Estimates the accuracy of the predictions from the constructed feature .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , ** arg_4 ) : if arg_0 . feature_map is None : raise ValueError ( 'The MDR model must be fit before Func can be called.' ) arg_5 = arg_0 . predict ( arg_1 ) if arg_3 is None : return accuracy_Func ( arg_2 , arg_5 ) else : return arg_3 ( arg_2 , arg_5 , ** arg_4 )","id_":254045,"task_name":"https:\/\/github.com\/EpistasisLab\/scikit-mdr\/blob\/768565deb10467d04a960d27e000ab38b7aa8a62\/mdr\/mdr.py#L210-L234","negative":"Query for null or blank field."} {"query":"Destroy nDestroy synapses on the specified segment but don t destroy synapses to the excludeCells .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 ) : arg_6 = sorted ( ( arg_10 for arg_10 in arg_1 . synapsesForSegment ( arg_3 ) if arg_10 . presynapticCell not in arg_5 ) , key = lambda s : s . _ordinal ) for arg_7 in xrange ( arg_4 ) : if len ( arg_6 ) == 0 : break arg_8 = None arg_9 = float ( \"inf\" ) for arg_10 in arg_6 : if arg_10 . permanence < arg_9 - EPSILON : arg_8 = arg_10 arg_9 = arg_10 . permanence arg_1 . destroySynapse ( arg_8 ) arg_6 . remove ( arg_8 )","id_":254046,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/algorithms\/temporal_memory.py#L696-L722","negative":"Load a configuration module and return a Config"} {"query":"Returns head coach data by game .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . _year_info_pq ( arg_1 , 'Coach' ) . text ( ) arg_3 = r'(\\S+?) \\((\\d+)-(\\d+)-(\\d+)\\)' arg_4 = [ ] arg_5 = True while arg_5 : arg_5 = re . search ( arg_3 , arg_2 ) arg_6 , arg_7 , arg_8 , arg_9 = arg_5 . groups ( ) arg_10 = arg_5 . end ( 4 ) + 1 arg_11 = arg_11 [ arg_10 : ] arg_12 = int ( arg_7 ) + int ( arg_8 ) + int ( arg_9 ) arg_4 . append ( ( arg_6 , arg_12 ) ) arg_13 = [ cID for cID , games in arg_4 for _ in range ( games ) ] return np . array ( arg_13 [ : : - 1 ] )","id_":254047,"task_name":"https:\/\/github.com\/mdgoldberg\/sportsref\/blob\/09f11ac856a23c96d666d1d510bb35d6f050b5c3\/sportsref\/nfl\/teams.py#L189-L212","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Decorator for methods that need many retries because of intermittent failures such as AWS calls via boto which has a non - back - off retry .","positive":"def Func ( arg_0 ) : def retry_func ( arg_1 , * arg_2 , ** arg_3 ) : arg_4 = 1 while True : try : return arg_0 ( arg_1 , * arg_2 , ** arg_3 ) break except OSError as exc : logger . error ( 'assuming OSError unrecoverable' ) raise except FailedExtraction as exc : logger . error ( 'FAIL(%d)' , arg_4 , exc_info = True ) raise except FailedVerification as exc : logger . warn ( 'FAIL(%d)' , arg_4 , exc_info = True ) if arg_4 >= arg_1 . config [ 'tries' ] : if arg_1 . config . get ( 'suppress_failures' ) : logger . warn ( 'suppressing failure and breaking out of this loop; data may be corrupt, downstream will have to cope' ) break else : raise except Exception as exc : logger . warn ( 'FAIL(%d): having I\/O trouble with S3' , arg_4 , exc_info = True ) if arg_4 >= arg_1 . config [ 'tries' ] : raise logger . warn ( 'RETRYING (%d left)' , arg_1 . config [ 'tries' ] - arg_4 ) time . sleep ( 3 * arg_4 ) arg_4 += 1 return retry_func","id_":254048,"task_name":"https:\/\/github.com\/trec-kba\/streamcorpus-pipeline\/blob\/8bb82ea1beb83c6b40ed03fa1659df2897c2292a\/streamcorpus_pipeline\/_s3_storage.py#L56-L95","negative":"This returns an array of each sector and performance for the current trading day. Performance is based on each sector ETF.\n\n https:\/\/iexcloud.io\/docs\/api\/#sector-performance\n 8am-5pm ET Mon-Fri\n\n Args:\n token (string); Access token\n version (string); API version\n\n Returns:\n DataFrame: result"} {"query":"Organization data validation","positive":"def Func ( arg_0 ) : if arg_0 is None : return False if 'id' in arg_0 and not arg_0 . get ( 'id' ) : return False if 'name' in arg_0 and not arg_0 . get ( 'name' ) : return False return True","id_":254049,"task_name":"https:\/\/github.com\/edx\/edx-organizations\/blob\/51000d5d359d880a6eb3a79345f60744f1982c00\/organizations\/validators.py#L23-L33","negative":"Prepare received data for representation.\n\n Args:\n data (dict): values to represent (ex. {'001' : 130})\n number_to_keep (int): number of elements to show individually.\n\n Returns:\n dict: processed data to show."} {"query":"May generate and add a PDFPage separately or use this to generate a default page .","positive":"def Func ( arg_0 , arg_1 = None ) : if arg_1 is None : arg_0 . page = PDFPage ( arg_0 . orientation_default , arg_0 . layout_default , arg_0 . margins ) else : arg_0 . page = arg_1 arg_0 . page . _set_index ( len ( arg_0 . pages ) ) arg_0 . pages . append ( arg_0 . page ) arg_2 = arg_0 . font arg_0 . set_font ( font = arg_2 ) arg_0 . session . _reset_colors ( )","id_":254050,"task_name":"https:\/\/github.com\/katerina7479\/pypdflite\/blob\/ac2501f30d6619eae9dea5644717575ca9263d0a\/pypdflite\/pdfdocument.py#L120-L131","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"Read one byte in stream","positive":"def Func ( arg_0 ) -> bool : if arg_0 . read_eof ( ) : return False arg_0 . _stream . incpos ( ) return True","id_":254051,"task_name":"https:\/\/github.com\/LionelAuroux\/pyrser\/blob\/f153a97ef2b6bf915a1ed468c0252a9a59b754d5\/pyrser\/parsing\/base.py#L265-L270","negative":"Test whether FILENAME matches PATTERN, including case.\n\n This is a version of fnmatch() which doesn't case-normalize\n its arguments."} {"query":"Get letters from string only .","positive":"def Func ( arg_0 ) : arg_1 = \"\" for arg_2 in arg_0 : if arg_2 . isalpha ( ) : arg_1 += arg_2 return arg_1","id_":254052,"task_name":"https:\/\/github.com\/inspirehep\/harvesting-kit\/blob\/33a7f8aa9dade1d863110c6d8b27dfd955cb471f\/harvestingkit\/utils.py#L476-L482","negative":"Accept a publication request at form value 'epub"} {"query":"Returns the column of the cursor in the input buffer excluding the contribution by the prompt or - 1 if there is no such column .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . _get_input_buffer_cursor_prompt ( ) if arg_1 is None : return - 1 else : arg_2 = arg_0 . _control . textCursor ( ) return arg_2 . columnNumber ( ) - len ( arg_1 )","id_":254053,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/frontend\/qt\/console\/console_widget.py#L1451-L1460","negative":"Set a decrypted value by key in a giraffez configuration file.\n\n :param str key: The key used to lookup the encrypted value\n :param value: Value to set at the given key, can be any value that is\n YAML serializeable."} {"query":"Apply a specified mutant to the source code","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 , arg_4 = filename_and_mutation_id_from_pk ( int ( arg_0 ) ) update_line_numbers ( arg_3 ) arg_5 = Context ( arg_4 = arg_4 , arg_3 = arg_3 , arg_1 = arg_1 , ) mutate_file ( arg_2 = arg_2 , arg_5 = arg_5 , ) if arg_5 . number_of_performed_mutations == 0 : raise RuntimeError ( 'No mutations performed.' )","id_":254054,"task_name":"https:\/\/github.com\/boxed\/mutmut\/blob\/dd3bbe9aba3168ed21b85fbfe0b654b150239697\/mutmut\/__main__.py#L133-L160","negative":"Launches GBDX workflow.\n\n Args:\n workflow (dict): Dictionary specifying workflow tasks.\n\n Returns:\n Workflow id (str)."} {"query":"Output a link tag to a css stylesheet .","positive":"def Func ( arg_0 ) : if not arg_0 . startswith ( 'http:\/\/' ) and not arg_0 [ : 1 ] == '\/' : arg_0 = settings . STATIC_URL + arg_0 return '' . format ( src = arg_0 )","id_":254055,"task_name":"https:\/\/github.com\/theduke\/django-baseline\/blob\/7be8b956e53c70b35f34e1783a8fe8f716955afb\/django_baseline\/templatetags\/helpers.py#L72-L81","negative":"Inform the widget about the encoding of the underlying character stream."} {"query":"Extract arguments for model from the environment and return as a tuple that is ready to be passed to the model .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = inspect . getfullargspec ( arg_1 ) if arg_3 . varargs : logger . warning ( \"ABI: A vararg model must be a unary function.\" ) arg_4 = len ( arg_3 . args ) - len ( arg_2 ) if inspect . ismethod ( arg_1 ) : arg_4 -= 1 def resolve_argument ( arg_5 ) : if isinstance ( arg_5 , str ) : return arg_0 . _cpu . read_register ( arg_5 ) else : return arg_0 . _cpu . read_int ( arg_5 ) arg_6 = arg_0 . get_arguments ( ) arg_7 = map ( resolve_argument , arg_6 ) from . . models import isvariadic if isvariadic ( arg_1 ) : arg_8 = arg_2 + ( arg_7 , ) else : arg_8 = arg_2 + tuple ( islice ( arg_7 , arg_4 ) ) return arg_8","id_":254056,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/native\/cpu\/abstractcpu.py#L301-L339","negative":"write lines, one by one, separated by \\n to device"} {"query":"Extract Packed Floating - Point Values","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_3 = arg_3 . read ( ) arg_1 . write ( Operators . EXTRACT ( arg_2 . read ( ) , arg_3 * 128 , ( arg_3 + 1 ) * 128 ) )","id_":254057,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/native\/cpu\/x86.py#L5781-L5790","negative":"Make preparations before running Tank"} {"query":"Calculate the concurrence .","positive":"def Func ( arg_0 ) : arg_1 = np . array ( arg_0 ) if arg_1 . ndim == 1 : arg_1 = outer ( arg_0 ) if len ( arg_0 ) != 4 : raise Exception ( \"Concurrence is only defined for more than two qubits\" ) arg_2 = np . fliplr ( np . diag ( [ - 1 , 1 , 1 , - 1 ] ) ) arg_3 = arg_1 . dot ( arg_2 ) . dot ( arg_1 . conj ( ) ) . dot ( arg_2 ) arg_4 = la . eigh ( arg_3 , eigvals_only = True ) arg_4 = np . sqrt ( np . maximum ( arg_4 , 0 ) ) return max ( 0.0 , arg_4 [ - 1 ] - np . sum ( arg_4 [ 0 : - 1 ] ) )","id_":254058,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/tools\/qi\/qi.py#L377-L398","negative":"Call the restful endpoint to merge two RAMON objects into one.\n\n Arguments:\n token (str): The token to inspect\n channel (str): The channel to inspect\n ids (int[]): the list of the IDs to merge\n delete (bool : False): Whether to delete after merging.\n\n Returns:\n json: The ID as returned by ndstore"} {"query":"Discard deposit changes .","positive":"def Func ( arg_0 , arg_1 = None ) : arg_1 = arg_1 or arg_0 . pid with db . session . begin_nested ( ) : before_record_update . send ( current_app . _get_current_object ( ) , arg_3 = arg_0 ) arg_2 , arg_3 = arg_0 . fetch_published ( ) arg_0 . model . json = deepcopy ( arg_3 . model . json ) arg_0 . model . json [ '$schema' ] = arg_0 . build_deposit_schema ( arg_3 ) flag_modified ( arg_0 . model , 'json' ) db . session . merge ( arg_0 . model ) after_record_update . send ( current_app . _get_current_object ( ) , arg_3 = arg_0 ) return arg_0 . __class__ ( arg_0 . model . json , arg_4 = arg_0 . model )","id_":254059,"task_name":"https:\/\/github.com\/inveniosoftware\/invenio-deposit\/blob\/f243ea1d01ab0a3bc92ade3262d1abdd2bc32447\/invenio_deposit\/api.py#L414-L453","negative":"DRF view to list all catalogs.\n\n Arguments:\n request (HttpRequest): Current request\n\n Returns:\n (Response): DRF response object containing course catalogs."} {"query":"Get the frequencies for FFT bins","positive":"def Func ( arg_0 , arg_1 = 22050 , ** arg_2 ) : arg_3 = 2 * ( arg_0 - 1 ) arg_4 = core . fft_frequencies ( arg_1 = arg_1 , arg_3 = arg_3 ) arg_5 = arg_4 [ - 1 ] arg_4 -= 0.5 * ( arg_4 [ 1 ] - arg_4 [ 0 ] ) arg_4 = np . append ( np . maximum ( 0 , arg_4 ) , [ arg_5 ] ) return arg_4","id_":254060,"task_name":"https:\/\/github.com\/librosa\/librosa\/blob\/180e8e6eb8f958fa6b20b8cba389f7945d508247\/librosa\/display.py#L923-L932","negative":"Drop a node from the network\n\n :param node: node to drop\n :type node: Node"} {"query":"Renders a javascript snippet suitable for use as a mapbox - gl heatmap paint entry","positive":"def Func ( arg_0 ) : arg_1 = { 'heatmap-radius' : VectorStyle . get_style_value ( arg_0 . radius ) , 'heatmap-opacity' : VectorStyle . get_style_value ( arg_0 . opacity ) , 'heatmap-color' : VectorStyle . get_style_value ( arg_0 . color ) , 'heatmap-intensity' : VectorStyle . get_style_value ( arg_0 . intensity ) , 'heatmap-weight' : VectorStyle . get_style_value ( arg_0 . weight ) } return arg_1","id_":254061,"task_name":"https:\/\/github.com\/DigitalGlobe\/gbdxtools\/blob\/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb\/gbdxtools\/vector_styles.py#L278-L293","negative":"Load the configuration.\n\n :param under_test:\n Tell us if we only have to load the configuration file (True)\n or load the configuration file and initate the output directory\n if it does not exist (False).\n :type under_test: bool\n\n :param custom:\n A dict with the configuration index (from .PyFunceble.yaml) to update.\n :type custom: dict\n\n .. warning::\n If :code:`custom` is given, the given :code:`dict` overwrite\n the last value of the given configuration indexes."} {"query":"Returns the SQL literal of the cell as a string .","positive":"def Func ( arg_0 , arg_1 = None ) : if arg_0 is None : return None if isinstance ( arg_0 , datetime ) : return arg_0 . isoformat ( ) return str ( arg_0 )","id_":254062,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/hooks\/dbapi_hook.py#L256-L272","negative":"Read the file and perform any transforms to get a loaded image"} {"query":"Sets the document data license . Raises value error if malformed value CardinalityError if already defined .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if not arg_0 . doc_data_lics_set : arg_0 . doc_data_lics_set = True if validations . validate_data_lics ( arg_2 ) : arg_1 . data_license = document . License . from_identifier ( arg_2 ) return True else : raise SPDXValueError ( 'Document::DataLicense' ) else : raise CardinalityError ( 'Document::DataLicense' )","id_":254063,"task_name":"https:\/\/github.com\/spdx\/tools-python\/blob\/301d72f6ae57c832c1da7f6402fa49b192de6810\/spdx\/parsers\/tagvaluebuilders.py#L92-L105","negative":"Stop the timer\n\n Returns:\n The time the timer was stopped"} {"query":"Given a spec for an app returns the value of the build field for docker - compose . If the path is relative it is expanded and added to the path of the app s repo .","positive":"def Func ( arg_0 ) : if os . path . isabs ( arg_0 [ 'build' ] ) : return arg_0 [ 'build' ] return os . path . join ( Repo ( arg_0 [ 'repo' ] ) . local_path , arg_0 [ 'build' ] )","id_":254064,"task_name":"https:\/\/github.com\/gamechanger\/dusty\/blob\/dc12de90bb6945023d6f43a8071e984313a1d984\/dusty\/compiler\/compose\/__init__.py#L70-L75","negative":"Cycles through notifications with latest results from data feeds."} {"query":"Unpack a directory using the same interface as for archives","positive":"def Func ( arg_0 , arg_1 , arg_2 = arg_3 ) : if not arg_11 . path . isdir ( arg_0 ) : raise UnrecognizedFormat ( \"%s is not a directory\" % ( arg_0 , ) ) arg_4 = { arg_0 : ( '' , arg_1 ) } for arg_5 , arg_6 , arg_7 in arg_11 . walk ( arg_0 ) : arg_8 , arg_9 = arg_4 [ arg_5 ] for arg_10 in arg_6 : arg_4 [ arg_11 . path . join ( arg_5 , arg_10 ) ] = arg_8 + arg_10 + '\/' , arg_11 . path . join ( arg_9 , arg_10 ) for arg_14 in arg_7 : arg_15 = arg_8 + arg_14 arg_16 = arg_11 . path . join ( arg_9 , arg_14 ) arg_16 = arg_2 ( arg_8 + arg_14 , arg_16 ) if not arg_16 : continue ensure_directory ( arg_16 ) arg_14 = arg_11 . path . join ( arg_5 , arg_14 ) shutil . copyfile ( arg_14 , arg_16 ) shutil . copystat ( arg_14 , arg_16 )","id_":254065,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/distribute-0.6.31-py2.7.egg\/setuptools\/archive_util.py#L83-L105","negative":"Get an access token from the provider token URI.\n\n :param code: Authorization code.\n :type code: str\n :return: Dict containing access token, refresh token, etc.\n :rtype: dict"} {"query":"Build a mongo query across multiple cases . Translate query options from a form into a complete mongo query dictionary .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = 'snv' , arg_3 = [ 'clinical' ] ) : arg_1 = arg_1 or { } arg_4 = { } LOG . debug ( \"Building a mongo query for %s\" % arg_1 ) if arg_1 . get ( 'hgnc_symbols' ) : arg_4 [ 'hgnc_symbols' ] = { '$in' : arg_1 [ 'hgnc_symbols' ] } arg_4 [ 'variant_type' ] = { '$in' : arg_3 } arg_4 [ 'category' ] = arg_2 arg_5 = arg_1 . get ( 'rank_score' ) or 15 arg_4 [ 'rank_score' ] = { '$gte' : arg_5 } LOG . debug ( \"Querying %s\" % arg_4 ) return arg_4","id_":254066,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/adapter\/mongo\/query.py#L11-L50","negative":"Set a smoothing Gaussian kernel given its FWHM in mm."} {"query":"Calculate the mol mass of the compound","positive":"def Func ( arg_0 ) : try : return sum ( [ arg_2 * elements_and_molecular_Funcs [ arg_1 ] for arg_1 , arg_2 in arg_0 . elements . items ( ) ] ) except KeyError as e : warn ( \"The element %s does not appear in the periodic table\" % e )","id_":254067,"task_name":"https:\/\/github.com\/opencobra\/cobrapy\/blob\/9d1987cdb3a395cf4125a3439c3b002ff2be2009\/cobra\/core\/formula.py#L83-L95","negative":"Returns a DataFrame of offensive team splits for a season.\n\n :year: int representing the season.\n :returns: Pandas DataFrame of split data."} {"query":"_rem_id_from_keys - Remove primary key from table internal","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : if arg_2 is None : arg_2 = arg_0 . _get_connection ( ) arg_2 . srem ( arg_0 . _get_ids_key ( ) , arg_1 )","id_":254068,"task_name":"https:\/\/github.com\/kata198\/indexedredis\/blob\/f9c85adcf5218dac25acb06eedc63fc2950816fa\/IndexedRedis\/__init__.py#L1261-L1268","negative":"Returns a list of two actions per gcs bucket to mount."} {"query":"terminates the underlying x3270 subprocess . Once called this Emulator instance must no longer be used .","positive":"def Func ( arg_0 ) : if not arg_0 . is_Funcd : log . debug ( \"terminal client Funcd\" ) try : arg_0 . exec_command ( b\"Quit\" ) except BrokenPipeError : pass except socket . error as e : if e . errno != errno . ECONNRESET : raise arg_0 . app . close ( ) arg_0 . is_Funcd = True","id_":254069,"task_name":"https:\/\/github.com\/py3270\/py3270\/blob\/c3e91b519f3a18b4be4799a00a96341957a8831f\/py3270\/__init__.py#L315-L335","negative":"Provide a label for a list of labels.\n\n The items in the list of labels are assumed to be either instances of\n :class:`Label`, or dicts with at least the key `label` in them. These will\n be passed to the :func:`dict_to_label` function.\n\n This method tries to find a label by looking if there's\n a pref label for the specified language. If there's no pref label,\n it looks for an alt label. It disregards hidden labels.\n\n While matching languages, preference will be given to exact matches. But,\n if no exact match is present, an inexact match will be attempted. This might\n be because a label in language `nl-BE` is being requested, but only `nl` or\n even `nl-NL` is present. Similarly, when requesting `nl`, a label with\n language `nl-NL` or even `nl-Latn-NL` will also be considered,\n providing no label is present that has an exact match with the\n requested language.\n\n If language 'any' was specified, all labels will be considered,\n regardless of language.\n\n To find a label without a specified language, pass `None` as language.\n\n If a language or None was specified, and no label could be found, this\n method will automatically try to find a label in some other language.\n\n Finally, if no label could be found, None is returned.\n\n :param string language: The preferred language to receive the label in. This\n should be a valid IANA language tag.\n :param boolean sortLabel: Should sortLabels be considered or not? If True,\n sortLabels will be preferred over prefLabels. Bear in mind that these\n are still language dependent. So, it's possible to have a different\n sortLabel per language.\n :rtype: A :class:`Label` or `None` if no label could be found."} {"query":"Create a node for spdx . file .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = URIRef ( 'http:\/\/www.spdx.org\/files#{id}' . format ( id = str ( arg_1 . spdx_id ) ) ) arg_3 = ( arg_2 , RDF . type , arg_0 . spdx_namespace . File ) arg_0 . graph . add ( arg_3 ) arg_4 = ( arg_2 , arg_0 . spdx_namespace . fileName , Literal ( arg_1 . name ) ) arg_0 . graph . add ( arg_4 ) if arg_1 . has_optional_field ( 'comment' ) : arg_5 = ( arg_2 , RDFS . comment , Literal ( arg_1 . comment ) ) arg_0 . graph . add ( arg_5 ) if arg_1 . has_optional_field ( 'type' ) : arg_6 = arg_0 . spdx_namespace [ arg_0 . FILE_TYPES [ arg_1 . type ] ] arg_7 = ( arg_2 , arg_0 . spdx_namespace . fileType , arg_6 ) arg_0 . graph . add ( arg_7 ) arg_0 . graph . add ( ( arg_2 , arg_0 . spdx_namespace . checksum , arg_0 . create_checksum_node ( arg_1 . chk_sum ) ) ) arg_8 = arg_0 . license_or_special ( arg_1 . conc_lics ) arg_9 = ( arg_2 , arg_0 . spdx_namespace . licenseConcluded , arg_8 ) arg_0 . graph . add ( arg_9 ) arg_10 = map ( arg_0 . license_or_special , arg_1 . licenses_in_file ) for arg_11 in arg_10 : arg_12 = ( arg_2 , arg_0 . spdx_namespace . licenseInfoInFile , arg_11 ) arg_0 . graph . add ( arg_12 ) if arg_1 . has_optional_field ( 'license_comment' ) : arg_5 = ( arg_2 , arg_0 . spdx_namespace . licenseComments , Literal ( arg_1 . license_comment ) ) arg_0 . graph . add ( arg_5 ) arg_13 = arg_0 . to_special_value ( arg_1 . copyright ) arg_14 = ( arg_2 , arg_0 . spdx_namespace . copyrightText , arg_13 ) arg_0 . graph . add ( arg_14 ) if arg_1 . has_optional_field ( 'notice' ) : arg_15 = ( arg_2 , arg_0 . spdx_namespace . noticeText , arg_1 . notice ) arg_0 . graph . add ( arg_15 ) arg_16 = map ( lambda c : Literal ( c ) , arg_1 . contributors ) arg_17 = [ ( arg_2 , arg_0 . spdx_namespace . fileContributor , node ) for node in arg_16 ] for arg_12 in arg_17 : arg_0 . graph . add ( arg_12 ) return arg_2","id_":254070,"task_name":"https:\/\/github.com\/spdx\/tools-python\/blob\/301d72f6ae57c832c1da7f6402fa49b192de6810\/spdx\/writers\/rdf.py#L209-L258","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Set s the package s description . Raises CardinalityError if description already set . Raises OrderError if no package previously defined .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_0 . assert_package_exists ( ) if not arg_0 . package_desc_set : arg_0 . package_desc_set = True arg_1 . package . description = arg_2 else : raise CardinalityError ( 'Package::Description' )","id_":254071,"task_name":"https:\/\/github.com\/spdx\/tools-python\/blob\/301d72f6ae57c832c1da7f6402fa49b192de6810\/spdx\/parsers\/rdfbuilders.py#L275-L285","negative":"Replace target with replacement"} {"query":"Returns the number of keys in the current database","positive":"def Func ( arg_0 ) : arg_1 = 0 for arg_2 , arg_3 in iteritems ( arg_0 . redises ) : if arg_2 . find ( '_slave' ) == - 1 : continue arg_1 += arg_3 . dbsize ( ) return arg_1","id_":254072,"task_name":"https:\/\/github.com\/salimane\/rediscluster-py\/blob\/4fe4d928cd6fe3e7564f7362e3996898bda5a285\/rediscluster\/cluster_client.py#L497-L507","negative":"Run the actual simulation."} {"query":"Derivative of the covariance matrix over the parameters of L .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . L arg_0 . _grad_Lu [ : ] = 0 for arg_3 in range ( len ( arg_0 . _tril1 [ 0 ] ) ) : arg_4 = arg_0 . _tril1 [ 0 ] [ arg_3 ] arg_5 = arg_0 . _tril1 [ 1 ] [ arg_3 ] arg_0 . _grad_Lu [ arg_4 , : , arg_3 ] = arg_1 [ : , arg_5 ] arg_0 . _grad_Lu [ : , arg_4 , arg_3 ] += arg_1 [ : , arg_5 ] arg_6 = len ( arg_0 . _tril1 [ 0 ] ) for arg_3 in range ( len ( arg_0 . _diag [ 0 ] ) ) : arg_4 = arg_0 . _diag [ 0 ] [ arg_3 ] arg_5 = arg_0 . _diag [ 1 ] [ arg_3 ] arg_0 . _grad_Lu [ arg_4 , : , arg_6 + arg_3 ] = arg_1 [ arg_4 , arg_5 ] * arg_1 [ : , arg_5 ] arg_0 . _grad_Lu [ : , arg_4 , arg_6 + arg_3 ] += arg_1 [ arg_4 , arg_5 ] * arg_1 [ : , arg_5 ] return { \"Lu\" : arg_0 . _grad_Lu }","id_":254073,"task_name":"https:\/\/github.com\/limix\/glimix-core\/blob\/cddd0994591d100499cc41c1f480ddd575e7a980\/glimix_core\/cov\/_free.py#L203-L228","negative":"Make a call to the meter via JSON RPC"} {"query":"Constructs the Continuous MDR feature map from the provided training data .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_0 . feature_map = defaultdict ( lambda : arg_0 . default_label ) arg_0 . overall_mean_trait_value = np . mean ( arg_2 ) arg_0 . mdr_matrix_values = defaultdict ( list ) for arg_6 in range ( arg_1 . shape [ 0 ] ) : arg_7 = tuple ( arg_1 [ arg_6 ] ) arg_0 . mdr_matrix_values [ arg_7 ] . append ( arg_2 [ arg_6 ] ) for arg_7 in arg_0 . mdr_matrix_values : arg_8 = np . mean ( arg_0 . mdr_matrix_values [ arg_7 ] ) if arg_8 > arg_0 . overall_mean_trait_value : arg_0 . feature_map [ arg_7 ] = 1 elif arg_8 == arg_0 . overall_mean_trait_value : arg_0 . feature_map [ arg_7 ] = arg_0 . tie_break else : arg_0 . feature_map [ arg_7 ] = 0 arg_0 . feature_map = dict ( arg_0 . feature_map ) arg_0 . mdr_matrix_values = dict ( arg_0 . mdr_matrix_values ) return arg_0","id_":254074,"task_name":"https:\/\/github.com\/EpistasisLab\/scikit-mdr\/blob\/768565deb10467d04a960d27e000ab38b7aa8a62\/mdr\/continuous_mdr.py#L57-L93","negative":"Attach an observer.\n\n Args:\n observer (func): A function to be called when new messages arrive\n\n Returns:\n :class:`Stream`. Current instance to allow chaining"} {"query":"Searches files satisfying query","positive":"def Func ( arg_0 , arg_1 , arg_2 = 0 ) : if arg_2 > 0 : print ( \"Funcing \" + arg_1 ) arg_1 = arg_1 . lower ( ) arg_3 = ng ( arg_1 , arg_0 . slb ) arg_4 = set ( ) for arg_5 in arg_3 : if arg_5 in arg_0 . ngrams . keys ( ) : for arg_6 in arg_0 . ngrams [ arg_5 ] : arg_4 . add ( arg_6 ) arg_0 . qocument = arg_4 arg_7 = { } for arg_6 in arg_4 : for arg_8 in arg_0 . D [ arg_6 ] . keys ( ) : if not arg_8 in arg_7 . keys ( ) : arg_7 [ arg_8 ] = 0 arg_7 [ arg_8 ] = arg_7 [ arg_8 ] + arg_0 . D [ arg_6 ] [ arg_8 ] arg_9 = sorted ( arg_7 . items ( ) , key = operator . itemgetter ( 1 ) , reverse = True ) return [ arg_0 . elements [ arg_10 [ 0 ] ] for arg_10 in arg_9 ]","id_":254075,"task_name":"https:\/\/github.com\/tallero\/trovotutto\/blob\/7afcfacf2bb3b642654153630c1ab7447ab10fae\/trovotutto\/__init__.py#L201-L229","negative":"Returns a DataFrame of offensive team splits for a season.\n\n :year: int representing the season.\n :returns: Pandas DataFrame of split data."} {"query":"reload - Reload all objects in this list . Updates in - place . To just fetch all these objects again use refetch","positive":"def Func ( arg_0 ) : if len ( arg_0 ) == 0 : return [ ] arg_1 = [ ] for arg_2 in arg_0 : arg_3 = None try : arg_3 = arg_2 . Func ( ) except Exception as e : arg_3 = e arg_1 . append ( arg_3 ) return arg_1","id_":254076,"task_name":"https:\/\/github.com\/kata198\/indexedredis\/blob\/f9c85adcf5218dac25acb06eedc63fc2950816fa\/IndexedRedis\/IRQueryableList.py#L110-L131","negative":"Check if hdl process has event depenency on signal"} {"query":"Get all data contained in hashed category hashroot as dict","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . keys ( arg_1 + \"\/*\" ) arg_2 . sort ( ) arg_3 = len ( arg_2 ) and arg_2 [ - 1 ] or '' if arg_3 . endswith ( 'xx' ) : arg_2 = [ arg_3 ] + arg_2 [ : - 1 ] arg_4 = { } for arg_5 in arg_2 : try : arg_4 . update ( arg_0 [ arg_5 ] ) except KeyError : print \"Corrupt\" , arg_5 , \"deleted - hset is not threadsafe!\" del arg_0 [ arg_5 ] arg_0 . uncache ( arg_5 ) return arg_4","id_":254077,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/utils\/pickleshare.py#L123-L144","negative":"Generic IOP reader that interpolates the iop to the common wavelengths defined in the constructor\n\n :param file_name: filename and path of the csv file\n :returns interpolated iop"} {"query":"Create a project","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 ) : arg_6 = { 'title' : arg_1 , 'description' : arg_2 , 'currency' : arg_3 , 'budget' : arg_4 , 'jobs' : arg_5 } arg_7 = make_post_request ( arg_0 , 'projects' , arg_8 = arg_6 ) arg_8 = arg_7 . json ( ) if arg_7 . status_code == 200 : arg_6 = arg_8 [ 'result' ] arg_9 = Project ( arg_6 ) arg_9 . url = urljoin ( arg_0 . url , 'projects\/%s' % arg_9 . seo_url ) return arg_9 else : raise ProjectNotCreatedException ( message = arg_8 [ 'message' ] , error_code = arg_8 [ 'error_code' ] , request_id = arg_8 [ 'request_id' ] , )","id_":254078,"task_name":"https:\/\/github.com\/freelancer\/freelancer-sdk-python\/blob\/e09034936d6f13b3909a9464ee329c81c1834941\/freelancersdk\/resources\/projects\/projects.py#L38-L62","negative":"Return HgResource object for path.\n\n See DAVProvider.get_resource_inst()"} {"query":"Get all providers registered .","positive":"def Func ( arg_0 , ** arg_1 ) : if 'ids' in arg_1 : arg_2 = [ arg_0 . concept_scheme_uri_map . get ( id , id ) for id in arg_1 [ 'ids' ] ] arg_3 = [ arg_0 . providers [ k ] for k in arg_0 . providers . keys ( ) if k in arg_2 ] else : arg_3 = list ( arg_0 . providers . values ( ) ) if 'subject' in arg_1 : arg_3 = [ p for p in arg_3 if arg_1 [ 'subject' ] in p . metadata [ 'subject' ] ] return arg_3","id_":254079,"task_name":"https:\/\/github.com\/koenedaele\/skosprovider\/blob\/7304a37953978ca8227febc2d3cc2b2be178f215\/skosprovider\/registry.py#L92-L123","negative":"Update reportnumbers."} {"query":"Fit all the transforms one after the other and transform the data then use fit_transform on transformed data using the final estimator .","positive":"def Func ( arg_0 , arg_1 , ** arg_2 ) : arg_3 , arg_2 = arg_0 . _pre_transform ( arg_1 , ** arg_2 ) if hasattr ( arg_0 . steps [ - 1 ] [ - 1 ] , 'Func' ) : return arg_0 . steps [ - 1 ] [ - 1 ] . Func ( arg_3 , ** arg_2 ) else : return arg_0 . steps [ - 1 ] [ - 1 ] . fit ( arg_3 , ** arg_2 ) . transform ( arg_3 )","id_":254080,"task_name":"https:\/\/github.com\/lensacom\/sparkit-learn\/blob\/0498502107c1f7dcf33cda0cdb6f5ba4b42524b7\/splearn\/pipeline.py#L115-L123","negative":"Read attribute from sysfs and return as string"} {"query":"Default exception handling that kicks in when an exception occurs that is not caught . In debug mode the exception will be re - raised immediately otherwise it is logged and the handler for a 500 internal server error is used . If no such handler exists a default 500 internal server error message is displayed .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 , arg_3 , arg_4 = sys . exc_info ( ) got_request_exception . send ( arg_0 , exception = arg_1 ) arg_5 = arg_0 . error_handler_spec [ None ] . get ( 500 ) if arg_0 . propagate_exceptions : if arg_3 is arg_1 : reraise ( arg_2 , arg_3 , arg_4 ) else : raise arg_1 arg_0 . log_exception ( ( arg_2 , arg_3 , arg_4 ) ) if arg_5 is None : return InternalServerError ( ) return arg_5 ( arg_1 )","id_":254081,"task_name":"https:\/\/github.com\/AkihikoITOH\/capybara\/blob\/e86c2173ea386654f4ae061148e8fbe3f25e715c\/capybara\/virtualenv\/lib\/python2.7\/site-packages\/flask\/app.py#L1383-L1410","negative":"Replaces all occurrences of 'old' with 'new'"} {"query":"Send a JSON POST request with the given request headers additional URL query parameters and the given JSON in the request body . The extra query parameters are merged with any which already exist in the URL . The json and data parameters may not both be given .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , arg_3 = None , ** arg_4 ) : if len ( arg_4 ) > 1 : raise InvalidArgumentsError ( \"Too many extra args ({} > 1)\" . format ( len ( arg_4 ) ) ) if arg_4 : arg_5 = next ( iter ( arg_4 ) ) if arg_5 not in ( \"json\" , \"data\" ) : raise InvalidArgumentsError ( \"Invalid kwarg: \" + arg_5 ) arg_6 = arg_0 . session . Func ( arg_1 , arg_2 = arg_2 , arg_3 = arg_3 , ** arg_4 ) arg_6 . raise_for_status ( ) return _to_json ( arg_6 )","id_":254082,"task_name":"https:\/\/github.com\/oasis-open\/cti-taxii-client\/blob\/b4c037fb61d8b8892af34423e2c67c81218d6f8e\/taxii2client\/__init__.py#L951-L980","negative":"This function hides the error message when all values are correct."} {"query":"Returns an ogr . Geometry instance optionally created from a geojson str or dict . The spatial reference may also be provided .","positive":"def Func ( * arg_0 , ** arg_1 ) : arg_2 = arg_1 . pop ( 'geojson' , None ) or len ( arg_0 ) and arg_0 [ 0 ] try : arg_3 = arg_1 . pop ( 'srs' , None ) or arg_2 . srs . wkt except AttributeError : arg_3 = SpatialReference ( 4326 ) if hasattr ( arg_2 , 'keys' ) : arg_4 = ogr . CreateFuncFromJson ( json . dumps ( arg_2 ) ) elif hasattr ( arg_2 , 'startswith' ) : arg_5 = arg_2 [ 0 ] if arg_2 else ' ' arg_6 = arg_5 if isinstance ( arg_5 , int ) else ord ( arg_5 ) if arg_6 in ( 0 , 1 ) : arg_4 = ogr . CreateFuncFromWkb ( arg_2 ) elif arg_2 . startswith ( '{' ) : arg_4 = ogr . CreateFuncFromJson ( arg_2 ) elif arg_2 . startswith ( ' 0 )","id_":254092,"task_name":"https:\/\/github.com\/pyca\/pyopenssl\/blob\/1fbe064c50fd030948141d7d630673761525b0d0\/src\/OpenSSL\/crypto.py#L1156-L1182","negative":"Helper function to get offset argument.\n Raises exception if argument is missing.\n Returns the offset argument."} {"query":"Send a request to MatchMaker and return its response","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = None , arg_5 = None ) : arg_6 = Headers ( ) arg_6 = { 'X-Auth-Token' : arg_1 } if arg_3 : arg_6 [ 'Content-Type' ] = arg_3 if arg_4 : arg_6 [ 'Accept' ] = arg_4 arg_7 = arg_5 or { 'timestamp' : datetime . datetime . now ( ) . timestamp ( ) } arg_8 = None try : LOG . info ( 'Sending {} request to MME url {}. Data sent: {}' . format ( arg_2 , arg_0 , arg_7 ) ) arg_9 = requests . request ( arg_2 = arg_2 , arg_0 = arg_0 , arg_6 = arg_6 , arg_5 = json . dumps ( arg_7 ) ) arg_8 = arg_9 . json ( ) LOG . info ( 'MME server response was:{}' . format ( arg_8 ) ) if isinstance ( arg_8 , str ) : arg_8 = { 'message' : arg_8 , } elif isinstance ( arg_8 , list ) : return arg_8 arg_8 [ 'status_code' ] = arg_9 . status_code except Exception as err : LOG . info ( 'An error occurred while sending HTTP request to server ({})' . format ( err ) ) arg_8 = { 'message' : str ( err ) } return arg_8","id_":254093,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/utils\/matchmaker.py#L10-L58","negative":"Unsigned divide.\n\n Divides (unsigned) the value in the AX register, DX:AX register pair,\n or EDX:EAX or RDX:RAX register pair (dividend) by the source operand\n (divisor) and stores the result in the AX (AH:AL), DX:AX, EDX:EAX or\n RDX:RAX registers. The source operand can be a general-purpose register\n or a memory location. The action of this instruction depends of the\n operand size (dividend\/divisor). Division using 64-bit operand is\n available only in 64-bit mode. Non-integral results are truncated\n (chopped) towards 0. The reminder is always less than the divisor in\n magnitude. Overflow is indicated with the #DE (divide error) exception\n rather than with the CF flag::\n\n IF SRC = 0\n THEN #DE; FI;(* divide error *)\n IF OperandSize = 8 (* word\/byte operation *)\n THEN\n temp = AX \/ SRC;\n IF temp > FFH\n THEN #DE; (* divide error *) ;\n ELSE\n AL = temp;\n AH = AX MOD SRC;\n FI;\n ELSE IF OperandSize = 16 (* doubleword\/word operation *)\n THEN\n temp = DX:AX \/ SRC;\n IF temp > FFFFH\n THEN #DE; (* divide error *) ;\n ELSE\n AX = temp;\n DX = DX:AX MOD SRC;\n FI;\n FI;\n ELSE If OperandSize = 32 (* quadword\/doubleword operation *)\n THEN\n temp = EDX:EAX \/ SRC;\n IF temp > FFFFFFFFH\n THEN #DE; (* divide error *) ;\n ELSE\n EAX = temp;\n EDX = EDX:EAX MOD SRC;\n FI;\n FI;\n ELSE IF OperandSize = 64 (*Doublequadword\/quadword operation*)\n THEN\n temp = RDX:RAX \/ SRC;\n IF temp > FFFFFFFFFFFFFFFFH\n THEN #DE; (* Divide error *)\n ELSE\n RAX = temp;\n RDX = RDX:RAX MOD SRC;\n FI;\n FI;\n FI;\n\n :param cpu: current CPU.\n :param src: source operand."} {"query":"Return the collections of handlers handling the exception in arguments .","positive":"def Func ( arg_0 : arg_1 . node_classes . NodeNG , arg_4 = arg_5 ) -> List [ arg_1 . ExceptHandler ] : arg_6 = find_try_except_wrapper_node ( arg_0 ) if isinstance ( arg_6 , arg_1 . TryExcept ) : return [ arg_7 for arg_7 in arg_6 . handlers if error_of_type ( arg_7 , arg_4 ) ] return None","id_":254094,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/checkers\/utils.py#L856-L874","negative":"Adds all parameters to `traj`"} {"query":"Converts given J H Ks mags to an SDSS r magnitude value .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : return convert_constants ( arg_0 , arg_1 , arg_2 , SDSSR_JHK , SDSSR_JH , SDSSR_JK , SDSSR_HK , SDSSR_J , SDSSR_H , SDSSR_K )","id_":254095,"task_name":"https:\/\/github.com\/waqasbhatti\/astrobase\/blob\/2922a14619d183fb28005fa7d02027ac436f2265\/astrobase\/magnitudes.py#L320-L340","negative":"the last block proposal node voted on"} {"query":"Generates a frequency count of each rating on the scale ratings is a list of scores Returns a list of frequencies","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None ) : arg_0 = [ int ( arg_5 ) for arg_5 in arg_0 ] if arg_1 is None : arg_1 = min ( arg_0 ) if arg_2 is None : arg_2 = max ( arg_0 ) arg_3 = int ( arg_2 - arg_1 + 1 ) arg_4 = [ 0 for x in range ( arg_3 ) ] for arg_5 in arg_0 : arg_4 [ arg_5 - arg_1 ] += 1 return arg_4","id_":254096,"task_name":"https:\/\/github.com\/edx\/ease\/blob\/a7890ed403da94d03726b0639cd8ebda45af6bbb\/ease\/util_functions.py#L410-L425","negative":"Attempt to detect if a device at this address is present on the I2C\n bus. Will send out the device's address for writing and verify an ACK\n is received. Returns true if the ACK is received, and false if not."} {"query":"Verify SSH variables and construct exported variables","positive":"def Func ( ) : arg_0 = arg_1 . Func_to_dict ( PREFIX , REQUIRED ) if \"KEY\" in arg_0 : arg_0 [ \"KEY\" ] = arg_1 . util . expand_path ( arg_0 [ \"KEY\" ] ) if arg_1 . ENV . get ( \"SSH_PORT\" ) is None : arg_1 . ENV [ \"SSH_PORT\" ] = \"22\" arg_1 . warn ( \"cij.ssh.Func: SSH_PORT was not set, assigned: %r\" % ( arg_1 . ENV . get ( \"SSH_PORT\" ) ) ) if arg_1 . ENV . get ( \"SSH_CMD_TIME\" ) is None : arg_1 . ENV [ \"SSH_CMD_TIME\" ] = \"1\" arg_1 . warn ( \"cij.ssh.Func: SSH_CMD_TIME was not set, assigned: %r\" % ( arg_1 . ENV . get ( \"SSH_CMD_TIME\" ) ) ) return 0","id_":254097,"task_name":"https:\/\/github.com\/refenv\/cijoe\/blob\/21d7b2ed4ff68e0a1457e7df2db27f6334f1a379\/modules\/cij\/ssh.py#L32-L51","negative":"Register a file reader for use in parse_config_file.\n\n Registered file readers will be used to try reading files passed to\n `parse_config_file`. All file readers (beginning with the default `open`) will\n be tried until one of them succeeds at opening the file.\n\n This function may also be be used used as a decorator. For example:\n\n @register_file_reader(IOError)\n def exotic_data_source(filename):\n ...\n\n Args:\n *args: (When used as a decorator, only the existence check is supplied.)\n - file_reader_fn: The file reader function to register. This should be a\n function that can be used as a context manager to open a file and\n provide a file-like object, similar to Python's built-in `open`.\n - is_readable_fn: A function taking the file path and returning a boolean\n indicating whether the file can be read by `file_reader_fn`.\n\n Returns:\n `None`, or when used as a decorator, a function that will perform the\n registration using the supplied readability predicate."} {"query":"Delete ContentMetadataItemTransmision models associated with the given content metadata items .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = apps . get_model ( 'integrated_channel' , 'ContentMetadataItemTransmission' ) arg_2 . objects . filter ( enterprise_customer = arg_0 . enterprise_configuration . enterprise_customer , integrated_channel_code = arg_0 . enterprise_configuration . channel_code ( ) , content_id__in = arg_1 ) . delete ( )","id_":254098,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/integrated_channels\/integrated_channel\/transmitters\/content_metadata.py#L218-L231","negative":"Compute the average size of the largest cluster\n\n Helper function for :func:`microcanonical_averages`\n\n Parameters\n ----------\n\n max_cluster_size : 1-D :py:class:`numpy.ndarray` of int\n Each entry is the ``max_cluster_size`` field of the output of\n :func:`sample_states`:\n The size of the largest cluster (absolute number of sites).\n\n alpha: float\n Significance level.\n\n Returns\n -------\n\n ret : dict\n Largest cluster statistics\n\n ret['max_cluster_size'] : float\n Average size of the largest cluster (absolute number of sites)\n\n ret['max_cluster_size_ci'] : 1-D :py:class:`numpy.ndarray` of float, size 2\n Lower and upper bounds of the normal confidence interval of the average\n size of the largest cluster (absolute number of sites)\n\n See Also\n --------\n\n sample_states : largest cluster detection\n\n microcanonical_averages : largest cluster statistics"} {"query":"Enable event loop integration with Tk .","positive":"def Func ( arg_0 , arg_1 = None ) : arg_0 . _current_gui = arg_4 if arg_1 is None : import Tkinter arg_1 = Tkinter . Tk ( ) arg_1 . withdraw ( ) arg_0 . _apps [ arg_4 ] = arg_1 return arg_1","id_":254099,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/lib\/inputhook.py#L305-L327","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Create the Certificate","positive":"def Func ( arg_0 ) : arg_1 = { \"name\" : arg_0 . name , \"type\" : arg_0 . type , \"dns_names\" : arg_0 . dns_names , \"private_key\" : arg_0 . private_key , \"leaf_certificate\" : arg_0 . leaf_certificate , \"certificate_chain\" : arg_0 . certificate_chain } arg_2 = arg_0 . get_data ( \"certificates\/\" , arg_7 = POST , arg_1 = arg_1 ) if arg_2 : arg_0 . id = arg_2 [ 'certificate' ] [ 'id' ] arg_0 . not_after = arg_2 [ 'certificate' ] [ 'not_after' ] arg_0 . sha1_fingerprint = arg_2 [ 'certificate' ] [ 'sha1_fingerprint' ] arg_0 . Funcd_at = arg_2 [ 'certificate' ] [ 'Funcd_at' ] arg_0 . type = arg_2 [ 'certificate' ] [ 'type' ] arg_0 . dns_names = arg_2 [ 'certificate' ] [ 'dns_names' ] arg_0 . state = arg_2 [ 'certificate' ] [ 'state' ] return arg_0","id_":254100,"task_name":"https:\/\/github.com\/koalalorenzo\/python-digitalocean\/blob\/d0221b57856fb1e131cafecf99d826f7b07a947c\/digitalocean\/Certificate.py#L83-L107","negative":"Return HgResource object for path.\n\n See DAVProvider.get_resource_inst()"} {"query":"Integrate the mass profiles s convergence profile to compute the total mass within a circle of \\ specified radius . This is centred on the mass profile .","positive":"def Func ( arg_0 , arg_1 : arg_2 . Length , arg_4 = 'angular' , arg_5 = None , arg_6 = None ) : arg_0 . check_units_of_radius_and_critical_surface_density ( arg_1 = arg_1 , arg_6 = arg_6 ) arg_7 = arg_0 . new_profile_with_units_converted ( unit_length = arg_1 . unit_length , arg_4 = 'angular' , arg_5 = arg_5 , arg_6 = arg_6 ) arg_8 = arg_2 . Mass ( value = quad ( arg_7 . mass_integral , a = 0.0 , b = arg_1 , args = ( 1.0 , ) ) [ 0 ] , arg_4 = 'angular' ) return arg_8 . convert ( arg_4 = arg_4 , arg_6 = arg_6 )","id_":254101,"task_name":"https:\/\/github.com\/Jammy2211\/PyAutoLens\/blob\/91e50369c7a9c048c83d217625578b72423cd5a7\/autolens\/model\/profiles\/mass_profiles.py#L170-L199","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"Perform a droplet action .","positive":"def Func ( arg_0 , arg_1 , arg_2 = True ) : arg_3 = arg_0 . get_data ( \"droplets\/%s\/actions\/\" % arg_0 . id , type = POST , arg_1 = arg_1 ) if arg_2 : return arg_3 else : arg_3 = arg_3 [ u'action' ] arg_4 = Action ( token = arg_0 . token ) for arg_5 in arg_3 . keys ( ) : setattr ( arg_4 , arg_5 , arg_3 [ arg_5 ] ) return arg_4","id_":254102,"task_name":"https:\/\/github.com\/koalalorenzo\/python-digitalocean\/blob\/d0221b57856fb1e131cafecf99d826f7b07a947c\/digitalocean\/Droplet.py#L204-L230","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"File size in bytes .","positive":"def Func ( arg_0 ) : try : return arg_0 . _stat . st_Func except : arg_0 . _stat = arg_0 . stat ( ) return arg_0 . Func","id_":254103,"task_name":"https:\/\/github.com\/MacHu-GWU\/pathlib_mate-project\/blob\/f9fb99dd7cc9ea05d1bec8b9ce8f659e8d97b0f1\/pathlib_mate\/mate_attr_accessor.py#L110-L118","negative":"Build transition noise distribution for a ConstrainedSeasonalSSM."} {"query":"Return the list containing the names of the modules available in the given folder .","positive":"def Func ( arg_0 ) : if arg_0 == '' : arg_0 = '.' if os . path . isdir ( arg_0 ) : arg_1 = os . listdir ( arg_0 ) elif arg_0 . endswith ( '.egg' ) : try : arg_1 = [ f for f in zipimporter ( arg_0 ) . _files ] except : arg_1 = [ ] else : arg_1 = [ ] if not arg_1 : return [ ] arg_2 = os . path . isfile arg_3 = os . path . join arg_4 = os . path . basename def is_importable_file ( arg_0 ) : arg_5 , arg_6 = os . path . splitext ( arg_0 ) return import_re . match ( arg_0 ) and py3compat . isidentifier ( arg_5 ) arg_1 = [ arg_7 for arg_7 in arg_1 if arg_2 ( arg_3 ( arg_0 , arg_7 , '__init__.py' ) ) or is_importable_file ( arg_7 ) ] return [ arg_4 ( arg_7 ) . split ( '.' ) [ 0 ] for arg_7 in arg_1 ]","id_":254104,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/core\/completerlib.py#L59-L96","negative":"Update the rate limit and the time to reset\n from the response headers.\n\n :param: response: the response object"} {"query":"Read auth from pip config .","positive":"def Func ( arg_0 = '~\/.pypirc' ) : arg_1 = ConfigParser ( ) if arg_1 . read ( os . path . expanduser ( arg_0 ) ) : try : arg_2 = arg_1 . get ( 'pypi' , 'username' ) arg_3 = arg_1 . get ( 'pypi' , 'password' ) return arg_2 , arg_3 except ConfigError : notify . warning ( \"No PyPI credentials in '{}',\" \" will fall back to '~\/.netrc'...\" . format ( arg_0 ) ) return None","id_":254105,"task_name":"https:\/\/github.com\/jhermann\/rituals\/blob\/1534f50d81e19bbbe799e2eba0acdefbce047c06\/src\/rituals\/acts\/documentation.py#L50-L61","negative":"Overriding the default JSONEncoder.default for NDB support."} {"query":"Check if input dimension corresponds to qubit subsystems .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_1 is None : arg_1 = arg_2 elif np . product ( arg_1 ) != arg_2 : raise QiskitError ( \"dimensions do not match size.\" ) if isinstance ( arg_1 , ( int , np . integer ) ) : arg_3 = int ( np . log2 ( arg_1 ) ) if 2 ** arg_3 == arg_2 : return arg_3 * ( 2 , ) return ( arg_1 , ) return tuple ( arg_1 )","id_":254106,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/quantum_info\/operators\/base_operator.py#L315-L326","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Get a list of all the cards in the game","positive":"def Func ( arg_0 , arg_1 : arg_2 = None ) : arg_3 = arg_0 . api . CARDS return arg_0 . _get_model ( arg_3 , arg_1 = arg_1 )","id_":254107,"task_name":"https:\/\/github.com\/cgrok\/clashroyale\/blob\/2618f4da22a84ad3e36d2446e23436d87c423163\/clashroyale\/official_api\/client.py#L434-L443","negative":"Simple and fast implementation of the covariance AR estimate\n\n This code is 10 times faster than :func:`modcovar_marple` and more importantly\n only 10 lines of code, compared to a 200 loc for :func:`modcovar_marple`\n\n :param X: Array of complex data samples\n :param int order: Order of linear prediction model\n\n :return:\n * P - Real linear prediction variance at order IP\n * A - Array of complex linear prediction coefficients\n\n\n .. plot::\n :include-source:\n :width: 80%\n\n from spectrum import modcovar, marple_data, arma2psd, cshift\n from pylab import log10, linspace, axis, plot \n\n a, p = modcovar(marple_data, 15)\n PSD = arma2psd(a)\n PSD = cshift(PSD, len(PSD)\/2) # switch positive and negative freq\n plot(linspace(-0.5, 0.5, 4096), 10*log10(PSD\/max(PSD)))\n axis([-0.5,0.5,-60,0])\n\n .. seealso:: :class:`~spectrum.modcovar.pmodcovar`\n\n :validation: the AR parameters are the same as those returned by\n a completely different function :func:`modcovar_marple`.\n\n\n :References: Mathworks"} {"query":"Create simulation model and connect it with interfaces of original unit and decorate it with agents","positive":"def Func ( arg_0 : arg_1 , arg_2 : arg_3 [ arg_4 ] = None , arg_5 = arg_6 ( ) , arg_7 : arg_8 = None , arg_9 = None ) : if arg_2 is None : arg_2 = toSimModel ( arg_0 , arg_5 = arg_5 , arg_7 = arg_7 ) else : toSimModel ( arg_0 ) if arg_9 : arg_9 ( arg_0 , arg_2 ) reconnectUnitSignalsToModel ( arg_0 , arg_2 ) arg_10 = arg_2 ( ) arg_11 = autoAddAgents ( arg_0 ) return arg_0 , arg_10 , arg_11","id_":254108,"task_name":"https:\/\/github.com\/Nic30\/hwt\/blob\/8cbb399e326da3b22c233b98188a9d08dec057e6\/hwt\/simulator\/shortcuts.py#L21-L55","negative":"Get LanguageTool directory."} {"query":"Compute the hamming distance between two images","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = average_hash ( arg_0 ) arg_3 = average_hash ( arg_1 ) return hash_Func ( arg_2 , arg_3 )","id_":254109,"task_name":"https:\/\/github.com\/bunchesofdonald\/photohash\/blob\/1839a37a884e8c31cb94e661bd76f8125b0dfcb6\/photohash\/photohash.py#L37-L42","negative":"This is a parallel worker for running period-recovery.\n\n Parameters\n ----------\n\n task : tuple\n This is used to pass args to the `periodicvar_recovery` function::\n\n task[0] = period-finding result pickle to work on\n task[1] = simbasedir\n task[2] = period_tolerance\n\n Returns\n -------\n\n dict\n This is the dict produced by the `periodicvar_recovery` function for the\n input period-finding result pickle."} {"query":"Compute the inverse constant - Q transform .","positive":"def Func ( arg_0 , arg_1 = 22050 , arg_2 = 512 , arg_3 = None , arg_4 = 12 , arg_5 = 0.0 , arg_6 = 1 , arg_7 = 1 , arg_8 = 0.01 , arg_9 = 'hann' , arg_10 = True , arg_11 = None , arg_12 = arg_13 . Deprecated ( ) , arg_15 = 'fft' ) : if arg_3 is None : arg_3 = note_to_hz ( 'C1' ) arg_16 = len ( arg_0 ) arg_17 = cqt_frequencies ( arg_16 , arg_3 , arg_4 = arg_4 , arg_5 = arg_5 ) [ - arg_4 : ] arg_18 = min ( arg_16 , arg_4 ) arg_19 , arg_20 , arg_21 = __cqt_filter_fft ( arg_1 , np . min ( arg_17 ) , arg_18 , arg_4 , arg_5 , arg_6 , arg_7 , arg_8 = arg_8 , arg_9 = arg_9 ) if arg_2 > min ( arg_21 ) : warnings . warn ( 'hop_length={} exceeds minimum CQT filter length={:.3f}.\\n' 'This will probably cause unpleasant acoustic artifacts. ' 'Consider decreasing your hop length or increasing the frequency resolution of your CQT.' . format ( arg_2 , min ( arg_21 ) ) ) arg_19 = arg_19 . todense ( ) * arg_20 \/ arg_21 [ : , np . newaxis ] arg_22 = arg_19 . H arg_23 = int ( np . ceil ( float ( arg_16 ) \/ arg_4 ) ) arg_24 = None for arg_25 in range ( arg_23 - 1 , - 1 , - 1 ) : arg_26 = slice ( - ( arg_25 + 1 ) * arg_4 - 1 , - ( arg_25 ) * arg_4 - 1 ) arg_27 = arg_0 [ arg_26 ] arg_28 = arg_22 [ : , - arg_27 . shape [ 0 ] : ] arg_29 = arg_2 \/\/ 2 ** arg_25 if arg_10 : arg_30 = np . sqrt ( arg_21 [ - arg_27 . shape [ 0 ] : , np . newaxis ] ) \/ arg_20 else : arg_30 = arg_21 [ - arg_27 . shape [ 0 ] : , np . newaxis ] * np . sqrt ( 2 ** arg_25 ) \/ arg_20 arg_31 = arg_28 . dot ( arg_27 \/ arg_30 ) arg_32 = istft ( arg_31 , arg_9 = 'ones' , arg_2 = arg_29 ) if arg_24 is None : arg_24 = arg_32 else : arg_24 = audio . resample ( arg_24 , 1 , 2 , arg_10 = True , arg_15 = arg_15 , fix = False ) arg_24 [ : len ( arg_32 ) ] += arg_32 if arg_11 : arg_24 = arg_13 . fix_length ( arg_24 , arg_11 ) return arg_24","id_":254110,"task_name":"https:\/\/github.com\/librosa\/librosa\/blob\/180e8e6eb8f958fa6b20b8cba389f7945d508247\/librosa\/core\/constantq.py#L538-L703","negative":"Cycles through notifications with latest results from data feeds."} {"query":"Registers its metrics to a given metrics collector with a given interval","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : for arg_3 , arg_4 in arg_0 . metrics . items ( ) : arg_1 . register_metric ( arg_3 , arg_4 , arg_2 )","id_":254111,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/instance\/src\/python\/utils\/metrics\/metrics_helper.py#L42-L45","negative":"Releases renderer resources associated with this image."} {"query":"Median absolute error regression loss","positive":"def Func ( arg_0 , arg_1 ) : ModelBase . _check_targets ( arg_0 , arg_1 ) return ( arg_1 - arg_0 ) . abs ( ) . median ( )","id_":254112,"task_name":"https:\/\/github.com\/h2oai\/h2o-3\/blob\/dd62aaa1e7f680a8b16ee14bc66b0fb5195c2ad8\/h2o-py\/h2o\/model\/regression.py#L69-L78","negative":"Get analog data."} {"query":"Shorthand access to the color table scheme selector method .","positive":"def Func ( arg_0 , * arg_1 , ** arg_2 ) : arg_0 . color_scheme_table . set_active_scheme ( * arg_1 , ** arg_2 ) arg_0 . Colors = arg_0 . color_scheme_table . active_colors if hasattr ( arg_0 , 'pdb' ) and arg_0 . pdb is not None : arg_0 . pdb . Func ( * arg_1 , ** arg_2 )","id_":254113,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/core\/ultratb.py#L380-L389","negative":"Returns a list of the dicom files within root_path\n\n Parameters\n ----------\n root_path: str\n Path to the directory to be recursively searched for DICOM files.\n\n Returns\n -------\n dicoms: set\n Set of DICOM absolute file paths"} {"query":"Retrieves a list of all servers bound to the specified data center .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 1 ) : arg_3 = arg_0 . _perform_request ( '\/datacenters\/%s\/servers?depth=%s' % ( arg_1 , str ( arg_2 ) ) ) return arg_3","id_":254114,"task_name":"https:\/\/github.com\/profitbricks\/profitbricks-sdk-python\/blob\/2c804b141688eccb07d6ae56601d5c60a62abebd\/profitbricks\/client.py#L1267-L1281","negative":"Returns mappable data for a random subset of voxels.\n\n May be useful as a baseline in predictive analyses--e.g., to compare\n performance of a more principled feature selection method with simple\n random selection.\n\n Args:\n dataset: A Dataset instance\n n_voxels: An integer specifying the number of random voxels to select.\n\n Returns:\n A 2D numpy array with (randomly-selected) voxels in rows and mappables\n in columns."} {"query":"Embed hyperlinks to documentation into example code","positive":"def Func ( arg_0 , arg_1 ) : if arg_1 is not None : return if not arg_0 . builder . config . plot_gallery : return if arg_0 . builder . name not in [ 'html' , 'readthedocs' ] : return print ( 'Embedding documentation hyperlinks in examples..' ) arg_2 = arg_0 . config . sphinx_gallery_conf arg_3 = arg_2 [ 'gallery_dirs' ] if not isinstance ( arg_3 , list ) : arg_3 = [ arg_3 ] for arg_4 in arg_3 : _Func ( arg_0 , arg_2 , arg_4 )","id_":254115,"task_name":"https:\/\/github.com\/cokelaer\/spectrum\/blob\/bad6c32e3f10e185098748f67bb421b378b06afe\/doc\/sphinxext\/sphinx_gallery\/docs_resolv.py#L408-L436","negative":"Fetch the events pages of a given group."} {"query":"Parse an RFC 2822 addr - spec .","positive":"def Func ( arg_0 ) : arg_1 = [ ] arg_0 . gotonext ( ) while arg_0 . pos < len ( arg_0 . field ) : if arg_0 . field [ arg_0 . pos ] == '.' : arg_1 . append ( '.' ) arg_0 . pos += 1 elif arg_0 . field [ arg_0 . pos ] == '\"' : arg_1 . append ( '\"%s\"' % arg_0 . getquote ( ) ) elif arg_0 . field [ arg_0 . pos ] in arg_0 . atomends : break else : arg_1 . append ( arg_0 . getatom ( ) ) arg_0 . gotonext ( ) if arg_0 . pos >= len ( arg_0 . field ) or arg_0 . field [ arg_0 . pos ] != '@' : return '' . join ( arg_1 ) arg_1 . append ( '@' ) arg_0 . pos += 1 arg_0 . gotonext ( ) return '' . join ( arg_1 ) + arg_0 . getdomain ( )","id_":254116,"task_name":"https:\/\/github.com\/google\/grumpy\/blob\/3ec87959189cfcdeae82eb68a47648ac25ceb10b\/third_party\/stdlib\/rfc822.py#L642-L664","negative":"Handle global keybindings."} {"query":"Fetch jobIDs for jobs in the table with optional fields given a specific clientInfo","positive":"def Func ( arg_0 , arg_1 , arg_2 = [ ] ) : arg_3 = [ arg_0 . _jobs . pubToDBNameDict [ x ] for x in arg_2 ] arg_4 = ',' . join ( [ 'job_id' ] + arg_3 ) with ConnectionFactory . get ( ) as conn : arg_5 = 'SELECT %s FROM %s ' 'WHERE client_info = %%s ' ' AND status != %%s' % ( arg_4 , arg_0 . jobsTableName ) conn . cursor . execute ( arg_5 , [ arg_1 , arg_0 . STATUS_COMPLETED ] ) arg_6 = conn . cursor . fetchall ( ) return arg_6","id_":254117,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/database\/client_jobs_dao.py#L1952-L1968","negative":"End the blockade event and return to a steady state"} {"query":"Return a list of task runs for a given project ID .","positive":"def Func ( arg_0 , arg_1 = 100 , arg_2 = 0 , arg_3 = None ) : if arg_3 is not None : arg_4 = dict ( arg_1 = arg_1 , arg_3 = arg_3 ) else : arg_4 = dict ( arg_1 = arg_1 , arg_2 = arg_2 ) print ( OFFSET_WARNING ) arg_4 [ 'project_id' ] = arg_0 try : arg_5 = _pybossa_req ( 'get' , 'taskrun' , arg_4 = arg_4 ) if type ( arg_5 ) . __name__ == 'list' : return [ TaskRun ( arg_6 ) for arg_6 in arg_5 ] else : raise TypeError except : raise","id_":254118,"task_name":"https:\/\/github.com\/Scifabric\/pybossa-client\/blob\/998d7cb0207ff5030dc800f0c2577c5692316c2c\/pbclient\/__init__.py#L551-L580","negative":"Decorator for methods accepting old_path and new_path."} {"query":"Logical compare .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_1 . read ( ) & arg_2 . read ( ) arg_0 . SF = ( arg_3 & ( 1 << ( arg_1 . size - 1 ) ) ) != 0 arg_0 . ZF = arg_3 == 0 arg_0 . PF = arg_0 . _calculate_parity_flag ( arg_3 ) arg_0 . CF = False arg_0 . OF = False","id_":254119,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/native\/cpu\/x86.py#L905-L934","negative":"Revoke the token and remove the cookie."} {"query":"Processes a level of segmentation and converts it into times .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : assert arg_0 [ 0 ] == 0 and arg_0 [ - 1 ] == arg_2 - 1 assert len ( arg_0 ) - 1 == len ( arg_1 ) arg_5 = np . concatenate ( ( [ 0 ] , arg_3 [ arg_0 ] , [ arg_4 ] ) ) arg_6 = np . max ( arg_1 ) + 1 arg_1 = np . concatenate ( ( [ arg_6 ] , arg_1 , [ arg_6 ] ) ) arg_5 , arg_1 = remove_empty_segments ( arg_5 , arg_1 ) assert np . allclose ( [ arg_5 [ 0 ] ] , [ 0 ] ) and np . allclose ( [ arg_5 [ - 1 ] ] , [ arg_4 ] ) return arg_5 , arg_1","id_":254120,"task_name":"https:\/\/github.com\/urinieto\/msaf\/blob\/9dbb57d77a1310465a65cc40f1641d083ca74385\/msaf\/utils.py#L193-L231","negative":"Set a property value or remove a property.\n\n value == None means 'remove property'.\n Raise HTTP_FORBIDDEN if property is read-only, or not supported.\n\n When dry_run is True, this function should raise errors, as in a real\n run, but MUST NOT change any data.\n\n This default implementation\n\n - raises HTTP_FORBIDDEN, if trying to modify a locking property\n - raises HTTP_FORBIDDEN, if trying to modify an immutable {DAV:}\n property\n - handles Windows' Win32LastModifiedTime to set the getlastmodified\n property, if enabled\n - stores everything else as dead property, if a property manager is\n present.\n - raises HTTP_FORBIDDEN, else\n\n Removing a non-existing prop is NOT an error.\n\n Note: RFC 4918 states that {DAV:}displayname 'SHOULD NOT be protected'\n\n A resource provider may override this method, to update supported custom\n live properties."} {"query":"Return a Droplet by its ID .","positive":"def Func ( arg_0 , arg_1 ) : return Droplet . get_object ( api_token = arg_0 . token , arg_1 = arg_1 )","id_":254121,"task_name":"https:\/\/github.com\/koalalorenzo\/python-digitalocean\/blob\/d0221b57856fb1e131cafecf99d826f7b07a947c\/digitalocean\/Manager.py#L88-L92","negative":"Add members found in prior versions up till the next major release\n\n These members are to be considered deprecated. When a new major\n release is made, these members are removed."} {"query":"Decorator . Abortable worker . If wrapped task will be cancelled by dispatcher decorator will send ftp codes of successful interrupt .","positive":"def Func ( arg_0 ) : @ functools . wraps ( arg_0 ) async def wrapper ( arg_1 , arg_2 , arg_3 ) : try : await arg_0 ( arg_1 , arg_2 , arg_3 ) except asyncio . CancelledError : arg_2 . response ( \"426\" , \"transfer aborted\" ) arg_2 . response ( \"226\" , \"abort successful\" ) return wrapper","id_":254122,"task_name":"https:\/\/github.com\/aio-libs\/aioftp\/blob\/b45395b1aba41301b898040acade7010e6878a08\/aioftp\/server.py#L697-L717","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"A helper function to compute validation related metrics","positive":"def Func ( arg_0 ) : if ( arg_0 . _validation_iterator is None ) or ( arg_0 . _Func is None ) : raise AttributeError ( 'Validation is not setup.' ) arg_1 = 0.0 arg_2 = [ 0.0 ] * len ( arg_0 . _Func ) arg_0 . _sess . run ( arg_0 . _validation_iterator . initializer ) while True : try : arg_3 = arg_0 . _sess . run ( arg_0 . _Func ) for arg_4 , arg_5 in enumerate ( arg_3 ) : arg_2 [ arg_4 ] += arg_5 arg_1 += 1.0 except tf . errors . OutOfRangeError : break for arg_4 , arg_5 in enumerate ( arg_2 ) : arg_2 [ arg_4 ] = arg_2 [ arg_4 ] \/ arg_1 return zip ( arg_0 . _Func , arg_2 )","id_":254123,"task_name":"https:\/\/github.com\/tensorlayer\/tensorlayer\/blob\/aa9e52e36c7058a7e6fd81d36563ca6850b21956\/tensorlayer\/distributed.py#L187-L206","negative":"Drop a node from the network\n\n :param node: node to drop\n :type node: Node"} {"query":"Send email using backend specified in EMAIL_BACKEND .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = False , arg_5 = None , arg_6 = None , arg_7 = 'mixed' , arg_8 = 'utf-8' , ** arg_9 ) : arg_10 , arg_11 = configuration . conf . get ( 'email' , 'EMAIL_BACKEND' ) . rsplit ( '.' , 1 ) arg_12 = importlib . import_module ( arg_10 ) arg_13 = getattr ( arg_12 , arg_11 ) arg_0 = get_email_address_list ( arg_0 ) arg_0 = \", \" . join ( arg_0 ) return arg_13 ( arg_0 , arg_1 , arg_2 , arg_3 = arg_3 , arg_4 = arg_4 , arg_5 = arg_5 , arg_6 = arg_6 , arg_7 = arg_7 , arg_8 = arg_8 , ** arg_9 )","id_":254124,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/utils\/email.py#L36-L50","negative":"Sequentially update the actors, the world, and the messaging system. \n The theater terminates once all of the actors indicate that they are done."} {"query":"Generates a URL - safe token for the given user action time tuple .","positive":"def Func ( arg_0 , arg_1 , arg_2 = '' , arg_3 = None ) : arg_4 = hmac . new ( _helpers . _to_bytes ( arg_0 , encoding = 'utf-8' ) ) arg_4 . update ( _helpers . _to_bytes ( str ( arg_1 ) , encoding = 'utf-8' ) ) arg_4 . update ( DELIMITER ) arg_4 . update ( _helpers . _to_bytes ( arg_2 , encoding = 'utf-8' ) ) arg_4 . update ( DELIMITER ) arg_3 = _helpers . _to_bytes ( str ( arg_3 or int ( time . time ( ) ) ) , encoding = 'utf-8' ) arg_4 . update ( arg_3 ) arg_5 = arg_4 . digest ( ) arg_6 = base64 . urlsafe_b64encode ( arg_5 + DELIMITER + arg_3 ) return arg_6","id_":254125,"task_name":"https:\/\/github.com\/googleapis\/oauth2client\/blob\/50d20532a748f18e53f7d24ccbe6647132c979a9\/oauth2client\/contrib\/xsrfutil.py#L33-L57","negative":"Returns a list of the dicom files within root_path\n\n Parameters\n ----------\n root_path: str\n Path to the directory to be recursively searched for DICOM files.\n\n Returns\n -------\n dicoms: set\n Set of DICOM absolute file paths"} {"query":"Write data to the log file if active","positive":"def Func ( arg_0 , arg_1 , arg_2 = 'input' ) : if arg_0 . log_active and arg_1 : arg_3 = arg_0 . logfile . write if arg_2 == 'input' : if arg_0 . timestamp : arg_3 ( str_to_unicode ( time . strftime ( '# %a, %d %b %Y %H:%M:%S\\n' , time . localtime ( ) ) ) ) arg_3 ( arg_1 ) elif arg_2 == 'output' and arg_0 . log_output : arg_4 = u'\\n' . join ( [ u'#[Out]# %s' % s for s in arg_1 . splitlines ( ) ] ) arg_3 ( u'%s\\n' % arg_4 ) arg_0 . logfile . flush ( )","id_":254126,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/core\/logger.py#L188-L203","negative":"Returns the maximum delay for the InferenceElements in the inference\n dictionary\n\n Parameters:\n -----------------------------------------------------------------------\n inferences: A dictionary where the keys are InferenceElements"} {"query":"Parses arguments for the command .","positive":"def Func ( arg_0 , arg_1 = None ) : arg_2 = vars ( arg_0 . create_argument_parser ( ) . parse_args ( arg_1 ) ) arg_3 = None if arg_0 . requires_seed : arg_4 = arg_2 . pop ( 'seed_file' ) arg_3 = ( arg_0 . seed_from_filepath ( arg_4 ) if arg_4 else arg_0 . prompt_for_seed ( ) ) arg_2 [ 'api' ] = Iota ( adapter = arg_2 . pop ( 'uri' ) , arg_3 = arg_3 , testnet = arg_2 . pop ( 'testnet' ) , ) return arg_2","id_":254127,"task_name":"https:\/\/github.com\/iotaledger\/iota.lib.py\/blob\/97cdd1e241498446b46157b79b2a1ea2ec6d387a\/iota\/bin\/__init__.py#L80-L107","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Run Gene Set Enrichment Analysis .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = 'GSEA_' , arg_4 = 15 , arg_5 = 500 , arg_6 = 1000 , arg_7 = 1 , arg_8 = 'gene_set' , arg_9 = 'log2_ratio_of_classes' , arg_10 = False , arg_11 = 1 , arg_12 = ( 6.5 , 6 ) , arg_13 = 'pdf' , arg_14 = 20 , arg_15 = False , arg_16 = None , arg_17 = False ) : arg_18 = GSEA ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 , arg_6 , arg_7 , arg_8 , arg_9 , arg_10 , arg_11 , arg_12 , arg_13 , arg_14 , arg_15 , arg_16 , arg_17 ) arg_18 . run ( ) return arg_18","id_":254128,"task_name":"https:\/\/github.com\/zqfang\/GSEApy\/blob\/673e9ec1391e3b14d3e8a4353117151fd2cb9345\/gseapy\/gsea.py#L858-L933","negative":"Drop a node from the network\n\n :param node: node to drop\n :type node: Node"} {"query":"Show entire demo on screen block by block","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . title arg_2 = arg_0 . title arg_3 = arg_0 . nblocks arg_4 = arg_0 . _silent arg_5 = arg_0 . marquee for arg_6 , arg_7 in enumerate ( arg_0 . src_blocks_colored ) : if arg_4 [ arg_6 ] : print >> io . stdout , arg_5 ( '<%s> SILENT block # %s (%s remaining)' % ( arg_2 , arg_6 , arg_3 - arg_6 - 1 ) ) else : print >> io . stdout , arg_5 ( '<%s> block # %s (%s remaining)' % ( arg_2 , arg_6 , arg_3 - arg_6 - 1 ) ) print >> io . stdout , arg_7 , sys . stdout . flush ( )","id_":254129,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/lib\/demo.py#L395-L411","negative":"An intelligent wrapper for ``open``.\n\n Parameters\n ----------\n name_or_fdesc : string-type or open file descriptor\n If a string type, refers to the path to a file on disk.\n\n If an open file descriptor, it is returned as-is.\n\n mode : string\n The mode with which to open the file.\n See ``open`` for details.\n\n fmt : string ['auto', 'jams', 'json', 'jamz']\n The encoding for the input\/output stream.\n\n If `auto`, the format is inferred from the filename extension.\n\n Otherwise, use the specified coding.\n\n\n See Also\n --------\n open\n gzip.open"} {"query":"Comparison for x coordinate","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . _is_coordinate ( arg_1 ) if arg_0 . x > arg_1 . x : return True else : return False","id_":254130,"task_name":"https:\/\/github.com\/katerina7479\/pypdflite\/blob\/ac2501f30d6619eae9dea5644717575ca9263d0a\/pypdflite\/pdfobjects\/pdfcursor.py#L112-L118","negative":"Downloads a MP4 or WebM file that is associated with the video at the URL passed.\n\n :param str url: URL of the video to be downloaded\n :return str: Filename of the file in local storage"} {"query":"Get 6D data with euler rotations .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = None ) : arg_4 = [ ] arg_5 = arg_4 . append for arg_6 in range ( arg_1 . body_count ) : arg_3 , arg_7 = QRTPacket . _get_exact ( RT6DBodyPosition , arg_2 , arg_3 ) arg_3 , arg_8 = QRTPacket . _get_exact ( RT6DBodyEuler , arg_2 , arg_3 ) arg_5 ( ( arg_7 , arg_8 ) ) return arg_4","id_":254131,"task_name":"https:\/\/github.com\/qualisys\/qualisys_python_sdk\/blob\/127d7eeebc2b38b5cafdfa5d1d0198437fedd274\/qtm\/packet.py#L430-L442","negative":"Returns the nth rest sequence of coll, or coll if i is 0."} {"query":"Try to create a Distribution path_or_module . o path_or_module may be a module object .","positive":"def Func ( arg_0 , arg_1 = None ) : if isinstance ( arg_0 , ModuleType ) : try : return Installed ( arg_0 , arg_1 ) except ( ValueError , IOError ) : pass try : __import__ ( arg_0 ) except ImportError : pass else : try : return Installed ( arg_0 , arg_1 ) except ( ValueError , IOError ) : pass if os . path . isfile ( arg_0 ) : try : return SDist ( arg_0 , arg_1 ) except ( ValueError , IOError ) : pass try : return BDist ( arg_0 , arg_1 ) except ( ValueError , IOError ) : pass try : return Wheel ( arg_0 , arg_1 ) except ( ValueError , IOError ) : pass if os . path . isdir ( arg_0 ) : try : return Develop ( arg_0 , arg_1 ) except ( ValueError , IOError ) : pass","id_":254132,"task_name":"https:\/\/github.com\/tnkteja\/myhelp\/blob\/fb3a4809d448ad14d5b2e6ddf2e7e89ad52b71cb\/virtualEnvironment\/lib\/python2.7\/site-packages\/pkginfo\/utils.py#L10-L57","negative":"Try to merge processes as much is possible\n\n :param processes: list of processes instances"} {"query":"Get the decryption for col .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : if arg_2 is None : arg_2 = arg_0 if arg_1 != arg_0 . model . _meta . db_table or arg_2 != arg_0 : return DecryptedCol ( arg_1 , arg_0 , arg_2 ) else : return arg_0 . cached_col","id_":254133,"task_name":"https:\/\/github.com\/incuna\/django-pgcrypto-fields\/blob\/406fddf0cbe9091ba71b97206d0f4719c0450ac1\/pgcrypto\/mixins.py#L106-L117","negative":"main execution loop. query weather data and post to online service."} {"query":"Check HTTP reponse for known errors","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_2 . status == 401 : raise trolly . Unauthorised ( arg_1 , arg_2 ) if arg_2 . status != 200 : raise trolly . ResourceUnavailable ( arg_1 , arg_2 )","id_":254134,"task_name":"https:\/\/github.com\/its-rigs\/Trolly\/blob\/483dc94c352df40dc05ead31820b059b2545cf82\/trolly\/client.py#L48-L56","negative":"URL reconstruction according to PEP 333.\n @see https:\/\/www.python.org\/dev\/peps\/pep-3333\/#url-reconstruction"} {"query":"Check self . data . Raise InvalidConfig on error","positive":"def Func ( arg_0 ) : if ( arg_0 . data . get ( 'content-type' ) or arg_0 . data . get ( 'body' ) ) and arg_0 . data . get ( 'method' , '' ) . lower ( ) not in CONTENT_TYPE_METHODS : raise InvalidConfig ( extra_body = 'The body\/content-type option only can be used with the {} methods. The device is {}. ' 'Check the configuration file.' . format ( ', ' . join ( CONTENT_TYPE_METHODS ) , arg_0 . name ) ) arg_0 . data [ 'content-type' ] = CONTENT_TYPE_ALIASES . get ( arg_0 . data . get ( 'content-type' ) , arg_0 . data . get ( 'content-type' ) ) arg_2 = CONTENT_TYPE_ALIASES [ 'form' ] if arg_0 . data . get ( 'body' ) and ( arg_0 . data . get ( 'content-type' ) or arg_2 ) == arg_2 : try : arg_0 . data [ 'body' ] = json . loads ( arg_0 . data [ 'body' ] ) except JSONDecodeError : raise InvalidConfig ( extra_body = 'Invalid JSON body on {} device.' . format ( arg_0 . name ) )","id_":254135,"task_name":"https:\/\/github.com\/Nekmo\/amazon-dash\/blob\/0e2bdc24ff8ea32cecb2f5f54f5cc1c0f99c197b\/amazon_dash\/execute.py#L179-L199","negative":"Creates the variational distribution for LDA.\n\n Args:\n activation: Activation function to use.\n num_topics: The number of topics.\n layer_sizes: The number of hidden units per layer in the encoder.\n\n Returns:\n lda_variational: A function that takes a bag-of-words Tensor as\n input and returns a distribution over topics."} {"query":"Updates the current configuration with the values in conf_dict .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : for arg_3 in arg_1 : if arg_3 . startswith ( '_' ) : continue arg_4 = arg_1 [ arg_3 ] if arg_4 is Namespace : continue if arg_2 : arg_3 = arg_2 + '.' + arg_3 if isinstance ( arg_4 , Namespace ) : for arg_3 , arg_4 in arg_4 . iteritems ( arg_3 ) : arg_0 . set ( arg_3 , arg_4 ) elif callable ( arg_4 ) : arg_4 = arg_4 ( ) if arg_4 is not None : arg_0 . set ( arg_3 , arg_4 ) else : arg_0 . set ( arg_3 , arg_4 )","id_":254136,"task_name":"https:\/\/github.com\/shakefu\/pyconfig\/blob\/000cb127db51e03cb4070aae6943e956193cbad5\/pyconfig\/__init__.py#L87-L115","negative":"Regenerates the primary or secondary access key for the specified\n storage account.\n\n service_name:\n Name of the storage service account.\n key_type:\n Specifies which key to regenerate. Valid values are:\n Primary, Secondary"} {"query":"check if aws conn exists already or create one and return it","positive":"def Func ( arg_0 ) : if not arg_0 . conn : arg_0 . conn = arg_0 . get_client_type ( 'athena' ) return arg_0 . conn","id_":254137,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/aws_athena_hook.py#L43-L51","negative":"Remove all binary files in the adslib directory."} {"query":"Get the default local IP address .","positive":"def Func ( ) : arg_0 = socket . socket ( socket . AF_INET , socket . SOCK_DGRAM ) try : arg_0 . connect ( ( '10.255.255.255' , 1 ) ) arg_1 = arg_0 . getsockname ( ) [ 0 ] except ( socket . error , IndexError ) : arg_1 = '127.0.0.1' finally : arg_0 . close ( ) return arg_1","id_":254138,"task_name":"https:\/\/github.com\/mozilla-iot\/webthing-python\/blob\/65d467c89ed79d0bbc42b8b3c8f9e5a320edd237\/webthing\/utils.py#L17-L32","negative":"Return a circuit with a barrier before last measurements."} {"query":"Completed human transaction","positive":"def Func ( arg_0 ) : arg_1 = [ ] for arg_2 in arg_0 . transactions : if arg_2 . depth == 0 : arg_1 . append ( arg_2 ) return tuple ( arg_1 )","id_":254139,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/platforms\/evm.py#L2058-L2064","negative":"Configure the Outstation's database of input point definitions.\n\n Configure two Analog points (group\/variation 30.1) at indexes 1 and 2.\n Configure two Binary points (group\/variation 1.2) at indexes 1 and 2."} {"query":"Parse and sort an Accept header .","positive":"def Func ( arg_0 ) : def sort ( arg_1 ) : return float ( arg_1 [ 1 ] . get ( 'q' , 1 ) ) return OrderedDict ( sorted ( _splitHeaders ( arg_0 ) , key = sort , reverse = True ) )","id_":254140,"task_name":"https:\/\/github.com\/jonathanj\/txspinneret\/blob\/717008a2c313698984a23e3f3fc62ea3675ed02d\/txspinneret\/util.py#L13-L24","negative":"Decrypt the encrypted masterkey"} {"query":"Print operational metrics for the scheduler test .","positive":"def Func ( arg_0 ) : arg_1 = settings . Session ( ) arg_2 = TaskInstance arg_3 = ( arg_1 . query ( arg_2 ) . filter ( arg_2 . dag_id . in_ ( DAG_IDS ) ) . all ( ) ) arg_4 = [ x for x in arg_3 if x . state == State . SUCCESS ] arg_5 = [ ( arg_8 . dag_id , arg_8 . task_id , arg_8 . execution_date , ( arg_8 . queued_dttm - arg_0 . start_date ) . total_seconds ( ) , ( arg_8 . start_date - arg_0 . start_date ) . total_seconds ( ) , ( arg_8 . end_date - arg_0 . start_date ) . total_seconds ( ) , arg_8 . duration ) for arg_8 in arg_4 ] arg_6 = pd . DataFrame ( arg_5 , columns = [ 'dag_id' , 'task_id' , 'execution_date' , 'queue_delay' , 'start_delay' , 'land_time' , 'duration' ] ) print ( 'Performance Results' ) print ( '###################' ) for arg_7 in DAG_IDS : print ( 'DAG {}' . format ( arg_7 ) ) print ( arg_6 [ arg_6 [ 'dag_id' ] == arg_7 ] ) print ( '###################' ) if len ( arg_3 ) > len ( arg_4 ) : print ( \"WARNING!! The following task instances haven't completed\" ) print ( pd . DataFrame ( [ ( arg_8 . dag_id , arg_8 . task_id , arg_8 . execution_date , arg_8 . state ) for arg_8 in filter ( lambda x : x . state != State . SUCCESS , arg_3 ) ] , columns = [ 'dag_id' , 'task_id' , 'execution_date' , 'state' ] ) ) arg_1 . commit ( )","id_":254141,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/scripts\/perf\/scheduler_ops_metrics.py#L65-L101","negative":"Decorator. Abortable worker. If wrapped task will be cancelled by\n dispatcher, decorator will send ftp codes of successful interrupt.\n\n ::\n\n >>> @worker\n ... async def worker(self, connection, rest):\n ... ..."} {"query":"Asserts that val contains the given sequence of items in order .","positive":"def Func ( arg_0 , * arg_1 ) : if len ( arg_1 ) == 0 : raise ValueError ( 'one or more args must be given' ) else : try : for arg_2 in xrange ( len ( arg_0 . val ) - len ( arg_1 ) + 1 ) : for arg_3 in xrange ( len ( arg_1 ) ) : if arg_0 . val [ arg_2 + arg_3 ] != arg_1 [ arg_3 ] : break else : return arg_0 except TypeError : raise TypeError ( 'val is not iterable' ) arg_0 . _err ( 'Expected <%s> to contain sequence %s, but did not.' % ( arg_0 . val , arg_0 . _fmt_items ( arg_1 ) ) )","id_":254142,"task_name":"https:\/\/github.com\/ActivisionGameScience\/assertpy\/blob\/08d799cdb01f9a25d3e20672efac991c7bc26d79\/assertpy\/assertpy.py#L314-L328","negative":"Connect to the stream\n\n Returns\n -------\n asyncio.coroutine\n The streaming response"} {"query":"Read the state file if it exists .","positive":"def Func ( arg_0 , arg_1 ) : try : arg_2 = open ( arg_1 , 'r' ) arg_3 = json . load ( arg_2 ) arg_0 . vpc_id = arg_3 [ 'vpcID' ] arg_0 . sg_id = arg_3 [ 'sgID' ] arg_0 . sn_ids = arg_3 [ 'snIDs' ] arg_0 . instances = arg_3 [ 'instances' ] except Exception as e : logger . debug ( \"Caught exception while reading state file: {0}\" . format ( e ) ) raise e logger . debug ( \"Done reading state from the local state file.\" )","id_":254143,"task_name":"https:\/\/github.com\/Parsl\/parsl\/blob\/d7afb3bc37f50dcf224ae78637944172edb35dac\/parsl\/providers\/aws\/aws.py#L183-L205","negative":"Remove rows with NAs from the H2OFrame.\n\n :returns: new H2OFrame with all rows from the original frame containing any NAs removed."} {"query":"Adds all necessary parameters to the traj container .","positive":"def Func ( arg_0 ) : arg_0 . f_add_parameter ( 'steps' , 10000 , comment = 'Number of time steps to simulate' ) arg_0 . f_add_parameter ( 'dt' , 0.01 , comment = 'Step size' ) arg_0 . f_add_parameter ( ArrayParameter , 'initial_conditions' , np . array ( [ 0.0 , 0.0 , 0.0 ] ) , comment = 'Our initial conditions, as default we will start from' ' origin!' ) arg_0 . f_add_parameter ( 'diff_name' , 'diff_lorenz' , comment = 'Name of our differential equation' ) if arg_0 . diff_name == 'diff_lorenz' : arg_0 . f_add_parameter ( 'func_params.sigma' , 10.0 ) arg_0 . f_add_parameter ( 'func_params.beta' , 8.0 \/ 3.0 ) arg_0 . f_add_parameter ( 'func_params.rho' , 28.0 ) elif arg_0 . diff_name == 'diff_roessler' : arg_0 . f_add_parameter ( 'func_params.a' , 0.1 ) arg_0 . f_add_parameter ( 'func_params.c' , 14.0 ) else : raise ValueError ( 'I don\\'t know what %s is.' % arg_0 . diff_name )","id_":254144,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/examples\/example_06_parameter_presetting.py#L14-L50","negative":"write lines, one by one, separated by \\n to device"} {"query":"Decorator used to change the return value from PJFFactory . fuzzed it makes the structure printable","positive":"def Func ( arg_0 ) : def func_wrapper ( arg_1 , arg_2 , arg_3 ) : if arg_3 : arg_4 = \"\\\\x%02x\" else : arg_4 = \"\\\\u%04x\" arg_5 = re . compile ( r\"(\\\\\\\\x[a-fA-F0-9]{2})\" ) arg_6 = re . compile ( r\"(\\\\u[a-fA-F0-9]{4})\" ) def encode_decode_all ( arg_7 , arg_8 = True ) : if type ( arg_7 ) == dict : for arg_9 in arg_7 : if type ( arg_7 [ arg_9 ] ) in [ dict , list ] : if arg_8 : arg_7 [ arg_9 ] = encode_decode_all ( arg_7 [ arg_9 ] ) else : arg_7 [ arg_9 ] = encode_decode_all ( arg_7 [ arg_9 ] , arg_8 = False ) elif type ( arg_7 [ arg_9 ] ) == str : if arg_8 : arg_7 [ arg_9 ] = decode ( arg_7 [ arg_9 ] ) else : arg_7 [ arg_9 ] = encode ( arg_7 [ arg_9 ] ) elif type ( arg_7 ) == list : arg_10 = [ ] for arg_11 in arg_7 : if type ( arg_11 ) == str : if arg_8 : arg_10 . append ( decode ( arg_11 ) ) else : arg_10 . append ( encode ( arg_11 ) ) elif type ( arg_11 ) in [ dict , list ] : if arg_8 : arg_10 . append ( encode_decode_all ( arg_11 ) ) else : arg_10 . append ( encode_decode_all ( arg_11 , arg_8 = False ) ) else : arg_10 . append ( arg_11 ) return arg_10 else : if arg_8 : return decode ( arg_7 ) else : return encode ( arg_7 ) return arg_7 def decode ( arg_12 ) : arg_13 = \"\" . join ( arg_4 % ord ( c ) if c not in p else c for c in arg_12 ) if sys . version_info >= ( 3 , 0 ) : return str ( arg_13 ) else : for arg_14 in arg_6 . findall ( arg_13 ) : arg_13 = arg_13 . replace ( arg_14 , arg_14 . decode ( \"unicode_escape\" ) ) return unicode ( arg_13 ) def encode ( arg_12 ) : for arg_14 in arg_5 . findall ( arg_12 ) : if sys . version_info >= ( 3 , 0 ) : arg_12 = arg_12 . replace ( arg_14 , bytes ( str ( arg_14 ) . replace ( \"\\\\\\\\x\" , \"\\\\x\" ) , \"utf-8\" ) . decode ( \"unicode_escape\" ) ) else : arg_12 = arg_12 . replace ( arg_14 , str ( arg_14 ) . replace ( \"\\\\\\\\x\" , \"\\\\x\" ) . decode ( \"string_escape\" ) ) return arg_12 if arg_2 : return encode_decode_all ( \"{0}\" . format ( json . dumps ( encode_decode_all ( arg_0 ( arg_1 ) ) , arg_2 = 5 ) ) , arg_8 = False ) else : return encode_decode_all ( \"{0}\" . format ( json . dumps ( encode_decode_all ( arg_0 ( arg_1 ) ) ) ) , arg_8 = False ) return func_wrapper","id_":254145,"task_name":"https:\/\/github.com\/mseclab\/PyJFuzz\/blob\/f777067076f62c9ab74ffea6e90fd54402b7a1b4\/pyjfuzz\/core\/pjf_encoder.py#L35-L106","negative":"Checks to see if Spark worker and HDFS datanode are still running."} {"query":"Check that the server is returning a valid Content - Type","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_2 . replace ( ' ' , '' ) . split ( ';' ) arg_4 = arg_1 . replace ( ' ' , '' ) . split ( ';' ) return ( all ( arg_5 in arg_4 for arg_5 in arg_3 ) and ( arg_4 [ 0 ] == 'application\/vnd.oasis.taxii+json' or arg_4 [ 0 ] == 'application\/vnd.oasis.stix+json' ) )","id_":254146,"task_name":"https:\/\/github.com\/oasis-open\/cti-taxii-client\/blob\/b4c037fb61d8b8892af34423e2c67c81218d6f8e\/taxii2client\/__init__.py#L902-L917","negative":"Returns the filename which is effectively used by the application. If\n overridden by an environment variable, it will return that filename."} {"query":"returns a fully qualified app domain name","positive":"def Func ( arg_0 , arg_1 = None ) : from . generic import get_site_proto arg_1 = arg_1 or get_site_proto ( ) arg_2 = '{proto}:\/\/{domain}' . format ( arg_1 = arg_1 , arg_0 = arg_0 ) return arg_2","id_":254147,"task_name":"https:\/\/github.com\/un33k\/django-toolware\/blob\/973f3e003dc38b812897dab88455bee37dcaf931\/toolware\/utils\/convert.py#L38-L43","negative":"Create a tar file based on the list of files passed"} {"query":"Return a datetime object from an iso 8601 representation . Return None if string is non conforming .","positive":"def Func ( arg_0 ) : arg_1 = DATE_ISO_REGEX . match ( arg_0 ) if arg_1 : arg_2 = datetime . datetime ( year = int ( arg_1 . group ( DATE_ISO_YEAR_GRP ) ) , month = int ( arg_1 . group ( DATE_ISO_MONTH_GRP ) ) , day = int ( arg_1 . group ( DATE_ISO_DAY_GRP ) ) , hour = int ( arg_1 . group ( DATE_ISO_HOUR_GRP ) ) , second = int ( arg_1 . group ( DATE_ISO_SEC_GRP ) ) , minute = int ( arg_1 . group ( DATE_ISO_MIN_GRP ) ) ) return arg_2 else : return None","id_":254148,"task_name":"https:\/\/github.com\/spdx\/tools-python\/blob\/301d72f6ae57c832c1da7f6402fa49b192de6810\/spdx\/utils.py#L48-L63","negative":"Process current member with 'op' operation."} {"query":"Wait for a particular UI event to occur ; this can be built upon in NativeUIElement for specific convenience methods .","positive":"def Func ( arg_0 , arg_1 , arg_2 , ** arg_3 ) : arg_4 = arg_0 . _matchOther arg_5 = None arg_6 = None arg_7 = None if 'callback' in arg_3 : arg_4 = arg_3 [ 'callback' ] del arg_3 [ 'callback' ] if 'args' in arg_3 : if not isinstance ( arg_3 [ 'args' ] , tuple ) : arg_8 = 'Notification callback args not given as a tuple' raise TypeError ( arg_8 ) arg_6 = arg_3 [ 'args' ] del arg_3 [ 'args' ] if 'kwargs' in arg_3 : if not isinstance ( arg_3 [ 'kwargs' ] , dict ) : arg_8 = 'Notification callback kwargs not given as a dict' raise TypeError ( arg_8 ) arg_7 = arg_3 [ 'kwargs' ] del arg_3 [ 'kwargs' ] if arg_3 : if arg_7 : arg_7 . update ( arg_3 ) else : arg_7 = arg_3 else : arg_6 = ( arg_5 , ) arg_7 = arg_3 return arg_0 . _setNotification ( arg_1 , arg_2 , arg_4 , arg_6 , arg_7 )","id_":254149,"task_name":"https:\/\/github.com\/alex-kostirin\/pyatomac\/blob\/3f46f6feb4504315eec07abb18bb41be4d257aeb\/atomac\/AXClasses.py#L606-L654","negative":"Get the context for this view."} {"query":"Load the configuration .","positive":"def Func ( arg_0 = False , arg_1 = None ) : if \"config_loaded\" not in INTERN : Load ( CURRENT_DIRECTORY ) if not arg_0 : DirectoryStructure ( ) INTERN . update ( { \"config_loaded\" : True } ) if arg_1 and isinstance ( arg_1 , dict ) : CONFIGURATION . update ( arg_1 )","id_":254150,"task_name":"https:\/\/github.com\/funilrys\/PyFunceble\/blob\/cdf69cbde120199171f7158e1c33635753e6e2f5\/PyFunceble\/__init__.py#L475-L515","negative":"Generate RENEWING time.\n\n [:rfc:`2131#section-4.4.5`]::\n\n T1\n defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 *\n duration_of_lease). Times T1 and T2 SHOULD be chosen with some\n random \"fuzz\" around a fixed value, to avoid synchronization of\n client reacquisition."} {"query":"Encode body of request to bytes and update content - type if required .","positive":"def Func ( arg_0 ) : if isinstance ( arg_0 . body , text_type ) : arg_1 = arg_0 . headers . get ( 'content-type' , 'text\/plain' ) . split ( ';' ) if len ( arg_1 ) == 2 : arg_2 , arg_3 = arg_1 arg_3 = arg_3 . split ( '=' ) [ 1 ] arg_0 . body = arg_0 . body . encode ( arg_3 ) else : arg_2 = arg_1 [ 0 ] if ( arg_2 == 'application\/x-www-form-urlencoded' or 'x-amz-' in arg_2 ) : arg_0 . body = arg_0 . body . encode ( ) else : arg_0 . body = arg_0 . body . encode ( 'utf-8' ) arg_0 . headers [ 'content-type' ] = arg_2 + '; charset=utf-8'","id_":254151,"task_name":"https:\/\/github.com\/sam-washington\/requests-aws4auth\/blob\/1201e470c6d5847b7fe42e937a55755e1895e72c\/requests_aws4auth\/aws4auth.py#L469-L495","negative":"Make preparations before running Tank"} {"query":"Assumes that start and stop are already in buffer coordinates . value is a byte iterable . value_len is fractional .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : assert arg_2 >= arg_1 and arg_4 >= 0 arg_5 = arg_2 - arg_1 if arg_5 < arg_4 : arg_0 . _insert_zeros ( arg_2 , arg_2 + arg_4 - arg_5 ) arg_0 . _copy_to_range ( arg_1 , arg_3 , arg_4 ) elif arg_5 > arg_4 : arg_0 . _del_range ( arg_2 - ( arg_5 - arg_4 ) , arg_2 ) arg_0 . _copy_to_range ( arg_1 , arg_3 , arg_4 ) else : arg_0 . _copy_to_range ( arg_1 , arg_3 , arg_4 )","id_":254152,"task_name":"https:\/\/github.com\/Infinidat\/infi.instruct\/blob\/93b1ab725cfd8d13227960dbf9e3ca1e7f9eebe8\/src\/infi\/instruct\/buffer\/io_buffer.py#L167-L181","negative":"Given a lib2to3 node, return its string representation."} {"query":"Run a dataset query against Citrination .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . _validate_search_query ( arg_1 ) return arg_0 . _execute_search_query ( arg_1 , DatasetSearchResult )","id_":254153,"task_name":"https:\/\/github.com\/CitrineInformatics\/python-citrination-client\/blob\/409984fc65ce101a620f069263f155303492465c\/citrination_client\/search\/client.py#L72-L86","negative":"Sequentially update the actors, the world, and the messaging system. \n The theater terminates once all of the actors indicate that they are done."} {"query":"A decorator for a function implementing dispatch_fn for dispatch_key .","positive":"def Func ( arg_0 , arg_1 = None ) : def apply_decorator ( arg_2 ) : if arg_1 is None : arg_0 . __multi_default__ = arg_2 else : arg_0 . __multi__ [ arg_1 ] = arg_2 return arg_2 return apply_decorator","id_":254154,"task_name":"https:\/\/github.com\/dialoguemd\/multi-method\/blob\/8b405d4c5ad74a2a36a4ecf88283262defa2e737\/dialogue\/multi_method\/__init__.py#L30-L45","negative":"Get the context for this view."} {"query":"Resolves environment from . cpenv file ... recursively walks up the tree in attempt to find a . cpenv file","positive":"def Func ( arg_0 , arg_1 ) : if not os . path . exists ( arg_1 ) : raise ResolveError if os . path . isfile ( arg_1 ) : arg_1 = os . path . dirname ( arg_1 ) for arg_2 , arg_3 , arg_3 in walk_up ( arg_1 ) : if is_redirecting ( arg_2 ) : arg_4 = redirect_to_env_paths ( unipath ( arg_2 , '.cpenv' ) ) arg_5 = Resolver ( * arg_4 ) return arg_5 . resolve ( ) raise ResolveError","id_":254155,"task_name":"https:\/\/github.com\/cpenv\/cpenv\/blob\/afbb569ae04002743db041d3629a5be8c290bd89\/cpenv\/resolver.py#L207-L223","negative":"Set a smoothing Gaussian kernel given its FWHM in mm."} {"query":"Send a password reset to the user s email address .","positive":"def Func ( arg_0 ) : arg_1 = Site . objects . get_current ( ) arg_0 . password_reset_notification ( user = arg_0 , arg_1 = arg_1 ) . notify ( )","id_":254156,"task_name":"https:\/\/github.com\/incuna\/django-user-management\/blob\/6784e33191d4eff624d2cf2df9ca01db4f23c9c6\/user_management\/models\/mixins.py#L163-L166","negative":"Subtract the arg from the value."} {"query":"Triggers specific class methods using a simple reflection mechanism based on the given input dictionary params .","positive":"def Func ( arg_0 , arg_1 ) : for arg_2 in sorted ( arg_1 ) : arg_3 = arg_1 [ arg_2 ] arg_4 = arg_0 if arg_2 . startswith ( 'response_' ) or arg_2 . startswith ( 'reply_' ) : arg_2 = arg_2 . replace ( 'response_' , '' ) . replace ( 'reply_' , '' ) if hasattr ( arg_0 , '_response' ) : arg_4 = arg_0 . _response arg_5 = getattr ( arg_4 , arg_2 , None ) arg_6 = arg_2 in dir ( arg_4 ) arg_7 = ismethod ( arg_5 ) and not isfunction ( arg_5 ) if not arg_7 and not arg_6 : raise PookInvalidArgument ( 'Unsupported argument: {}' . format ( arg_2 ) ) if arg_7 : arg_5 ( arg_3 ) else : setattr ( arg_4 , arg_2 , arg_3 )","id_":254157,"task_name":"https:\/\/github.com\/h2non\/pook\/blob\/e64094e41e4d89d98d2d29af7608ef27dc50cf19\/pook\/helpers.py#L5-L43","negative":"Show an image.\n\n Args:\n img (str or ndarray): The image to be displayed.\n win_name (str): The window name.\n wait_time (int): Value of waitKey param."} {"query":"Gets the response and generates the _Response object","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . _httprequest . status ( ) arg_2 = arg_0 . _httprequest . status_text ( ) arg_3 = arg_0 . _httprequest . get_all_response_headers ( ) arg_4 = [ ] for arg_5 in arg_3 . split ( '\\n' ) : if ( arg_5 . startswith ( '\\t' ) or arg_5 . startswith ( ' ' ) ) and arg_4 : arg_4 [ - 1 ] += arg_5 else : arg_4 . append ( arg_5 ) arg_6 = [ ] for arg_5 in arg_4 : if ':' in arg_5 : arg_7 = arg_5 . find ( ':' ) arg_6 . append ( ( arg_5 [ : arg_7 ] . lower ( ) , arg_5 [ arg_7 + 1 : ] . strip ( ) ) ) arg_8 = arg_0 . _httprequest . response_body ( ) arg_9 = len ( arg_8 ) return _Response ( arg_1 , arg_2 , arg_9 , arg_6 , arg_8 )","id_":254158,"task_name":"https:\/\/github.com\/Azure\/azure-sdk-for-python\/blob\/d7306fde32f60a293a7567678692bdad31e4b667\/azure-servicemanagement-legacy\/azure\/servicemanagement\/_http\/winhttp.py#L449-L474","negative":"Sets niceness of a process"} {"query":"Return icon for index .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . sourceModel ( ) if not arg_2 : return None return arg_2 . Func ( arg_0 . mapToSource ( arg_1 ) )","id_":254159,"task_name":"https:\/\/github.com\/4degrees\/riffle\/blob\/e5a0d908df8c93ff1ee7abdda8875fd1667df53d\/source\/riffle\/model.py#L553-L559","negative":"Stop the heartbeating and cancel all related callbacks."} {"query":"Waits for an asynchronous operation to complete .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 'Succeeded' , arg_3 = 30 , arg_4 = 5 , arg_5 = arg_6 , arg_7 = arg_8 , arg_9 = arg_10 ) : arg_11 = arg_3 \/\/ arg_4 + 1 arg_12 = time . time ( ) for arg_13 in range ( int ( arg_11 ) ) : arg_14 = arg_0 . get_operation_status ( arg_1 ) arg_15 = time . time ( ) - arg_12 if arg_14 . status == arg_2 : if arg_7 is not None : arg_7 ( arg_15 ) return arg_14 elif arg_14 . error : if arg_9 is not None : arg_16 = AzureAsyncOperationHttpError ( _ERROR_ASYNC_OP_FAILURE , arg_14 . status , arg_14 ) arg_9 ( arg_15 , arg_16 ) return arg_14 else : if arg_5 is not None : arg_5 ( arg_15 ) time . sleep ( arg_4 ) if arg_9 is not None : arg_16 = AzureAsyncOperationHttpError ( _ERROR_ASYNC_OP_TIMEOUT , arg_14 . status , arg_14 ) arg_9 ( arg_15 , arg_16 ) return arg_14","id_":254160,"task_name":"https:\/\/github.com\/Azure\/azure-sdk-for-python\/blob\/d7306fde32f60a293a7567678692bdad31e4b667\/azure-servicemanagement-legacy\/azure\/servicemanagement\/servicemanagementclient.py#L264-L318","negative":"Query for null or blank field."} {"query":"Perform inference for a single step . Given an SDR input and a weight matrix return a predicted distribution .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = arg_2 [ arg_1 ] . sum ( axis = 0 ) arg_3 = arg_3 - numpy . max ( arg_3 ) arg_4 = numpy . exp ( arg_3 ) arg_5 = arg_4 \/ numpy . sum ( arg_4 ) return arg_5","id_":254161,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/algorithms\/sdr_classifier.py#L365-L380","negative":"Rename this conversation.\n\n Hangouts only officially supports renaming group conversations, so\n custom names for one-to-one conversations may or may not appear in all\n first party clients.\n\n Args:\n name (str): New name.\n\n Raises:\n .NetworkError: If conversation cannot be renamed."} {"query":"Returns a link dictionary .","positive":"def Func ( arg_0 , arg_1 = True ) : if arg_1 : return arg_0 . _links . copy ( ) else : return arg_0 . _links","id_":254162,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/naturalnaming.py#L3220-L3234","negative":"Return the number of bytes transmitted in 1 second."} {"query":"Parse a Confluence summary JSON list .","positive":"def Func ( arg_0 ) : arg_1 = json . loads ( arg_0 ) arg_2 = arg_1 [ 'results' ] for arg_3 in arg_2 : yield arg_3","id_":254163,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-perceval\/blob\/41c908605e88b7ebc3a536c643fa0f212eaf9e0e\/perceval\/backends\/core\/confluence.py#L191-L205","negative":"Inform the widget about the encoding of the underlying character stream."} {"query":"Handle marking messages as read and keeping client active .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_0 . _coroutine_queue . put ( arg_0 . _client . set_active ( ) ) arg_0 . _coroutine_queue . put ( arg_0 . _conversation . update_read_timestamp ( ) ) return super ( ) . Func ( arg_1 , arg_2 )","id_":254164,"task_name":"https:\/\/github.com\/tdryer\/hangups\/blob\/85c0bf0a57698d077461283895707260f9dbf931\/hangups\/ui\/__main__.py#L912-L920","negative":"source record and index must have been set"} {"query":"Decorator so that functions can be written to work on Series but may still be called with DataFrames .","positive":"def Func ( arg_0 ) : def wrapper ( arg_1 , * arg_2 , ** arg_3 ) : if arg_1 . ndim == 1 : return arg_0 ( arg_1 , * arg_2 , ** arg_3 ) elif arg_1 . ndim == 2 : return arg_1 . apply ( arg_0 , * arg_2 , ** arg_3 ) return wrapper","id_":254165,"task_name":"https:\/\/github.com\/quantopian\/pyfolio\/blob\/712716ab0cdebbec9fabb25eea3bf40e4354749d\/pyfolio\/utils.py#L99-L111","negative":"Find a webhook by name."} {"query":"Standardizes continuous features and expands categorical features .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 , arg_5 , arg_6 , arg_7 , arg_8 ) : arg_9 = [ ] for arg_10 , arg_11 in enumerate ( arg_2 ) : arg_12 = [ ] for arg_13 , arg_14 in enumerate ( arg_11 ) : arg_15 = arg_4 [ arg_13 ] if arg_13 == arg_0 : arg_16 = arg_8 [ arg_14 ] arg_12 . append ( arg_16 ) elif arg_15 in arg_6 : pass elif arg_15 in arg_7 : for arg_17 in arg_3 [ arg_15 ] : if arg_14 == arg_17 : arg_18 = 1.0 else : arg_18 = - 1.0 arg_12 . append ( arg_18 ) else : arg_19 = float ( ( arg_14 - arg_5 [ arg_15 ] [ 'mean' ] ) \/ arg_5 [ arg_15 ] [ 'std_dev' ] ) arg_12 . append ( arg_19 ) arg_9 . append ( arg_12 ) arg_20 = [ ] for arg_15 in arg_4 : if arg_15 in arg_6 : pass elif ( arg_15 in arg_7 ) and ( arg_15 is not arg_1 ) : for arg_17 in arg_3 [ arg_15 ] : arg_21 = '{}_{}' . format ( arg_15 , arg_17 ) arg_20 . append ( arg_21 ) else : arg_20 . append ( arg_15 ) return arg_9 , arg_20","id_":254166,"task_name":"https:\/\/github.com\/algofairness\/BlackBoxAuditing\/blob\/b06c4faed5591cd7088475b2a203127bc5820483\/BlackBoxAuditing\/model_factories\/DecisionTree.py#L142-L190","negative":"Returns how the result count compares to the query options.\n\n The return value is negative if too few results were found, zero if enough were found, and\n positive if too many were found.\n\n Returns:\n int: -1, 0, or 1."} {"query":"Attempt to sum MultivariateNormal distributions .","positive":"def Func ( arg_0 ) : arg_1 = [ tensor for distribution in arg_0 for tensor in distribution . _graph_parents ] with tf . compat . v1 . name_scope ( 'Func' , values = arg_1 ) : if all ( [ isinstance ( arg_2 , tfd . MultivariateNormalDiag ) for arg_2 in arg_0 ] ) : return tfd . MultivariateNormalDiag ( loc = sum ( [ arg_2 . mean ( ) for arg_2 in arg_0 ] ) , scale_diag = tf . sqrt ( sum ( [ arg_2 . scale . diag ** 2 for arg_2 in arg_0 ] ) ) ) else : raise NotImplementedError ( 'Sums of distributions other than MultivariateNormalDiag are not ' 'currently implemented. (given: {})' . format ( arg_0 ) )","id_":254167,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/sts\/internal\/util.py#L164-L198","negative":"This method is called before first step of simulation."} {"query":"Emulate a single instruction .","positive":"def Func ( arg_0 , arg_1 ) : while True : arg_0 . reset ( ) for arg_2 in arg_0 . _should_be_mapped : arg_3 , arg_4 = arg_0 . _should_be_mapped [ arg_2 ] arg_0 . _emu . mem_map ( arg_2 , arg_3 , arg_4 ) for arg_5 , arg_6 in arg_0 . _should_be_written . items ( ) : for arg_7 , arg_8 in enumerate ( arg_6 , start = arg_5 ) : if issymbolic ( arg_8 ) : from . . native . cpu . abstractcpu import ConcretizeMemory raise ConcretizeMemory ( arg_0 . _cpu . memory , arg_7 , 8 , \"Concretizing for emulation\" ) arg_0 . _emu . mem_write ( arg_5 , b'' . join ( arg_6 ) ) arg_0 . _should_try_again = False arg_0 . _step ( arg_1 ) if not arg_0 . _should_try_again : break","id_":254168,"task_name":"https:\/\/github.com\/trailofbits\/manticore\/blob\/54c5a15b1119c523ae54c09972413e8b97f11629\/manticore\/utils\/fallback_emulator.py#L162-L194","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"yield all file paths under input_dir","positive":"def Func ( arg_0 ) : for arg_1 , arg_2 , arg_3 in os . walk ( arg_0 ) : for arg_4 in arg_3 : arg_5 = os . path . join ( arg_1 , arg_4 ) yield arg_5","id_":254169,"task_name":"https:\/\/github.com\/trec-kba\/streamcorpus-pipeline\/blob\/8bb82ea1beb83c6b40ed03fa1659df2897c2292a\/examples\/john_smith_chunk_writer.py#L20-L25","negative":"This is a parallel worker for running period-recovery.\n\n Parameters\n ----------\n\n task : tuple\n This is used to pass args to the `periodicvar_recovery` function::\n\n task[0] = period-finding result pickle to work on\n task[1] = simbasedir\n task[2] = period_tolerance\n\n Returns\n -------\n\n dict\n This is the dict produced by the `periodicvar_recovery` function for the\n input period-finding result pickle."} {"query":"The list from pypi is really a list of versions . We want a list of packages with the list of versions stored inline . This converts the list from pypi into one we can use .","positive":"def Func ( arg_0 ) : arg_1 = { } for arg_2 in arg_0 : arg_3 = arg_2 [ 'name' ] arg_4 = arg_2 [ 'summary' ] arg_5 = arg_2 [ 'version' ] arg_6 = arg_2 [ '_pypi_ordering' ] if arg_6 is None : arg_6 = 0 if arg_3 not in arg_1 . keys ( ) : arg_1 [ arg_3 ] = { 'name' : arg_3 , 'summary' : arg_4 , 'versions' : [ arg_5 ] , 'score' : arg_6 , } else : arg_1 [ arg_3 ] [ 'versions' ] . append ( arg_5 ) if arg_5 == highest_version ( arg_1 [ arg_3 ] [ 'versions' ] ) : arg_1 [ arg_3 ] [ 'summary' ] = arg_4 arg_1 [ arg_3 ] [ 'score' ] = arg_6 arg_7 = sorted ( arg_1 . values ( ) , key = lambda x : x [ 'score' ] , reverse = True , ) return arg_7","id_":254170,"task_name":"https:\/\/github.com\/AkihikoITOH\/capybara\/blob\/e86c2173ea386654f4ae061148e8fbe3f25e715c\/capybara\/virtualenv\/lib\/python2.7\/site-packages\/pip\/commands\/search.py#L64-L101","negative":"Check if hdl process has event depenency on signal"} {"query":"Format an experiment result memory object for measurement level 0 .","positive":"def Func ( arg_0 ) : arg_1 = _list_to_complex_array ( arg_0 ) if not 2 <= len ( arg_1 . shape ) <= 3 : raise QiskitError ( 'Level zero memory is not of correct shape.' ) return arg_1","id_":254171,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/result\/postprocess.py#L86-L104","negative":"Return True if we should retry. False otherwise.\n\n Args:\n exception: An exception to test for transience.\n\n Returns:\n True if we should retry. False otherwise."} {"query":"Remove a listener from the emitter .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : with contextlib . suppress ( ValueError ) : arg_0 . _listeners [ arg_1 ] . remove ( arg_2 ) return True with contextlib . suppress ( ValueError ) : arg_0 . _once [ arg_1 ] . remove ( arg_2 ) return True return False","id_":254172,"task_name":"https:\/\/github.com\/asyncdef\/eventemitter\/blob\/148b700c5846d8fdafc562d4326587da5447223f\/eventemitter\/emitter.py#L95-L121","negative":"Sets the review comment. Raises CardinalityError if\n already set. OrderError if no reviewer defined before.\n Raises SPDXValueError if comment is not free form text."} {"query":"Given a dictionary of variable attribute data from get_details display the data in the terminal .","positive":"def Func ( arg_0 ) : arg_1 = ( arg_2 for arg_2 in list ( arg_0 . keys ( ) ) if arg_2 . startswith ( 'META_' ) ) for arg_2 in arg_1 : arg_3 = arg_2 [ 5 : ] . capitalize ( ) pprint ( '{0}: {1}' . format ( arg_3 , arg_0 . pop ( arg_2 ) ) ) pprint ( arg_0 )","id_":254173,"task_name":"https:\/\/github.com\/calebsmith\/django-template-debug\/blob\/f3d52638da571164d63e5c8331d409b0743c628f\/template_debug\/templatetags\/debug_tags.py#L26-L36","negative":"Remove all binary files in the adslib directory."} {"query":"We use VBox directly instead of Docker Machine because it shaves about 0 . 5 seconds off the runtime of this check .","positive":"def Func ( ) : arg_0 = check_output_demoted ( [ 'VBoxManage' , 'list' , 'vms' ] ) for arg_1 in arg_0 . splitlines ( ) : if '\"{}\"' . format ( constants . VM_MACHINE_NAME ) in arg_1 : return True return False","id_":254174,"task_name":"https:\/\/github.com\/gamechanger\/dusty\/blob\/dc12de90bb6945023d6f43a8071e984313a1d984\/dusty\/systems\/virtualbox\/__init__.py#L64-L71","negative":"Delete the specified InactivityAlert\n\n :param tag_id: The tag ID to delete\n :type tag_id: str\n\n :raises: This will raise a\n :class:`ServerException `\n if there is an error from Logentries"} {"query":"Asserts that val is iterable and contains duplicate items .","positive":"def Func ( arg_0 ) : try : if len ( arg_0 . val ) != len ( set ( arg_0 . val ) ) : return arg_0 except TypeError : raise TypeError ( 'val is not iterable' ) arg_0 . _err ( 'Expected <%s> to contain duplicates, but did not.' % arg_0 . val )","id_":254175,"task_name":"https:\/\/github.com\/ActivisionGameScience\/assertpy\/blob\/08d799cdb01f9a25d3e20672efac991c7bc26d79\/assertpy\/assertpy.py#L330-L337","negative":"setting baudrate if supported"} {"query":"A property factory that will dispatch the to a specific validator function that will validate the user s input to ensure critical parameters are of a specific type .","positive":"def Func ( arg_0 ) : def getter ( arg_1 ) : return arg_1 . __dict__ [ arg_0 ] def setter ( arg_1 , arg_2 ) : validate_input ( arg_1 . __class__ . __name__ , arg_0 , arg_2 ) arg_1 . __dict__ [ arg_0 ] = arg_2 return property ( fget = getter , fset = setter )","id_":254176,"task_name":"https:\/\/github.com\/trp07\/messages\/blob\/7789ebc960335a59ea5d319fceed3dd349023648\/messages\/_utils.py#L35-L49","negative":"Replace all the tracks in a playlist, overwriting its existing tracks. \n This powerful request can be useful for replacing tracks, re-ordering existing tracks, or clearing the playlist.\n\n Parameters\n ----------\n playlist : Union[str, PLaylist]\n The playlist to modify\n tracks : Sequence[Union[str, Track]]\n Tracks to place in the playlist"} {"query":"Draw all line strings onto a given image .","positive":"def Func ( arg_0 , arg_1 , arg_2 = ( 0 , 255 , 0 ) , arg_3 = None , arg_4 = None , arg_5 = 1.0 , arg_6 = None , arg_7 = None , arg_8 = 1 , arg_9 = None , arg_10 = None , arg_11 = True , arg_12 = False ) : for arg_13 in arg_0 . line_strings : arg_1 = arg_13 . Func ( arg_1 , arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = arg_4 , arg_5 = arg_5 , arg_6 = arg_6 , arg_7 = arg_7 , arg_8 = arg_8 , arg_9 = arg_9 , arg_10 = arg_10 , arg_11 = arg_11 , arg_12 = arg_12 ) return arg_1","id_":254177,"task_name":"https:\/\/github.com\/aleju\/imgaug\/blob\/786be74aa855513840113ea523c5df495dc6a8af\/imgaug\/augmentables\/lines.py#L1608-L1690","negative":"Handle the kernel's death by asking if the user wants to restart."} {"query":"\\ Received registration acknowledgement from the BotnetBot as well as the name of the command channel so join up and indicate that registration succeeded","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 ) : arg_0 . channel = arg_4 arg_0 . conn . join ( arg_0 . channel ) arg_0 . registered . set ( )","id_":254178,"task_name":"https:\/\/github.com\/coleifer\/irc\/blob\/f9d2bd6369aafe6cb0916c9406270ca8ecea2080\/botnet\/worker.py#L123-L134","negative":"Output profiler report."} {"query":"given a config dict with streamcorpus_pipeline as a key find all keys under streamcorpus_pipeline that end with _path and if the value of that key is a relative path convert it to an absolute path using the value provided by root_path","positive":"def Func ( arg_0 ) : if not 'streamcorpus_pipeline' in arg_0 : logger . critical ( 'bad config: %r' , arg_0 ) raise ConfigurationError ( 'missing \"streamcorpus_pipeline\" from config' ) arg_1 = arg_0 [ 'streamcorpus_pipeline' ] . pop ( 'root_path' , None ) if not arg_1 : arg_1 = os . getcwd ( ) if not arg_1 . startswith ( '\/' ) : arg_1 = os . path . join ( os . getcwd ( ) , arg_1 ) def recursive_abs_path ( arg_2 , arg_1 ) : for arg_3 , arg_4 in arg_2 . items ( ) : if isinstance ( arg_4 , basestring ) : if arg_3 . endswith ( 'path' ) : if re . match ( '^http.?:\/\/' , arg_4 ) : continue if not arg_4 . startswith ( '\/' ) : arg_2 [ arg_3 ] = os . path . join ( arg_1 , arg_4 ) elif isinstance ( arg_4 , dict ) : recursive_abs_path ( arg_4 , arg_1 ) recursive_abs_path ( arg_0 , arg_1 ) arg_0 [ 'root_path' ] = arg_1","id_":254179,"task_name":"https:\/\/github.com\/trec-kba\/streamcorpus-pipeline\/blob\/8bb82ea1beb83c6b40ed03fa1659df2897c2292a\/streamcorpus_pipeline\/run.py#L61-L95","negative":"Fill entire strip by giving individual RGB values instead of tuple"} {"query":"run child process to get OWPL output","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = dict ( INPUT_FILE = arg_1 , OUTPUT_FILE = arg_2 , PIPELINE_ROOT = arg_3 ) arg_5 = pipeline_cmd_templates [ arg_0 ] % arg_4 print arg_5 print 'creating %s' % arg_2 arg_6 = time . time ( ) arg_7 = subprocess . Popen ( arg_5 , stderr = subprocess . PIPE , shell = True ) arg_8 , arg_9 = arg_7 . communicate ( ) assert arg_7 . returncode == 0 and 'Exception' not in arg_9 , arg_9 arg_10 = time . time ( ) - arg_6 print 'created %s in %.1f sec' % ( arg_2 , arg_10 ) ''' postproc_cmd = postproc_cmd_templates[tagger_id] % params print postproc_cmd ## replace this with log.info() print 'creating %s' % tmp_ner_raw_path start_time = time.time() gpg_child = subprocess.Popen( postproc_cmd, stderr=subprocess.PIPE, shell=True) s_out, errors = gpg_child.communicate() assert gpg_child.returncode == 0 and 'Exception' not in errors, errors elapsed = time.time() - start_time ## replace this with log.info() print 'created %s in %.1f sec' % (tmp_ner_path, elapsed) '''","id_":254180,"task_name":"https:\/\/github.com\/trec-kba\/streamcorpus-pipeline\/blob\/8bb82ea1beb83c6b40ed03fa1659df2897c2292a\/streamcorpus_pipeline\/_run_lingpipe.py#L60-L100","negative":"Check the spacing of a single equals sign."} {"query":"Check if the node is being used as an iterator .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . parent if isinstance ( arg_1 , astroid . For ) : return True if isinstance ( arg_1 , astroid . Comprehension ) : if arg_1 . iter == arg_0 : return True elif isinstance ( arg_1 , astroid . Call ) : if isinstance ( arg_1 . func , astroid . Name ) : arg_2 = arg_1 . func . lookup ( arg_1 . func . name ) [ 0 ] if _is_builtin ( arg_2 ) and arg_1 . func . name in _ACCEPTS_ITERATOR : return True elif isinstance ( arg_1 . func , astroid . Attribute ) : if arg_1 . func . attrname in ATTRIBUTES_ACCEPTS_ITERATOR : return True arg_3 = utils . safe_infer ( arg_1 . func ) if arg_3 : if arg_3 . qname ( ) in _BUILTIN_METHOD_ACCEPTS_ITERATOR : return True arg_4 = arg_3 . root ( ) if arg_4 and arg_4 . name == \"itertools\" : return True elif isinstance ( arg_1 , astroid . Assign ) and isinstance ( arg_1 . targets [ 0 ] , ( astroid . List , astroid . Tuple ) ) : if len ( arg_1 . targets [ 0 ] . elts ) > 1 : return True elif ( isinstance ( arg_1 , astroid . Compare ) and len ( arg_1 . ops ) == 1 and arg_1 . ops [ 0 ] [ 0 ] == \"in\" ) : return True elif isinstance ( arg_1 , astroid . YieldFrom ) : return True if isinstance ( arg_1 , astroid . Starred ) : return True return False","id_":254181,"task_name":"https:\/\/github.com\/PyCQA\/pylint\/blob\/2bf5c61a3ff6ae90613b81679de42c0f19aea600\/pylint\/checkers\/python3.py#L96-L150","negative":"Uploads file to GDocs spreadsheet.\n Content type can be provided as argument, default is ods."} {"query":"Generates python code for a clause repeated 1 or more times .","positive":"def Func ( arg_0 , arg_1 : arg_2 . Rep0N ) -> [ ast . stmt ] : arg_4 = arg_0 . visit ( arg_1 . pt ) if isinstance ( arg_4 , ast . expr ) : return ( arg_0 . _clause ( arg_4 ) + arg_0 . visit_Rep0N ( arg_1 ) ) arg_0 . in_loop += 1 arg_4 = arg_0 . _clause ( arg_0 . visit ( arg_1 . pt ) ) arg_0 . in_loop -= 1 return arg_0 . _clause ( arg_0 . visit ( arg_1 . pt ) ) + [ ast . While ( ast . Name ( 'True' , ast . Load ( ) ) , arg_4 , [ ] ) ]","id_":254182,"task_name":"https:\/\/github.com\/LionelAuroux\/pyrser\/blob\/f153a97ef2b6bf915a1ed468c0252a9a59b754d5\/pyrser\/passes\/topython.py#L246-L260","negative":"Save the state of hooks in the sys module.\n\n This has to be called after self.user_module is created."} {"query":"Move the selected object","positive":"def Func ( arg_0 , arg_1 ) : if DEBUG : print \"move!\" if arg_0 . current and not arg_0 . overlay : arg_2 = arg_0 . current arg_3 , arg_4 = arg_0 . start arg_5 , arg_6 = wx . GetMousePosition ( ) arg_5 , arg_6 = ( arg_5 + arg_3 , arg_6 + arg_4 ) if arg_1 . ShiftDown ( ) : arg_5 = arg_5 \/ GRID_SIZE [ 0 ] * GRID_SIZE [ 0 ] arg_6 = arg_6 \/ GRID_SIZE [ 1 ] * GRID_SIZE [ 1 ] arg_7 , arg_8 = arg_2 . obj . pos arg_9 , arg_10 = ( arg_5 - arg_7 ) , ( arg_6 - arg_8 ) for arg_11 in arg_0 . selection : arg_5 , arg_6 = arg_11 . pos arg_5 = arg_5 + arg_9 arg_6 = arg_6 + arg_10 arg_11 . pos = ( wx . Point ( arg_5 , arg_6 ) ) elif arg_0 . overlay : arg_2 = arg_0 . current arg_12 = arg_1 . GetPosition ( ) if arg_1 . GetEventObject ( ) != arg_2 : arg_12 = arg_1 . GetEventObject ( ) . ClientToScreen ( arg_12 ) arg_12 = arg_2 . ScreenToClient ( arg_12 ) arg_13 = wx . RectPP ( arg_0 . pos , arg_12 ) arg_14 = wx . ClientDC ( arg_2 ) arg_15 = wx . DCOverlay ( arg_0 . overlay , arg_14 ) arg_15 . Clear ( ) arg_14 . SetPen ( wx . Pen ( \"blue\" , 2 ) ) if 'wxMac' in wx . PlatformInfo : arg_14 . SetBrush ( wx . Brush ( wx . Colour ( 0xC0 , 0xC0 , 0xC0 , 0x80 ) ) ) else : arg_14 . SetBrush ( wx . TRANSPARENT_BRUSH ) arg_14 . DrawRectangleRect ( arg_13 ) del arg_15","id_":254183,"task_name":"https:\/\/github.com\/reingart\/gui2py\/blob\/aca0a05f6fcde55c94ad7cc058671a06608b01a4\/gui\/tools\/designer.py#L165-L206","negative":"Use ``\\\\r`` to overdraw the current line with the given text.\n\n This function transparently handles tracking how much overdrawing is\n necessary to erase the previous line when used consistently.\n\n :param text: The text to be outputted\n :param newline: Whether to start a new line and reset the length count.\n :type text: :class:`~__builtins__.str`\n :type newline: :class:`~__builtins__.bool`"} {"query":"Wrap simpler AST generators to return a GeneratedPyAST .","positive":"def Func ( arg_0 ) : @ wraps ( arg_0 ) def wrapped_ast_generator ( arg_1 : arg_2 , arg_3 : arg_4 ) -> GeneratedPyAST : return GeneratedPyAST ( node = arg_0 ( arg_1 , arg_3 ) ) return wrapped_ast_generator","id_":254184,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/compiler\/generator.py#L317-L324","negative":"Attaches a bundle object\n\n :param bundle: :class:`flask_journey.BlueprintBundle` object\n :raises:\n - IncompatibleBundle if the bundle is not of type `BlueprintBundle`\n - ConflictingPath if a bundle already exists at bundle.path\n - MissingBlueprints if the bundle doesn't contain any blueprints"} {"query":"Creates a 2D quad VAO using 2 triangles with normals and texture coordinates .","positive":"def Func ( arg_0 , arg_1 , arg_2 = 0.0 , arg_3 = 0.0 ) -> VAO : arg_4 = numpy . array ( [ arg_2 - arg_0 \/ 2.0 , arg_3 + arg_1 \/ 2.0 , 0.0 , arg_2 - arg_0 \/ 2.0 , arg_3 - arg_1 \/ 2.0 , 0.0 , arg_2 + arg_0 \/ 2.0 , arg_3 - arg_1 \/ 2.0 , 0.0 , arg_2 - arg_0 \/ 2.0 , arg_3 + arg_1 \/ 2.0 , 0.0 , arg_2 + arg_0 \/ 2.0 , arg_3 - arg_1 \/ 2.0 , 0.0 , arg_2 + arg_0 \/ 2.0 , arg_3 + arg_1 \/ 2.0 , 0.0 , ] , dtype = numpy . float32 ) arg_5 = numpy . array ( [ 0.0 , 0.0 , 1.0 , 0.0 , 0.0 , 1.0 , 0.0 , 0.0 , 1.0 , 0.0 , 0.0 , 1.0 , 0.0 , 0.0 , 1.0 , 0.0 , 0.0 , 1.0 , ] , dtype = numpy . float32 ) arg_6 = numpy . array ( [ 0.0 , 1.0 , 0.0 , 0.0 , 1.0 , 0.0 , 0.0 , 1.0 , 1.0 , 0.0 , 1.0 , 1.0 , ] , dtype = numpy . float32 ) arg_7 = VAO ( \"geometry:quad\" , mode = moderngl . TRIANGLES ) arg_7 . buffer ( arg_4 , '3f' , [ \"in_position\" ] ) arg_7 . buffer ( arg_5 , '3f' , [ \"in_normal\" ] ) arg_7 . buffer ( arg_6 , '2f' , [ \"in_uv\" ] ) return arg_7","id_":254185,"task_name":"https:\/\/github.com\/Contraz\/demosys-py\/blob\/6466128a3029c4d09631420ccce73024025bd5b6\/demosys\/geometry\/quad.py#L17-L64","negative":"Resets the builder's state to allow building new annotations."} {"query":"Sanitizes HTML removing not allowed tags and attributes .","positive":"def Func ( arg_0 , arg_1 = arg_2 , arg_3 = arg_4 ) : return clean ( arg_0 , tags = arg_1 , attributes = arg_3 , strip = True )","id_":254186,"task_name":"https:\/\/github.com\/idlesign\/django-yaturbo\/blob\/a5ac9053bb800ea8082dc0615b93398917c3290a\/yaturbo\/toolbox.py#L18-L28","negative":"Show an image.\n\n Args:\n img (str or ndarray): The image to be displayed.\n win_name (str): The window name.\n wait_time (int): Value of waitKey param."} {"query":"Like many_until but must consume at least one of these .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = [ arg_0 ( ) ] arg_3 , arg_4 = many_until ( arg_0 , arg_1 ) return ( arg_2 + arg_3 , arg_4 )","id_":254187,"task_name":"https:\/\/github.com\/brehaut\/picoparse\/blob\/5e07c8e687a021bba58a5a2a76696c7a7ff35a1c\/picoparse\/__init__.py#L431-L436","negative":"Prints a file on the device to console"} {"query":"Descend depth first into all child nodes","positive":"def Func ( arg_0 , arg_1 = True ) : if arg_1 : yield arg_0 for arg_2 in arg_0 . child_list : yield arg_2 yield from arg_2 . Func ( )","id_":254188,"task_name":"https:\/\/github.com\/datacamp\/pythonwhat\/blob\/ffbf7f8436a51f77c22f3bed75ba3bc37a5c666f\/pythonwhat\/probe.py#L146-L153","negative":"Returns a decorator to swallow a requests exception for modules that\n are not accessible without logging in, and turn it into an Unavailable\n exception."} {"query":"Decorator for multi to remove nodes for original test functions from root node","positive":"def Func ( arg_0 ) : @ wraps ( arg_0 ) def wrapper ( * arg_1 , ** arg_2 ) : arg_1 = ( arg_1 [ 0 ] if len ( arg_1 ) == 1 and isinstance ( arg_1 [ 0 ] , ( list , tuple ) ) else arg_1 ) for arg_3 in arg_1 : if isinstance ( arg_3 , Node ) and arg_3 . parent . name is \"root\" : arg_3 . parent . remove_child ( arg_3 ) arg_3 . update_child_calls ( ) return arg_0 ( * arg_1 , ** arg_2 ) return wrapper","id_":254189,"task_name":"https:\/\/github.com\/datacamp\/pythonwhat\/blob\/ffbf7f8436a51f77c22f3bed75ba3bc37a5c666f\/pythonwhat\/sct_syntax.py#L13-L27","negative":"Rely on pytz.localize to ensure new result honors DST."} {"query":"Temporarily write an alembic . ini file for use with alembic migration scripts .","positive":"def Func ( arg_0 , arg_1 ) : with TemporaryDirectory ( ) as tempdir : arg_2 = join ( tempdir , 'temp_alembic.ini' ) with open ( arg_2 , 'w' ) as f : f . write ( ALEMBIC_INI_TEMPLATE . format ( arg_0 = arg_0 , arg_1 = arg_1 , ) ) yield arg_2","id_":254190,"task_name":"https:\/\/github.com\/quantopian\/pgcontents\/blob\/ed36268b7917332d16868208e1e565742a8753e1\/pgcontents\/utils\/migrate.py#L17-L31","negative":"r'\\s+"} {"query":"Primitive string formatter .","positive":"def Func ( arg_0 , arg_1 = \"%d:%H:%M:%S\" ) : arg_2 = { \"%d\" : str ( arg_0 . days ) , \"%H\" : \"{0:02d}\" . format ( arg_0 . dhours ) , \"%h\" : str ( 24 * arg_0 . days + arg_0 . dhours ) , \"%M\" : \"{0:02d}\" . format ( arg_0 . dminutes ) , \"%S\" : \"{0:02d}\" . format ( arg_0 . dseconds ) , } arg_3 = arg_1 for arg_4 , arg_5 in arg_2 . items ( ) : arg_3 = arg_3 . replace ( arg_4 , arg_5 ) return arg_3","id_":254191,"task_name":"https:\/\/github.com\/Becksteinlab\/GromacsWrapper\/blob\/d4f9a8cb6f48292732cf7c7e4ef4a6d2ccbc51b9\/gromacs\/utilities.py#L792-L816","negative":"Read data synchronous from an ADS-device from data name.\n\n :param int port: local AMS port as returned by adsPortOpenEx()\n :param pyads.structs.AmsAddr address: local or remote AmsAddr\n :param string data_name: data name\n :param Type data_type: type of the data given to the PLC, according to\n PLCTYPE constants\n :param bool return_ctypes: return ctypes instead of python types if True\n (default: False)\n :rtype: data_type\n :return: value: **value**"} {"query":"Count non missing value for expression on an array which represents healpix data .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = None , arg_3 = 12 , arg_4 = 8 , arg_5 = None , arg_6 = None , arg_7 = arg_8 , arg_9 = False , arg_10 = None , arg_11 = None ) : import healpy as hp if arg_2 is None : if arg_0 . ucds . get ( \"source_id\" , None ) == 'meta.id;meta.main' : arg_2 = \"source_id\/34359738368\" if arg_2 is None : raise ValueError ( \"no healpix_expression given, and was unable to guess\" ) arg_12 = arg_3 - arg_4 arg_13 = 2 ** arg_4 arg_14 = hp . nside2npix ( arg_13 ) arg_15 = 4 ** arg_12 arg_16 = \"%s\/%s\" % ( arg_2 , arg_15 ) arg_5 = [ arg_16 ] + ( [ ] if arg_5 is None else _ensure_list ( arg_5 ) ) arg_7 = ( arg_14 , ) + _expand_shape ( arg_7 , len ( arg_5 ) - 1 ) arg_17 = 1. \/ arg_15 \/ 2 arg_6 = [ [ - arg_17 , arg_14 - arg_17 ] ] + ( [ ] if arg_6 is None else arg_6 ) return arg_0 . count ( arg_1 , arg_5 = arg_5 , arg_6 = arg_6 , arg_7 = arg_7 , arg_9 = arg_9 , arg_10 = arg_10 , arg_11 = arg_11 )","id_":254192,"task_name":"https:\/\/github.com\/vaexio\/vaex\/blob\/a45b672f8287afca2ada8e36b74b604b9b28dd85\/packages\/vaex-core\/vaex\/dataframe.py#L1657-L1690","negative":"Fetch the base Managed Policy.\n\n This includes the base policy and the latest version document.\n\n :param managed_policy:\n :param conn:\n :return:"} {"query":"Slices dist along its batch dimensions . Helper for tfd . Distribution .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : if not isinstance ( arg_3 , collections . Sequence ) : arg_3 = ( arg_3 , ) arg_4 , arg_5 = getattr ( arg_0 , PROVENANCE_ATTR , ( arg_0 , [ ] ) ) arg_5 += [ ( arg_3 , arg_2 ) ] arg_0 = _apply_slice_sequence ( arg_4 , arg_1 , arg_5 ) setattr ( arg_0 , PROVENANCE_ATTR , ( arg_4 , arg_5 ) ) return arg_0","id_":254193,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/distributions\/internal\/slicing.py#L165-L191","negative":"Called when there is an error in the websocket"} {"query":"Clean up stats file if configured to do so .","positive":"def Func ( arg_0 , arg_1 ) : if not arg_0 . available ( ) : return try : arg_0 . prof . close ( ) except AttributeError : pass if arg_0 . clean_stats_file : if arg_0 . fileno : try : os . close ( arg_0 . fileno ) except OSError : pass try : os . unlink ( arg_0 . pfile ) except OSError : pass return None","id_":254194,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/nose\/plugins\/prof.py#L127-L149","negative":"Unregisters the given model with Algolia engine.\n\n If the given model is not registered with Algolia engine, a\n RegistrationError will be raised."} {"query":"Format a timedelta object for display to users","positive":"def Func ( arg_0 ) : def get_total_seconds ( arg_1 ) : return ( arg_1 . microseconds + ( arg_1 . seconds + arg_1 . days * 24 * 3600 ) * 1e6 ) \/ 1e6 arg_2 = int ( get_total_seconds ( arg_0 ) ) arg_3 = [ ( 'year' , 60 * 60 * 24 * 365 ) , ( 'month' , 60 * 60 * 24 * 30 ) , ( 'day' , 60 * 60 * 24 ) , ( 'hour' , 60 * 60 ) , ( 'minute' , 60 ) , ( 'second' , 1 ) ] arg_4 = [ ] for arg_5 , arg_6 in arg_3 : if arg_2 > arg_6 : arg_7 , arg_2 = divmod ( arg_2 , arg_6 ) if arg_7 == 1 : arg_4 . append ( \"%s %s\" % ( arg_7 , arg_5 ) ) else : arg_4 . append ( \"%s %ss\" % ( arg_7 , arg_5 ) ) return \", \" . join ( arg_4 )","id_":254195,"task_name":"https:\/\/github.com\/msmbuilder\/osprey\/blob\/ea09da24e45820e1300e24a52fefa6c849f7a986\/osprey\/utils.py#L91-L120","negative":"Convert cylindrical polar velocities to Cartesian.\n\n :param x:\n :param y:\n :param azimuth: Optional expression for the azimuth in degrees , may lead to a better performance when given.\n :param vr:\n :param vazimuth:\n :param vx_out:\n :param vy_out:\n :param propagate_uncertainties: {propagate_uncertainties}"} {"query":"Get file information dict from the repository given its relative path .","positive":"def Func ( arg_0 , arg_1 ) : arg_1 = arg_0 . to_repo_relative_path ( path = arg_1 , split = False ) arg_2 = os . path . basename ( arg_1 ) arg_3 , arg_4 , arg_5 , arg_6 = arg_0 . is_repository_file ( arg_1 ) if not arg_3 : return None , \"file is not a registered repository file.\" if not arg_5 : return None , \"file is a registered repository file but info file missing\" arg_7 = os . path . join ( arg_0 . __path , os . path . dirname ( arg_1 ) , arg_0 . __fileInfo % arg_2 ) try : with open ( arg_7 , 'rb' ) as fd : arg_8 = pickle . load ( fd ) except Exception as err : return None , \"Unable to read file info from disk (%s)\" % str ( err ) return arg_8 , ''","id_":254196,"task_name":"https:\/\/github.com\/bachiraoun\/pyrep\/blob\/0449bf2fad3e3e8dda855d4686a8869efeefd433\/Repository.py#L1248-L1274","negative":"Return the generation index of the first generation in the given\n swarm that does not have numParticles particles in it, either still in the\n running state or completed. This does not include orphaned particles.\n\n Parameters:\n ---------------------------------------------------------------------\n swarmId: A string representation of the sorted list of encoders in this\n swarm. For example '__address_encoder.__gym_encoder'\n minNumParticles: minium number of partices required for a full\n generation.\n\n retval: generation index, or None if no particles at all."} {"query":"Get a list of identity providers choices for enterprise customer .","positive":"def Func ( ) : try : from third_party_auth . provider import arg_0 except ImportError as exception : LOGGER . warning ( \"Could not import Registry from third_party_auth.provider\" ) LOGGER . warning ( exception ) arg_0 = None arg_1 = [ ( \"\" , \"-\" * 7 ) ] if arg_0 : return arg_1 + [ ( arg_2 . provider_id , arg_2 . name ) for arg_2 in arg_0 . enabled ( ) ] return None","id_":254197,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/enterprise\/utils.py#L134-L151","negative":"Query for null or blank field."} {"query":"Encode input in the way that is appropriate to the observation space","positive":"def Func ( arg_0 , arg_1 ) : if isinstance ( arg_0 , Discrete ) : return tf . to_float ( tf . one_hot ( arg_1 , arg_0 . n ) ) elif isinstance ( arg_0 , Box ) : return tf . to_float ( arg_1 ) elif isinstance ( arg_0 , MultiDiscrete ) : arg_1 = tf . cast ( arg_1 , tf . int32 ) arg_2 = [ tf . to_float ( tf . one_hot ( arg_1 [ ... , i ] , arg_0 . nvec [ i ] ) ) for i in range ( arg_1 . shape [ - 1 ] ) ] return tf . concat ( arg_2 , axis = - 1 ) else : raise NotImplementedError","id_":254198,"task_name":"https:\/\/github.com\/openai\/baselines\/blob\/3301089b48c42b87b396e246ea3f56fa4bfc9678\/baselines\/common\/input.py#L43-L63","negative":"Initiate connection to APRS server and attempt to login\n\n blocking = False - Should we block until connected and logged-in\n retry = 30 - Retry interval in seconds"} {"query":"Creates the toolkit - specific control that represents the editor . parent is the toolkit - specific control that is the editor s parent .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . graph = arg_0 . editor_input . load ( ) arg_3 = View ( Item ( name = \"graph\" , editor = graph_tree_editor , show_label = False ) , id = \"godot.graph_editor\" , kind = \"live\" , resizable = True ) arg_4 = arg_0 . edit_traits ( arg_3 = arg_3 , arg_1 = arg_1 , kind = \"subpanel\" ) return arg_4","id_":254199,"task_name":"https:\/\/github.com\/rwl\/godot\/blob\/013687c9e8983d2aa2ceebb8a76c5c4f1e37c90f\/godot\/plugin\/tree_editor.py#L96-L109","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"Compute marginal pdf for each individual observable .","positive":"def Func ( arg_0 ) : arg_1 = tf . broadcast_to ( arg_0 . _log_init , tf . concat ( [ arg_0 . batch_shape_tensor ( ) , [ arg_0 . _num_states ] ] , axis = 0 ) ) if arg_0 . _num_steps > 1 : arg_2 = arg_0 . _log_trans def forward_step ( arg_3 , arg_4 ) : return _log_vector_matrix ( arg_3 , arg_2 ) arg_5 = tf . zeros ( arg_0 . _num_steps - 1 , dtype = tf . float32 ) arg_6 = tf . scan ( forward_step , arg_5 , initializer = arg_1 , name = \"forward_log_probs\" ) arg_6 = tf . concat ( [ [ arg_1 ] , arg_6 ] , axis = 0 ) else : arg_6 = arg_1 [ tf . newaxis , ... ] return tf . exp ( arg_6 )","id_":254200,"task_name":"https:\/\/github.com\/tensorflow\/probability\/blob\/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5\/tensorflow_probability\/python\/distributions\/hidden_markov_model.py#L489-L517","negative":"Run a scan in the path setted."} {"query":"Normalize an image by rescale samplewise centering and samplewise centering in order .","positive":"def Func ( arg_0 , arg_1 = None , arg_2 = False , arg_3 = False , arg_4 = 2 , arg_5 = 1e-7 ) : if arg_1 : arg_0 *= arg_1 if arg_0 . shape [ arg_4 ] == 1 : if arg_2 : arg_0 = arg_0 - np . mean ( arg_0 ) if arg_3 : arg_0 = arg_0 \/ np . std ( arg_0 ) return arg_0 elif arg_0 . shape [ arg_4 ] == 3 : if arg_2 : arg_0 = arg_0 - np . mean ( arg_0 , axis = arg_4 , keepdims = True ) if arg_3 : arg_0 = arg_0 \/ ( np . std ( arg_0 , axis = arg_4 , keepdims = True ) + arg_5 ) return arg_0 else : raise Exception ( \"Unsupported channels %d\" % arg_0 . shape [ arg_4 ] )","id_":254201,"task_name":"https:\/\/github.com\/tensorlayer\/tensorlayer\/blob\/aa9e52e36c7058a7e6fd81d36563ca6850b21956\/tensorlayer\/prepro.py#L1911-L1965","negative":"Get distribution version.\n\n This method is enhanced compared to original distutils implementation.\n If the version string is set to a special value then instead of using\n the actual value the real version is obtained by querying versiontools.\n\n If versiontools package is not installed then the version is obtained\n from the standard section of the ``PKG-INFO`` file. This file is\n automatically created by any source distribution. This method is less\n useful as it cannot take advantage of version control information that\n is automatically loaded by versiontools. It has the advantage of not\n requiring versiontools installation and that it does not depend on\n ``setup_requires`` feature of ``setuptools``."} {"query":"Return the metadata for the specified course run .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = arg_0 . get_object ( ) arg_5 = arg_4 . get_course_run ( arg_3 ) if not arg_5 : raise Http404 arg_6 = arg_0 . get_serializer_context ( ) arg_6 [ 'enterprise_customer_catalog' ] = arg_4 arg_7 = serializers . CourseRunDetailSerializer ( arg_5 , arg_6 = arg_6 ) return Response ( arg_7 . data )","id_":254202,"task_name":"https:\/\/github.com\/edx\/edx-enterprise\/blob\/aea91379ab0a87cd3bc798961fce28b60ee49a80\/enterprise\/api\/v1\/views.py#L438-L453","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Get the known catalog edges formed between two resources .","positive":"def Func ( arg_0 , ** arg_1 ) : Func = arg_0 . _query ( 'edges' , ** arg_1 ) for arg_3 in Func : arg_4 = arg_3 [ 'source_type' ] + '[' + arg_3 [ 'source_title' ] + ']' arg_5 = arg_3 [ 'target_type' ] + '[' + arg_3 [ 'target_title' ] + ']' yield Edge ( source = arg_0 . resources [ arg_4 ] , target = arg_0 . resources [ arg_5 ] , relationship = arg_3 [ 'relationship' ] , node = arg_3 [ 'certname' ] )","id_":254203,"task_name":"https:\/\/github.com\/voxpupuli\/pypuppetdb\/blob\/cedeecf48014b4ad5b8e2513ca8230c814f45603\/pypuppetdb\/api.py#L500-L519","negative":"Annotate a set of records with stored fields.\n\n Args:\n records: A list or iterator (can be a Query object)\n chunk_size: The number of records to annotate at once (max 500).\n\n Returns:\n A generator that yields one annotated record at a time."} {"query":"Inverse of dict_to_list","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_2 = arg_0 . _keys if arg_2 is None else arg_2 arg_3 = arg_0 . _defaults ( arg_2 ) for arg_4 , arg_5 in zip ( arg_2 , arg_1 ) : arg_3 [ arg_4 ] = arg_5 return arg_3","id_":254204,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/parallel\/controller\/sqlitedb.py#L269-L276","negative":"Generates a 'code_verifier' as described in section 4.1 of RFC 7636.\n\n This is a 'high-entropy cryptographic random string' that will be\n impractical for an attacker to guess.\n\n Args:\n n_bytes: integer between 31 and 96, inclusive. default: 64\n number of bytes of entropy to include in verifier.\n\n Returns:\n Bytestring, representing urlsafe base64-encoded random data."} {"query":"Return the filename of module if it can be imported .","positive":"def Func ( arg_0 ) : arg_0 = arg_0 . split ( '.' ) arg_1 = '.' . join ( arg_0 [ : - 1 ] ) arg_0 = arg_0 [ - 1 ] try : if not arg_1 : arg_0 = __import__ ( arg_0 ) else : arg_1 = __import__ ( arg_1 , fromlist = [ arg_0 ] ) arg_0 = getattr ( arg_1 , arg_0 , None ) arg_2 = getattr ( arg_0 , '__file__' , None ) if not arg_2 : return Unparseable ( ) if arg_2 . endswith ( '.pyc' ) : arg_2 = arg_2 [ : - 1 ] if not os . path . exists ( arg_2 ) and os . path . isfile ( arg_2 ) : return Unparseable ( ) if arg_2 . endswith ( '__init__.py' ) : arg_2 = arg_2 [ : - 11 ] return arg_2 except ImportError : return","id_":254205,"task_name":"https:\/\/github.com\/shakefu\/pyconfig\/blob\/000cb127db51e03cb4070aae6943e956193cbad5\/pyconfig\/scripts.py#L259-L311","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"Transform Stinespring representation to Kraus representation .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = [ ] for arg_4 in arg_0 : if arg_4 is None : arg_3 . append ( None ) else : arg_5 = arg_4 . shape [ 0 ] \/\/ arg_2 arg_6 = np . eye ( arg_2 ) arg_7 = [ ] for arg_8 in range ( arg_5 ) : arg_9 = np . zeros ( arg_5 ) arg_9 [ arg_8 ] = 1 arg_7 . append ( np . kron ( arg_6 , arg_9 [ None , : ] ) . dot ( arg_4 ) ) arg_3 . append ( arg_7 ) return tuple ( arg_3 )","id_":254206,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/quantum_info\/operators\/channel\/transformations.py#L226-L241","negative":"Post processes the estimations from the algorithm, removing empty\n segments and making sure the lenghts of the boundaries and labels\n match."} {"query":"Check the status of the incoming response raise exception if status is not 200 .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_1 [ 'status' ] arg_3 = arg_1 [ 'msg' ] if arg_2 == 400 : raise BadRequestException ( arg_3 ) elif arg_2 == 403 : raise PermissionDeniedException ( arg_3 ) elif arg_2 == 404 : raise FileNotFoundException ( arg_3 ) elif arg_2 == 451 : raise UnavailableForLegalReasonsException ( arg_3 ) elif arg_2 == 509 : raise BandwidthUsageExceeded ( arg_3 ) elif arg_2 >= 500 : raise ServerErrorException ( arg_3 )","id_":254207,"task_name":"https:\/\/github.com\/mohan3d\/PyOpenload\/blob\/7f9353915ca5546926ef07be9395c6de60e761b1\/openload\/openload.py#L31-L55","negative":"update secrets will take a secrets credential file\n either located at .sregistry or the environment variable\n SREGISTRY_CLIENT_SECRETS and update the current client \n secrets as well as the associated API base."} {"query":"memoization decorator that respects args and kwargs","positive":"def Func ( arg_0 ) : arg_1 = { } @ functools . wraps ( arg_0 ) def arg_5 ( * arg_2 , ** arg_3 ) : arg_4 = _make_signature_key ( arg_2 , arg_3 ) if arg_4 not in arg_1 : arg_1 [ arg_4 ] = arg_0 ( * arg_2 , ** arg_3 ) return arg_1 [ arg_4 ] arg_5 . cache = arg_1 return arg_5","id_":254208,"task_name":"https:\/\/github.com\/Erotemic\/ubelt\/blob\/db802f3ad8abba025db74b54f86e6892b8927325\/ubelt\/util_memoize.py#L103-L150","negative":"Get the reason of this revocation.\n\n :return: The reason, or ``None`` if there is none.\n :rtype: bytes or NoneType\n\n .. seealso::\n\n :meth:`all_reasons`, which gives you a list of all supported\n reasons this method might return."} {"query":"returns 0 if the date range is violated otherwise it will return the quantity remaining under the stock limit .","positive":"def Func ( arg_0 , arg_1 , arg_2 = True ) : if arg_2 : if hasattr ( arg_0 . condition , \"remainder\" ) : return arg_0 . condition . remainder arg_3 = type ( arg_0 . condition ) . objects . filter ( pk = arg_0 . condition . id ) arg_3 = arg_0 . pre_filter ( arg_3 , arg_1 ) if len ( arg_3 ) > 0 : return arg_3 [ 0 ] . remainder else : return 0","id_":254209,"task_name":"https:\/\/github.com\/chrisjrn\/registrasion\/blob\/461d5846c6f9f3b7099322a94f5d9911564448e4\/registrasion\/controllers\/conditions.py#L125-L144","negative":"Replace all the tracks in a playlist, overwriting its existing tracks. \n This powerful request can be useful for replacing tracks, re-ordering existing tracks, or clearing the playlist.\n\n Parameters\n ----------\n playlist : Union[str, PLaylist]\n The playlist to modify\n tracks : Sequence[Union[str, Track]]\n Tracks to place in the playlist"} {"query":"Verifies the signature on this certificate signing request .","positive":"def Func ( arg_0 , arg_1 ) : if not isinstance ( arg_1 , PKey ) : raise TypeError ( \"pkey must be a PKey instance\" ) arg_2 = _lib . X509_REQ_Func ( arg_0 . _req , arg_1 . _pkey ) if arg_2 <= 0 : _raise_current_error ( ) return arg_2","id_":254210,"task_name":"https:\/\/github.com\/pyca\/pyopenssl\/blob\/1fbe064c50fd030948141d7d630673761525b0d0\/src\/OpenSSL\/crypto.py#L1029-L1048","negative":"Deletes the video\n\n Authentication is required\n\n Params:\n entry: video entry fetch via 'fetch_video()'\n\n Return:\n True if successful\n\n Raise:\n OperationError: on unsuccessful deletion"} {"query":"Return the class corresponding to the given temporalImp string","positive":"def Func ( arg_0 ) : if arg_0 == 'py' : return backtracking_tm . BacktrackingTM elif arg_0 == 'cpp' : return backtracking_tm_cpp . BacktrackingTMCPP elif arg_0 == 'tm_py' : return backtracking_tm_shim . TMShim elif arg_0 == 'tm_cpp' : return backtracking_tm_shim . TMCPPShim elif arg_0 == 'monitored_tm_py' : return backtracking_tm_shim . MonitoredTMShim else : raise RuntimeError ( \"Invalid temporalImp '%s'. Legal values are: 'py', \" \"'cpp', 'tm_py', 'monitored_tm_py'\" % ( arg_0 ) )","id_":254211,"task_name":"https:\/\/github.com\/numenta\/nupic\/blob\/5922fafffdccc8812e72b3324965ad2f7d4bbdad\/src\/nupic\/regions\/tm_region.py#L45-L61","negative":"Put us back at the beginning of the file again."} {"query":"Populate a harmonic tensor from a time - frequency representation with time - varying frequencies .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 , arg_4 = 'linear' , arg_5 = 0 , arg_6 = 0 ) : arg_7 = [ slice ( None ) ] * arg_1 . ndim arg_8 = [ slice ( None ) ] * arg_1 . ndim arg_9 = [ slice ( None ) ] * arg_0 . ndim arg_10 = ( 1 + arg_6 ) % arg_1 . ndim for arg_11 in range ( arg_1 . shape [ arg_10 ] ) : arg_7 [ arg_10 ] = slice ( arg_11 , arg_11 + 1 ) arg_8 [ arg_10 ] = arg_11 arg_9 [ 1 + arg_10 ] = arg_7 [ arg_10 ] harmonics_1d ( arg_0 [ tuple ( arg_9 ) ] , arg_1 [ tuple ( arg_7 ) ] , arg_2 [ tuple ( arg_8 ) ] , arg_3 , arg_4 = arg_4 , arg_5 = arg_5 , arg_6 = arg_6 )","id_":254212,"task_name":"https:\/\/github.com\/librosa\/librosa\/blob\/180e8e6eb8f958fa6b20b8cba389f7945d508247\/librosa\/core\/harmonic.py#L331-L382","negative":"Drop a node from the network\n\n :param node: node to drop\n :type node: Node"} {"query":"Generates the instructions for a bot and its filters .","positive":"def Func ( arg_0 , arg_1 ) : return '\\n\\n' . join ( [ arg_0 . INSTRUCTIONS . strip ( ) , '*Supported methods:*' , 'If you send \"@{}: help\" to me I reply with these ' 'instructions.' . format ( arg_0 . user ) , 'If you send \"@{}: version\" to me I reply with my current ' 'version.' . format ( arg_0 . user ) , ] + [ arg_2 . description ( ) for arg_2 in arg_1 ] )","id_":254213,"task_name":"https:\/\/github.com\/textbook\/aslack\/blob\/9ac6a44e4464180109fa4be130ad7a980a9d1acc\/aslack\/slack_bot\/bot.py#L204-L229","negative":"Initialize all ephemerals used by derived classes."} {"query":"Handles mapping elements to diagram components","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 ) : arg_4 = arg_0 . diagram . diagram_canvas arg_5 = XDotParser ( ) for arg_6 in arg_3 . added : logger . debug ( \"Mapping new element [%s] to diagram node\" % arg_6 ) for arg_7 in arg_0 . nodes : arg_8 = arg_2 [ : - 6 ] if arg_7 . containment_trait == arg_8 : arg_9 = arg_7 . dot_node arg_10 = Dot ( ) arg_11 = Node ( str ( id ( arg_6 ) ) ) arg_0 . _style_node ( arg_11 , arg_9 ) arg_10 . add_node ( arg_11 ) arg_12 = graph_from_dot_data ( arg_10 . create ( arg_0 . program , \"xdot\" ) ) arg_13 = arg_5 . parse_nodes ( arg_12 ) for arg_14 in arg_13 : if arg_14 is not None : arg_14 . element = arg_6 for arg_15 in arg_7 . tools : arg_14 . tools . append ( arg_15 ( arg_14 ) ) arg_4 . add ( arg_14 ) arg_4 . request_redraw ( ) for arg_6 in arg_3 . removed : logger . debug ( \"Unmapping element [%s] from diagram\" % arg_6 ) for arg_16 in arg_4 . components : if arg_6 == arg_16 . element : arg_4 . remove ( arg_16 ) arg_4 . request_redraw ( ) break","id_":254214,"task_name":"https:\/\/github.com\/rwl\/godot\/blob\/013687c9e8983d2aa2ceebb8a76c5c4f1e37c90f\/godot\/mapping.py#L295-L329","negative":"Override configuration according to command line parameters\n\n return additional arguments"} {"query":"Return tuple with mantissa and exponent of number formatted in engineering notation .","positive":"def Func ( arg_0 ) : arg_1 = lambda x , p : ( x . ljust ( 3 + arg_4 , \"0\" ) [ : p ] , x [ p : ] . rstrip ( \"0\" ) ) arg_2 , arg_3 = to_scientific_tuple ( arg_0 ) arg_2 , arg_4 = arg_2 . replace ( \".\" , \"\" ) , arg_2 . startswith ( \"-\" ) arg_5 = \".\" . join ( filter ( None , arg_1 ( arg_2 , 1 + ( arg_3 % 3 ) + arg_4 ) ) ) arg_6 = int ( 3 * math . floor ( arg_3 \/ 3 ) ) return NumComp ( arg_5 , arg_6 )","id_":254215,"task_name":"https:\/\/github.com\/pmacosta\/peng\/blob\/976935377adaa3de26fc5677aceb2cdfbd6f93a7\/peng\/functions.py#L284-L305","negative":"Stop streaming frames."} {"query":"Estimates the accuracy of the predictions from the MDR ensemble","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , ** arg_4 ) : arg_5 = arg_0 . ensemble . predict ( arg_1 ) if arg_3 is None : return accuracy_Func ( arg_2 , arg_5 ) else : return arg_3 ( arg_2 , arg_5 , ** arg_4 )","id_":254216,"task_name":"https:\/\/github.com\/EpistasisLab\/scikit-mdr\/blob\/768565deb10467d04a960d27e000ab38b7aa8a62\/mdr\/mdr_ensemble.py#L128-L149","negative":"This function adds the given stream to the logger, but does not check with a ConnectorDB database\n to make sure that the stream exists. Use at your own risk."} {"query":"Display one case .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 , arg_3 = institute_and_Func ( store , arg_0 , arg_1 ) arg_4 = controllers . Func ( store , arg_2 , arg_3 ) return dict ( institute = arg_2 , Func = arg_3 , ** arg_4 )","id_":254217,"task_name":"https:\/\/github.com\/Clinical-Genomics\/scout\/blob\/90a551e2e1653a319e654c2405c2866f93d0ebb9\/scout\/server\/blueprints\/cases\/views.py#L69-L73","negative":"Returns any parameters needed for Akamai HD player verification.\n\n Algorithm originally documented by KSV, source:\n http:\/\/stream-recorder.com\/forum\/showpost.php?p=43761&postcount=13"} {"query":"Intended to be overridden by subclasses . Raises NotImplementedError .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_3 = 'Tried to use unimplemented lens {}.' raise NotImplementedError ( arg_3 . format ( type ( arg_0 ) ) )","id_":254218,"task_name":"https:\/\/github.com\/ingolemo\/python-lenses\/blob\/a3a6ed0a31f6674451e542e7380a8aa16e6f8edf\/lenses\/optics\/base.py#L122-L126","negative":"Synchronize local po files with translations on GDocs Spreadsheet.\n Downloads two csv files, merges them and converts into po files\n structure. If new msgids appeared in po files, this method creates\n new ods with appended content and sends it to GDocs."} {"query":"Adds parameters for a network simulation .","positive":"def Func ( arg_0 , arg_1 ) : arg_0 . _logger . info ( 'Adding Parameters of Components' ) for arg_2 in arg_0 . components : arg_2 . Func ( arg_1 ) if arg_0 . analysers : arg_0 . _logger . info ( 'Adding Parameters of Analysers' ) for arg_3 in arg_0 . analysers : arg_3 . Func ( arg_1 ) arg_0 . _logger . info ( 'Adding Parameters of Runner' ) arg_0 . network_runner . Func ( arg_1 )","id_":254219,"task_name":"https:\/\/github.com\/SmokinCaterpillar\/pypet\/blob\/97ad3e80d46dbdea02deeb98ea41f05a19565826\/pypet\/brian2\/network.py#L541-L563","negative":"Upload all po files to GDocs ignoring conflicts.\n This method looks for all msgids in po_files and sends them\n as ods to GDocs Spreadsheet."} {"query":"r Return a datetime . tzinfo implementation for the given timezone","positive":"def Func ( arg_0 ) : if arg_0 . upper ( ) == 'UTC' : return utc try : arg_0 = ascii ( arg_0 ) except UnicodeEncodeError : raise UnknownTimeZoneError ( arg_0 ) arg_0 = _unmunge_zone ( arg_0 ) if arg_0 not in arg_1 : if arg_0 in all_Funcs_set : arg_1 [ arg_0 ] = build_tzinfo ( arg_0 ) else : raise UnknownTimeZoneError ( arg_0 ) return arg_1 [ arg_0 ]","id_":254220,"task_name":"https:\/\/github.com\/dsoprea\/pytzPure\/blob\/ec8f7803ca1025d363ba954905ae7717a0524a0e\/pytzpure\/__init__.py#L126-L183","negative":"Get summary and description of this notebook"} {"query":"Retrieves info about the repos of the current organization .","positive":"def Func ( arg_0 , arg_1 = 'public' , arg_2 = 'llnl' ) : print 'Getting Func.' for arg_3 in arg_0 . org_retrieved . iter_Func ( type = arg_1 ) : arg_4 = arg_3 . to_json ( ) arg_0 . Func_json [ arg_3 . name ] = arg_4 arg_7 = my_repo . My_Repo ( ) arg_7 . name = arg_3 . full_name arg_0 . total_Func += 1 arg_7 . contributors = my_github . get_total_contributors ( arg_3 ) arg_0 . total_contributors += arg_7 . contributors arg_7 . forks = arg_3 . forks_count arg_0 . total_forks += arg_7 . forks arg_7 . stargazers = arg_3 . stargazers arg_0 . total_stars += arg_7 . stargazers arg_7 . pull_requests_open , arg_7 . pull_requests_closed = my_github . get_pull_reqs ( arg_3 ) arg_7 . pull_requests = ( arg_7 . pull_requests_open + arg_7 . pull_requests_closed ) arg_0 . total_pull_reqs += arg_7 . pull_requests_open arg_0 . total_pull_reqs += arg_7 . pull_requests_closed arg_0 . total_pull_reqs_open += arg_7 . pull_requests_open arg_0 . total_pull_reqs_closed += arg_7 . pull_requests_closed arg_7 . open_issues = arg_3 . open_issues_count arg_0 . total_open_issues += arg_7 . open_issues arg_7 . closed_issues = my_github . get_issues ( arg_3 , arg_2 = arg_2 ) arg_7 . issues = arg_7 . closed_issues + arg_7 . open_issues arg_0 . total_closed_issues += arg_7 . closed_issues arg_0 . total_issues += arg_7 . issues my_github . get_languages ( arg_3 , arg_7 ) arg_7 . readme = my_github . get_readme ( arg_3 ) arg_7 . commits = arg_0 . get_commits ( arg_3 = arg_3 , arg_2 = arg_2 ) arg_0 . total_commits += arg_7 . commits arg_0 . all_Func . append ( arg_7 )","id_":254221,"task_name":"https:\/\/github.com\/LLNL\/scraper\/blob\/881a316e4c04dfa5a9cf491b7c7f9f997a7c56ea\/scripts\/github_stats.py#L169-L207","negative":"Serialize a dataframe.\n\n Parameters\n ----------\n writer : file\n File-like object to write to. Must be opened in binary mode.\n data_type_id : dict\n Serialization format to use.\n See the azureml.DataTypeIds class for constants.\n dataframe: pandas.DataFrame\n Dataframe to serialize."} {"query":"Tries to remove a registered event without triggering it","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : try : arg_0 . log . debug ( \"Removing event {0}({1},{2})\" . format ( arg_2 [ 'function' ] . __name__ , arg_2 [ 'args' ] , arg_2 [ 'kwargs' ] ) ) except AttributeError : arg_0 . log . debug ( \"Removing event {0}\" . format ( str ( arg_2 ) ) ) try : arg_1 . remove ( arg_2 ) except ValueError : try : arg_0 . log . warn ( \"Unable to remove event {0}({1},{2}) , not found in list: {3}\" . format ( arg_2 [ 'function' ] . __name__ , arg_2 [ 'args' ] , arg_2 [ 'kwargs' ] , arg_1 ) ) except AttributeError : arg_0 . log . debug ( \"Unable to remove event {0}\" . format ( str ( arg_2 ) ) ) raise KeyError ( 'Unable to unregister the specified event from the signals specified' )","id_":254222,"task_name":"https:\/\/github.com\/antevens\/listen\/blob\/d3ddff8e7fbfb672c5bd7f6f4febeb5e921d8c67\/listen\/signal_handler.py#L190-L204","negative":"Add the currently tested element into the database."} {"query":"Method to update an item","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = False ) : if not arg_3 : raise BackendException ( BACKEND_ERROR , \"Header If-Match required for Funcing an object\" ) arg_5 = arg_0 . get_response ( method = 'PATCH' , arg_1 = arg_1 , json = arg_2 , arg_3 = arg_3 ) if arg_5 . status_code == 200 : return arg_0 . decode ( arg_5 = arg_5 ) if arg_5 . status_code == 412 : if arg_4 : arg_6 = arg_0 . get ( arg_1 ) arg_3 = { 'If-Match' : arg_6 [ '_etag' ] } return arg_0 . Func ( arg_1 , arg_2 = arg_2 , arg_3 = arg_3 , arg_4 = False ) raise BackendException ( arg_5 . status_code , arg_5 . content ) else : raise BackendException ( arg_5 . status_code , arg_5 . content )","id_":254223,"task_name":"https:\/\/github.com\/Alignak-monitoring-contrib\/alignak-backend-client\/blob\/1e21f6ce703e66984d1f9b20fe7866460ab50b39\/alignak_backend_client\/client.py#L550-L610","negative":"Receive TCP response, looping to get whole thing or timeout."} {"query":"Set the room topic .","positive":"def Func ( arg_0 , arg_1 ) : if not arg_1 : arg_1 = '' arg_2 = arg_0 . _connection . put ( \"room\/%s\" % arg_0 . id , { \"room\" : { \"topic\" : arg_1 } } ) if arg_2 [ \"success\" ] : arg_0 . _load ( ) return arg_2 [ \"success\" ]","id_":254224,"task_name":"https:\/\/github.com\/mariano\/pyfire\/blob\/42e3490c138abc8e10f2e9f8f8f3b40240a80412\/pyfire\/room.py#L126-L141","negative":"Dump a certificate revocation list to a buffer.\n\n :param type: The file type (one of ``FILETYPE_PEM``, ``FILETYPE_ASN1``, or\n ``FILETYPE_TEXT``).\n :param CRL crl: The CRL to dump.\n\n :return: The buffer with the CRL.\n :rtype: bytes"} {"query":"r Continuous gaussian square pulse .","positive":"def Func ( arg_0 : arg_1 . ndarray , arg_3 : arg_4 , arg_5 : arg_6 , arg_7 : arg_6 , arg_8 : arg_6 , arg_9 : arg_10 [ None , arg_6 ] = None ) -> arg_1 . ndarray : arg_11 = arg_5 - arg_7 \/ 2 arg_12 = arg_5 + arg_7 \/ 2 if arg_9 : arg_9 = min ( arg_7 , arg_9 ) arg_13 = arg_9 - arg_7 else : arg_13 = None arg_14 = [ functools . partial ( gaussian , arg_3 = arg_3 , arg_5 = arg_11 , arg_8 = arg_8 , arg_9 = arg_13 , rescale_amp = True ) , functools . partial ( gaussian , arg_3 = arg_3 , arg_5 = arg_12 , arg_8 = arg_8 , arg_9 = arg_13 , rescale_amp = True ) , functools . partial ( constant , arg_3 = arg_3 ) ] arg_15 = [ arg_0 <= arg_11 , arg_0 >= arg_12 ] return arg_1 . piecewise ( arg_0 . astype ( arg_1 . complex_ ) , arg_15 , arg_14 )","id_":254225,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/pulse\/pulse_lib\/continuous.py#L186-L213","negative":"Creates the email"} {"query":"Consumes protocol message field identifier .","positive":"def Func ( arg_0 ) : arg_1 = arg_0 . token if not arg_0 . _IDENTIFIER . match ( arg_1 ) : raise arg_0 . _ParseError ( 'Expected identifier.' ) arg_0 . NextToken ( ) return arg_1","id_":254226,"task_name":"https:\/\/github.com\/ibelie\/typy\/blob\/3616845fb91459aacd8df6bf82c5d91f4542bee7\/typy\/google\/protobuf\/text_format.py#L860-L873","negative":"Reimplemented to add an action for raw copy."} {"query":"Find and return the S3 data location given a catalog_id .","positive":"def Func ( arg_0 , arg_1 ) : try : arg_2 = arg_0 . get ( arg_1 ) except : return None if 'Landsat8' in arg_2 [ 'type' ] and 'LandsatAcquisition' in arg_2 [ 'type' ] : arg_3 = arg_2 [ 'properties' ] [ 'bucketName' ] arg_4 = arg_2 [ 'properties' ] [ 'bucketPrefix' ] return 's3:\/\/' + arg_3 + '\/' + arg_4 if 'DigitalGlobeAcquisition' in arg_2 [ 'type' ] : arg_5 = Ordering ( ) arg_6 = arg_5 . location ( [ arg_1 ] ) return arg_6 [ 'acquisitions' ] [ 0 ] [ 'location' ] return None","id_":254227,"task_name":"https:\/\/github.com\/DigitalGlobe\/gbdxtools\/blob\/def62f8f2d77b168aa2bd115290aaa0f9a08a4bb\/gbdxtools\/catalog.py#L161-L190","negative":"Called by the environment to make some initial configurations before performing the\n individual runs.\n\n Checks if all parameters marked for presetting were preset. If not raises a\n DefaultReplacementError.\n\n Locks all parameters.\n\n Removal of potential results of previous runs in case the trajectory was expanded to avoid\n mixing up undesired shortcuts in natural naming."} {"query":"Return a single backend matching the specified filtering .","positive":"def Func ( arg_0 , arg_1 = None , ** arg_2 ) : arg_3 = arg_0 . backends ( arg_1 , ** arg_2 ) if len ( arg_3 ) > 1 : raise QiskitBackendNotFoundError ( 'More than one backend matches the criteria' ) elif not arg_3 : raise QiskitBackendNotFoundError ( 'No backend matches the criteria' ) return arg_3 [ 0 ]","id_":254228,"task_name":"https:\/\/github.com\/Qiskit\/qiskit-terra\/blob\/d4f58d903bc96341b816f7c35df936d6421267d1\/qiskit\/providers\/baseprovider.py#L28-L48","negative":"Send a chat message to a conversation."} {"query":"Send a message to a particular user .","positive":"def Func ( arg_0 , arg_1 , arg_2 = arg_3 . INFO ) : arg_5 = _user_key ( arg_0 ) arg_6 = cache . get ( arg_5 ) or [ ] arg_6 . append ( ( arg_1 , arg_2 ) ) cache . set ( arg_5 , arg_6 )","id_":254229,"task_name":"https:\/\/github.com\/codeinthehole\/django-async-messages\/blob\/292cb2fc517521dabc67b90e7ca5b1617f59e214\/async_messages\/__init__.py#L5-L18","negative":"This is a decorator that retries a function.\n\n Tries `n` times and catches a given tuple of `errors`.\n\n If the `n` retries are not enough, the error is reraised.\n\n If desired `waits` some seconds.\n\n Optionally takes a 'logger_name' of a given logger to print the caught error."} {"query":"Replaces many documents in a mongo collection .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = None , arg_4 = None , arg_5 = False , arg_6 = None , ** arg_7 ) : arg_8 = arg_0 . get_collection ( arg_1 , arg_4 = arg_4 ) if not arg_3 : arg_3 = [ { '_id' : doc [ '_id' ] } for doc in arg_2 ] arg_9 = [ ReplaceOne ( arg_3 [ i ] , arg_2 [ i ] , arg_5 = arg_5 , arg_6 = arg_6 ) for i in range ( len ( arg_2 ) ) ] return arg_8 . bulk_write ( arg_9 , ** arg_7 )","id_":254230,"task_name":"https:\/\/github.com\/apache\/airflow\/blob\/b69c686ad8a0c89b9136bb4b31767257eb7b2597\/airflow\/contrib\/hooks\/mongo_hook.py#L214-L261","negative":"wrapper for ensuring image_tag returns utf8-encoded str on Python 2"} {"query":"Return the Tanimoto distance between two strings .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = 2 ) : arg_4 = arg_0 . sim ( arg_1 , arg_2 , arg_3 ) if arg_4 != 0 : return log ( arg_4 , 2 ) return float ( '-inf' )","id_":254231,"task_name":"https:\/\/github.com\/chrislit\/abydos\/blob\/165466b3ff6afd8024a4c8660421b0c4e7773db9\/abydos\/distance\/_jaccard.py#L83-L120","negative":"This is a parallel worker for running period-recovery.\n\n Parameters\n ----------\n\n task : tuple\n This is used to pass args to the `periodicvar_recovery` function::\n\n task[0] = period-finding result pickle to work on\n task[1] = simbasedir\n task[2] = period_tolerance\n\n Returns\n -------\n\n dict\n This is the dict produced by the `periodicvar_recovery` function for the\n input period-finding result pickle."} {"query":"Execute the raw phase for a given backend section optionally using Arthur","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_2 : arg_3 = TaskRawDataArthurCollection ( arg_0 , arg_1 = arg_1 ) else : arg_3 = TaskRawDataCollection ( arg_0 , arg_1 = arg_1 ) TaskProjects ( arg_0 ) . execute ( ) try : arg_3 . execute ( ) logging . info ( \"Loading raw data finished!\" ) except Exception as e : logging . error ( str ( e ) ) sys . exit ( - 1 )","id_":254232,"task_name":"https:\/\/github.com\/chaoss\/grimoirelab-sirmordred\/blob\/d6ac94d28d707fae23170064d078f1edf937d13e\/utils\/micro.py#L70-L89","negative":"Asynchronous connection listener. Starts a handler for each connection."} {"query":"Find consecutive li tags that have content that have the same list id .","positive":"def Func ( arg_0 , arg_1 ) : yield arg_0 arg_2 = get_namespace ( arg_0 , 'w' ) arg_3 = get_numId ( arg_0 , arg_2 ) arg_4 = get_ilvl ( arg_0 , arg_2 ) arg_5 = arg_0 while True : arg_5 = arg_5 . getnext ( ) if arg_5 is None : break if not has_text ( arg_5 ) : continue if _is_top_level_upper_roman ( arg_5 , arg_1 ) : break if ( is_li ( arg_5 , arg_1 ) and ( arg_4 > get_ilvl ( arg_5 , arg_2 ) ) ) : break arg_6 = get_numId ( arg_5 , arg_2 ) if arg_6 is None or arg_6 == - 1 : yield arg_5 continue if arg_3 != arg_6 : break if is_last_li ( arg_5 , arg_1 , arg_3 ) : yield arg_5 break yield arg_5","id_":254233,"task_name":"https:\/\/github.com\/PolicyStat\/docx2html\/blob\/2dc4afd1e3a3f2f0b357d0bff903eb58bcc94429\/docx2html\/core.py#L293-L333","negative":"Parses arguments."} {"query":"Orient a graph using the method defined by the arguments .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None , ** arg_3 ) : if arg_2 is None : return arg_0 . create_graph_from_data ( arg_1 , ** arg_3 ) elif isinstance ( arg_2 , nx . DiGraph ) : return arg_0 . orient_directed_graph ( arg_1 , arg_2 , ** arg_3 ) elif isinstance ( arg_2 , nx . Graph ) : return arg_0 . orient_undirected_graph ( arg_1 , arg_2 , ** arg_3 ) else : print ( 'Unknown Graph type' ) raise ValueError","id_":254234,"task_name":"https:\/\/github.com\/Diviyan-Kalainathan\/CausalDiscoveryToolbox\/blob\/be228b078ba9eb76c01b3ccba9a1c0ad9e9e5ed1\/cdt\/causality\/graph\/model.py#L44-L70","negative":"Gets back all response headers."} {"query":"Show available operators for a given saved search condition","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = arg_0 . savedsearch . conditions_operators . get ( arg_1 ) arg_3 = set ( [ arg_0 . savedsearch . operators . get ( op ) for op in arg_2 ] ) return arg_3","id_":254235,"task_name":"https:\/\/github.com\/urschrei\/pyzotero\/blob\/b378966b30146a952f7953c23202fb5a1ddf81d9\/pyzotero\/zotero.py#L941-L949","negative":"Sets general options used by plugins and streams originating\n from this session object.\n\n :param key: key of the option\n :param value: value to set the option to\n\n\n **Available options**:\n\n ======================== =========================================\n hds-live-edge ( float) Specify the time live HDS\n streams will start from the edge of\n stream, default: ``10.0``\n\n hds-segment-attempts (int) How many attempts should be done\n to download each HDS segment, default: ``3``\n\n hds-segment-threads (int) The size of the thread pool used\n to download segments, default: ``1``\n\n hds-segment-timeout (float) HDS segment connect and read\n timeout, default: ``10.0``\n\n hds-timeout (float) Timeout for reading data from\n HDS streams, default: ``60.0``\n\n hls-live-edge (int) How many segments from the end\n to start live streams on, default: ``3``\n\n hls-segment-attempts (int) How many attempts should be done\n to download each HLS segment, default: ``3``\n\n hls-segment-threads (int) The size of the thread pool used\n to download segments, default: ``1``\n\n hls-segment-timeout (float) HLS segment connect and read\n timeout, default: ``10.0``\n\n hls-timeout (float) Timeout for reading data from\n HLS streams, default: ``60.0``\n\n http-proxy (str) Specify a HTTP proxy to use for\n all HTTP requests\n\n https-proxy (str) Specify a HTTPS proxy to use for\n all HTTPS requests\n\n http-cookies (dict or str) A dict or a semi-colon (;)\n delimited str of cookies to add to each\n HTTP request, e.g. ``foo=bar;baz=qux``\n\n http-headers (dict or str) A dict or semi-colon (;)\n delimited str of headers to add to each\n HTTP request, e.g. ``foo=bar;baz=qux``\n\n http-query-params (dict or str) A dict or a ampersand (&)\n delimited string of query parameters to\n add to each HTTP request,\n e.g. ``foo=bar&baz=qux``\n\n http-trust-env (bool) Trust HTTP settings set in the\n environment, such as environment\n variables (HTTP_PROXY, etc) and\n ~\/.netrc authentication\n\n http-ssl-verify (bool) Verify SSL certificates,\n default: ``True``\n\n http-ssl-cert (str or tuple) SSL certificate to use,\n can be either a .pem file (str) or a\n .crt\/.key pair (tuple)\n\n http-timeout (float) General timeout used by all HTTP\n requests except the ones covered by\n other options, default: ``20.0``\n\n http-stream-timeout (float) Timeout for reading data from\n HTTP streams, default: ``60.0``\n\n subprocess-errorlog (bool) Log errors from subprocesses to\n a file located in the temp directory\n\n subprocess-errorlog-path (str) Log errors from subprocesses to\n a specific file\n\n ringbuffer-size (int) The size of the internal ring\n buffer used by most stream types,\n default: ``16777216`` (16MB)\n\n rtmp-proxy (str) Specify a proxy (SOCKS) that RTMP\n streams will use\n\n rtmp-rtmpdump (str) Specify the location of the\n rtmpdump executable used by RTMP streams,\n e.g. ``\/usr\/local\/bin\/rtmpdump``\n\n rtmp-timeout (float) Timeout for reading data from\n RTMP streams, default: ``60.0``\n\n ffmpeg-ffmpeg (str) Specify the location of the\n ffmpeg executable use by Muxing streams\n e.g. ``\/usr\/local\/bin\/ffmpeg``\n\n ffmpeg-verbose (bool) Log stderr from ffmpeg to the\n console\n\n ffmpeg-verbose-path (str) Specify the location of the\n ffmpeg stderr log file\n\n ffmpeg-video-transcode (str) The codec to use if transcoding\n video when muxing with ffmpeg\n e.g. ``h264``\n\n ffmpeg-audio-transcode (str) The codec to use if transcoding\n audio when muxing with ffmpeg\n e.g. ``aac``\n\n stream-segment-attempts (int) How many attempts should be done\n to download each segment, default: ``3``.\n General option used by streams not\n covered by other options.\n\n stream-segment-threads (int) The size of the thread pool used\n to download segments, default: ``1``.\n General option used by streams not\n covered by other options.\n\n stream-segment-timeout (float) Segment connect and read\n timeout, default: ``10.0``.\n General option used by streams not\n covered by other options.\n\n stream-timeout (float) Timeout for reading data from\n stream, default: ``60.0``.\n General option used by streams not\n covered by other options.\n\n locale (str) Locale setting, in the RFC 1766 format\n eg. en_US or es_ES\n default: ``system locale``.\n\n user-input-requester (UserInputRequester) instance of UserInputRequester\n to collect input from the user at runtime. Must be\n set before the plugins are loaded.\n default: ``UserInputRequester``.\n ======================== ========================================="} {"query":"Unpack a byte string to the given format . If the byte string contains more bytes than required for the given format the function returns a tuple of values .","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : if arg_1 == 's' : arg_3 = struct . Func ( '' . join ( [ arg_0 , str ( len ( arg_2 ) ) , 's' ] ) , arg_2 ) [ 0 ] else : arg_4 = len ( arg_2 ) \/\/ struct . calcsize ( arg_1 ) arg_3 = struct . Func ( '' . join ( [ arg_0 , str ( arg_4 ) , arg_1 ] ) , arg_2 ) if len ( arg_3 ) == 1 : arg_3 = arg_3 [ 0 ] return arg_3","id_":254236,"task_name":"https:\/\/github.com\/nephics\/mat4py\/blob\/6c1a2ad903937437cc5f24f3c3f5aa2c5a77a1c1\/mat4py\/loadmat.py#L107-L122","negative":"Convert any non-DAVError exception to HTTP_INTERNAL_ERROR."} {"query":"Calls input to allow user to input an arbitrary string . User can go back by entering the go_back string . Works in both Python 2 and 3 .","positive":"def Func ( arg_0 , * arg_1 , ** arg_2 ) : arg_3 = arg_2 . get ( 'go_back' , '<' ) arg_4 = arg_2 . get ( 'type' , str ) arg_5 = arg_2 . get ( 'default' , '' ) with stdout_redirected ( sys . stderr ) : while True : try : if arg_2 . get ( 'secret' , False ) : arg_6 = getpass . getpass ( arg_0 ) elif sys . version_info < ( 3 , 0 ) : arg_6 = Func_input ( arg_0 ) else : arg_6 = input ( arg_0 ) if not arg_6 : arg_6 = arg_5 if arg_6 == arg_3 : raise QuestionnaireGoBack return arg_4 ( arg_6 ) except ValueError : eprint ( '\\n`{}` is not a valid `{}`\\n' . format ( arg_6 , arg_4 ) )","id_":254237,"task_name":"https:\/\/github.com\/kylebebak\/questionnaire\/blob\/ed92642e8a2a0198da198acbcde2707f1d528585\/questionnaire\/prompters.py#L128-L152","negative":"Read the file and perform any transforms to get a loaded image"} {"query":"Attach marker bodies to the corresponding skeleton bodies .","positive":"def Func ( arg_0 , arg_1 ) : assert not arg_0 . joints for arg_2 , arg_3 in arg_0 . channels . items ( ) : arg_4 = arg_0 . targets . get ( arg_2 ) if arg_4 is None : continue if arg_0 . visibility [ arg_1 , arg_3 ] < 0 : continue if np . linalg . norm ( arg_0 . velocities [ arg_1 , arg_3 ] ) > 10 : continue arg_5 = ode . BallJoint ( arg_0 . world . ode_world , arg_0 . jointgroup ) arg_5 . Func ( arg_0 . bodies [ arg_2 ] . ode_body , arg_4 . ode_body ) arg_5 . setAnchor1Rel ( [ 0 , 0 , 0 ] ) arg_5 . setAnchor2Rel ( arg_0 . offsets [ arg_2 ] ) arg_5 . setParam ( ode . ParamCFM , arg_0 . cfms [ arg_1 , arg_3 ] ) arg_5 . setParam ( ode . ParamERP , arg_0 . erp ) arg_5 . name = arg_2 arg_0 . joints [ arg_2 ] = arg_5 arg_0 . _frame_no = arg_1","id_":254238,"task_name":"https:\/\/github.com\/EmbodiedCognition\/pagoda\/blob\/8892f847026d98aba8646ecbc4589397e6dec7bd\/pagoda\/cooper.py#L253-L281","negative":"Update Marketing urls in course metadata and return updated course.\n\n Arguments:\n course_runs (list): List of course runs.\n enterprise_customer (EnterpriseCustomer): enterprise customer instance.\n enterprise_context (dict): The context to inject into URLs.\n\n Returns:\n (dict): Dictionary containing updated course metadata."} {"query":"Backtrack detected onset events to the nearest preceding local minimum of an energy function .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = np . flatnonzero ( ( arg_1 [ 1 : - 1 ] <= arg_1 [ : - 2 ] ) & ( arg_1 [ 1 : - 1 ] < arg_1 [ 2 : ] ) ) arg_2 = util . fix_frames ( 1 + arg_2 , x_min = 0 ) return arg_2 [ util . match_events ( arg_0 , arg_2 , right = False ) ]","id_":254239,"task_name":"https:\/\/github.com\/librosa\/librosa\/blob\/180e8e6eb8f958fa6b20b8cba389f7945d508247\/librosa\/onset.py#L336-L403","negative":"Generate a list of 2 month ranges for the range requested with an\n intersection between months. This is necessary because we can't search\n for ranges longer than 3 months and the period searched has to encompass\n the whole period of the mission."} {"query":"Get short form of commit hash given directory pkg_path","positive":"def Func ( arg_0 ) : if _sysinfo . commit : return \"installation\" , _sysinfo . commit arg_1 = subprocess . Popen ( 'git rev-parse --short HEAD' , stdout = subprocess . PIPE , stderr = subprocess . PIPE , cwd = arg_0 , shell = True ) arg_2 , arg_3 = arg_1 . communicate ( ) if arg_2 : return 'repository' , arg_2 . strip ( ) return '(none found)' , ''","id_":254240,"task_name":"https:\/\/github.com\/cloud9ers\/gurumate\/blob\/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e\/environment\/lib\/python2.7\/site-packages\/IPython\/utils\/sysinfo.py#L32-L67","negative":"This method is called before first step of simulation."} {"query":"Returns rolling - window gradient of a .","positive":"def Func ( arg_0 , arg_1 = 11 ) : if arg_1 % 2 == 0 : arg_1 += 1 arg_2 = rolling_window ( arg_0 , arg_1 , 'ends' ) arg_0 = map ( lambda x : np . polyfit ( np . arange ( arg_1 ) , x , 1 ) [ 0 ] , arg_2 ) return np . array ( list ( arg_0 ) )","id_":254241,"task_name":"https:\/\/github.com\/oscarbranson\/latools\/blob\/cd25a650cfee318152f234d992708511f7047fbe\/latools\/helpers\/helpers.py#L408-L439","negative":"write triples into a translation file."} {"query":"Return an a license identifier from an ExtractedLicense or None .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = list ( arg_0 . graph . triples ( ( arg_1 , arg_0 . spdx_namespace [ 'licenseId' ] , None ) ) ) if not arg_2 : arg_0 . error = True arg_4 = 'Extracted license must have licenseId property.' arg_0 . logger . log ( arg_4 ) return if len ( arg_2 ) > 1 : arg_0 . more_than_one_error ( 'extracted license identifier_tripples' ) return arg_5 = arg_2 [ 0 ] arg_6 , arg_7 , arg_8 = arg_5 return arg_8","id_":254242,"task_name":"https:\/\/github.com\/spdx\/tools-python\/blob\/301d72f6ae57c832c1da7f6402fa49b192de6810\/spdx\/parsers\/rdf.py#L144-L162","negative":"Input validation for standard estimators.\n\n Checks X and y for consistent length, enforces X 2d and y 1d.\n Standard input checks are only applied to y. For multi-label y,\n set multi_ouput=True to allow 2d and sparse y.\n\n Parameters\n ----------\n X : nd-array, list or sparse matrix\n Input data.\n\n y : nd-array, list or sparse matrix\n Labels.\n\n accept_sparse : string, list of string or None (default=None)\n String[s] representing allowed sparse matrix formats, such as 'csc',\n 'csr', etc. None means that sparse matrix input will raise an error.\n If the input is sparse but not in the allowed format, it will be\n converted to the first listed format.\n\n order : 'F', 'C' or None (default=None)\n Whether an array will be forced to be fortran or c-style.\n\n copy : boolean (default=False)\n Whether a forced copy will be triggered. If copy=False, a copy might\n be triggered by a conversion.\n\n force_all_finite : boolean (default=True)\n Whether to raise an error on np.inf and np.nan in X.\n\n ensure_2d : boolean (default=True)\n Whether to make X at least 2d.\n\n allow_nd : boolean (default=False)\n Whether to allow X.ndim > 2.\n\n Returns\n -------\n X_converted : object\n The converted and validated X."} {"query":"Wrap a reader function in a decorator to supply line and column information along with relevant forms .","positive":"def Func ( arg_0 : arg_1 ) -> arg_1 : @ functools . wraps ( arg_0 ) def with_lineno_and_col ( arg_2 ) : arg_3 = lmap . map ( { READER_LINE_KW : arg_2 . reader . line , READER_COL_KW : arg_2 . reader . col } ) arg_4 = arg_0 ( arg_2 ) try : return arg_4 . with_meta ( arg_3 ) except AttributeError : return arg_4 return cast ( arg_1 , with_lineno_and_col )","id_":254243,"task_name":"https:\/\/github.com\/chrisrink10\/basilisp\/blob\/3d82670ee218ec64eb066289c82766d14d18cc92\/src\/basilisp\/lang\/reader.py#L306-L321","negative":"_rem_id_from_keys - Remove primary key from table\n\t\t\tinternal"} {"query":"Receive NAK in REQUESTING state .","positive":"def Func ( arg_0 , arg_1 ) : logger . debug ( \"C3.1. Received NAK?, in REQUESTING state.\" ) if arg_0 . process_received_nak ( arg_1 ) : logger . debug ( \"C3.1: T. Received NAK, in REQUESTING state, \" \"raise INIT.\" ) raise arg_0 . INIT ( )","id_":254244,"task_name":"https:\/\/github.com\/juga0\/dhcpcanon\/blob\/9f51a29e57fe93dc93fb22bb0ed12fcfe9557e59\/dhcpcanon\/dhcpcapfsm.py#L590-L596","negative":"Replace all the tracks in a playlist, overwriting its existing tracks. \n This powerful request can be useful for replacing tracks, re-ordering existing tracks, or clearing the playlist.\n\n Parameters\n ----------\n playlist : Union[str, PLaylist]\n The playlist to modify\n tracks : Sequence[Union[str, Track]]\n Tracks to place in the playlist"} {"query":"Bin up an array to coarser resolution by binning up groups of pixels and using their sum value to determine \\ the value of the new pixel .","positive":"def Func ( arg_0 , arg_1 ) : arg_2 = array_util . pad_2d_array_for_binning_up_with_bin_up_factor ( array_2d = arg_0 , arg_1 = arg_1 , pad_value = True ) arg_3 = np . zeros ( shape = ( arg_2 . shape [ 0 ] \/\/ arg_1 , arg_2 . shape [ 1 ] \/\/ arg_1 ) ) for arg_4 in range ( arg_3 . shape [ 0 ] ) : for arg_5 in range ( arg_3 . shape [ 1 ] ) : arg_6 = True for arg_7 in range ( arg_1 ) : for arg_8 in range ( arg_1 ) : arg_9 = arg_4 * arg_1 + arg_7 arg_10 = arg_5 * arg_1 + arg_8 if arg_2 [ arg_9 , arg_10 ] == False : arg_6 = False arg_3 [ arg_4 , arg_5 ] = arg_6 return arg_3","id_":254245,"task_name":"https:\/\/github.com\/Jammy2211\/PyAutoLens\/blob\/91e50369c7a9c048c83d217625578b72423cd5a7\/autolens\/data\/array\/util\/mask_util.py#L368-L417","negative":"Truncate string values."} {"query":"Return the most common item in the list .","positive":"def Func ( arg_0 : arg_1 [ arg_2 ] ) -> arg_2 : return max ( set ( arg_0 ) , key = arg_0 . count )","id_":254246,"task_name":"https:\/\/github.com\/5j9\/wikitextparser\/blob\/1347425814361d7955342c53212edbb27f0ff4b5\/wikitextparser\/_template.py#L323-L338","negative":"Calculate the minimum for given expressions, possibly on a grid defined by binby.\n\n\n Example:\n\n >>> df.min(\"x\")\n array(-128.293991)\n >>> df.min([\"x\", \"y\"])\n array([-128.293991 , -71.5523682])\n >>> df.min(\"x\", binby=\"x\", shape=5, limits=[-10, 10])\n array([-9.99919128, -5.99972439, -1.99991322, 2.0000093 , 6.0004878 ])\n\n :param expression: {expression}\n :param binby: {binby}\n :param limits: {limits}\n :param shape: {shape}\n :param selection: {selection}\n :param delay: {delay}\n :param progress: {progress}\n :return: {return_stat_scalar}, the last dimension is of shape (2)"} {"query":"Initiate TLS connection .","positive":"def Func ( arg_0 ) : logger . debug ( \"Preparing TLS connection\" ) if arg_0 . settings [ \"tls_verify_peer\" ] : arg_1 = ssl . CERT_REQUIRED else : arg_1 = ssl . CERT_NONE arg_0 . stream . transport . starttls ( keyfile = arg_0 . settings [ \"tls_key_file\" ] , certfile = arg_0 . settings [ \"tls_cert_file\" ] , server_side = not arg_0 . stream . initiator , arg_1 = arg_1 , ssl_version = ssl . PROTOCOL_TLSv1 , ca_certs = arg_0 . settings [ \"tls_cacert_file\" ] , do_handshake_on_connect = False , )","id_":254247,"task_name":"https:\/\/github.com\/Jajcus\/pyxmpp2\/blob\/14a40a3950910a9cd008b55f0d8905aa0186ce18\/pyxmpp2\/streamtls.py#L166-L184","negative":"Deletes the video\n\n Authentication is required\n\n Params:\n entry: video entry fetch via 'fetch_video()'\n\n Return:\n True if successful\n\n Raise:\n OperationError: on unsuccessful deletion"} {"query":"Split the storage to run of char types at the same level .","positive":"def Func ( arg_0 ) : arg_0 [ 'runs' ] . clear ( ) arg_1 = arg_0 [ 'chars' ] if not arg_1 : return def calc_level_run ( arg_2 , arg_3 ) : return [ 'L' , 'R' ] [ max ( arg_2 , arg_3 ) % 2 ] arg_4 = arg_1 [ 0 ] arg_5 = calc_level_run ( arg_0 [ 'base_level' ] , arg_4 [ 'level' ] ) arg_6 = None arg_7 = arg_13 = 0 arg_8 , arg_9 = arg_4 [ 'level' ] , arg_4 [ 'type' ] for arg_10 in arg_1 : arg_11 , arg_12 = arg_10 [ 'level' ] , arg_10 [ 'type' ] if arg_11 == arg_8 : arg_13 += 1 else : arg_6 = calc_level_run ( arg_8 , arg_11 ) arg_0 [ 'runs' ] . append ( { 'sor' : arg_5 , 'eor' : arg_6 , 'start' : arg_7 , 'type' : arg_9 , 'length' : arg_13 } ) arg_5 = arg_6 arg_7 += arg_13 arg_13 = 1 arg_8 , arg_9 = arg_11 , arg_12 arg_6 = calc_level_run ( arg_11 , arg_0 [ 'base_level' ] ) arg_0 [ 'runs' ] . append ( { 'sor' : arg_5 , 'eor' : arg_6 , 'start' : arg_7 , 'type' : arg_12 , 'length' : arg_13 } )","id_":254248,"task_name":"https:\/\/github.com\/MeirKriheli\/python-bidi\/blob\/a0e265bb465c1b7ad628487991e33b5ebe364641\/bidi\/algorithm.py#L261-L307","negative":"Apply all filters to issues and pull requests.\n\n :param dict older_tag: All issues before this tag's date will be\n excluded. May be special value, if new tag is\n the first tag. (Means **older_tag** is when\n the repo was created.)\n :param dict newer_tag: All issues after this tag's date will be\n excluded. May be title of unreleased section.\n :rtype: list(dict), list(dict)\n :return: Filtered issues and pull requests."} {"query":"Deserializes Java objects and primitive data serialized by ObjectOutputStream from a string .","positive":"def Func ( arg_0 ) : arg_1 = StringIO . StringIO ( arg_0 ) arg_2 = JavaObjectUnmarshaller ( arg_1 ) arg_2 . add_transformer ( DefaultObjectTransformer ( ) ) return arg_2 . readObject ( )","id_":254249,"task_name":"https:\/\/github.com\/apache\/incubator-heron\/blob\/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac\/heron\/tools\/tracker\/src\/python\/javaobj.py#L56-L64","negative":"Scale in the number of active blocks by the specified number."} {"query":"Return the opposite mapping by searching the imported KB .","positive":"def Func ( arg_0 , arg_1 , arg_2 , arg_3 = True ) : arg_4 = arg_0 . kbs . get ( arg_2 , None ) if arg_4 : if arg_1 in arg_4 : return arg_4 [ arg_1 ] elif arg_3 : arg_5 = [ v for k , v in arg_4 . items ( ) if arg_1 in k ] if arg_5 : return arg_5 [ 0 ] return arg_1","id_":254250,"task_name":"https:\/\/github.com\/inspirehep\/harvesting-kit\/blob\/33a7f8aa9dade1d863110c6d8b27dfd955cb471f\/harvestingkit\/inspire_cds_package\/base.py#L87-L97","negative":"Generate RENEWING time.\n\n [:rfc:`2131#section-4.4.5`]::\n\n T1\n defaults to (0.5 * duration_of_lease). T2 defaults to (0.875 *\n duration_of_lease). Times T1 and T2 SHOULD be chosen with some\n random \"fuzz\" around a fixed value, to avoid synchronization of\n client reacquisition."} {"query":"Given an email address check the email_remapping table to see if the email should be sent to a different address . This function also handles overriding the email domain if ignore_vcs_email_domain is set or the domain was missing","positive":"def Func ( arg_0 , arg_1 ) : if not arg_1 or \"@\" not in arg_1 : return None if arg_1 in arg_0 . email_remapping . remap : return arg_0 . email_remapping . remap [ arg_1 ] arg_2 , arg_3 = arg_1 . split ( \"@\" , 2 ) if arg_2 in arg_0 . email_remapping . remap : return arg_0 . email_remapping . remap [ arg_2 ] if \".\" not in arg_3 or config . ignore_vcs_email_domain : return \"%s@%s\" % ( arg_2 , config . email_domain_name ) return arg_1","id_":254251,"task_name":"https:\/\/github.com\/shopkick\/flawless\/blob\/c54b63ca1991c153e6f75080536f6df445aacc64\/flawless\/server\/service.py#L403-L417","negative":"Process current member with 'op' operation."} {"query":"Launches a new menu . Wraps curses nicely so exceptions won t screw with the terminal too much .","positive":"def Func ( arg_0 , arg_1 , arg_2 = None ) : arg_3 = { \"code\" : - 1 , \"done\" : False } curses . wrapper ( Menu , arg_0 , arg_1 , arg_2 , arg_3 ) return arg_3","id_":254252,"task_name":"https:\/\/github.com\/minttu\/tmc.py\/blob\/212cfe1791a4aab4783f99b665cc32da6437f419\/tmc\/ui\/menu.py#L122-L129","negative":"scan through the java output text and extract the bad java messages that may or may not happened when\n unit tests are run. It will not record any bad java messages that are stored in g_ok_java_messages.\n\n :return: none"} {"query":"Helper function to get files in a single directory","positive":"def Func ( arg_0 , arg_1 ) : arg_0 = os . path . abspath ( os . path . expanduser ( arg_0 ) ) arg_2 = set ( ) for arg_3 in arg_1 : arg_4 = os . path . join ( arg_0 , '*' + os . path . extsep + arg_3 ) arg_2 |= set ( glob . glob ( arg_4 ) ) return arg_2","id_":254253,"task_name":"https:\/\/github.com\/librosa\/librosa\/blob\/180e8e6eb8f958fa6b20b8cba389f7945d508247\/librosa\/util\/files.py#L139-L151","negative":"Set renewal, rebinding times."} {"query":"Default handler a generic callback method for signal processing","positive":"def Func ( arg_0 , arg_1 , arg_2 ) : arg_0 . log . debug ( \"Signal handler called with signal: {0}\" . format ( arg_1 ) ) if arg_1 in arg_0 . restart_signals : arg_0 . set_handler ( arg_0 . handled_signals , arg_0 . pseudo_handler ) arg_0 . _cleanup ( ) os . execl ( 'python' , 'python' , * sys . argv ) elif arg_1 in arg_0 . abort_signals : arg_0 . abort ( arg_1 ) elif arg_1 in arg_0 . pause_signals : arg_0 . pause ( arg_1 ) elif arg_1 in arg_0 . resume_signals : arg_0 . resume ( arg_1 ) elif arg_1 in arg_0 . status_signals : arg_0 . status ( arg_1 ) elif arg_1 in arg_0 . error_signals : arg_0 . log . error ( 'Signal handler received error signal from an external process, aborting' ) arg_0 . abort ( arg_1 ) else : arg_0 . log . error ( \"Unhandled signal received: {0}\" . format ( arg_1 ) ) raise","id_":254254,"task_name":"https:\/\/github.com\/antevens\/listen\/blob\/d3ddff8e7fbfb672c5bd7f6f4febeb5e921d8c67\/listen\/signal_handler.py#L65-L92","negative":"Return an open file-object to the index file"} {"query":"Get the template used in a TemplateResponse . This returns a tuple of active choice all choices","positive":"def Func ( arg_0 ) : if not hasattr ( arg_0 , 'template_name' ) : return None , None arg_1 = arg_0 . template_name if arg_1 is None : return None , None if isinstance ( arg_1 , ( list , tuple ) ) : if len ( arg_1 ) == 1 : return arg_1 [ 0 ] , None else : arg_2 = _Func_name ( arg_1 ) return arg_2 , arg_1 elif isinstance ( arg_1 , six . string_types ) : return arg_1 , None else : arg_3 = _get_template_filename ( arg_1 ) arg_4 = '