function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
def init_repo(self): if os.path.exists(self.config_path): raise errors.Error("repository '%s' already initialized" % ( self.root_dir)) try: if not os.path.exists(self.system_root): os.makedirs(self.system_root) util.json_dump({}, self.config_path) with open(os.path.join(self.root_dir, "poni.id"), "wb") as f: f.write(codecs.decode(codecs.decode(b""" eJy1l7uOJCcUhvN5ipKQKkK1EogASKCSIiEiIbZlGSSvd3VYv7//Q1+2dy5yte09 0nRXMwUf5w7L8oNszpNcfpK83B6CcItc3PhZoBvMWQIMotfU339N+u3/gbl9W7bC sFFrvQy/XVrK7b3hZ2Fx28iWVQDmhpFzRfdm3U067x0+3H+AyapHLR4LeeqDlN88 wxz5zTHikbdhB/6fDfrhCy/S2GrI0RhEPavgSXvnfFFaJmjpP5jq3OM4FKaij1pX VZyUSi7vbullka2UPnrHH9UhRte99FJNowNx41mhH6dIIu9p6EbOd1NK0fueYjya bYcIezoqfuDLtiRfw5aueleDVVNB29KtKqZgqMTqAZMTtj1YiI64tqZbjAkUPFal qmKsMSbhyRgMaGuPdVVvYJRDKaCFYBXR3oAvvQkTqnSS7gaDE6Vjx83FldJaV9Vi wHMxyrBxRh8qW2Xw0MGuFnspQ293mC+N475VXVwPjULIQiSdMZZJln41v5euIeu7 637AzlidFVGHTqwUrz56FYoqL3YQ0eSp2jyC/QarUYUp1vgjfBc9P6nXwcEut1GH Wb0frcDsvG194FvZPhedXi86NHUIJFEQu6Ixx0xT29U4L8sWQ0jVxTsFo4lf5zlB kKrG+YW8RKTV6RBjajz6KLYmA193A83Yy9A2zVl5fqqpXOdguyYnzDgVKyLdUeye yw8hDq9EQSr26mcIQAdeNWJ/vbd917bqZieM/3NRiyfiW2jYBSoXpfw9QKjdtRLf Qwdv5zXGXPduOB44AC4yxnGwR5NIU4898thQtVhxhYWU8WAI+zHDFK1uMOu3HuPc zo9lWARhMc2wU64c+GuMojJv/SpBHJ0YhmjBj/267ZzTijqxBgGYOfwV1gJiKASk 9OuM97yByOtfSHBKRYrUZNcsMmQXOcJyUXOss4vUHRZEsghJ+IhrhFVXGAqgXDjX 6TVscZgUzdw407B33eroR2LUzri071AuM6wMVJaRxI2WE2C0VyTKKPoGu8k7LXoG yiAOOuQrogMWDGysHah94qaO0LcnjTrmxl012BflNuzYJXn1GvaeyMu8RgNVA3Gg bmGKAEhpv/BShq0L6qJB3RPfYRmQXXPR9cadgN2SANtURzR2TQ95j5DbAeS0ysNX cY/F2xzjx/R48f2MZsvlwHCTqHR4JdALlhQZ/eVSBh7/qSMnN9ypTml2sSQ5eD7W YsRr1oMc82wRiKP4QSqxz07CIBRWNHP0VFRZEff8FrTOUJYDbfTBGvmwR+RSit5x GjaLpaHhx62ecXem7kuK9F5PpquuaIgcxIWegfFWsxrcFs69f1Pe5tDQzpXanoBN Wveooh+cLF8LdLs0khwi12L/DAzRHTR7/k1R+0BqsJdoWhEh6OjbM5rtqxocbtd2 cWLGlD0oScMqbc/DtkAeR0ne6OnrwExMiQBGj+8luvOalUqD2DjHSafNphG694X9 ljpOPadhFLRinUI6ff6eMGwRsaKNoid8hjtVo/llTs+YMNB0xIHZPwOrppBfNCry SdVmYo95phNo7/0ZmFJjkx3etieDf+WqzI1wkMCJ+ymYhhE3rqfq5DUYWcb9hWGt OlxFzsN67rgOmn7q0nSfNOZJGSeRfY1qO51nm2/7yjeYs9f7gSJc5zETrVrhznA2 9GXhoKj2JGeKK56MXo+Ii1G/nKVO9rM+/u0NfuNWxPcxro0vd1z79u2r+/Tp9/6t /fXL9uuXz58+//bHF/r09cuf/eXlb2jrYlE=""", "base64"), "zlib")) except (OSError, __HOLE__) as error: raise errors.RepoError("repository '%s' init failed: %s: %s" % ( self.root_dir, error.__class__.__name__, error))
IOError
dataset/ETHPy150Open ohmu/poni/poni/core.py/ConfigMan.init_repo
def _query(self, method, path, data=None, page=False, retry=0): """ Fetch an object from the Graph API and parse the output, returning a tuple where the first item is the object yielded by the Graph API and the second is the URL for the next page of results, or ``None`` if results have been exhausted. :param method: A string describing the HTTP method. :param path: A string describing the object in the Graph API. :param data: A dictionary of HTTP GET parameters (for GET requests) or POST data (for POST requests). :param page: A boolean describing whether to return an iterator that iterates over each page of results. :param retry: An integer describing how many times the request may be retried. """ if(data): data = dict( (k.replace('_sqbro_', '['), v) for k, v in data.items()) data = dict( (k.replace('_sqbrc_', ']'), v) for k, v in data.items()) data = dict( (k.replace('__', ':'), v) for k, v in data.items()) data = data or {} def load(method, url, data): for key in data: value = data[key] if isinstance(value, (list, dict, set)): data[key] = json.dumps(value) try: if method in ['GET', 'DELETE']: response = self.session.request( method, url, params=data, allow_redirects=True, verify=self.verify_ssl_certificate, timeout=self.timeout ) if method in ['POST', 'PUT']: files = {} for key in data: if hasattr(data[key], 'read'): files[key] = data[key] for key in files: data.pop(key) response = self.session.request( method, url, data=data, files=files, verify=self.verify_ssl_certificate, timeout=self.timeout ) if 500 <= response.status_code < 600: # Facebook 5XX errors usually come with helpful messages # as a JSON object describing the problem with the request. # If this is the case, an error will be raised and we just # need to re-raise it. This is most likely to happen # with the Ads API. # This will raise an exception if a JSON-like error object # comes in the response. self._parse(response.content) # If Facebook does not provide any JSON-formatted error # but just a plain-text, useless error, we'll just inform # about a Facebook Internal errror occurred. raise FacebookError( 'Internal Facebook error occurred', response.status_code ) except requests.RequestException as exception: raise HTTPError(exception) result = self._parse(response.content) try: next_url = result['paging']['next'] except (__HOLE__, TypeError): next_url = None return result, next_url def paginate(method, url, data): while url: result, url = load(method, url, data) # Reset pagination parameters. for key in ['offset', 'until', 'since']: if key in data: del data[key] yield result # Convert option lists to comma-separated values. for key in data: if isinstance(data[key], (list, set, tuple)) and all([isinstance(item, six.string_types) for item in data[key]]): data[key] = ','.join(data[key]) # Support absolute paths too if not path.startswith('/'): if six.PY2: path = '/' + six.text_type(path.decode('utf-8')) else: path = '/' + path url = self._get_url(path) if self.oauth_token: data['access_token'] = self.oauth_token if self.appsecret and self.oauth_token: data['appsecret_proof'] = self._generate_appsecret_proof() try: if page: return paginate(method, url, data) else: return load(method, url, data)[0] except FacepyError: if retry: return self._query(method, path, data, page, retry - 1) else: raise
KeyError
dataset/ETHPy150Open jgorset/facepy/facepy/graph_api.py/GraphAPI._query
def _parse(self, data): """ Parse the response from Facebook's Graph API. :param data: A string describing the Graph API's response. """ # tests seems to pass a str, while real usage bytes which should be expected if type(data) == type(bytes()): data = data.decode('utf-8') try: data = json.loads(data, parse_float=Decimal) except __HOLE__: return data # Facebook's Graph API sometimes responds with 'true' or 'false'. Facebook offers no documentation # as to the prerequisites for this type of response, though it seems that it responds with 'true' # when objects are successfully deleted and 'false' upon attempting to delete or access an item that # one does not have access to. # # For example, the API would respond with 'false' upon attempting to query a feed item without having # the 'read_stream' extended permission. If you were to query the entire feed, however, it would respond # with an empty list instead. # # Genius. # # We'll handle this discrepancy as gracefully as we can by implementing logic to deal with this behavior # in the high-level access functions (get, post, delete etc.). if type(data) is dict: if 'error' in data: error = data['error'] if error.get('type') == "OAuthException": exception = OAuthError else: exception = FacebookError raise exception(**self._get_error_params(data)) # Facebook occasionally reports errors in its legacy error format. if 'error_msg' in data: raise FacebookError(**self._get_error_params(data)) return data
ValueError
dataset/ETHPy150Open jgorset/facepy/facepy/graph_api.py/GraphAPI._parse
def handle(self, listener, client, addr): req = None try: if self.cfg.is_ssl: client = ssl.wrap_socket(client, server_side=True, **self.cfg.ssl_options) parser = http.RequestParser(self.cfg, client) req = six.next(parser) self.handle_request(listener, req, client, addr) except http.errors.NoMoreData as e: self.log.debug("Ignored premature client disconnection. %s", e) except __HOLE__ as e: self.log.debug("Closing connection. %s", e) except ssl.SSLError as e: if e.args[0] == ssl.SSL_ERROR_EOF: self.log.debug("ssl connection closed") client.close() else: self.log.debug("Error processing SSL request.") self.handle_error(req, client, addr, e) except EnvironmentError as e: if e.errno not in (errno.EPIPE, errno.ECONNRESET): self.log.exception("Socket error processing request.") else: if e.errno == errno.ECONNRESET: self.log.debug("Ignoring connection reset") else: self.log.debug("Ignoring EPIPE") except Exception as e: self.handle_error(req, client, addr, e) finally: util.close(client)
StopIteration
dataset/ETHPy150Open benoitc/gunicorn/gunicorn/workers/sync.py/SyncWorker.handle
def render_GET(self, request): """ This method is called by the twisted framework when a GET request was received. """ # First check if the version is ok try: version = request.args['version'] except __HOLE__: request.setResponseCode(httpstatus.HTTP_STATUS_CODE_BAD_REQUEST[0]) request.setHeader('content-type', 'text/plain; charset=utf-8') return "Request is missing parameter: 'version'" if len(version) != 1: request.setResponseCode(httpstatus.HTTP_STATUS_CODE_BAD_REQUEST[0]) request.setHeader('content-type', 'text/plain; charset=utf-8') return "Parameter 'version' has to be unique in request." version = version[0] if version < MINIMAL_VERSION: request.setResponseCode(httpstatus.HTTP_STATUS_CODE_GONE[0]) request.setHeader('content-type', 'text/plain; charset=utf-8') return ('Client version is insufficient. Minimal version is ' "'{0}'.".format(MINIMAL_VERSION)) elif version > CURRENT_VERSION: request.setResponseCode(httpstatus.HTTP_STATUS_CODE_NOT_IMPLEMENTED[0]) request.setHeader('content-type', 'text/plain; charset=utf-8') return 'Client version is newer than version supported by server.' # Version is ok, now the GET request can be processed # Extract and check the arguments try: userID = request.args['userID'] except KeyError: request.setResponseCode(httpstatus.HTTP_STATUS_CODE_BAD_REQUEST[0]) request.setHeader('content-type', 'text/plain; charset=utf-8') return "Request is missing parameter: 'userID'" if len(userID) != 1: request.setResponseCode(httpstatus.HTTP_STATUS_CODE_BAD_REQUEST[0]) request.setHeader('content-type', 'text/plain; charset=utf-8') return "Parameter 'userID' has to be unique in request." userID = userID[0] # Get the URL of a Robot process d = self._realm.requestURL(userID) d.addCallback(self._build_response, version, request) d.addErrback(self._handle_error, request) return NOT_DONE_YET
KeyError
dataset/ETHPy150Open rapyuta/rce/rce-comm/rce/comm/server.py/RobotResource.render_GET
def onConnect(self, req): """ Method is called by the Autobahn engine when a request to establish a connection has been received. @param req: Connection Request object. @type req: autobahn.websocket.ConnectionRequest @return: Deferred which fires callback with None or errback with autobahn.websocket.HttpException """ params = req.params try: userID = params['userID'] robotID = params['robotID'] password = params['password'] except __HOLE__ as e: raise HttpException(httpstatus.HTTP_STATUS_CODE_BAD_REQUEST[0], 'Request is missing parameter: {0}'.format(e)) for name, param in [('userID', userID), ('robotID', robotID), ('password', password)]: if len(param) != 1: raise HttpException(httpstatus.HTTP_STATUS_CODE_BAD_REQUEST[0], "Parameter '{0}' has to be unique in " 'request.'.format(name)) d = self._realm.login(userID[0], robotID[0], password[0]) d.addCallback(self._authenticate_success) d.addErrback(self._authenticate_failed) return d
KeyError
dataset/ETHPy150Open rapyuta/rce/rce-comm/rce/comm/server.py/RobotWebSocketProtocol.onConnect
def processCompleteMessage(self, msg): """ Process complete messages by calling the appropriate handler for the manager. (Called by rce.comm.assembler.MessageAssembler) """ try: msgType = msg['type'] data = msg['data'] except __HOLE__ as e: raise InvalidRequest('Message is missing key: {0}'.format(e)) if msgType == types.DATA_MESSAGE: self._process_DataMessage(data) elif msgType == types.CONFIGURE_COMPONENT: self._process_configureComponent(data) elif msgType == types.CONFIGURE_CONNECTION: self._process_configureConnection(data) elif msgType == types.CREATE_CONTAINER: self._process_createContainer(data) elif msgType == types.DESTROY_CONTAINER: self._process_destroyContainer(data) else: raise InvalidRequest('This message type is not supported.')
KeyError
dataset/ETHPy150Open rapyuta/rce/rce-comm/rce/comm/server.py/RobotWebSocketProtocol.processCompleteMessage
def _process_createContainer(self, data): """ Internally used method to process a request to create a container. """ try: self._avatar.createContainer(data['containerTag'], data.get('containerData', {})) except __HOLE__ as e: raise InvalidRequest("Can not process 'CreateContainer' request. " 'Missing key: {0}'.format(e))
KeyError
dataset/ETHPy150Open rapyuta/rce/rce-comm/rce/comm/server.py/RobotWebSocketProtocol._process_createContainer
def _process_destroyContainer(self, data): """ Internally used method to process a request to destroy a container. """ try: self._avatar.destroyContainer(data['containerTag']) except __HOLE__ as e: raise InvalidRequest("Can not process 'DestroyContainer' request. " 'Missing key: {0}'.format(e))
KeyError
dataset/ETHPy150Open rapyuta/rce/rce-comm/rce/comm/server.py/RobotWebSocketProtocol._process_destroyContainer
def _process_configureComponent(self, data): """ Internally used method to process a request to configure components. """ for node in data.pop('addNodes', []): try: self._avatar.addNode(node['containerTag'], node['nodeTag'], node['pkg'], node['exe'], node.get('args', ''), node.get('name', ''), node.get('namespace', '')) except KeyError as e: raise InvalidRequest("Can not process 'ConfigureComponent' " "request. 'addNodes' is missing key: " '{0}'.format(e)) for node in data.pop('removeNodes', []): try: self._avatar.removeNode(node['containerTag'], node['nodeTag']) except KeyError as e: raise InvalidRequest("Can not process 'ConfigureComponent' " "request. 'removeNodes' is missing key: " '{0}'.format(e)) for conf in data.pop('addInterfaces', []): try: self._avatar.addInterface(conf['endpointTag'], conf['interfaceTag'], conf['interfaceType'], conf['className'], conf.get('addr', '')) except KeyError as e: raise InvalidRequest("Can not process 'ConfigureComponent' " "request. 'addInterfaces' is missing " 'key: {0}'.format(e)) for conf in data.pop('removeInterfaces', []): try: self._avatar.removeInterface(conf['endpointTag'], conf['interfaceTag']) except KeyError as e: raise InvalidRequest("Can not process 'ConfigureComponent' " "request. 'removeInterfaces' is missing " 'key: {0}'.format(e)) for param in data.pop('setParam', []): try: self._avatar.addParameter(param['containerTag'], param['name'], param['value']) except KeyError as e: raise InvalidRequest("Can not process 'ConfigureComponent' " "request. 'setParam' is missing key: " '{0}'.format(e)) for param in data.pop('deleteParam', []): try: self._avatar.removeParameter(param['containerTag'], param['name']) except __HOLE__ as e: raise InvalidRequest("Can not process 'ConfigureComponent' " "request. 'deleteParam' is missing key: " '{0}'.format(e))
KeyError
dataset/ETHPy150Open rapyuta/rce/rce-comm/rce/comm/server.py/RobotWebSocketProtocol._process_configureComponent
def _process_configureConnection(self, data): """ Internally used method to process a request to configure connections. """ for conf in data.pop('connect', []): try: self._avatar.addConnection(conf['tagA'], conf['tagB']) except KeyError as e: raise InvalidRequest("Can not process 'ConfigureComponent' " "request. 'connect' is missing key: " '{0}'.format(e)) for conf in data.pop('disconnect', []): try: self._avatar.removeConnection(conf['tagA'], conf['tagB']) except __HOLE__ as e: raise InvalidRequest("Can not process 'ConfigureComponent' " "request. 'disconnect' is missing key: " '{0}'.format(e))
KeyError
dataset/ETHPy150Open rapyuta/rce/rce-comm/rce/comm/server.py/RobotWebSocketProtocol._process_configureConnection
def _process_DataMessage(self, data): """ Internally used method to process a data message. """ try: iTag = str(data['iTag']) mType = str(data['type']) msgID = str(data['msgID']) msg = data['msg'] except __HOLE__ as e: raise InvalidRequest("Can not process 'DataMessage' request. " 'Missing key: {0}'.format(e)) if len(msgID) > 255: raise InvalidRequest("Can not process 'DataMessage' request. " 'Message ID can not be longer than 255.') self._avatar.processReceivedMessage(iTag, mType, msgID, msg)
KeyError
dataset/ETHPy150Open rapyuta/rce/rce-comm/rce/comm/server.py/RobotWebSocketProtocol._process_DataMessage
def parse_ticket(secret, ticket, ip, digest_algo=DEFAULT_DIGEST): """ Parse the ticket, returning (timestamp, userid, tokens, user_data). If the ticket cannot be parsed, ``BadTicket`` will be raised with an explanation. """ if isinstance(digest_algo, str): # correct specification of digest from hashlib or fail digest_algo = getattr(hashlib, digest_algo) digest_hexa_size = digest_algo().digest_size * 2 ticket = ticket.strip('"') digest = ticket[:digest_hexa_size] try: timestamp = int(ticket[digest_hexa_size:digest_hexa_size + 8], 16) except __HOLE__ as e: raise BadTicket('Timestamp is not a hex integer: %s' % e) try: userid, data = ticket[digest_hexa_size + 8:].split('!', 1) except ValueError: raise BadTicket('userid is not followed by !') userid = url_unquote(userid) if '!' in data: tokens, user_data = data.split('!', 1) else: # @@: Is this the right order? tokens = '' user_data = data expected = calculate_digest(ip, timestamp, secret, userid, tokens, user_data, digest_algo) if expected != digest: raise BadTicket('Digest signature is not correct', expected=(expected, digest)) tokens = tokens.split(',') return (timestamp, userid, tokens, user_data) # @@: Digest object constructor compatible with named ones in hashlib only
ValueError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Paste-2.0.1/paste/auth/auth_tkt.py/parse_ticket
def setup(self, gen): Node.setup(self, gen) try: self.target = gen.rules[self.name] if self.accepts_epsilon != self.target.accepts_epsilon: self.accepts_epsilon = self.target.accepts_epsilon gen.changed() except __HOLE__: # Oops, it's nonexistent print('Error: no rule <%s>' % self.name, file=sys.stderr) self.target = self
KeyError
dataset/ETHPy150Open smurfix/yapps/yapps/parsetree.py/NonTerminal.setup
def track_event(request, event): """ Simple view that receives and stores an event in the backend (celery queue or dummy). """ params = request.GET.get('params') if params: try: params = anyjson.deserialize(params) except __HOLE__, e: return HttpResponseBadRequest() else: params = {} backend.track(event, params) return HttpResponse('OK')
ValueError
dataset/ETHPy150Open ella/django-event-tracker/eventtracker/views.py/track_event
def rollback(self): """ Rolls back the database to last """ try: shutil.copy2(self.db_path + "/BACKUP_codedict_db.DB", self.db_path + "/codedict_db.DB") except (shutil.Error, IOError, __HOLE__) as error: print "Error while rolling back database.\n", error sys.exit(1)
OSError
dataset/ETHPy150Open BastiPaeltz/codedict/source/database.py/Database.rollback
def add_content(self, values, lang_name, insert_type="normal"): """ Adds content to the database. Tries to insert and updates if row already exists. """ # backup database file if insert_type == "from_file": try: shutil.copy2(self.db_path + "/codedict_db.DB", self.db_path + "/BACKUP_codedict_db.DB") except (shutil.Error, __HOLE__, OSError) as error: print "Error while backing up database.", error print "Continuing ..." try: with self._db_instance: dict_cursor = self._db_instance.cursor() tags_cursor = self._db_instance.cursor() #add language to lang db if not exists self._db_instance.execute(''' INSERT OR IGNORE INTO Languages (language, suffix) VALUES (?, "") ''', (lang_name, )) for new_row in values: dict_cursor.execute(''' INSERT or REPLACE into Dictionary (id, language, problem, solution) VALUES((SELECT id from Dictionary where problem = ? AND language = (SELECT language from Languages where language = ?)), (SELECT language from Languages where language = ?), ?, ?) ''', (new_row[1], lang_name, lang_name, new_row[1], new_row[2])) tags_list = process_input_tags(new_row[0]) dict_id = dict_cursor.lastrowid self._db_instance.execute(''' DELETE from ItemsToTags where dictID = ? ''', (dict_id,)) for tag in tags_list: tags_cursor.execute(''' INSERT OR REPLACE INTO Tags (id, name, language) VALUES ( (SELECT id from Tags WHERE name = ? and language = ?), ?, (SELECT language from Languages where language = ?)) ''', (tag.strip(), lang_name, tag.strip(), lang_name)) tag_id = tags_cursor.lastrowid self._db_instance.execute(''' INSERT OR IGNORE into ItemsToTags (tagID, dictID) VALUES (?, ?) ''', (tag_id, dict_id)) except sqlite3.Error as error: print "A database error has ocurred: ", error sys.exit(1)
IOError
dataset/ETHPy150Open BastiPaeltz/codedict/source/database.py/Database.add_content
def dependencies(package_json, version_number=None): '''Get the list of package names of dependencies for a package.''' versions = package_json['versions'] if version_number is None: # Get the maximum version number if the caller didn't provide one explicity. version_number = sorted(versions, cmp=compare_version_numbers, reverse=True)[0] try: return package_json['versions'][version_number]['dependencies'].keys() except __HOLE__: return None
KeyError
dataset/ETHPy150Open zsck/npm-to-git/n2g.py/dependencies
def repository_url(package_json): '''Get the URL of a package's repository''' try: return package_json['repository']['url'] except __HOLE__: return None
KeyError
dataset/ETHPy150Open zsck/npm-to-git/n2g.py/repository_url
def recursively_replace_dependencies(node_modules_path, kind, memo={}): '''Recursively traverse into node_modules/ and then each dependency's node_modules/ directory, replacing dependencies as we go.''' package_json_filename = os.sep.join([node_modules_path, 'package.json']) if os.path.isfile(package_json_filename): try: in_file = open(package_json_filename) package_json = json.loads(in_file.read()) in_file.close() new_pkg_json, new_memos = replace_dependencies(package_json, kind, memo) memo.update(new_memos) out_file = open(package_json_filename, 'w') out_file.write(json.dumps(new_pkg_json, indent=4)) out_file.close() except __HOLE__: sys.stderr.write('Could not read/write {0}'.format(package_json_filename)) for name in os.listdir(node_modules_path): path = os.sep.join([node_modules_path, name]) if os.path.isdir(path): recursively_replace_dependencies(path, kind, memo) return memo
IOError
dataset/ETHPy150Open zsck/npm-to-git/n2g.py/recursively_replace_dependencies
def convert(ctx, filename, setup_args, monkey_patch_mode, verbose, output, log, show_output=True): if monkey_patch_mode == "automatic": try: if verbose: pprint("PINK", "Catching monkey (this may take a while) ...") monkey_patch_mode = detect_monkeys(filename, show_output, log) if verbose: pprint("PINK", "Detected mode: %s" % monkey_patch_mode) except __HOLE__: e = extract_exception() raise UsageException("Error while detecting setup.py type " \ "(original error: %s)" % str(e)) monkey_patch(ctx.top_node, monkey_patch_mode, filename) dist, package_objects = analyse_setup_py(filename, setup_args) pkg, options = build_pkg(dist, package_objects, ctx.top_node) out = static_representation(pkg, options) if output == '-': for line in out.splitlines(): pprint("YELLOW", line) else: fid = open(output, "w") try: fid.write(out) finally: fid.close()
ValueError
dataset/ETHPy150Open cournape/Bento/bento/convert/commands.py/convert
def loadfile(filename): try: fp = file(filename, 'r') try: obj = simplejson.load(fp) finally: fp.close() except __HOLE__: raise InvalidClientSecretsError('File not found: "%s"' % filename) return _validate_clientsecrets(obj)
IOError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/google-api-python-client/oauth2client/clientsecrets.py/loadfile
def get_user_for_response(id, request, include_products=True): id_type = unicode(request.args.get("id_type", "url_slug")) try: logged_in = unicode(getattr(current_user, id_type)) == id except __HOLE__: logged_in = False retrieved_user = get_profile_from_id( id, id_type, show_secrets=logged_in, include_products=include_products ) if retrieved_user is None: logger.debug(u"in get_user_for_response, user {id} doesn't exist".format( id=id)) abort(404, "That user doesn't exist.") g.profile_slug = retrieved_user.url_slug return retrieved_user
AttributeError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpactwebapp/views.py/get_user_for_response
def abort_if_user_not_logged_in(profile): allowed = True try: if current_user.id != profile.id: abort_json(401, "You can't do this because it's not your profile.") except __HOLE__: abort_json(405, "You can't do this because you're not logged in.")
AttributeError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpactwebapp/views.py/abort_if_user_not_logged_in
def current_user_owns_profile(profile): try: return current_user.id == profile.id except __HOLE__: #Anonymous return False
AttributeError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpactwebapp/views.py/current_user_owns_profile
def current_user_owns_tiid(tiid): try: profile_for_current_user = db.session.query(Profile).get(int(current_user.id)) db.session.expunge(profile_for_current_user) return tiid in profile_for_current_user.tiids except __HOLE__: #Anonymous return False
AttributeError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpactwebapp/views.py/current_user_owns_tiid
def current_user_must_own_tiid(tiid): try: if not current_user_owns_tiid(tiid): abort_json(401, "You have to own this product to modify it.") except __HOLE__: abort_json(405, "You must be logged in to modify products.") ############################################################################### # # BEFORE AND AFTER REQUESTS # ###############################################################################
AttributeError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpactwebapp/views.py/current_user_must_own_tiid
@app.before_request def redirect_to_https(): try: if request.headers["X-Forwarded-Proto"] == "https": pass else: return redirect(request.url.replace("http://", "https://"), 301) # permanent except __HOLE__: #logger.debug(u"There's no X-Forwarded-Proto header; assuming localhost, serving http.") pass
KeyError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpactwebapp/views.py/redirect_to_https
@app.before_request def load_globals(): g.user = current_user try: g.user_id = current_user.id except __HOLE__: g.user_id = None g.api_key = os.getenv("API_KEY") g.webapp_root = os.getenv("WEBAPP_ROOT_PRETTY", os.getenv("WEBAPP_ROOT"))
AttributeError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpactwebapp/views.py/load_globals
@app.route("/profile/current") def get_current_user(): local_sleep(1) try: user_info = g.user.dict_about() except __HOLE__: # anon user has no as_dict() user_info = None return json_resp_from_thing({"user": user_info})
AttributeError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpactwebapp/views.py/get_current_user
@app.route("/profile/<profile_id>/pinboard", methods=["GET", "POST"]) def pinboard_endpoint(profile_id): profile = get_user_for_response(profile_id, request) if request.method == "GET": board = Pinboard.query.filter_by(profile_id=profile.id).first() try: resp = board.contents except __HOLE__: abort_json(404, "user has no pinboard set yet.") elif request.method == "POST": abort_if_user_not_logged_in(profile) # debugging contents = request.json["contents"] product_pins = contents["one"] for (product_lable, tiid) in product_pins: current_user_must_own_tiid(tiid) resp = write_to_pinboard(profile.id, request.json["contents"]) return json_resp_from_thing(resp)
AttributeError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpactwebapp/views.py/pinboard_endpoint
@app.route("/product/<tiid>/pdf", methods=['GET']) def product_pdf(tiid): if request.method == "GET": try: product = get_product(tiid) pdf = product.get_pdf() db.session.merge(product) # get pdf might have cached the pdf commit(db) if pdf: resp = make_response(pdf, 200) resp.mimetype = "application/pdf" resp.headers.add("Content-Disposition", "attachment; filename=impactstory-{tiid}.pdf".format( tiid=tiid)) return resp else: abort_json(404, "This product exists, but has no pdf.") except __HOLE__: abort_json(404, "That product doesn't exist.") except S3ResponseError: abort_json(404, "This product exists, but has no pdf.")
IndexError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpactwebapp/views.py/product_pdf
@app.route("/product/<tiid>/file", methods=['GET', 'POST']) def product_file(tiid): if request.method == "GET": try: product = get_product(tiid) if not product: return abort_json(404, "product not found") if product.has_file: my_file = product.get_file() resp = make_response(my_file, 200) return resp else: abort_json(404, "This product exists, but has no file.") except __HOLE__: abort_json(404, "That product doesn't exist.") except S3ResponseError: abort_json(404, "This product exists, but has no file.") elif request.method == "POST": current_user_must_own_tiid(tiid) file_to_upload = request.files['file'].stream product = get_product(tiid) resp = upload_file_and_commit(product, file_to_upload, db) return json_resp_from_thing(resp)
IndexError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpactwebapp/views.py/product_file
@app.route("/top.js") def get_js_top(): try: current_user_dict = current_user.dict_about() except __HOLE__: current_user_dict = None return make_js_response( "top.js.tpl", segmentio_key=os.getenv("SEGMENTIO_KEY"), mixpanel_token=os.getenv("MIXPANEL_TOKEN"), stripe_publishable_key=os.getenv("STRIPE_PUBLISHABLE_KEY"), current_user=current_user_dict, genre_configs=configs.genre_configs(), country_names=get_country_names_from_iso() )
AttributeError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpactwebapp/views.py/get_js_top
def _interpret_args(self, args, kwargs): f, gradient = None, self.gradient atoms, lists = self._sort_args(args) s = self._pop_symbol_list(lists) s = self._fill_in_vars(s) # prepare the error message for lambdification failure f_str = ', '.join(str(fa) for fa in atoms) s_str = (str(sa) for sa in s) s_str = ', '.join(sa for sa in s_str if sa.find('unbound') < 0) f_error = ValueError("Could not interpret arguments " "%s as functions of %s." % (f_str, s_str)) # try to lambdify args if len(atoms) == 1: fv = atoms[0] try: f = lambdify(s, [fv, fv, fv]) except __HOLE__: raise f_error elif len(atoms) == 3: fr, fg, fb = atoms try: f = lambdify(s, [fr, fg, fb]) except TypeError: raise f_error else: raise ValueError("A ColorScheme must provide 1 or 3 " "functions in x, y, z, u, and/or v.") # try to intrepret any given color information if len(lists) == 0: gargs = [] elif len(lists) == 1: gargs = lists[0] elif len(lists) == 2: try: (r1, g1, b1), (r2, g2, b2) = lists except TypeError: raise ValueError("If two color arguments are given, " "they must be given in the format " "(r1, g1, b1), (r2, g2, b2).") gargs = lists elif len(lists) == 3: try: (r1, r2), (g1, g2), (b1, b2) = lists except Exception: raise ValueError("If three color arguments are given, " "they must be given in the format " "(r1, r2), (g1, g2), (b1, b2). To create " "a multi-step gradient, use the syntax " "[0, colorStart, step1, color1, ..., 1, " "colorEnd].") gargs = [[r1, g1, b1], [r2, g2, b2]] else: raise ValueError("Don't know what to do with collection " "arguments %s." % (', '.join(str(l) for l in lists))) if gargs: try: gradient = ColorGradient(*gargs) except Exception as ex: raise ValueError(("Could not initialize a gradient " "with arguments %s. Inner " "exception: %s") % (gargs, str(ex))) return f, gradient
TypeError
dataset/ETHPy150Open sympy/sympy/sympy/plotting/pygletplot/color_scheme.py/ColorScheme._interpret_args
def _test_color_function(self): if not callable(self.f): raise ValueError("Color function is not callable.") try: result = self.f(0, 0, 0, 0, 0) if len(result) != 3: raise ValueError("length should be equal to 3") except TypeError as te: raise ValueError("Color function needs to accept x,y,z,u,v, " "as arguments even if it doesn't use all of them.") except __HOLE__ as ae: raise ValueError("Color function needs to return 3-tuple r,g,b.") except Exception as ie: pass # color function probably not valid at 0,0,0,0,0
AssertionError
dataset/ETHPy150Open sympy/sympy/sympy/plotting/pygletplot/color_scheme.py/ColorScheme._test_color_function
def so_bindtodevice_supported(): try: if hasattr(IN, 'SO_BINDTODEVICE'): return True except __HOLE__: pass return False
NameError
dataset/ETHPy150Open circus-tent/circus/circus/tests/test_sockets.py/so_bindtodevice_supported
def safe_import(name): try: mod = __import__(name, None, None, "*") except __HOLE__: mod = MissingModule(name) except Exception: # issue 72: IronPython on Mono if sys.platform == "cli" and name == "signal": #os.name == "posix": mod = MissingModule(name) else: raise return mod
ImportError
dataset/ETHPy150Open sccn/SNAP/src/rpyc/lib/__init__.py/safe_import
def _purge_consumer_by_tag(self, consumer_tag): '''Purge consumer entry from this basic instance NOTE: this protected method may be called by derived classes :param str consumer_tag: ''' try: del self._consumer_cb[consumer_tag] except __HOLE__: self.logger.warning( 'no callback registered for consumer tag " %s "', consumer_tag) else: self.logger.info('purged consumer with tag " %s "', consumer_tag)
KeyError
dataset/ETHPy150Open agoragames/haigha/haigha/classes/basic_class.py/BasicClass._purge_consumer_by_tag
def run(self): # pylint: disable=too-many-branches self._send(signal.RUN_START) self._initialize_run() try: while self.job_queue: try: self._init_job() self._run_job() except __HOLE__: self.current_job.result.status = IterationResult.ABORTED raise except Exception, e: # pylint: disable=broad-except self.current_job.result.status = IterationResult.FAILED self.current_job.result.add_event(e.message) if isinstance(e, DeviceNotRespondingError): self.logger.info('Device appears to be unresponsive.') if self.context.reboot_policy.can_reboot and self.device.can('reset_power'): self.logger.info('Attempting to hard-reset the device...') try: self.device.boot(hard=True) self.device.connect() except DeviceError: # hard_boot not implemented for the device. raise e else: raise e else: # not a DeviceNotRespondingError self.logger.error(e) finally: self._finalize_job() except KeyboardInterrupt: self.logger.info('Got CTRL-C. Finalizing run... (CTRL-C again to abort).') # Skip through the remaining jobs. while self.job_queue: self.context.next_job(self.current_job) self.current_job.result.status = IterationResult.ABORTED self._finalize_job() except DeviceNotRespondingError: self.logger.info('Device unresponsive and recovery not possible. Skipping the rest of the run.') self.context.aborted = True while self.job_queue: self.context.next_job(self.current_job) self.current_job.result.status = IterationResult.SKIPPED self._finalize_job() instrumentation.enable_all() self._finalize_run() self._process_results() self.result_manager.finalize(self.context) self._send(signal.RUN_END)
KeyboardInterrupt
dataset/ETHPy150Open ARM-software/workload-automation/wlauto/core/execution.py/Runner.run
def _run_job(self): # pylint: disable=too-many-branches spec = self.current_job.spec if not spec.enabled: self.logger.info('Skipping workload %s (iteration %s)', spec, self.context.current_iteration) self.current_job.result.status = IterationResult.SKIPPED return self.logger.info('Running workload %s (iteration %s)', spec, self.context.current_iteration) if spec.flash: if not self.context.reboot_policy.can_reboot: raise ConfigError('Cannot flash as reboot_policy does not permit rebooting.') if not self.device.can('flash'): raise DeviceError('Device does not support flashing.') self._flash_device(spec.flash) elif not self.completed_jobs: # Never reboot on the very fist job of a run, as we would have done # the initial reboot if a reboot was needed. pass elif self.context.reboot_policy.reboot_on_each_spec and self.spec_changed: self.logger.debug('Rebooting on spec change.') self._reboot_device() elif self.context.reboot_policy.reboot_on_each_iteration: self.logger.debug('Rebooting on iteration.') self._reboot_device() instrumentation.disable_all() instrumentation.enable(spec.instrumentation) self.device.start() if self.spec_changed: self._send(signal.WORKLOAD_SPEC_START) self._send(signal.ITERATION_START) try: setup_ok = False with self._handle_errors('Setting up device parameters'): self.device.set_runtime_parameters(spec.runtime_parameters) setup_ok = True if setup_ok: with self._handle_errors('running {}'.format(spec.workload.name)): self.current_job.result.status = IterationResult.RUNNING self._run_workload_iteration(spec.workload) else: self.logger.info('\tSkipping the rest of the iterations for this spec.') spec.enabled = False except __HOLE__: self._send(signal.ITERATION_END) self._send(signal.WORKLOAD_SPEC_END) raise else: self._send(signal.ITERATION_END) if self.spec_will_change or not spec.enabled: self._send(signal.WORKLOAD_SPEC_END) finally: self.device.stop()
KeyboardInterrupt
dataset/ETHPy150Open ARM-software/workload-automation/wlauto/core/execution.py/Runner._run_job
@contextmanager def _handle_errors(self, action, on_error_status=IterationResult.FAILED): try: if action is not None: self.logger.debug(action) yield except (__HOLE__, DeviceNotRespondingError): raise except (WAError, TimeoutError), we: self.device.ping() if self.current_job: self.current_job.result.status = on_error_status self.current_job.result.add_event(str(we)) try: self._take_screenshot('error.png') except Exception, e: # pylint: disable=W0703 # We're already in error state, so the fact that taking a # screenshot failed is not surprising... pass if action: action = action[0].lower() + action[1:] self.logger.error('Error while {}:\n\t{}'.format(action, we)) except Exception, e: # pylint: disable=W0703 error_text = '{}("{}")'.format(e.__class__.__name__, e) if self.current_job: self.current_job.result.status = on_error_status self.current_job.result.add_event(error_text) self.logger.error('Error while {}'.format(action)) self.logger.error(error_text) if isinstance(e, subprocess.CalledProcessError): self.logger.error('Got:') self.logger.error(e.output) tb = get_traceback() self.logger.error(tb)
KeyboardInterrupt
dataset/ETHPy150Open ARM-software/workload-automation/wlauto/core/execution.py/Runner._handle_errors
def _output_to_dict(cmdoutput, values_mapper=None): ''' Convert rabbitmqctl output to a dict of data cmdoutput: string output of rabbitmqctl commands values_mapper: function object to process the values part of each line ''' ret = {} if values_mapper is None: values_mapper = lambda string: string.split('\t') # remove first and last line: Listing ... - ...done data_rows = _strip_listing_to_done(cmdoutput.splitlines()) for row in data_rows: try: key, values = row.split('\t', 1) except __HOLE__: # If we have reached this far, we've hit an edge case where the row # only has one item: the key. The key doesn't have any values, so we # set it to an empty string to preserve rabbitmq reporting behavior. # e.g. A user's permission string for '/' is set to ['', '', ''], # Rabbitmq reports this only as '/' from the rabbitmqctl command. log.debug('Could not find any values for key \'{0}\'. ' 'Setting to \'{0}\' to an empty string.'.format(row)) ret[row] = '' continue ret[key] = values_mapper(values) return ret
ValueError
dataset/ETHPy150Open saltstack/salt/salt/modules/rabbitmq.py/_output_to_dict
def test_turns_into_unicode(self): unicode_str = b''.decode('utf-8') try: assert unicode(self.instance) == unicode_str except __HOLE__: assert str(self.instance) == unicode_str
NameError
dataset/ETHPy150Open sigmavirus24/github3.py/tests/unit/test_null.py/TestNullObject.test_turns_into_unicode
def testUploadAccount(self): hash_algorithm = gitkitclient.ALGORITHM_HMAC_SHA256 try: hash_key = bytes('key123', 'utf-8') except __HOLE__: hash_key = 'key123' upload_user = gitkitclient.GitkitUser.FromDictionary({ 'email': self.email, 'localId': self.user_id, 'displayName': self.user_name, 'photoUrl': self.user_photo }) with mock.patch('identitytoolkit.rpchelper.RpcHelper._InvokeGitkitApi') as rpc_mock: rpc_mock.return_value = {} self.gitkitclient.UploadUsers(hash_algorithm, hash_key, [upload_user]) expected_param = { 'hashAlgorithm': hash_algorithm, 'signerKey': base64.urlsafe_b64encode(hash_key), 'users': [{ 'email': self.email, 'localId': self.user_id, 'displayName': self.user_name, 'photoUrl': self.user_photo }] } rpc_mock.assert_called_with('uploadAccount', expected_param)
TypeError
dataset/ETHPy150Open google/identity-toolkit-python-client/tests/test_gitkitclient.py/GitkitClientTestCase.testUploadAccount
def get_histogram(self, database, table, column, nested=None): """ Returns the results of an Impala SELECT histogram() FROM query for a given column or nested type. Assumes that the column/nested type is scalar. """ results = [] hql = self.get_histogram_query(database, table, column, nested) query = hql_query(hql) handle = self.execute_and_wait(query, timeout_sec=5.0) if handle: result = self.fetch(handle) try: histogram = list(result.rows())[0][0] # actual histogram results is in first-and-only result row unique_values = set(histogram.split(', ')) results = list(unique_values) except __HOLE__, e: LOG.warn('Failed to get histogram results, result set has unexpected format: %s' % smart_str(e)) finally: self.close(handle) return results
IndexError
dataset/ETHPy150Open cloudera/hue/apps/impala/src/impala/dbms.py/ImpalaDbms.get_histogram
def swig_import_helper(): from os.path import dirname import imp fp = None try: fp, pathname, description = imp.find_module('_quadopt', [dirname(__file__)]) except __HOLE__: import _quadopt return _quadopt if fp is not None: try: _mod = imp.load_module('_quadopt', fp, pathname, description) finally: fp.close() return _mod
ImportError
dataset/ETHPy150Open googlefonts/fontcrunch/fontcrunch/quadopt.py/swig_import_helper
@access.admin @describeRoute( Description('Set the value for a system setting, or a list of them.') .notes("""Must be a system administrator to call this. If the value passed is a valid JSON object, it will be parsed and stored as an object.""") .param('key', 'The key identifying this setting.', required=False) .param('value', 'The value for this setting.', required=False) .param('list', 'A JSON list of objects with key and value representing ' 'a list of settings to set.', required=False) .errorResponse('You are not a system administrator.', 403) .errorResponse('Failed to set system setting.', 500) ) def setSetting(self, params): """ Set a system-wide setting. Validation of the setting is performed in the setting model. If the setting is a valid JSON string, it will be passed to the model as the corresponding dict, otherwise it is simply passed as a raw string. """ if 'list' in params: try: settings = json.loads(params['list']) if not isinstance(settings, list): raise ValueError() except ValueError: raise RestException('List was not a valid JSON list.') else: self.requireParams(('key', 'value'), params) settings = ({'key': params['key'], 'value': params['value']},) for setting in settings: if setting['value'] is None: value = None else: try: if isinstance(setting['value'], six.string_types): value = json.loads(setting['value']) else: value = setting['value'] except __HOLE__: value = setting['value'] if value is None: self.model('setting').unset(key=setting['key']) else: self.model('setting').set(key=setting['key'], value=value) return True
ValueError
dataset/ETHPy150Open girder/girder/girder/api/v1/system.py/System.setSetting
@access.admin(scope=TokenScope.SETTINGS_READ) @describeRoute( Description('Get the value of a system setting, or a list of them.') .notes('Must be a system administrator to call this.') .param('key', 'The key identifying this setting.', required=False) .param('list', 'A JSON list of keys representing a set of settings to' ' return.', required=False) .param('default', 'If "none", return a null value if a setting is ' 'currently the default value. If "default", return the ' 'default value of the setting(s).', required=False) .errorResponse('You are not a system administrator.', 403) ) def getSetting(self, params): getFuncName = 'get' funcParams = {} if 'default' in params: if params['default'] == 'none': funcParams['default'] = None elif params['default'] == 'default': getFuncName = 'getDefault' elif len(params['default']): raise RestException("Default was not 'none', 'default', or " "blank.") getFunc = getattr(self.model('setting'), getFuncName) if 'list' in params: try: keys = json.loads(params['list']) if not isinstance(keys, list): raise ValueError() except __HOLE__: raise RestException('List was not a valid JSON list.') return {k: getFunc(k, **funcParams) for k in keys} else: self.requireParams('key', params) return getFunc(params['key'], **funcParams)
ValueError
dataset/ETHPy150Open girder/girder/girder/api/v1/system.py/System.getSetting
@access.admin @describeRoute( Description('Set the list of enabled plugins for the system.') .responseClass('Setting') .notes('Must be a system administrator to call this.') .param('plugins', 'JSON array of plugins to enable.') .errorResponse('Required dependencies do not exist.', 500) .errorResponse('You are not a system administrator.', 403) ) def enablePlugins(self, params): self.requireParams('plugins', params) try: plugins = json.loads(params['plugins']) except __HOLE__: raise RestException('Plugins parameter should be a JSON list.') return self.model('setting').set(SettingKey.PLUGINS_ENABLED, plugins)
ValueError
dataset/ETHPy150Open girder/girder/girder/api/v1/system.py/System.enablePlugins
@access.admin(scope=TokenScope.PARTIAL_UPLOAD_CLEAN) @describeRoute( Description('Discard uploads that have not been finished.') .notes("""Must be a system administrator to call this. This frees resources that were allocated for the uploads and clears the uploads from database.""") .param('uploadId', 'Clear only a specific upload.', required=False) .param('userId', 'Restrict clearing uploads to those started by a ' 'specific user.', required=False) .param('parentId', 'Restrict clearing uploads to those within a ' 'specific folder or item.', required=False) .param('assetstoreId', 'Restrict clearing uploads within a specific ' 'assetstore.', required=False) .param('minimumAge', 'Restrict clearing uploads to those that are at ' 'least this many days old.', required=False) .param('includeUntracked', 'Some assetstores can have partial uploads ' 'that are no longer in the Girder database. If this is True, ' 'remove all of them (only filtered by assetstoreId). Default ' 'True.', required=False, dataType='boolean') .errorResponse('You are not a system administrator.', 403) .errorResponse('Failed to delete upload', 500) ) def discardPartialUploads(self, params): uploadList = list(self.model('upload').list(filters=params)) # Move the results to list that isn't a cursor so we don't have to have # the cursor sitting around while we work on the data. for upload in uploadList: try: self.model('upload').cancelUpload(upload) except __HOLE__ as exc: if exc.errno == errno.EACCES: raise GirderException( 'Failed to delete upload.', 'girder.api.v1.system.delete-upload-failed') raise untracked = self.boolParam('includeUntracked', params, default=True) if untracked: assetstoreId = params.get('assetstoreId', None) uploadList += self.model('upload').untrackedUploads('delete', assetstoreId) return uploadList
OSError
dataset/ETHPy150Open girder/girder/girder/api/v1/system.py/System.discardPartialUploads
def _run_worker(): LOG.info('(PID=%s) Exporter started.', os.getpid()) export_worker = worker.get_worker() try: export_worker.start(wait=True) except (KeyboardInterrupt, __HOLE__): LOG.info('(PID=%s) Exporter stopped.', os.getpid()) export_worker.shutdown() except: return 1 return 0
SystemExit
dataset/ETHPy150Open StackStorm/st2/st2exporter/st2exporter/cmd/st2exporter_starter.py/_run_worker
def main(): try: _setup() return _run_worker() except __HOLE__ as exit_code: sys.exit(exit_code) except: LOG.exception('(PID=%s) Exporter quit due to exception.', os.getpid()) return 1 finally: _teardown()
SystemExit
dataset/ETHPy150Open StackStorm/st2/st2exporter/st2exporter/cmd/st2exporter_starter.py/main
def reorder(fname): """ Reorder fields in a configuration file so that assignments of variables comes before use. """ fp = open(fname, 'r+') options = Options() configresult = {} section = "" configresult[section] = Options() for line in fp.readlines(): line = line.strip() if line.startswith("["): # New section section = line configresult[section] = Options() elif line.startswith("#"): pass # Lonely comments are removed else: # Store an option try: key, value = line.split("=") configresult[section].insert(Option(key, value)) except __HOLE__: pass # Ignore all weird lines fp.seek(0) fp.truncate() for section in configresult: fp.write("{}\n".format(section)) configresult[section].sort() # Sort options in this section for option in configresult[section].options: fp.write("{}={}\n".format(option.key, option.value)) fp.close()
ValueError
dataset/ETHPy150Open EricssonResearch/calvin-base/calvin/utilities/confsort.py/reorder
def facebook_decorator(func): def wrapper(request, *args, **kwargs): user = request.user # User must me logged via FB backend in order to ensure we talk about # the same person if not is_complete_authentication(request): try: user = social_complete(request, FacebookBackend.name) except __HOLE__: pass # no matter if failed # Not recommended way for FB, but still something we need to be aware # of if isinstance(user, HttpResponse): kwargs.update({'auth_response': user}) else: # Need to re-check the completion if is_complete_authentication(request): kwargs.update({'access_token': get_access_token(request.user)}) else: request.user = AnonymousUser() signed_request = FacebookBackend().load_signed_request( request.REQUEST.get('signed_request', '') ) if signed_request: kwargs.update({'signed_request': signed_request}) return func(request, *args, **kwargs) return wrapper
ValueError
dataset/ETHPy150Open omab/django-social-auth/example/app/facebook.py/facebook_decorator
def format_message(self, message): message = self.normalize_message(message) username = message['n_user'] kind = message.get('kind') created = None group = message['n_group'] topic = message['n_topic'] message_body = message['n_message'] try: created = datetime.fromtimestamp(message['_ts']) except (__HOLE__, TypeError): pass if not created: try: created = datetime.fromtimestamp(message['date_created']) except (KeyError, TypeError): created = datetime.now() if len(topic) > 15: topic = topic[:15] + "..." if group and topic: label = "[{group}/{topic}]".format(**locals()) else: label = '' body=wrap_string(message_body, indent=6).lstrip() time=created.strftime('%H:%M') # I know explicit is better than implicit - but I'm taking the lazy **locals route # in this limited context if kind == 'message': return '{time} <{username}> {label} {body}'.format(**locals()) elif kind == 'topic': return '{time} <{username}> created new topic in #{group}: "{topic}"'.format(**locals()) elif kind == 'mention': return '{time} <{username}> mentioned you: {label} {body}'.format(**locals()) elif kind == 'topic-delete': return '{time} <{username}> deleted a topic'.format(**locals()) else: return "{time} <{username}> [{group}]: {kind}".format(**locals())
KeyError
dataset/ETHPy150Open ptone/notifyore/notifyore/notifiers/stream.py/StreamNotifier.format_message
def collapse_address_list(addresses): """Collapse a list of IP objects. Example: collapse_address_list([IPv4Network('1.1.0.0/24'), IPv4Network('1.1.1.0/24')]) -> [IPv4Network('1.1.0.0/23')] Args: addresses: A list of IPv4Network or IPv6Network objects. Returns: A list of IPv4Network or IPv6Network objects depending on what we were passed. Raises: TypeError: If passed a list of mixed version objects. """ i = 0 addrs = [] ips = [] nets = [] # split IP addresses and networks for ip in addresses: if isinstance(ip, _BaseAddress): if ips and ips[-1]._version != ip._version: raise TypeError("%s and %s are not of the same version" % ( str(ip), str(ips[-1]))) ips.append(ip) elif ip._prefixlen == ip._max_prefixlen: if ips and ips[-1]._version != ip._version: raise TypeError("%s and %s are not of the same version" % ( str(ip), str(ips[-1]))) try: ips.append(ip.ip) except __HOLE__: ips.append(ip.network_address) else: if nets and nets[-1]._version != ip._version: raise TypeError("%s and %s are not of the same version" % ( str(ip), str(ips[-1]))) nets.append(ip) # sort and dedup ips = sorted(set(ips)) nets = sorted(set(nets)) while i < len(ips): (first, last) = _find_address_range(ips[i:]) i = ips.index(last) + 1 addrs.extend(summarize_address_range(first, last)) return _collapse_address_list_recursive(sorted( addrs + nets, key=_BaseInterface._get_networks_key)) # backwards compatibility
AttributeError
dataset/ETHPy150Open uwdata/termite-data-server/web2py/gluon/contrib/ipaddr.py/collapse_address_list
def __eq__(self, other): try: return (self._ip == other._ip and self._version == other._version) except __HOLE__: return NotImplemented
AttributeError
dataset/ETHPy150Open uwdata/termite-data-server/web2py/gluon/contrib/ipaddr.py/_BaseAddress.__eq__
def __eq__(self, other): try: return (self._version == other._version and self.network_address == other.network_address and int(self.netmask) == int(other.netmask)) except __HOLE__: if isinstance(other, _BaseAddress): return (self._version == other._version and self._ip == other._ip)
AttributeError
dataset/ETHPy150Open uwdata/termite-data-server/web2py/gluon/contrib/ipaddr.py/_BaseInterface.__eq__
def _ip_int_from_string(self, ip_str): """Turn the given IP string into an integer for comparison. Args: ip_str: A string, the IP ip_str. Returns: The IP ip_str as an integer. Raises: AddressValueError: if ip_str isn't a valid IPv4 Address. """ octets = ip_str.split('.') if len(octets) != 4: raise AddressValueError(ip_str) packed_ip = 0 for oc in octets: try: packed_ip = (packed_ip << 8) | self._parse_octet(oc) except __HOLE__: raise AddressValueError(ip_str) return packed_ip
ValueError
dataset/ETHPy150Open uwdata/termite-data-server/web2py/gluon/contrib/ipaddr.py/_BaseV4._ip_int_from_string
def _is_hostmask(self, ip_str): """Test if the IP string is a hostmask (rather than a netmask). Args: ip_str: A string, the potential hostmask. Returns: A boolean, True if the IP string is a hostmask. """ bits = ip_str.split('.') try: parts = [int(x) for x in bits if int(x) in self._valid_mask_octets] except __HOLE__: return False if len(parts) != len(bits): return False if parts[0] < parts[-1]: return True return False
ValueError
dataset/ETHPy150Open uwdata/termite-data-server/web2py/gluon/contrib/ipaddr.py/IPv4Interface._is_hostmask
def _is_valid_netmask(self, netmask): """Verify that the netmask is valid. Args: netmask: A string, either a prefix or dotted decimal netmask. Returns: A boolean, True if the prefix represents a valid IPv4 netmask. """ mask = netmask.split('.') if len(mask) == 4: if [x for x in mask if int(x) not in self._valid_mask_octets]: return False if [y for idx, y in enumerate(mask) if idx > 0 and y > mask[idx - 1]]: return False return True try: netmask = int(netmask) except __HOLE__: return False return 0 <= netmask <= self._max_prefixlen # backwards compatibility
ValueError
dataset/ETHPy150Open uwdata/termite-data-server/web2py/gluon/contrib/ipaddr.py/IPv4Interface._is_valid_netmask
def _ip_int_from_string(self, ip_str): """Turn an IPv6 ip_str into an integer. Args: ip_str: A string, the IPv6 ip_str. Returns: A long, the IPv6 ip_str. Raises: AddressValueError: if ip_str isn't a valid IPv6 Address. """ parts = ip_str.split(':') # An IPv6 address needs at least 2 colons (3 parts). if len(parts) < 3: raise AddressValueError(ip_str) # If the address has an IPv4-style suffix, convert it to hexadecimal. if '.' in parts[-1]: ipv4_int = IPv4Address(parts.pop())._ip parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF)) parts.append('%x' % (ipv4_int & 0xFFFF)) # An IPv6 address can't have more than 8 colons (9 parts). if len(parts) > self._HEXTET_COUNT + 1: raise AddressValueError(ip_str) # Disregarding the endpoints, find '::' with nothing in between. # This indicates that a run of zeroes has been skipped. try: skip_index, = ( [i for i in xrange(1, len(parts) - 1) if not parts[i]] or [None]) except ValueError: # Can't have more than one '::' raise AddressValueError(ip_str) # parts_hi is the number of parts to copy from above/before the '::' # parts_lo is the number of parts to copy from below/after the '::' if skip_index is not None: # If we found a '::', then check if it also covers the endpoints. parts_hi = skip_index parts_lo = len(parts) - skip_index - 1 if not parts[0]: parts_hi -= 1 if parts_hi: raise AddressValueError(ip_str) # ^: requires ^:: if not parts[-1]: parts_lo -= 1 if parts_lo: raise AddressValueError(ip_str) # :$ requires ::$ parts_skipped = self._HEXTET_COUNT - (parts_hi + parts_lo) if parts_skipped < 1: raise AddressValueError(ip_str) else: # Otherwise, allocate the entire address to parts_hi. The endpoints # could still be empty, but _parse_hextet() will check for that. if len(parts) != self._HEXTET_COUNT: raise AddressValueError(ip_str) parts_hi = len(parts) parts_lo = 0 parts_skipped = 0 try: # Now, parse the hextets into a 128-bit integer. ip_int = 0L for i in xrange(parts_hi): ip_int <<= 16 ip_int |= self._parse_hextet(parts[i]) ip_int <<= 16 * parts_skipped for i in xrange(-parts_lo, 0): ip_int <<= 16 ip_int |= self._parse_hextet(parts[i]) return ip_int except __HOLE__: raise AddressValueError(ip_str)
ValueError
dataset/ETHPy150Open uwdata/termite-data-server/web2py/gluon/contrib/ipaddr.py/_BaseV6._ip_int_from_string
def _is_valid_netmask(self, prefixlen): """Verify that the netmask/prefixlen is valid. Args: prefixlen: A string, the netmask in prefix length format. Returns: A boolean, True if the prefix represents a valid IPv6 netmask. """ try: prefixlen = int(prefixlen) except __HOLE__: return False return 0 <= prefixlen <= self._max_prefixlen
ValueError
dataset/ETHPy150Open uwdata/termite-data-server/web2py/gluon/contrib/ipaddr.py/IPv6Interface._is_valid_netmask
def test_describe_instances(self): i = InfrastructureManager() params1 = {} result1 = i.describe_instances(params1, 'secret1') self.assertFalse(result1['success']) self.assertEquals(result1['reason'], InfrastructureManager.REASON_BAD_SECRET) # test the scenario where we fail to give describe_instances a # reservation id params2 = {} result2 = i.describe_instances(params2, 'secret') self.assertFalse(result2['success']) self.assertEquals(result2['reason'], 'no ' + InfrastructureManager.PARAM_RESERVATION_ID) # test what happens when a caller fails to give describe instances # a reservation id that's in the system params3 = {InfrastructureManager.PARAM_RESERVATION_ID: 'boo'} result3 = i.describe_instances(params3, 'secret') self.assertFalse(result3['success']) self.assertEquals(result3['reason'], InfrastructureManager.REASON_RESERVATION_NOT_FOUND) # test what happens when a caller gives describe_instances a reservation # id that is in the system id = '0000000000' params4 = {InfrastructureManager.PARAM_RESERVATION_ID: id} vm_info = { 'public_ips': ['public-ip'], 'private_ips': ['private-ip'], 'instance_ids': ['i-id'] } status_info = { 'success': True, 'reason': 'received run request', 'state': InfrastructureManager.STATE_RUNNING, 'vm_info': vm_info } i.reservations.put(id, status_info) result4 = i.reservations.get(id) self.assertEquals(result4, i.describe_instances(params4, "secret")) params5 = json.dumps(params4) self.assertEquals(result4, i.describe_instances(params5, "secret")) try: i.describe_instances('foo', 'bar') self.fail('Must throw an exception') except Exception: pass try: i.describe_instances({'reservation_id': 'foo'}, {}) self.fail('Must throw an exception') except __HOLE__: pass
TypeError
dataset/ETHPy150Open AppScale/appscale/InfrastructureManager/tests/test_infrastructure_manager.py/TestInfrastructureManager.test_describe_instances
def load_object(path): """Load an object given its absolute object path, and return it. object can be a class, function, variable or an instance. path ie: 'scrapy.downloadermiddlewares.redirect.RedirectMiddleware' """ try: dot = path.rindex('.') except ValueError: raise ValueError("Error loading object '%s': not a full path" % path) module, name = path[:dot], path[dot+1:] mod = import_module(module) try: obj = getattr(mod, name) except __HOLE__: raise NameError("Module '%s' doesn't define any object named '%s'" % (module, name)) return obj
AttributeError
dataset/ETHPy150Open scrapy/scrapy/scrapy/utils/misc.py/load_object
def get_default_status(self, status=None, request=request): try: return request.headers[self.requested_response_status_header] except (__HOLE__, RuntimeError): return super(API, self).get_default_status()
KeyError
dataset/ETHPy150Open salsita/flask-raml/flask_raml.py/API.get_default_status
def metadata(self, name): """ Returns the metadata for the named variable. Args ---- name : str Name of variable to get the metadata for. Returns ------- dict The metadata dict for the named variable. Raises ------- KeyError If the named variable is not in this vector. """ try: return self._dat[name].meta except __HOLE__ as error: raise KeyError("Variable '%s' does not exist" % name)
KeyError
dataset/ETHPy150Open OpenMDAO/OpenMDAO/openmdao/core/vec_wrapper.py/VecWrapper.metadata
def _get_local_idxs(self, name, idx_dict, get_slice=False): """ Returns all of the indices for the named variable in this vector. Args ---- name : str Name of variable to get the indices for. get_slice : bool, optional If True, return the idxs as a slice object, if possible. Returns ------- size The size of the named variable. ndarray Index array containing all local indices for the named variable. """ try: slc = self._dat[name].slice if slc is None: return self.make_idx_array(0, 0) except __HOLE__: # this happens if 'name' doesn't exist in this process return self.make_idx_array(0, 0) start, end = slc if name in idx_dict: #TODO: possible slice conversion idxs = self.to_idx_array(idx_dict[name]) + start if idxs.size > (end-start) or max(idxs) >= end: raise RuntimeError("Indices of interest specified for '%s'" "are too large" % name) return idxs else: if get_slice: return slice(start, end) return self.make_idx_array(start, end)
KeyError
dataset/ETHPy150Open OpenMDAO/OpenMDAO/openmdao/core/vec_wrapper.py/VecWrapper._get_local_idxs
def setup(self, parent_params_vec, params_dict, srcvec, my_params, connections, relevance=None, var_of_interest=None, store_byobjs=False, shared_vec=None, alloc_complex=False): """ Configure this vector to store a flattened array of the variables in params_dict. Variable shape and value are retrieved from srcvec. Args ---- parent_params_vec : `VecWrapper` or None `VecWrapper` of parameters from the parent `System`. params_dict : `OrderedDict` Dictionary of parameter absolute name mapped to metadata dict. srcvec : `VecWrapper` Source `VecWrapper` corresponding to the target `VecWrapper` we're building. my_params : list of str A list of absolute names of parameters that the `VecWrapper` we're building will 'own'. connections : dict of str : str A dict of absolute target names mapped to the absolute name of their source variable. relevance : `Relevance` object Object that knows what vars are relevant for each var_of_interest. var_of_interest : str or None Name of the current variable of interest. store_byobjs : bool, optional If True, store 'pass by object' variables in the `VecWrapper` we're building. shared_vec : ndarray, optional If not None, create vec as a subslice of this array. alloc_complex : bool, optional If True, allocate space for the imaginary part of the vector and configure all functions to support complex computation. """ # dparams vector has some additional behavior if not store_byobjs: self.deriv_units = True src_to_prom_name = srcvec._sysdata.to_prom_name scoped_name = self._sysdata._scoped_abs_name vec_size = 0 missing = [] # names of our params that we don't 'own' syspath = self._sysdata.pathname + '.' for meta in itervalues(params_dict): if relevance is None or relevance.is_relevant(var_of_interest, meta['top_promoted_name']): pathname = meta['pathname'] if pathname in my_params: # if connected, get metadata from the source try: src = connections[pathname] except __HOLE__: raise RuntimeError("Parameter '%s' is not connected" % pathname) src_pathname, idxs = src src_rel_name = src_to_prom_name[src_pathname] src_acc = srcvec._dat[src_rel_name] slc, val = self._setup_var_meta(pathname, meta, vec_size, src_acc, store_byobjs) if 'remote' not in meta or not meta['remote']: vec_size += meta['size'] self._dat[scoped_name(pathname)] = Accessor(self, slc, val, meta, self._probdata, alloc_complex) elif parent_params_vec is not None and pathname in connections: src, _ = connections[pathname] common = get_common_ancestor(src, pathname) if (common == self._sysdata.pathname or syspath not in common): missing.append(pathname) if shared_vec is not None: self.vec = shared_vec[:vec_size] else: self.alloc_complex = alloc_complex self.vec = numpy.zeros(vec_size) if alloc_complex: self.imag_vec = numpy.zeros(vec_size) # map slices to the array for acc in itervalues(self._dat): if not (acc.pbo or acc.remote): start, end = acc.slice acc.val = self.vec[start:end] if alloc_complex: acc.imag_val = self.imag_vec[start:end] # fill entries for missing params with views from the parent if parent_params_vec is not None: parent_scoped_name = parent_params_vec._sysdata._scoped_abs_name for pathname in missing: parent_acc = parent_params_vec._dat[parent_scoped_name(pathname)] newmeta = parent_acc.meta if newmeta['pathname'] == pathname: if alloc_complex is True and not newmeta.get('pass_by_obj'): imag_val = parent_acc.imag_val else: imag_val = None # mark this param as not 'owned' by this VW self._dat[scoped_name(pathname)] = Accessor(self, None, parent_acc.val, newmeta, self._probdata, alloc_complex, owned=False, imag_val=imag_val)
KeyError
dataset/ETHPy150Open OpenMDAO/OpenMDAO/openmdao/core/vec_wrapper.py/TgtVecWrapper.setup
def test_strings(self): assert_that('').is_not_none() assert_that('').is_empty() assert_that('').is_false() assert_that('').is_type_of(str) assert_that('').is_instance_of(str) assert_that('foo').is_length(3) assert_that('foo').is_not_empty() assert_that('foo').is_true() assert_that('foo').is_alpha() assert_that('123').is_digit() assert_that('foo').is_lower() assert_that('FOO').is_upper() assert_that('foo').is_iterable() assert_that('foo').is_equal_to('foo') assert_that('foo').is_not_equal_to('bar') assert_that('foo').is_equal_to_ignoring_case('FOO') if sys.version_info[0] == 3: assert_that('foo').is_unicode() else: assert_that(u'foo').is_unicode() assert_that('foo').contains('f') assert_that('foo').contains('f','oo') assert_that('foo').contains_ignoring_case('F','oO') assert_that('foo').does_not_contain('x') assert_that('foo').contains_sequence('o','o') assert_that('foo').contains_duplicates() assert_that('fox').does_not_contain_duplicates() assert_that('foo').is_in('foo','bar','baz') assert_that('foo').is_not_in('boo','bar','baz') assert_that('foo').is_subset_of('abcdefghijklmnopqrstuvwxyz') assert_that('foo').starts_with('f') assert_that('foo').ends_with('oo') assert_that('foo').matches(r'\w') assert_that('123-456-7890').matches(r'\d{3}-\d{3}-\d{4}') assert_that('foo').does_not_match(r'\d+') # partial matches, these all pass assert_that('foo').matches(r'\w') assert_that('foo').matches(r'oo') assert_that('foo').matches(r'\w{2}') # match the entire string with an anchored regex pattern, passes assert_that('foo').matches(r'^\w{3}$') # fails try: assert_that('foo').matches(r'^\w{2}$') fail('should have raised error') except __HOLE__: pass
AssertionError
dataset/ETHPy150Open ActivisionGameScience/assertpy/tests/test_readme.py/TestReadme.test_strings
def test_custom_error_message(self): try: assert_that(1+2).is_equal_to(2) fail('should have raised error') except AssertionError as e: assert_that(str(e)).is_equal_to('Expected <3> to be equal to <2>, but was not.') try: assert_that(1+2).described_as('adding stuff').is_equal_to(2) fail('should have raised error') except __HOLE__ as e: assert_that(str(e)).is_equal_to('[adding stuff] Expected <3> to be equal to <2>, but was not.')
AssertionError
dataset/ETHPy150Open ActivisionGameScience/assertpy/tests/test_readme.py/TestReadme.test_custom_error_message
def get_minpad(self, axis): try: return self._minpad[axis] except __HOLE__: return self._minpad
TypeError
dataset/ETHPy150Open glue-viz/glue/glue/external/wcsaxes/axislabels.py/AxisLabels.get_minpad
def check_x_scale(x_scale, x0): if isinstance(x_scale, string_types) and x_scale == 'jac': return x_scale try: x_scale = np.asarray(x_scale, dtype=float) valid = np.all(np.isfinite(x_scale)) and np.all(x_scale > 0) except (__HOLE__, TypeError): valid = False if not valid: raise ValueError("`x_scale` must be 'jac' or array_like with " "positive numbers.") if x_scale.ndim == 0: x_scale = np.resize(x_scale, x0.shape) if x_scale.shape != x0.shape: raise ValueError("Inconsistent shapes between `x_scale` and `x0`.") return x_scale
ValueError
dataset/ETHPy150Open scipy/scipy/scipy/optimize/_lsq/least_squares.py/check_x_scale
def getStreamedConstant(constant_value): # Note: The marshal module cannot persist all unicode strings and # therefore cannot be used. Instead we use pickle. try: saved = cpickle.dumps( constant_value, protocol = 0 if type(constant_value) is unicode else pickle_protocol ) except __HOLE__: warning("Problem with persisting constant '%r'." % constant_value) raise saved = pickletools.optimize(saved) # Check that the constant is restored correctly. try: restored = cpickle.loads( saved ) except: warning("Problem with persisting constant '%r'." % constant_value) raise if not Constants.compareConstants(restored, constant_value): raise AssertionError( "Streaming of constant changed value", constant_value, "!=", restored, "types:", type(constant_value), type(restored) ) return saved
TypeError
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/codegen/Pickling.py/getStreamedConstant
def do_deletes(): while True: f = _get_delete_path() if f is None: break try: if os.path.isfile(f): os.remove(f) elif os.path.isdir(f): rm_full_dir(f) except __HOLE__: log.error("Failed to delete: %s", f)
OSError
dataset/ETHPy150Open blackberry/ALF/alf/local.py/do_deletes
def main(proj_name, project_inst, run_folder, template_fn, iters, aggr_min, aggr_max, keep, timeout, write_pickle, reduce, reduce_n): "main loop" ext = os.path.splitext(os.path.basename(template_fn))[1] results = 0 iterno = 0 is_replay = (iters == 1 and aggr_min == 0 and aggr_max == 0) log.info("Running project %s for %d iteration(s).", proj_name, iters) log.info("Results will be written to %s", run_folder) log.info("Iteration timeout: %r", timeout) log.info("Ctrl+C to quit") log.info("%-20s %-10s %s", "Iterations", "Rate", "Failures") start_time = time.time() print_time = start_time done = False if timeout is not None: timeout_event = threading.Event() timeout_continue = threading.Event() class TimeoutThread(threading.Thread): def run(self): while not done: if timeout_event.wait(timeout) is False: # dump thread stacks and exit log.error("Iteration timeout occurred!") for thread_id, stack in sys._current_frames().items(): if thread_id == self.ident: continue log.error("Thread: %d", thread_id) traceback.print_stack(stack) log.error("") _thread.interrupt_main() return timeout_event.clear() timeout_continue.set() tout_tid = TimeoutThread() tout_tid.start() try: while not iters or iterno < iters: printed = False # create mutation fn if is_replay: mutation_fn = os.path.basename(template_fn) else: mutation_fn = "mutation_%08X%s" % (iterno, ext) # do an iteration iter_had_result = False cls = "" result = project_inst.do_iteration(mutation_fn, random.randint(aggr_max, aggr_min)) if result is not None: if not isinstance(result, FuzzResult): raise TypeError("Expecting FuzzResult, not %s" % type(result)) iter_had_result = True cls = result.classification if result.classification != "NOT_AN_EXCEPTION": if not os.path.isfile(mutation_fn): raise Exception("result reported before mutation written to disk") results += 1 if keep or result.classification != "NOT_AN_EXCEPTION": if is_replay: log_fn = "%s.log.xml" % os.path.basename(template_fn) pkl_fn = "%s.pkl" % os.path.basename(template_fn) else: log_fn = "mutation_%08X.log.xml" % (iterno) pkl_fn = "mutation_%08X.pkl" % iterno with open(log_fn, "w") as logf: logf.write("<log>\n") logf.write("<classification>%s</classification>\n" % result.classification) logf.write("<backtrace>\n") for lso in result.backtrace: logf.write("<sym>%s</sym>\n" % lso) logf.write("</backtrace>\n") logf.write("<text>\n") logf.write(result.text) logf.write("</text>\n") logf.write("</log>\n") if write_pickle: with open(pkl_fn, "wb") as f: pickle.dump(result, f) if reduce: with open(template_fn, "rb") as f: mutation = f.read() for r in reduce: mutation = _reduce(project_inst, r, reduce_n, mutation, mutation_fn, result) oresult = result with open(mutation_fn, "wb") as f: f.write(mutation) result = project_inst.run_subject(mutation_fn) if not project_inst.resultmatch(oresult, result): raise Exception("Result didn't match post-reduce") elif reduce: log.warning("--reduce specified, but no failure was found") # remove the mutation if it didn't cause failure if not keep and (not iter_had_result or cls == "NOT_AN_EXCEPTION" or is_replay): delete(mutation_fn) iterno += 1 if time.time() - print_time >= 10: print_time = time.time() print_progress(start_time, iterno, results) printed = True if timeout is not None: timeout_event.set() timeout_continue.wait() do_deletes() except __HOLE__: log.info("User interrupt") finally: if not printed: print_progress(start_time, iterno, results) elapsed_time = time.time() - start_time log.info("Ran %d iterations and found %d results in %.2fs", iterno, results, elapsed_time) project_inst.cleanup() project_inst.finish() done = True if timeout is not None: timeout_event.set() do_deletes()
KeyboardInterrupt
dataset/ETHPy150Open blackberry/ALF/alf/local.py/main
def load_project(project_name): # load project and check that it looks okay try: importlib.import_module(project_name) except ImportError as e: try: #TODO: relative module imports in a projects/Project will fail for some reason importlib.import_module("projects.%s" % project_name) except __HOLE__ as e: log.error("Failed to import project %s", project_name, exc_info=1) sys.exit(1) if len(_registered) != 1: log.error("Project must register itself using alf.register(). " "%d projects registered, expecting 1.", len(_registered)) sys.exit(1) project_cls = _registered.pop() if not issubclass(project_cls, Fuzzer): raise TypeError("Expecting a Fuzzer, not '%s'" % type(project_cls)) return project_cls
ImportError
dataset/ETHPy150Open blackberry/ALF/alf/local.py/load_project
def _internal_increment(self, namespace, request): """Internal function for incrementing from a MemcacheIncrementRequest. Args: namespace: A string containing the namespace for the request, if any. Pass an empty string if there is no namespace. request: A MemcacheIncrementRequest instance. Returns: An integer or long if the offset was successful, None on error. """ key = request.key() entry = self._GetKey(namespace, key) if entry is None: if not request.has_initial_value(): return None if namespace not in self._the_cache: self._the_cache[namespace] = {} flags = 0 if request.has_initial_flags(): flags = request.initial_flags() self._the_cache[namespace][key] = CacheEntry(str(request.initial_value()), expiration=0, flags=flags, cas_id=self._next_cas_id, gettime=self._gettime) self._next_cas_id += 1 entry = self._GetKey(namespace, key) assert entry is not None try: old_value = long(entry.value) if old_value < 0: raise ValueError except __HOLE__: logging.error('Increment/decrement failed: Could not interpret ' 'value for key = "%s" as an unsigned integer.', key) return None delta = request.delta() if request.direction() == MemcacheIncrementRequest.DECREMENT: delta = -delta new_value = max(old_value + delta, 0) % (2**64) entry.value = str(new_value) return new_value
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/memcache/memcache_stub.py/MemcacheServiceStub._internal_increment
def unthe(self, text, pattern): """Moves pattern in the path format string or strips it text -- text to handle pattern -- regexp pattern (case ignore is already on) strip -- if True, pattern will be removed """ if text: r = re.compile(pattern, flags=re.IGNORECASE) try: t = r.findall(text)[0] except __HOLE__: return text else: r = re.sub(r, '', text).strip() if self.config['strip']: return r else: fmt = self.config['format'].get(unicode) return fmt.format(r, t.strip()).strip() else: return u''
IndexError
dataset/ETHPy150Open beetbox/beets/beetsplug/the.py/ThePlugin.unthe
@access.user @loadmodel(model='collection', level=AccessType.ADMIN) @filtermodel(model='collection', addFields={'access'}) @describeRoute( Description('Set the access control list for a collection.') .param('id', 'The ID of the collection.', paramType='path') .param('access', 'The access control list as JSON.') .param('public', "Whether the collection should be publicly visible.", dataType='boolean', required=False) .param('recurse', 'Whether the policies should be applied to all ' 'folders under this collection as well.', dataType='boolean', default=False, required=False) .param('progress', 'If recurse is set to True, this controls whether ' 'progress notifications will be sent.', dataType='boolean', default=False, required=False) .errorResponse('ID was invalid.') .errorResponse('Admin permission denied on the collection.', 403) ) def updateCollectionAccess(self, collection, params): self.requireParams('access', params) user = self.getCurrentUser() public = self.boolParam('public', params) recurse = self.boolParam('recurse', params, default=False) progress = self.boolParam('progress', params, default=False) and recurse try: access = json.loads(params['access']) except __HOLE__: raise RestException('The access parameter must be JSON.') with ProgressContext(progress, user=user, title='Updating permissions', message='Calculating progress...') as ctx: if progress: ctx.update(total=self.model('collection').subtreeCount( collection, includeItems=False, user=user, level=AccessType.ADMIN)) return self.model('collection').setAccessList( collection, access, save=True, user=user, recurse=recurse, progress=ctx, setPublic=public)
ValueError
dataset/ETHPy150Open girder/girder/girder/api/v1/collection.py/Collection.updateCollectionAccess
def encrypt(claims, jwk, adata='', add_header=None, alg='RSA-OAEP', enc='A128CBC-HS256', rng=get_random_bytes, compression=None): """ Encrypts the given claims and produces a :class:`~jose.JWE` :param claims: A `dict` representing the claims for this :class:`~jose.JWE`. :param jwk: A `dict` representing the JWK to be used for encryption of the CEK. This parameter is algorithm-specific. :param adata: Arbitrary string data to add to the authentication (i.e. HMAC). The same data must be provided during decryption. :param add_header: Additional items to be added to the header. Additional headers *will* be authenticated. :param alg: The algorithm to use for CEK encryption :param enc: The algorithm to use for claims encryption :param rng: Random number generator. A string of random bytes is expected as output. :param compression: The compression algorithm to use. Currently supports `'DEF'`. :rtype: :class:`~jose.JWE` :raises: :class:`~jose.Error` if there is an error producing the JWE """ # copy so the injected claim doesn't mutate the input claims # this is a temporary hack to allow for graceful deprecation of tokens, # ensuring that the library can still handle decrypting tokens issued # before the implementation of the fix claims = deepcopy(claims) assert _TEMP_VER_KEY not in claims claims[_TEMP_VER_KEY] = _TEMP_VER header = dict((add_header or {}).items() + [ (HEADER_ENC, enc), (HEADER_ALG, alg)]) # promote the temp key to the header assert _TEMP_VER_KEY not in header header[_TEMP_VER_KEY] = claims[_TEMP_VER_KEY] plaintext = json_encode(claims) # compress (if required) if compression is not None: header[HEADER_ZIP] = compression try: (compress, _) = COMPRESSION[compression] except __HOLE__: raise Error( 'Unsupported compression algorithm: {}'.format(compression)) plaintext = compress(plaintext) # body encryption/hash ((cipher, _), key_size), ((hash_fn, _), hash_mod) = JWA[enc] iv = rng(AES.block_size) encryption_key = rng(hash_mod.digest_size) ciphertext = cipher(plaintext, encryption_key[-hash_mod.digest_size/2:], iv) hash = hash_fn(_jwe_hash_str(ciphertext, iv, adata), encryption_key[:-hash_mod.digest_size/2], hash_mod) # cek encryption (cipher, _), _ = JWA[alg] encryption_key_ciphertext = cipher(encryption_key, jwk) return JWE(*map(b64encode_url, (json_encode(header), encryption_key_ciphertext, iv, ciphertext, auth_tag(hash))))
KeyError
dataset/ETHPy150Open Demonware/jose/jose.py/encrypt
def spec_compliant_encrypt(claims, jwk, add_header=None, alg='RSA-OAEP', enc='A128CBC-HS256', rng=get_random_bytes): """ Encrypts the given claims and produces a :class:`~jose.JWE` :param claims: A `dict` representing the claims for this :class:`~jose.JWE`. :param jwk: A `dict` representing the JWK to be used for encryption of the CEK. This parameter is algorithm-specific. :param add_header: Additional items to be added to the header. Additional headers *will* be authenticated. :param alg: The algorithm to use for CEK encryption :param enc: The algorithm to use for claims encryption :param rng: Random number generator. A string of random bytes is expected as output. : param compression: The compression algorithm to use. Currently supports `'DEF'`. :rtype: :class:`~jose.JWE` :raises: :class:`~jose.Error` if there is an error producing the JWE """ # We need 5 components for JWE token # 1. Generate header header = dict((add_header or {}).items() + [(HEADER_ENC, enc), (HEADER_ALG, alg)]) protected_header = json_encode(header) # 2. Generate CEK mac_key, enc_key = _generate_encryption_keys(enc, rng) encrypted_key = _encrypt_key(mac_key + enc_key, jwk, alg) # 3. Generate Initialization Vector iv = _generate_iv(enc, rng) # 4. Generate payload plaintext = json_encode(claims) # Compress if needed if HEADER_ZIP in header: try: (compression_func, _) = COMPRESSION[header[HEADER_ZIP]] except __HOLE__: raise Error( 'Unsupported compression algorithm: {}'.format(header[HEADER_ZIP])) M = compression_func(plaintext) else: M = plaintext # Encrypt payload ((cipher, _), key_len), _ = JWA[enc] ciphertext = cipher(M, enc_key, iv) # 5. Generate authentication tag authentication_tag = _generate_authentication_tag( mac_key, protected_header, ciphertext, iv, enc ) return JWE( *map( b64encode_url, (protected_header, encrypted_key, iv, ciphertext, authentication_tag) ) )
KeyError
dataset/ETHPy150Open Demonware/jose/jose.py/spec_compliant_encrypt
def legacy_decrypt(jwe, jwk, adata='', validate_claims=True, expiry_seconds=None): """ Decrypts a deserialized :class:`~jose.JWE` :param jwe: An instance of :class:`~jose.JWE` :param jwk: A `dict` representing the JWK required to decrypt the content of the :class:`~jose.JWE`. :param adata: Arbitrary string data used during encryption for additional authentication. :param validate_claims: A `bool` indicating whether or not the `exp`, `iat` and `nbf` claims should be validated. Defaults to `True`. :param expiry_seconds: An `int` containing the JWT expiry in seconds, used when evaluating the `iat` claim. Defaults to `None`, which disables `iat` claim validation. :rtype: :class:`~jose.JWT` :raises: :class:`~jose.Expired` if the JWT has expired :raises: :class:`~jose.NotYetValid` if the JWT is not yet valid :raises: :class:`~jose.Error` if there is an error decrypting the JWE """ protected_header, encrypted_key, iv, ciphertext, authentication_tag = map( b64decode_url, jwe) header = json_decode(protected_header) alg = header[HEADER_ALG] enc = header[HEADER_ENC] # decrypt cek encryption_key = _decrypt_key(encrypted_key, jwk, alg) # decrypt body ((_, decipher), _), ((hash_fn, _), mod) = JWA[enc] version = header.get(_TEMP_VER_KEY) if version: plaintext = decipher(ciphertext, encryption_key[-mod.digest_size/2:], iv) hash = hash_fn(_jwe_hash_str(ciphertext, iv, adata, version), encryption_key[:-mod.digest_size/2], mod=mod) else: plaintext = decipher(ciphertext, encryption_key[:-mod.digest_size], iv) hash = hash_fn(_jwe_hash_str(ciphertext, iv, adata, version), encryption_key[-mod.digest_size:], mod=mod) if not const_compare(auth_tag(hash), authentication_tag): raise Error('Mismatched authentication tags') if HEADER_ZIP in header: try: (_, decompress) = COMPRESSION[header[HEADER_ZIP]] except KeyError: raise Error('Unsupported compression algorithm: {}'.format( header[HEADER_ZIP])) plaintext = decompress(plaintext) claims = json_decode(plaintext) try: del claims[_TEMP_VER_KEY] except __HOLE__: # expected when decrypting legacy tokens pass _validate(claims, validate_claims, expiry_seconds) return JWT(header, claims)
KeyError
dataset/ETHPy150Open Demonware/jose/jose.py/legacy_decrypt
def spec_compliant_decrypt(jwe, jwk, validate_claims=True, expiry_seconds=None): """ Decrypts a deserialized :class:`~jose.JWE` :param jwe: An instance of :class:`~jose.JWE` :param jwk: A `dict` representing the JWK required to decrypt the content of the :class:`~jose.JWE`. :param validate_claims: A `bool` indicating whether or not the `exp`, `iat` and `nbf` claims should be validated. Defaults to `True`. :param expiry_seconds: An `int` containing the JWT expiry in seconds, used when evaluating the `iat` claim. Defaults to `None`, which disables `iat` claim validation. :rtype: :class:`~jose.JWT` :raises: :class:`~jose.Expired` if the JWT has expired :raises: :class:`~jose.NotYetValid` if the JWT is not yet valid :raises: :class:`~jose.Error` if there is an error decrypting the JWE """ protected_header, encrypted_key, iv, ciphertext, authentication_tag = map( b64decode_url, jwe ) header = json_decode(protected_header) if not _verify_header(header): raise Error('Header is invalid') alg = header[HEADER_ALG] enc = header[HEADER_ENC] # decrypt cek encryption_key = _decrypt_key(encrypted_key, jwk, alg) mac_key, enc_key = _parse_encryption_keys(encryption_key, enc) # verify authentication tag expected_tag = _generate_authentication_tag( mac_key, json_encode(header), ciphertext, iv, enc ) if not const_compare(expected_tag, authentication_tag): raise Error('Mismatched authentication tags') # decrypt body ((_, decipher), _), _ = JWA[enc] # http://tools.ietf.org/html/rfc7516#section-5.1 step 11 M = decipher(ciphertext, enc_key, iv) if HEADER_ZIP in header: try: (_, decompress) = COMPRESSION[header[HEADER_ZIP]] except __HOLE__: raise Error('Unsupported compression algorithm: {}'.format( header[HEADER_ZIP])) plaintext = decompress(M) else: plaintext = M claims = json_decode(plaintext) _validate(claims, validate_claims, expiry_seconds) return JWT(header, claims)
KeyError
dataset/ETHPy150Open Demonware/jose/jose.py/spec_compliant_decrypt
def decrypt(*args, **kwargs): """ Decrypts legacy or spec-compliant JOSE token. First attempts to decrypt the token in a legacy mode (https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-19). If it is not a valid legacy token then attempts to decrypt it in a spec-compliant way (http://tools.ietf.org/html/rfc7519) """ try: return legacy_decrypt(*args, **kwargs) except (Error, __HOLE__) as e: return spec_compliant_decrypt(*args, **kwargs)
ValueError
dataset/ETHPy150Open Demonware/jose/jose.py/decrypt
def b64decode_url(istr): """ JWT Tokens may be truncated without the usual trailing padding '=' symbols. Compensate by padding to the nearest 4 bytes. """ istr = encode_safe(istr) try: return urlsafe_b64decode(istr + '=' * (4 - (len(istr) % 4))) except __HOLE__ as e: raise Error('Unable to decode base64: %s' % (e))
TypeError
dataset/ETHPy150Open Demonware/jose/jose.py/b64decode_url
def encode_safe(istr, encoding='utf8'): try: return istr.encode(encoding) except __HOLE__: # this will fail if istr is already encoded pass return istr
UnicodeDecodeError
dataset/ETHPy150Open Demonware/jose/jose.py/encode_safe
def decrypt_oaep(ciphertext, jwk): try: return PKCS1_OAEP.new(RSA.importKey(jwk['k'])).decrypt(ciphertext) except __HOLE__ as e: raise Error(e.args[0])
ValueError
dataset/ETHPy150Open Demonware/jose/jose.py/decrypt_oaep
def _compound_from_key(self, key): try: enc, hash = key.split('+') return enc, hash except ValueError: pass try: enc, hash = key.split('-') return enc, hash except __HOLE__: pass raise Error('Unsupported algorithm: {}'.format(key))
ValueError
dataset/ETHPy150Open Demonware/jose/jose.py/_JWA._compound_from_key
def _validate(claims, validate_claims, expiry_seconds): """ Validate expiry related claims. If validate_claims is False, do nothing. Otherwise, validate the exp and nbf claims if they are present, and validate the iat claim if expiry_seconds is provided. """ if not validate_claims: return now = time() # TODO: implement support for clock skew # The exp (expiration time) claim identifies the expiration time on or # after which the JWT MUST NOT be accepted for processing. The # processing of the exp claim requires that the current date/time MUST # be before the expiration date/time listed in the exp claim. try: expiration_time = claims[CLAIM_EXPIRATION_TIME] except KeyError: pass else: _check_expiration_time(now, expiration_time) # The iat (issued at) claim identifies the time at which the JWT was # issued. This claim can be used to determine the age of the JWT. # If expiry_seconds is provided, and the iat claims is present, # determine the age of the token and check if it has expired. try: issued_at = claims[CLAIM_ISSUED_AT] except __HOLE__: pass else: if expiry_seconds is not None: _check_expiration_time(now, issued_at + expiry_seconds) # The nbf (not before) claim identifies the time before which the JWT # MUST NOT be accepted for processing. The processing of the nbf claim # requires that the current date/time MUST be after or equal to the # not-before date/time listed in the nbf claim. try: not_before = claims[CLAIM_NOT_BEFORE] except KeyError: pass else: _check_not_before(now, not_before)
KeyError
dataset/ETHPy150Open Demonware/jose/jose.py/_validate
def execute_from_command_line(): try: cmd = sys.argv[1] except __HOLE__: cmd = "help" try: subcmd = sys.argv[2:] except: print "error" if cmd == "help": sys.stdout.write(help_text()) else: exe = get_command(cmd) if exe: exe.execute(subcmd)
IndexError
dataset/ETHPy150Open CelloCello/flask-go/flask_go/management.py/execute_from_command_line
def consume(self): try: if self.data[self.p] == 10: # \n self.line += 1 self.charPositionInLine = 0 else: self.charPositionInLine += 1 self.p += 1 except __HOLE__: # happend when we reached EOF and self.data[self.p] fails # just do nothing pass
IndexError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/antlr3/antlr3/streams.py/ANTLRStringStream.consume
def LA(self, i): if i == 0: return 0 # undefined if i < 0: i += 1 # e.g., translate LA(-1) to use offset i=0; then data[p+0-1] try: return self.data[self.p+i-1] except __HOLE__: return EOF
IndexError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/antlr3/antlr3/streams.py/ANTLRStringStream.LA
def LT(self, i): if i == 0: return 0 # undefined if i < 0: i += 1 # e.g., translate LA(-1) to use offset i=0; then data[p+0-1] try: return self.strdata[self.p+i-1] except __HOLE__: return EOF
IndexError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/antlr3/antlr3/streams.py/ANTLRStringStream.LT
def mark(self): state = (self.p, self.line, self.charPositionInLine) try: self._markers[self.markDepth] = state except __HOLE__: self._markers.append(state) self.markDepth += 1 self.lastMarker = self.markDepth return self.lastMarker
IndexError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/antlr3/antlr3/streams.py/ANTLRStringStream.mark
def fillBuffer(self): """ Load all tokens from the token source and put in tokens. This is done upon first LT request because you might want to set some token type / channel overrides before filling buffer. """ index = 0 t = self.tokenSource.nextToken() while t is not None and t.type != EOF: discard = False if self.discardSet is not None and t.type in self.discardSet: discard = True elif self.discardOffChannelTokens and t.channel != self.channel: discard = True # is there a channel override for token type? try: overrideChannel = self.channelOverrideMap[t.type] except __HOLE__: # no override for this type pass else: if overrideChannel == self.channel: t.channel = overrideChannel else: discard = True if not discard: t.index = index self.tokens.append(t) index += 1 t = self.tokenSource.nextToken() # leave p pointing at first token on channel self.p = 0 self.p = self.skipOffTokenChannels(self.p)
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/antlr3/antlr3/streams.py/CommonTokenStream.fillBuffer
def skipOffTokenChannels(self, i): """ Given a starting index, return the index of the first on-channel token. """ try: while self.tokens[i].channel != self.channel: i += 1 except __HOLE__: # hit the end of token stream pass return i
IndexError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/antlr3/antlr3/streams.py/CommonTokenStream.skipOffTokenChannels
def LT(self, k): """ Get the ith token from the current position 1..n where k=1 is the first symbol of lookahead. """ if self.p == -1: self.fillBuffer() if k == 0: return None if k < 0: return self.LB(-k) i = self.p n = 1 # find k good tokens while n < k: # skip off-channel tokens i = self.skipOffTokenChannels(i+1) # leave p on valid token n += 1 try: return self.tokens[i] except __HOLE__: return EOF_TOKEN
IndexError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/antlr3/antlr3/streams.py/CommonTokenStream.LT
def toString(self, *args): if len(args) == 0: programName = self.DEFAULT_PROGRAM_NAME start = self.MIN_TOKEN_INDEX end = self.size() - 1 elif len(args) == 1: programName = args[0] start = self.MIN_TOKEN_INDEX end = self.size() - 1 elif len(args) == 2: programName = self.DEFAULT_PROGRAM_NAME start = args[0] end = args[1] if start is None: start = self.MIN_TOKEN_INDEX elif not isinstance(start, int): start = start.index if end is None: end = len(self.tokens) - 1 elif not isinstance(end, int): end = end.index # ensure start/end are in range if end >= len(self.tokens): end = len(self.tokens) - 1 if start < 0: start = 0 rewrites = self.programs.get(programName) if rewrites is None or len(rewrites) == 0: # no instructions to execute return self.toOriginalString(start, end) buf = StringIO() # First, optimize instruction stream indexToOp = self.reduceToSingleOperationPerIndex(rewrites) # Walk buffer, executing instructions and emitting tokens i = start while i <= end and i < len(self.tokens): op = indexToOp.get(i) # remove so any left have index size-1 try: del indexToOp[i] except __HOLE__: pass t = self.tokens[i] if op is None: # no operation at that index, just dump token buf.write(t.text) i += 1 # move to next token else: i = op.execute(buf) # execute operation and skip # include stuff after end if it's last index in buffer # So, if they did an insertAfter(lastValidIndex, "foo"), include # foo if end==lastValidIndex. if end == len(self.tokens) - 1: # Scan any remaining operations after last token # should be included (they will be inserts). for i in sorted(indexToOp.keys()): op = indexToOp[i] if op.index >= len(self.tokens)-1: buf.write(op.text) return buf.getvalue()
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/antlr3/antlr3/streams.py/TokenRewriteStream.toString