function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
def _run_validators(self, value): """ Execute all associated validators. """ errors = [] for v in self.validators: try: v(value) except __HOLE__, e: errors.extend(e.messages) if errors: raise ValidationError(errors)
ValidationError
dataset/ETHPy150Open wuher/devil/devil/fields/fields.py/NestedField._run_validators
@classmethod def setupClass(cls): global np try: import numpy as np import scipy except __HOLE__: raise SkipTest('SciPy not available.')
ImportError
dataset/ETHPy150Open networkx/networkx/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py/TestEigenvectorCentrality.setupClass
@classmethod def setupClass(cls): global np try: import numpy as np import scipy except __HOLE__: raise SkipTest('SciPy not available.')
ImportError
dataset/ETHPy150Open networkx/networkx/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py/TestEigenvectorCentralityDirected.setupClass
@classmethod def setupClass(cls): global np try: import numpy as np import scipy except __HOLE__: raise SkipTest('SciPy not available.')
ImportError
dataset/ETHPy150Open networkx/networkx/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py/TestEigenvectorCentralityExceptions.setupClass
def _run_tests(self): secrets = pjoin(self._dir, 'test', 'secrets.py') if not os.path.isfile(secrets): print("Missing " + secrets) print("Maybe you forgot to copy it from -dist:") print(" cp test/secrets.py-dist test/secrets.py") sys.exit(1) pre_python26 = (sys.version_info[0] == 2 and sys.version_info[1] < 6) if pre_python26: missing = [] # test for dependencies try: import simplejson simplejson # silence pyflakes except __HOLE__: missing.append("simplejson") try: import ssl ssl # silence pyflakes except ImportError: missing.append("ssl") if missing: print("Missing dependencies: " + ", ".join(missing)) sys.exit(1) testfiles = [] for test_path in TEST_PATHS: for t in glob(pjoin(self._dir, test_path, 'test_*.py')): testfiles.append('.'.join( [test_path.replace('/', '.'), splitext(basename(t))[0]])) tests = TestLoader().loadTestsFromNames(testfiles) t = TextTestRunner(verbosity = 2) res = t.run(tests) return not res.wasSuccessful()
ImportError
dataset/ETHPy150Open racker/rackspace-monitoring/setup.py/TestCommand._run_tests
def run(self): try: import pep8 pep8 except __HOLE__: print ('Missing "pep8" library. You can install it using pip: ' 'pip install pep8') sys.exit(1) cwd = os.getcwd() retcode = call(('pep8 %s/rackspace_monitoring/ %s/test/' % (cwd, cwd)).split(' ')) sys.exit(retcode)
ImportError
dataset/ETHPy150Open racker/rackspace-monitoring/setup.py/Pep8Command.run
def get_auth_from_conf(here): transifex_conf = os.path.join(here, '.transifex.ini') config = ConfigParser() try: with open(transifex_conf, 'r') as conf: config.readfp(conf) except __HOLE__ as ex: sys.exit('Failed to load authentication configuration file.\n' '{0}'.format(ex)) try: username = config.get('auth', 'username') password = config.get('auth', 'password') except (NoOptionError, NoSectionError) as ex: sys.exit('Oops. Incomplete configuration file: {0}'.format(ex)) return username, password
IOError
dataset/ETHPy150Open mblayman/tappy/transifex.py/get_auth_from_conf
def GetGeneratorByLanguage(language_or_generator): """Return the appropriate generator for this language. Args: language_or_generator: (str) the language for which to return a generator, or the name of a specific generator. Raises: ValueError: If provided language isn't supported. Returns: The appropriate code generator object (which may be None). """ try: return _ALL_GENERATORS[language_or_generator] except __HOLE__: raise ValueError('Unsupported language: %s' % language_or_generator)
KeyError
dataset/ETHPy150Open google/apis-client-generator/src/googleapis/codegen/generator_lookup.py/GetGeneratorByLanguage
def parse(self, fileData=None, context=None): self.reset() if context is not None: self.context = context if fileData is not None: self.filedata = fileData for index, line in enumerate(self.filedata): line = line.strip() # Use 1-indexed line numbers that users will recognize index = index + 1 if line.startswith('#') or not line: # empty lines are handled just like comments, and empty # lines and comments are always looked up in the # unmodified filedata, so we store only the index self.appendNoOpByText(line, modified=False, index=index, context=self.context) continue # non-empty, non-comment lines must be parsed try: verb, nouns = line.split(None, 1) except: raise CMError('%s: Invalid statement "%s"' %(CMLocation(index, self.context), line)) if verb == VersionOperation.key: nouns = nouns.split('#')[0].strip() self.setVersion(VersionOperation(text=nouns, modified=False, index=index, context=self.context)) elif verb == SearchOperation.key: # Handle it if quoted, but it doesn't need to be try: nouns = ' '.join(shlex.split(nouns, comments=True)) except ValueError, e: raise CMError('%s: %s' %( CMLocation(index, self.context), str(e))) try: searchOp = SearchLabel(text=nouns, modified=False, index=index, context=self.context) except errors.ParseError: try: searchOp = SearchTrove(text=nouns, modified=False, index=index, context=self.context) except cmdline.TroveSpecError, e: raise CMError('%s: %s' %( CMLocation(index, self.context), str(e))) self.appendOp(searchOp) elif verb == IncludeOperation.key: # Handle it if quoted, but it doesn't need to be try: nouns = ' '.join(shlex.split(nouns, comments=True)) except ValueError, e: raise CMError('%s: %s' %( CMLocation(index, self.context), str(e))) try: includeOp = IncludeOperation(text=nouns, modified=False, index=index, context=self.context) except cmdline.TroveSpecError, e: raise CMError('%s: %s' %( CMLocation(index, self.context), str(e))) self.appendOp(includeOp) elif verb in opMap: try: self.appendOpByName(verb, text=shlex.split(nouns, comments=True), modified=False, index=index, context=self.context) except __HOLE__, e: raise CMError('%s: %s' %( CMLocation(index, self.context), str(e))) else: raise CMError( '%s: Unrecognized command "%s"' %( CMLocation(index, self.context), verb))
ValueError
dataset/ETHPy150Open sassoftware/conary/conary/conaryclient/cml.py/CML.parse
def run(self): # verify that given states are subset of allowed states unknown_states = set(self.send_on) - self.allowed_states if len(unknown_states) > 0: raise PluginFailedException('Unknown state(s) "%s" for sendmail plugin' % '", "'.join(sorted(unknown_states))) rebuild = is_rebuild(self.workflow) success = not self.workflow.build_failed canceled = self.workflow.autorebuild_canceled self.log.info('checking conditions for sending notification ...') if self._should_send(rebuild, success, canceled): self.log.info('notification about build result will be sent') subject, body = self._render_mail(rebuild, success, canceled) try: self.log.debug('getting list of receivers for this component ...') receivers = self._get_receivers_list() except __HOLE__ as e: self.log.error('couldn\'t get list of receivers, sending error message ...') # TODO: maybe improve the error message/subject body = '\n'.join([ 'Failed to get contact for %s, error: %s' % (str(self.workflow.image), str(e)), 'Since your address is in "error_addresses", this email was sent to you to ' 'take action on this.', 'Wanted to send following mail:', '', body ]) receivers = self.error_addresses self.log.info('sending notification to %s ...', receivers) self._send_mail(receivers, subject, body) else: self.log.info('conditions for sending notification not met, doing nothing')
RuntimeError
dataset/ETHPy150Open projectatomic/atomic-reactor/atomic_reactor/plugins/exit_sendmail.py/SendMailPlugin.run
def __init__(self, parent, menu, item, controller): """ Creates a new menu item for an action item. """ self.item = item action = item.action # FIXME v3: This is a wx'ism and should be hidden in the toolkit code. self.control_id = None if action.image is None: self.control = menu.addAction(action.name, self._qt4_on_triggered, action.accelerator) else: self.control = menu.addAction(action.image.create_icon(), action.name, self._qt4_on_triggered, action.accelerator) menu.menu_items.append(self) self.control.setToolTip(action.tooltip) self.control.setWhatsThis(action.description) self.control.setEnabled(action.enabled) self.control.setVisible(action.visible) if getattr(action, 'menu_role', False): if action.menu_role == "About": self.control.setMenuRole(QtGui.QAction.AboutRole) elif action.menu_role == "Preferences": self.control.setMenuRole(QtGui.QAction.PreferencesRole) if action.style == 'toggle': self.control.setCheckable(True) self.control.setChecked(action.checked) elif action.style == 'radio': # Create an action group if it hasn't already been done. try: ag = item.parent._qt4_ag except __HOLE__: ag = item.parent._qt4_ag = QtGui.QActionGroup(parent) self.control.setActionGroup(ag) self.control.setCheckable(True) self.control.setChecked(action.checked) # Listen for trait changes on the action (so that we can update its # enabled/disabled/checked state etc). action.on_trait_change(self._on_action_enabled_changed, 'enabled') action.on_trait_change(self._on_action_visible_changed, 'visible') action.on_trait_change(self._on_action_checked_changed, 'checked') action.on_trait_change(self._on_action_name_changed, 'name') action.on_trait_change(self._on_action_accelerator_changed, 'accelerator') # Detect if the control is destroyed. self.control.destroyed.connect(self._qt4_on_destroyed) if controller is not None: self.controller = controller controller.add_to_menu(self)
AttributeError
dataset/ETHPy150Open enthought/pyface/pyface/ui/qt4/action/action_item.py/_MenuItem.__init__
def __init__(self, parent, tool_bar, image_cache, item, controller, show_labels): """ Creates a new tool bar tool for an action item. """ self.item = item self.tool_bar = tool_bar action = item.action # FIXME v3: This is a wx'ism and should be hidden in the toolkit code. self.control_id = None if action.image is None: self.control = tool_bar.addAction(action.name) else: size = tool_bar.iconSize() image = action.image.create_icon((size.width(), size.height())) self.control = tool_bar.addAction(image, action.name) QtCore.QObject.connect(self.control, QtCore.SIGNAL('triggered()'), self._qt4_on_triggered) self.control.setToolTip(action.tooltip) self.control.setWhatsThis(action.description) self.control.setEnabled(action.enabled) self.control.setVisible(action.visible) if action.style == 'toggle': self.control.setCheckable(True) self.control.setChecked(action.checked) elif action.style == 'radio': # Create an action group if it hasn't already been done. try: ag = item.parent._qt4_ag except __HOLE__: ag = item.parent._qt4_ag = QtGui.QActionGroup(parent) self.control.setActionGroup(ag) self.control.setCheckable(True) self.control.setChecked(action.checked) # Keep a reference in the action. This is done to make sure we live as # long as the action (and still respond to its signals) and don't die # if the manager that created us is garbage collected. self.control._tool_instance = self # Listen for trait changes on the action (so that we can update its # enabled/disabled/checked state etc). action.on_trait_change(self._on_action_enabled_changed, 'enabled') action.on_trait_change(self._on_action_visible_changed, 'visible') action.on_trait_change(self._on_action_checked_changed, 'checked') action.on_trait_change(self._on_action_name_changed, 'name') action.on_trait_change(self._on_action_accelerator_changed, 'accelerator') # Detect if the control is destroyed. self.control.destroyed.connect(self._qt4_on_destroyed) if controller is not None: self.controller = controller controller.add_to_toolbar(self) ########################################################################### # Private interface. ###########################################################################
AttributeError
dataset/ETHPy150Open enthought/pyface/pyface/ui/qt4/action/action_item.py/_Tool.__init__
@testhelp.context('trove-filter') def testBadTroveFilters(self): recipe = self.getRecipe() filt = trovefilter.AbstractFilter() self.assertRaises(NotImplementedError, filt.match) try: filt = trovefilter.TroveFilter(recipe, 'foo(') except __HOLE__, e: self.assertEquals(str(e), "Bad Regexp: 'foo(' for name") else: self.fail("Expected RuntimeError") nvf = cmdline.parseTroveSpec('foo=/test.rpath.local@rpl:devel') filt = trovefilter.TroveFilter(recipe, 'foo') self.assertEquals(filt.match((nvf,)), True) filt.compile() filt.versionType = True filt.version = 'foo' self.assertEquals(filt.match((nvf,)), False)
RuntimeError
dataset/ETHPy150Open sassoftware/conary/conary_test/cvctest/buildtest/expansiontest.py/TroveFilterTest.testBadTroveFilters
def take_action(self, parsed_args): compute_client = self.app.client_manager.compute volume_client = self.app.client_manager.volume # Lookup parsed_args.image image = None if parsed_args.image: image = utils.find_resource( compute_client.images, parsed_args.image, ) # Lookup parsed_args.volume volume = None if parsed_args.volume: volume = utils.find_resource( volume_client.volumes, parsed_args.volume, ).id # Lookup parsed_args.flavor flavor = utils.find_resource(compute_client.flavors, parsed_args.flavor) boot_args = [parsed_args.server_name, image, flavor] files = {} for f in parsed_args.file: dst, src = f.split('=', 1) try: files[dst] = io.open(src, 'rb') except __HOLE__ as e: raise exceptions.CommandError("Can't open '%s': %s" % (src, e)) if parsed_args.min > parsed_args.max: msg = _("min instances should be <= max instances") raise exceptions.CommandError(msg) if parsed_args.min < 1: msg = _("min instances should be > 0") raise exceptions.CommandError(msg) if parsed_args.max < 1: msg = _("max instances should be > 0") raise exceptions.CommandError(msg) userdata = None if parsed_args.user_data: try: userdata = io.open(parsed_args.user_data) except IOError as e: msg = "Can't open '%s': %s" raise exceptions.CommandError(msg % (parsed_args.user_data, e)) block_device_mapping = {} if volume: # When booting from volume, for now assume no other mappings # This device value is likely KVM-specific block_device_mapping = {'vda': volume} else: for dev_map in parsed_args.block_device_mapping: dev_key, dev_vol = dev_map.split('=', 1) block_volume = None if dev_vol: vol = dev_vol.split(':', 1)[0] if vol: vol_id = utils.find_resource( volume_client.volumes, vol, ).id block_volume = dev_vol.replace(vol, vol_id) else: msg = _("Volume name or ID must be specified if " "--block-device-mapping is specified") raise exceptions.CommandError(msg) block_device_mapping.update({dev_key: block_volume}) nics = [] for nic_str in parsed_args.nic: nic_info = {"net-id": "", "v4-fixed-ip": "", "v6-fixed-ip": "", "port-id": ""} nic_info.update(dict(kv_str.split("=", 1) for kv_str in nic_str.split(","))) if bool(nic_info["net-id"]) == bool(nic_info["port-id"]): msg = _("either net-id or port-id should be specified " "but not both") raise exceptions.CommandError(msg) if self.app.client_manager.is_network_endpoint_enabled(): network_client = self.app.client_manager.network if nic_info["net-id"]: net = network_client.find_network( nic_info["net-id"], ignore_missing=False) nic_info["net-id"] = net.id if nic_info["port-id"]: port = network_client.find_port( nic_info["port-id"], ignore_missing=False) nic_info["port-id"] = port.id else: if nic_info["net-id"]: nic_info["net-id"] = utils.find_resource( compute_client.networks, nic_info["net-id"] ).id if nic_info["port-id"]: msg = _("can't create server with port specified " "since network endpoint not enabled") raise exceptions.CommandError(msg) nics.append(nic_info) hints = {} for hint in parsed_args.hint: key, _sep, value = hint.partition('=') # NOTE(vish): multiple copies of the same hint will # result in a list of values if key in hints: if isinstance(hints[key], six.string_types): hints[key] = [hints[key]] hints[key] += [value] else: hints[key] = value # What does a non-boolean value for config-drive do? # --config-drive argument is either a volume id or # 'True' (or '1') to use an ephemeral volume if str(parsed_args.config_drive).lower() in ("true", "1"): config_drive = True elif str(parsed_args.config_drive).lower() in ("false", "0", "", "none"): config_drive = None else: config_drive = parsed_args.config_drive boot_kwargs = dict( meta=parsed_args.property, files=files, reservation_id=None, min_count=parsed_args.min, max_count=parsed_args.max, security_groups=parsed_args.security_group, userdata=userdata, key_name=parsed_args.key_name, availability_zone=parsed_args.availability_zone, block_device_mapping=block_device_mapping, nics=nics, scheduler_hints=hints, config_drive=config_drive) self.log.debug('boot_args: %s', boot_args) self.log.debug('boot_kwargs: %s', boot_kwargs) # Wrap the call to catch exceptions in order to close files try: server = compute_client.servers.create(*boot_args, **boot_kwargs) finally: # Clean up open files - make sure they are not strings for f in files: if hasattr(f, 'close'): f.close() if hasattr(userdata, 'close'): userdata.close() if parsed_args.wait: if utils.wait_for_status( compute_client.servers.get, server.id, callback=_show_progress, ): sys.stdout.write('\n') else: self.log.error(_('Error creating server: %s'), parsed_args.server_name) sys.stdout.write(_('\nError creating server')) raise SystemExit details = _prep_server_detail(compute_client, server) return zip(*sorted(six.iteritems(details)))
IOError
dataset/ETHPy150Open dtroyer/python-openstackclient/openstackclient/compute/v2/server.py/CreateServer.take_action
def load_state(name): global users, pending_users users = set() pending_users = set() try: loaded = yaml.load(open(name)) users = set(loaded['users']) pending_users = set(loaded['pending_users']) except __HOLE__: logger.warning('Could not load state, continuing.')
IOError
dataset/ETHPy150Open ebroder/anygit/anygit/client/spider.py/load_state
def github_com_spider(): state_file ='state.yml' load_state(state_file) if not pending_users: user = raw_input('Please enter in a GitHub user to bootstrap from: ').strip() pending_users.add(user) setup_proxies() while True: try: user = pending_users.pop() except __HOLE__: break users.add(user) repos = get_repos(user) logger.info('Beginning spider for %s with %d pending users (%s). Found %d repos' % (user, len(pending_users), pending_users, len(repos))) for repo in repos: url = 'git://%s.git' % repo[':url'].strip('http://') create(url=url) for new_user in get_collaborators(user, repo[':name']): # Don't repeat people record_user(new_user) dump_state(state_file) logger.info('All done.') # git.kernel.org spider
KeyError
dataset/ETHPy150Open ebroder/anygit/anygit/client/spider.py/github_com_spider
def get_svn_revision(path=None): """ Returns the SVN revision in the form SVN-XXXX, where XXXX is the revision number. Returns SVN-unknown if anything goes wrong, such as an unexpected format of internal SVN files. If path is provided, it should be a directory whose SVN info you want to inspect. If it's not provided, this will use the root django/ package directory. """ rev = None if path is None: path = django.__path__[0] entries_path = '%s/.svn/entries' % path try: entries = open(entries_path, 'r').read() except __HOLE__: pass else: # Versions >= 7 of the entries file are flat text. The first line is # the version number. The next set of digits after 'dir' is the revision. if re.match('(\d+)', entries): rev_match = re.search('\d+\s+dir\s+(\d+)', entries) if rev_match: rev = rev_match.groups()[0] # Older XML versions of the file specify revision as an attribute of # the first entries node. else: from xml.dom import minidom dom = minidom.parse(entries_path) rev = dom.getElementsByTagName('entry')[0].getAttribute('revision') if rev: return u'SVN-%s' % rev return u'SVN-unknown'
IOError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.4/django/utils/version.py/get_svn_revision
def _from_json(self, datastring): try: return jsonutils.loads(datastring) except __HOLE__: msg = _("Cannot understand JSON") raise exception.MalformedResponseBody(reason=msg)
ValueError
dataset/ETHPy150Open openstack/python-neutronclient/neutronclient/common/serializer.py/JSONDeserializer._from_json
def prep_arg(arg): try: return float(arg) except __HOLE__: try: return int(arg) except ValueError: return arg
ValueError
dataset/ETHPy150Open ejeschke/ginga/ginga/util/grc.py/prep_arg
def isfloat(x): """ Check if argument is float """ try: a = float(x) except __HOLE__: return False else: return True
ValueError
dataset/ETHPy150Open tyiannak/pyAudioAnalysis/utilities.py/isfloat
def isint(x): """ Check if argument is int """ try: a = float(x) b = int(a) except __HOLE__: return False else: return a == b
ValueError
dataset/ETHPy150Open tyiannak/pyAudioAnalysis/utilities.py/isint
@classmethod def http(cls, status_code, msg="", err_list=None, headers=None): """Raise an HTTP status code. Useful for returning status codes like 401 Unauthorized or 403 Forbidden. :param status_code: the HTTP status code as an integer :param msg: the message to send along, as a string :param err_list: list of fields with errors :param headers: the headers to send along, as a dictionary """ class _nocontent(web.HTTPError): message = 'No Content' def __init__(self): super(_nocontent, self).__init__( status='204 No Content', data=self.message ) exc_status_map = { 200: web.ok, 201: web.created, 202: web.accepted, 204: _nocontent, 301: web.redirect, 302: web.found, 400: web.badrequest, 401: web.unauthorized, 403: web.forbidden, 404: web.notfound, 405: web.nomethod, 406: web.notacceptable, 409: web.conflict, 410: web.gone, 415: web.unsupportedmediatype, 500: web.internalerror, } # web.py has a poor exception design: some of them receive # the `message` argument and some of them not. the only # solution to set custom message is to assign message directly # to the `data` attribute. though, that won't work for # the `internalerror` because it tries to do magic with # application context without explicit `message` argument. try: exc = exc_status_map[status_code](message=msg) except __HOLE__: exc = exc_status_map[status_code]() exc.data = msg exc.err_list = err_list or [] exc.status_code = status_code headers = headers or {} for key, value in headers.items(): web.header(key, value) return exc
TypeError
dataset/ETHPy150Open openstack/fuel-web/nailgun/nailgun/api/v1/handlers/base.py/BaseHandler.http
@content def PUT(self, cluster_id): """:returns: JSONized Task object. :http: * 202 (task successfully executed) * 400 (invalid object data specified) * 404 (environment is not found) * 409 (task with such parameters already exists) """ cluster = self.get_object_or_404( objects.Cluster, cluster_id, log_404=( u"warning", u"Error: there is no cluster " u"with id '{0}' in DB.".format(cluster_id) ) ) logger.info(self.log_message.format(env_id=cluster_id)) try: options = self.get_options() except __HOLE__ as e: raise self.http(400, six.text_type(e)) try: self.validator.validate(cluster) task_manager = self.task_manager(cluster_id=cluster.id) task = task_manager.execute(**options) except ( errors.AlreadyExists, errors.StopAlreadyRunning ) as exc: raise self.http(409, exc.message) except ( errors.DeploymentNotRunning, errors.NoDeploymentTasks, errors.WrongNodeStatus, errors.UnavailableRelease, errors.CannotBeStopped, ) as exc: raise self.http(400, exc.message) except Exception as exc: logger.error( self.log_error.format( env_id=cluster_id, error=str(exc) ) ) # let it be 500 raise self.raise_task(task)
ValueError
dataset/ETHPy150Open openstack/fuel-web/nailgun/nailgun/api/v1/handlers/base.py/DeferredTaskHandler.PUT
def _build_custom_contracts(): """ Define some custom contracts if PyContracts is found """ from contracts import new_contract @new_contract def cid_like(value): """ Value is a ComponentID or a string """ from glue.core import ComponentID return isinstance(value, (ComponentID, string_types)) @new_contract def component_like(value): from glue.core import Component, ComponentLink return isinstance(value, (Component, ComponentLink, ndarray, list, Series)) @new_contract def array_like(value): return isinstance(value, (ndarray, list)) @new_contract def color(value): """ A valid matplotlib color """ from matplotlib.colors import colorConverter try: colorConverter.to_rgba(value) except __HOLE__: return False @new_contract def inst(value, *types): return isinstance(value, types) @new_contract def data_view(value): from glue.core import ComponentID if value is None: return if isinstance(value, ComponentID): return try: if not isinstance(value[0], ComponentID): return False s_[value[1:]] except: return False @new_contract def array_view(value): try: s_[value] except: return False @new_contract def callable(value): return hasattr(value, '__call__')
ValueError
dataset/ETHPy150Open glue-viz/glue/glue/core/contracts.py/_build_custom_contracts
def evaluate(ex, out=None, local_dict=None, global_dict=None, **kwargs): """Evaluate expression and return an array.""" # First, get the signature for the arrays in expression context = getContext(kwargs) names, _ = getExprNames(ex, context) # Get the arguments based on the names. call_frame = sys._getframe(1) if local_dict is None: local_dict = call_frame.f_locals if global_dict is None: global_dict = call_frame.f_globals arguments = [] types = [] for name in names: try: a = local_dict[name] except __HOLE__: a = global_dict[name] arguments.append(a) if hasattr(a, 'atom'): types.append(a.atom) else: types.append(a) # Create a signature signature = [(name, getType(type_)) for (name, type_) in zip(names, types)] print("signature-->", signature) # Compile the expression compiled_ex = NumExpr(ex, signature, [], **kwargs) print("fullsig-->", compiled_ex.fullsig) _compute(out, compiled_ex, arguments) return
KeyError
dataset/ETHPy150Open PyTables/PyTables/bench/evaluate.py/evaluate
def __contains__(self, vertex): try: vals = [f['val'] for f in self.vertices.values()] except __HOLE__: vals = [] return vertex in self.vertices or vertex in vals
KeyError
dataset/ETHPy150Open christabor/MoAL/MOAL/data_structures/graphs/graphs.py/Graph.__contains__
def degree(self, vertex): """Return the number of edges for a given vertex. Only allows string/integer/tuple vertices.""" try: return len(self.vertices[vertex]['edges']) except __HOLE__: return 0
KeyError
dataset/ETHPy150Open christabor/MoAL/MOAL/data_structures/graphs/graphs.py/Graph.degree
def testInvalidSeek(self): """Tests that seeking fails for unsupported seek arguments.""" daisy_chain_wrapper = DaisyChainWrapper( self._dummy_url, self.test_data_file_len, self.MockDownloadCloudApi([])) try: # SEEK_CUR is invalid. daisy_chain_wrapper.seek(0, whence=os.SEEK_CUR) self.fail('Expected exception') except IOError, e: self.assertIn('does not support seek mode', str(e)) try: # Seeking from the end with an offset is invalid. daisy_chain_wrapper.seek(1, whence=os.SEEK_END) self.fail('Expected exception') except __HOLE__, e: self.assertIn('Invalid seek during daisy chain', str(e))
IOError
dataset/ETHPy150Open GoogleCloudPlatform/gsutil/gslib/tests/test_daisy_chain_wrapper.py/TestDaisyChainWrapper.testInvalidSeek
def _test_transport_connectivity(self, direction, protocol, src_port, dst_port): nc_tester = self._create_nc_tester(direction, protocol, src_port, dst_port) try: nc_tester.test_connectivity() except __HOLE__ as exc: nc_tester.stop_processes() raise ConnectionTesterException( "%s connection over %s protocol with %s source port and " "%s destination port can't be established: %s" % ( direction, protocol, src_port, dst_port, exc))
RuntimeError
dataset/ETHPy150Open openstack/neutron/neutron/tests/common/conn_testers.py/ConnectionTester._test_transport_connectivity
def _test_icmp_connectivity(self, direction, protocol, src_port, dst_port): src_namespace, ip_address = self._get_namespace_and_address(direction) ip_version = ip_lib.get_ip_version(ip_address) icmp_timeout = ICMP_VERSION_TIMEOUTS[ip_version] try: net_helpers.assert_ping(src_namespace, ip_address, timeout=icmp_timeout) except __HOLE__: raise ConnectionTesterException( "ICMP packets can't get from %s namespace to %s address" % ( src_namespace, ip_address))
RuntimeError
dataset/ETHPy150Open openstack/neutron/neutron/tests/common/conn_testers.py/ConnectionTester._test_icmp_connectivity
def _test_arp_connectivity(self, direction, protocol, src_port, dst_port): src_namespace, ip_address = self._get_namespace_and_address(direction) try: net_helpers.assert_arping(src_namespace, ip_address) except __HOLE__: raise ConnectionTesterException( "ARP queries to %s address have no response from %s namespace" % (ip_address, src_namespace))
RuntimeError
dataset/ETHPy150Open openstack/neutron/neutron/tests/common/conn_testers.py/ConnectionTester._test_arp_connectivity
@_validate_direction def assert_established_connection(self, direction, protocol, src_port=None, dst_port=None): nc_params = (direction, protocol, src_port, dst_port) nc_tester = self._nc_testers.get(nc_params) if nc_tester: if nc_tester.is_established: try: nc_tester.test_connectivity() except __HOLE__: raise ConnectionTesterException( "Established %s connection with protocol %s, source " "port %s and destination port %s can no longer " "communicate") else: nc_tester.stop_processes() raise ConnectionTesterException( '%s connection with protocol %s, source port %s and ' 'destination port %s is not established' % nc_params) else: raise ConnectionTesterException( "Attempting to test established %s connection with protocol %s" ", source port %s and destination port %s that hasn't been " "established yet by calling establish_connection()" % nc_params)
RuntimeError
dataset/ETHPy150Open openstack/neutron/neutron/tests/common/conn_testers.py/ConnectionTester.assert_established_connection
def _get_pinger(self, direction): try: pinger = self._pingers[direction] except __HOLE__: src_namespace, dst_address = self._get_namespace_and_address( direction) pinger = net_helpers.Pinger(src_namespace, dst_address) self._pingers[direction] = pinger return pinger
KeyError
dataset/ETHPy150Open openstack/neutron/neutron/tests/common/conn_testers.py/ConnectionTester._get_pinger
def _pre_render(self): # split markup, words, and lines # result: list of word with position and width/height # during the first pass, we don't care about h/valign self._cached_lines = lines = [] self._refs = {} self._anchors = {} clipped = False w = h = 0 uw, uh = self.text_size spush = self._push_style spop = self._pop_style opts = options = self.options options['_ref'] = None options['_anchor'] = None options['script'] = 'normal' shorten = options['shorten'] # if shorten, then don't split lines to fit uw, because it will be # flattened later when shortening and broken up lines if broken # mid-word will have space mid-word when lines are joined uw_temp = None if shorten else uw xpad = options['padding_x'] uhh = (None if uh is not None and options['valign'] != 'top' or options['shorten'] else uh) options['strip'] = options['strip'] or options['halign'] == 'justify' for item in self.markup: if item == '[b]': spush('bold') options['bold'] = True self.resolve_font_name() elif item == '[/b]': spop('bold') self.resolve_font_name() elif item == '[i]': spush('italic') options['italic'] = True self.resolve_font_name() elif item == '[/i]': spop('italic') self.resolve_font_name() elif item == '[u]': spush('underline') options['underline'] = True self.resolve_font_name() elif item == '[/u]': spop('underline') self.resolve_font_name() elif item == '[s]': spush('strikethrough') options['strikethrough'] = True self.resolve_font_name() elif item == '[/s]': spop('strikethrough') self.resolve_font_name() elif item[:6] == '[size=': item = item[6:-1] try: if item[-2:] in ('px', 'pt', 'in', 'cm', 'mm', 'dp', 'sp'): size = dpi2px(item[:-2], item[-2:]) else: size = int(item) except __HOLE__: raise size = options['font_size'] spush('font_size') options['font_size'] = size elif item == '[/size]': spop('font_size') elif item[:7] == '[color=': color = parse_color(item[7:-1]) spush('color') options['color'] = color elif item == '[/color]': spop('color') elif item[:6] == '[font=': fontname = item[6:-1] spush('font_name') options['font_name'] = fontname self.resolve_font_name() elif item == '[/font]': spop('font_name') self.resolve_font_name() elif item[:5] == '[sub]': spush('font_size') spush('script') options['font_size'] = options['font_size'] * .5 options['script'] = 'subscript' elif item == '[/sub]': spop('font_size') spop('script') elif item[:5] == '[sup]': spush('font_size') spush('script') options['font_size'] = options['font_size'] * .5 options['script'] = 'superscript' elif item == '[/sup]': spop('font_size') spop('script') elif item[:5] == '[ref=': ref = item[5:-1] spush('_ref') options['_ref'] = ref elif item == '[/ref]': spop('_ref') elif not clipped and item[:8] == '[anchor=': options['_anchor'] = item[8:-1] elif not clipped: item = item.replace('&bl;', '[').replace( '&br;', ']').replace('&amp;', '&') opts = copy(options) extents = self.get_cached_extents() opts['space_width'] = extents(' ')[0] w, h, clipped = layout_text(item, lines, (w, h), (uw_temp, uhh), opts, extents, True, False) if len(lines): # remove any trailing spaces from the last line old_opts = self.options self.options = copy(opts) w, h, clipped = layout_text('', lines, (w, h), (uw_temp, uhh), self.options, self.get_cached_extents(), True, True) self.options = old_opts if shorten: options['_ref'] = None # no refs for you! options['_anchor'] = None w, h, lines = self.shorten_post(lines, w, h) self._cached_lines = lines # when valign is not top, for markup we layout everything (text_size[1] # is temporarily set to None) and after layout cut to size if too tall elif uh != uhh and h > uh and len(lines) > 1: if options['valign'] == 'bottom': i = 0 while i < len(lines) - 1 and h > uh: h -= lines[i].h i += 1 del lines[:i] else: # middle i = 0 top = int(h / 2. + uh / 2.) # remove extra top portion while i < len(lines) - 1 and h > top: h -= lines[i].h i += 1 del lines[:i] i = len(lines) - 1 # remove remaining bottom portion while i and h > uh: h -= lines[i].h i -= 1 del lines[i + 1:] # now justify the text if options['halign'] == 'justify' and uw is not None: # XXX: update refs to justified pos # when justify, each line should've been stripped already split = partial(re.split, re.compile('( +)')) uww = uw - 2 * xpad chr = type(self.text) space = chr(' ') empty = chr('') for i in range(len(lines)): line = lines[i] words = line.words # if there's nothing to justify, we're done if (not line.w or int(uww - line.w) <= 0 or not len(words) or line.is_last_line): continue done = False parts = [None, ] * len(words) # contains words split by space idxs = [None, ] * len(words) # indices of the space in parts # break each word into spaces and add spaces until it's full # do first round of split in case we don't need to split all for w in range(len(words)): word = words[w] sw = word.options['space_width'] p = parts[w] = split(word.text) idxs[w] = [v for v in range(len(p)) if p[v].startswith(' ')] # now we have the indices of the spaces in split list for k in idxs[w]: # try to add single space at each space if line.w + sw > uww: done = True break line.w += sw word.lw += sw p[k] += space if done: break # there's not a single space in the line? if not any(idxs): continue # now keep adding spaces to already split words until done while not done: for w in range(len(words)): if not idxs[w]: continue word = words[w] sw = word.options['space_width'] p = parts[w] for k in idxs[w]: # try to add single space at each space if line.w + sw > uww: done = True break line.w += sw word.lw += sw p[k] += space if done: break # if not completely full, push last words to right edge diff = int(uww - line.w) if diff > 0: # find the last word that had a space for w in range(len(words) - 1, -1, -1): if not idxs[w]: continue break old_opts = self.options self.options = word.options word = words[w] # split that word into left/right and push right till uww l_text = empty.join(parts[w][:idxs[w][-1]]) r_text = empty.join(parts[w][idxs[w][-1]:]) left = LayoutWord(word.options, self.get_extents(l_text)[0], word.lh, l_text) right = LayoutWord(word.options, self.get_extents(r_text)[0], word.lh, r_text) left.lw = max(left.lw, word.lw + diff - right.lw) self.options = old_opts # now put words back together with right/left inserted for k in range(len(words)): if idxs[k]: words[k].text = empty.join(parts[k]) words[w] = right words.insert(w, left) else: for k in range(len(words)): if idxs[k]: words[k].text = empty.join(parts[k]) line.w = uww w = max(w, uww) self._internal_size = w, h if uw: w = uw if uh: h = uh if h > 1 and w < 2: w = 2 if w < 1: w = 1 if h < 1: h = 1 return int(w), int(h)
ValueError
dataset/ETHPy150Open kivy/kivy/kivy/core/text/markup.py/MarkupLabel._pre_render
@phylip.sniffer() def _phylip_sniffer(fh): # Strategy: # Read the header and a single sequence; verify that the sequence length # matches the header information. Do not verify that the total number of # lines matches the header information, since that would require reading # the whole file. try: header = next(_line_generator(fh)) _, seq_len = _validate_header(header) line = next(_line_generator(fh)) _validate_line(line, seq_len) except (__HOLE__, PhylipFormatError): return False, {} return True, {}
StopIteration
dataset/ETHPy150Open biocore/scikit-bio/skbio/io/format/phylip.py/_phylip_sniffer
def _validate_header(header): header_vals = header.split() try: n_seqs, seq_len = [int(x) for x in header_vals] if n_seqs < 1 or seq_len < 1: raise PhylipFormatError( 'The number of sequences and the length must be positive.') except __HOLE__: raise PhylipFormatError( 'Found non-header line when attempting to read the 1st record ' '(header line should have two space-separated integers): ' '"%s"' % header) return n_seqs, seq_len
ValueError
dataset/ETHPy150Open biocore/scikit-bio/skbio/io/format/phylip.py/_validate_header
def _parse_phylip_raw(fh): """Raw parser for PHYLIP files. Returns a list of raw (seq, id) values. It is the responsibility of the caller to construct the correct in-memory object to hold the data. """ # Note: this returns the full data instead of yielding each sequence, # because the header specifies the number of sequences, so the file cannot # be validated until it's read completely. # File should have a single header on the first line. try: header = next(_line_generator(fh)) except __HOLE__: raise PhylipFormatError("This file is empty.") n_seqs, seq_len = _validate_header(header) # All following lines should be ID+sequence. No blank lines are allowed. data = [] for line in _line_generator(fh): data.append(_validate_line(line, seq_len)) if len(data) != n_seqs: raise PhylipFormatError( "The number of sequences is not %s " % n_seqs + "as specified in the header.") return data
StopIteration
dataset/ETHPy150Open biocore/scikit-bio/skbio/io/format/phylip.py/_parse_phylip_raw
def index(self, field): """Return index of a field""" try: index = self._field_names.index(unicode(field)) except __HOLE__: raise KeyError("Field list has no field with name '%s'" % unicode(field)) return index
ValueError
dataset/ETHPy150Open Stiivi/brewery/brewery/metadata.py/FieldList.index
def coalesce_value(value, storage_type, empty_values=None, strip=False): """Coalesces `value` to given storage `type`. `empty_values` is a dictionary where keys are storage type names and values are values to be used as empty value replacements.""" if empty_values is None: empty_values={} if storage_type in ["string", "text"]: if strip: value = value.strip() elif value: value = unicode(value) if value == "" or value is None: value = empty_values.get("string") elif storage_type == "integer": # FIXME: use configurable thousands separator (now uses space) if strip: value = re.sub(r"\s", "", value.strip()) try: value = int(value) except __HOLE__: value = empty_values.get("integer") elif storage_type == "float": # FIXME: use configurable thousands separator (now uses space) if strip: value = re.sub(r"\s", "", value.strip()) try: value = float(value) except ValueError: value = empty_values.get("float") elif storage_type == "list": # FIXME: undocumented type value = value.split(",") return value
ValueError
dataset/ETHPy150Open Stiivi/brewery/brewery/metadata.py/coalesce_value
def get_lock_pid(self): try: return int(open(self.lock_filename).read()) except __HOLE__: # If we can't read symbolic link, there are two possibilities: # 1. The symbolic link is dead (point to non existing file) # 2. Symbolic link is not there # In either case, we can safely release the lock self.release()
IOError
dataset/ETHPy150Open ui/django-post_office/post_office/lockfile.py/FileLock.get_lock_pid
def valid_lock(self): """ See if the lock exists and is left over from an old process. """ lock_pid = self.get_lock_pid() # If we're unable to get lock_pid if lock_pid is None: return False # this is our process if self._pid == lock_pid: return True # it is/was another process # see if it is running try: os.kill(lock_pid, 0) except __HOLE__: self.release() return False # it is running return True
OSError
dataset/ETHPy150Open ui/django-post_office/post_office/lockfile.py/FileLock.valid_lock
def release(self): """Try to delete the lock files. Doesn't matter if we fail""" if self.lock_filename != self.pid_filename: try: os.unlink(self.lock_filename) except OSError: pass try: os.remove(self.pid_filename) except __HOLE__: pass
OSError
dataset/ETHPy150Open ui/django-post_office/post_office/lockfile.py/FileLock.release
def deserialize(self, source): """ Returns a model instance """ attrs = {} for k, v in source.iteritems(): try: attrs[k] = self.deserialize_field(source, k) except (__HOLE__, FieldDoesNotExist): # m2m, abstract pass return self.instanciate(attrs) # TODO: we can assign m2ms now
AttributeError
dataset/ETHPy150Open liberation/django-elasticsearch/django_elasticsearch/serializers.py/EsJsonToModelMixin.deserialize
def serialize_field(self, instance, field_name): method_name = 'serialize_{0}'.format(field_name) if hasattr(self, method_name): return getattr(self, method_name)(instance, field_name) try: field = self.model._meta.get_field(field_name) except FieldDoesNotExist: # Abstract field pass else: field_type_method_name = 'serialize_type_{0}'.format( field.__class__.__name__.lower()) if hasattr(self, field_type_method_name): return getattr(self, field_type_method_name)(instance, field_name) if field.rel: # M2M if isinstance(field, ManyToManyField): return [self.nested_serialize(r) for r in getattr(instance, field.name).all()] rel = getattr(instance, field.name) # FK, OtO if rel: # should be a model instance if self.cur_depth >= self.max_depth: return return self.nested_serialize(rel) try: return getattr(instance, field_name) except __HOLE__: raise AttributeError("The serializer doesn't know how to serialize {0}, " "please provide it a {1} method." "".format(field_name, method_name))
AttributeError
dataset/ETHPy150Open liberation/django-elasticsearch/django_elasticsearch/serializers.py/EsModelToJsonMixin.serialize_field
@classmethod def setupClass(cls): global np try: import numpy as np except __HOLE__: raise SkipTest('NumPy not available.')
ImportError
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py/TestEigenvectorCentrality.setupClass
@classmethod def setupClass(cls): global np try: import numpy as np except __HOLE__: raise SkipTest('NumPy not available.')
ImportError
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py/TestEigenvectorCentralityDirected.setupClass
@classmethod def setupClass(cls): global np try: import numpy as np except __HOLE__: raise SkipTest('NumPy not available.')
ImportError
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py/TestEigenvectorCentralityExceptions.setupClass
def default_configure(host=None): """Configure BlueOx based on defaults Accepts a connection string override in the form `localhost:3514`. Respects environment variable BLUEOX_HOST """ host = ports.default_collect_host(host) hostname, port = host.split(':') try: int_port = int(port) except __HOLE__: raise Error("Invalid value for port") configure(hostname, int_port)
ValueError
dataset/ETHPy150Open rhettg/BlueOx/blueox/__init__.py/default_configure
def validate(output, resource): ''' Validate Pepa templates ''' try: import cerberus # pylint: disable=import-error except __HOLE__: log.critical('You need module cerberus in order to use validation') return roots = __opts__['pepa_roots'] valdir = join(roots['base'], resource, 'validate') all_schemas = {} pepa_schemas = [] for fn in glob.glob(valdir + '/*.yaml'): log.info("Loading schema: {0}".format(fn)) with salt.utils.fopen(fn) as fhr: template = jinja2.Template(fhr.read()) data = output data['grains'] = __grains__.copy() data['pillar'] = __pillar__.copy() schema = yaml.load(template.render(data)) all_schemas.update(schema) pepa_schemas.append(fn) val = cerberus.Validator() if not val.validate(output['pepa_keys'], all_schemas): for ekey, error in six.iteritems(val.errors): log.warning('Validation failed for key {0}: {1}'.format(ekey, error)) output['pepa_schema_keys'] = all_schemas output['pepa_schemas'] = pepa_schemas # Only used when called from a terminal
ImportError
dataset/ETHPy150Open saltstack/salt/salt/pillar/pepa.py/validate
def test_errors(self): size = 8 fmt = linuxaudiodev.AFMT_U8 rate = 8000 nchannels = 1 try: self.dev.setparameters(-1, size, nchannels, fmt) except ValueError, err: self.assertEqual(err.args[0], "expected rate >= 0, not -1") try: self.dev.setparameters(rate, -2, nchannels, fmt) except ValueError, err: self.assertEqual(err.args[0], "expected sample size >= 0, not -2") try: self.dev.setparameters(rate, size, 3, fmt) except __HOLE__, err: self.assertEqual(err.args[0], "nchannels must be 1 or 2, not 3") try: self.dev.setparameters(rate, size, nchannels, 177) except ValueError, err: self.assertEqual(err.args[0], "unknown audio encoding: 177") try: self.dev.setparameters(rate, size, nchannels, linuxaudiodev.AFMT_U16_LE) except ValueError, err: self.assertEqual(err.args[0], "for linear unsigned 16-bit little-endian " "audio, expected sample size 16, not 8") try: self.dev.setparameters(rate, 16, nchannels, fmt) except ValueError, err: self.assertEqual(err.args[0], "for linear unsigned 8-bit audio, expected " "sample size 8, not 16")
ValueError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_linuxaudiodev.py/LinuxAudioDevTests.test_errors
def generate_stats(self, request, response): colors = contrasting_color_generator() trace_colors = defaultdict(lambda: next(colors)) query_duplicates = defaultdict(lambda: defaultdict(int)) if self._queries: width_ratio_tally = 0 factor = int(256.0 / (len(self._databases) * 2.5)) for n, db in enumerate(self._databases.values()): rgb = [0, 0, 0] color = n % 3 rgb[color] = 256 - n / 3 * factor nn = color # XXX: pretty sure this is horrible after so many aliases while rgb[color] < factor: nc = min(256 - rgb[color], 256) rgb[color] += nc nn += 1 if nn > 2: nn = 0 rgb[nn] = nc db['rgb_color'] = rgb trans_ids = {} trans_id = None i = 0 for alias, query in self._queries: query_duplicates[alias][query["raw_sql"]] += 1 trans_id = query.get('trans_id') last_trans_id = trans_ids.get(alias) if trans_id != last_trans_id: if last_trans_id: self._queries[(i - 1)][1]['ends_trans'] = True trans_ids[alias] = trans_id if trans_id: query['starts_trans'] = True if trans_id: query['in_trans'] = True query['alias'] = alias if 'iso_level' in query: query['iso_level'] = get_isolation_level_display(query['vendor'], query['iso_level']) if 'trans_status' in query: query['trans_status'] = get_transaction_status_display(query['vendor'], query['trans_status']) query['form'] = SQLSelectForm(auto_id=None, initial=copy(query)) if query['sql']: query['sql'] = reformat_sql(query['sql']) query['rgb_color'] = self._databases[alias]['rgb_color'] try: query['width_ratio'] = (query['duration'] / self._sql_time) * 100 query['width_ratio_relative'] = ( 100.0 * query['width_ratio'] / (100.0 - width_ratio_tally)) except ZeroDivisionError: query['width_ratio'] = 0 query['width_ratio_relative'] = 0 query['start_offset'] = width_ratio_tally query['end_offset'] = query['width_ratio'] + query['start_offset'] width_ratio_tally += query['width_ratio'] query['stacktrace'] = render_stacktrace(query['stacktrace']) i += 1 query['trace_color'] = trace_colors[query['stacktrace']] if trans_id: self._queries[(i - 1)][1]['ends_trans'] = True # Queries are duplicates only if there's as least 2 of them. # Also, to hide queries, we need to give all the duplicate groups an id query_colors = contrasting_color_generator() query_duplicates = dict( (alias, dict( (query, (duplicate_count, next(query_colors))) for query, duplicate_count in queries.items() if duplicate_count >= 2 )) for alias, queries in query_duplicates.items() ) for alias, query in self._queries: try: duplicates_count, color = query_duplicates[alias][query["raw_sql"]] query["duplicate_count"] = duplicates_count query["duplicate_color"] = color except __HOLE__: pass for alias, alias_info in self._databases.items(): try: alias_info["duplicate_count"] = sum(e[0] for e in query_duplicates[alias].values()) except KeyError: pass self.record_stats({ 'databases': sorted(self._databases.items(), key=lambda x: -x[1]['time_spent']), 'queries': [q for a, q in self._queries], 'sql_time': self._sql_time, })
KeyError
dataset/ETHPy150Open django-debug-toolbar/django-debug-toolbar/debug_toolbar/panels/sql/panel.py/SQLPanel.generate_stats
def main_cli(self, stdscr): # Block each getch() for 10 tenths of a second curses.halfdelay(10) # Visibility 0 is invisible curses.curs_set(0) try: while True: ps_str = self.get_ps_str() lines = ps_str.split('\n') max_y, max_x = stdscr.getmaxyx() stdscr.erase() for i, line in enumerate(lines): # We don't want to draw on the last line because the # Press q to exit message goes there if i >= max_y - 1: break stdscr.addstr(i, 0, line[:max_x]) # Assumes that terminal size is greater than 15 character # Will crash otherwise...but who uses terminals 15 characters wide? stdscr.addstr(max_y - 1, 0, 'Press q to exit', curses.A_REVERSE) stdscr.refresh() if stdscr.getch() == ord('q'): break except __HOLE__: pass
KeyboardInterrupt
dataset/ETHPy150Open memsql/memsql-loader/memsql_loader/cli/ps.py/Processes.main_cli
def _make_progress(self, row, width): # formatted percent has a max length of 4 (100%) # _format_filesize can return at most a string of length 10 (1,024.0 KB) # _format_time can return at most a string of length 8 (23 hours) NO_PROGRESS_FORMAT_STR = "{:<4} {:>10}/{:<10}" PROGRESS_FORMAT_STR = "{:<4} {} {:>10}/{:<10}" RATE_FORMAT_STR = "{:>10}/s" TIME_LEFT_FORMAT_STR = "{:>13}" try: current = row.bytes_downloaded or 0 total = row.bytes_total or 0 rate = row.download_rate or 0 time_left = row.data.get('time_left', -1) percent = 0 if total == 0 else current * 1.0 / total formatted = [ "%d%%" % int(percent * 100), self._format_filesize(current), self._format_filesize(total) ] formatted_rate = self._format_filesize(rate) formatted_time_left = self._format_time(time_left) except __HOLE__: percent = 0 formatted = [ '0%', '--', '--' ] formatted_rate = '--' formatted_time_left = '--' string_without_progress = NO_PROGRESS_FORMAT_STR.format(*formatted) progress_width = width - len(string_without_progress) # Bar is surrounded by brackets [] and an extra space bar_width = progress_width - 3 if bar_width <= 0: return string_without_progress filled_bar_width = int(bar_width * percent) bar = '=' * filled_bar_width if not filled_bar_width == bar_width and not percent == 0: bar += '>' formatted_bar = "[{0:{1}}]".format(bar, bar_width) formatted.insert(1, formatted_bar) return { 'progress': PROGRESS_FORMAT_STR.format(*formatted), 'rate': RATE_FORMAT_STR.format(formatted_rate), 'time_left': TIME_LEFT_FORMAT_STR.format(formatted_time_left) }
KeyError
dataset/ETHPy150Open memsql/memsql-loader/memsql_loader/cli/ps.py/Processes._make_progress
def _normalize_data(data, index): """normalize the data to a dict with tuples of strings as keys right now it works with: 0 - dictionary (or equivalent mappable) 1 - pandas.Series with simple or hierarchical indexes 2 - numpy.ndarrays 3 - everything that can be converted to a numpy array 4 - pandas.DataFrame (via the _normalize_dataframe function) """ # if data is a dataframe we need to take a completely new road # before coming back here. Use the hasattr to avoid importing # pandas explicitly if hasattr(data, 'pivot') and hasattr(data, 'groupby'): data = _normalize_dataframe(data, index) index = None # can it be used as a dictionary? try: items = list(iteritems(data)) except __HOLE__: # ok, I cannot use the data as a dictionary # Try to convert it to a numpy array, or die trying data = np.asarray(data) temp = OrderedDict() for idx in np.ndindex(data.shape): name = tuple(i for i in idx) temp[name] = data[idx] data = temp items = list(iteritems(data)) # make all the keys a tuple, even if simple numbers data = OrderedDict([_tuplify(k), v] for k, v in items) categories_levels = _categories_level(list(iterkeys(data))) # fill the void in the counting dictionary indexes = product(*categories_levels) contingency = OrderedDict([(k, data.get(k, 0)) for k in indexes]) data = contingency # reorder the keys order according to the one specified by the user # or if the index is None convert it into a simple list # right now it doesn't do any check, but can be modified in the future index = lrange(len(categories_levels)) if index is None else index contingency = OrderedDict() for key, value in iteritems(data): new_key = tuple(key[i] for i in index) contingency[new_key] = value data = contingency return data
AttributeError
dataset/ETHPy150Open statsmodels/statsmodels/statsmodels/graphics/mosaicplot.py/_normalize_data
def require_dataset(handle, symbol): gid = symbol[:3] group = handle.require_group(gid) try: ds = group[symbol] except __HOLE__: ds = group.create_dataset(symbol, (240, ), DTYPE) return ds
KeyError
dataset/ETHPy150Open yinhm/datafeed/example/bench_dataset.py/require_dataset
def enter_filing(data_hash): filing_created=False related_committee = None try: thisobj = new_filing.objects.get(filing_number=data_hash['filing_number']) try: thisobj.filed_date except AttributeError: try: thisobj.filed_date = get_local_time(data_hash['filed_date']) thisobj.process_time = get_local_time(data_hash['filed_date']) thisobj.save() except pytz.exceptions.AmbiguousTimeError: thisobj.filed_date = data_hash['filed_date'] thisobj.process_time = data_hash['filed_date'] thisobj.save() except new_filing.DoesNotExist: print "entering %s %s" % (data_hash['filing_number'], data_hash['committee_id']) is_superpac=False try: thisobj = new_filing.objects.create( is_superpac = is_superpac, #related_committee = related_committee, fec_id = data_hash['committee_id'], committee_name = data_hash['committee_name'], filing_number = data_hash['filing_number'], form_type = data_hash['form_type'], filed_date = get_local_time(data_hash['filed_date']), process_time = get_local_time(data_hash['filed_date']), ) except pytz.exceptions.AmbiguousTimeError: thisobj = new_filing.objects.create( is_superpac = is_superpac, #related_committee = related_committee, fec_id = data_hash['committee_id'], committee_name = data_hash['committee_name'], filing_number = data_hash['filing_number'], form_type = data_hash['form_type'], filed_date = data_hash['filed_date'], process_time = data_hash['filed_date'], ) filing_created=True needs_saving=False try: thisobj.coverage_from_date = data_hash['coverage_from_date'] thisobj.cycle = get_cycle_from_date(data_hash['coverage_from_date']) needs_saving=True except __HOLE__: pass try: thisobj.coverage_to_date = data_hash['coverage_to_date'] needs_saving=True except KeyError: pass if needs_saving: thisobj.save() # return true if a new filing was created return filing_created
KeyError
dataset/ETHPy150Open sunlightlabs/read_FEC/fecreader/fec_alerts/management/commands/scrape_rss_filings.py/enter_filing
def GenerateQueryUsingSession(self, criteria, target_id, for_stats=False): query = self.db.session.query(models.Transaction).filter_by( target_id=target_id) # If transaction search is being done if criteria.get('search', None): if criteria.get('url', None): if isinstance(criteria.get('url'), list): criteria['url'] = criteria['url'][0] query = query.filter(models.Transaction.url.like( '%' + criteria['url'] + '%')) if criteria.get('method', None): if isinstance(criteria.get('method'), list): criteria['method'] = criteria['method'][0] query = query.filter(models.Transaction.method.like( '%' + criteria.get('method') + '%')) if criteria.get('data', None): if isinstance(criteria.get('data'), list): criteria['data'] = criteria['data'][0] query = query.filter(models.Transaction.data.like( '%' + criteria.get('data') + '%')) if criteria.get('raw_request', None): if isinstance(criteria.get('raw_request'), list): criteria['raw_request'] = criteria['raw_request'][0] query = query.filter(models.Transaction.raw_request.like( '%' + criteria.get('raw_request') + '%')) if criteria.get('response_status', None): if isinstance(criteria.get('response_status'), list): criteria['response_status'] = criteria['response_status'][0] query = query.filter(models.Transaction.response_status.like( '%' + criteria.get('response_status') + '%')) if criteria.get('response_headers', None): if isinstance(criteria.get('response_headers'), list): criteria['response_headers'] = criteria['response_headers'][0] query = query.filter(models.Transaction.response_headers.like( '%' + criteria.get('response_headers') + '%')) if criteria.get('response_body', None): if isinstance(criteria.get('response_body'), list): criteria['response_body'] = criteria['response_body'][0] query = query.filter( models.Transaction.binary_response == False, models.Transaction.response_body.like( '%' + criteria.get('response_body') + '%')) else: # If transaction filter is being done if criteria.get('url', None): if isinstance(criteria.get('url'), (str, unicode)): query = query.filter_by(url=criteria['url']) if isinstance(criteria.get('url'), list): query = query.filter( models.Transaction.url.in_(criteria.get('url'))) if criteria.get('method', None): if isinstance(criteria.get('method'), (str, unicode)): query = query.filter_by(method=criteria['method']) if isinstance(criteria.get('method'), list): query = query.filter( models.Transaction.method.in_(criteria.get('method'))) if criteria.get('data', None): if isinstance(criteria.get('data'), (str, unicode)): query = query.filter_by(data=criteria['data']) if isinstance(criteria.get('data'), list): query = query.filter(models.Transaction.data.in_(criteria.get('data'))) # For the following section doesn't matter if filter/search because # it doesn't make sense to search in a boolean column :P if criteria.get('scope', None): if isinstance(criteria.get('scope'), list): criteria['scope'] = criteria['scope'][0] query = query.filter_by(scope=self.config.ConvertStrToBool(criteria['scope'])) if criteria.get('binary_response', None): if isinstance(criteria.get('binary_response'), list): criteria['binary_response'] = criteria['binary_response'][0] query = query.filter_by(binary_response=self.config.ConvertStrToBool(criteria['binary_response'])) if not for_stats: # query for stats shouldn't have limit and offset try: query.order_by(models.Transaction.local_timestamp) if criteria.get('offset', None): if isinstance(criteria.get('offset'), list): criteria['offset'] = int(criteria['offset'][0]) if criteria['offset'] >= 0: query = query.offset(criteria['offset']) if criteria.get('limit', None): if isinstance(criteria.get('limit'), list): criteria['limit'] = int(criteria['limit'][0]) if criteria['limit'] >= 0: query = query.limit(criteria['limit']) else: # It is too dangerous without a limit argument query.limit(10) # Default limit value is 10 except __HOLE__: raise InvalidParameterType( "Invalid parameter type for transaction db") return(query)
ValueError
dataset/ETHPy150Open owtf/owtf/framework/db/transaction_manager.py/TransactionManager.GenerateQueryUsingSession
def GetTransactionModel(self, transaction): try: response_body = transaction.GetRawResponseBody().encode("utf-8") binary_response = False except __HOLE__: response_body = base64.b64encode(transaction.GetRawResponseBody()) binary_response = True finally: transaction_model = models.Transaction( url=transaction.URL, scope=transaction.InScope(), method=transaction.Method, data=transaction.Data, time=float(transaction.Time), time_human=transaction.TimeHuman, local_timestamp=transaction.LocalTimestamp, raw_request=transaction.GetRawRequest(), response_status=transaction.GetStatus(), response_headers=transaction.GetResponseHeaders(), response_body=response_body, response_size=len(response_body), binary_response=binary_response, session_tokens=transaction.GetSessionTokens(), login=None, logout=None) return transaction_model
UnicodeDecodeError
dataset/ETHPy150Open owtf/owtf/framework/db/transaction_manager.py/TransactionManager.GetTransactionModel
def GetByID(self, ID): model_obj = None try: ID = int(ID) model_obj = self.db.session.query(models.Transaction).get(ID) except __HOLE__: pass finally: return(model_obj) # None returned if no such transaction.
ValueError
dataset/ETHPy150Open owtf/owtf/framework/db/transaction_manager.py/TransactionManager.GetByID
def find(self, only_visible=True, **kwargs): try: return self.__findcacheiter(only_visible, **kwargs).next() except __HOLE__: try: return self._finditer(only_visible, **kwargs).next() except StopIteration: attrs = ['%s=%s' % (k, v) for k, v in kwargs.iteritems()] raise TooSaltyUISoupException( 'Can\'t find object with attributes "%s".' % '; '.join(attrs))
StopIteration
dataset/ETHPy150Open F1ashhimself/UISoup/uisoup/win_soup/element.py/WinElement.find
def update_watchers(issue, created, comment=None): site = Site.objects.get(id=settings.SITE_ID) context = Context({'issue': issue, 'project': issue.project, 'site': site}) if comment: # issue commented context['comment'] = comment context['user_name'] = comment.user_name template = utils.MailTemplate('djtracker/mail/issue_commented.mail') elif created: template = utils.MailTemplate('djtracker/mail/issue_created.mail') try: issue.watched_by.add(issue.created) except: ## Anon user pass else: template = utils.MailTemplate('djtracker/mail/issue_updated.mail') # we send email to: all watchers, the creator and the current processor... email_addresses = list(issue.watched_by.all().values_list('user__email', flat=True)) for addy in issue.project.watched_by.all().values_list('user__email', flat=True): if addy not in email_addresses: email_addresses.append(addy) if issue.created_by: try: email_addresses.append(issue.created_by.user.email) except __HOLE__: pass if issue.assigned_to: try: email_addresses.append(issue.assigned_to.user.email) except ObjectDoesNotExist: pass # remove commenter from the list. Issue 13 try: if email_addresses.index(comment.user_email): email_index = email_addresses.index(comment.user_email) email_addresses.pop(email_index) except: # user email doesn't exist, so we're not removing it. pass # make list unique email_addresses = {}.fromkeys(email_addresses).keys() # send mails seperately to protect privacy for recipient in email_addresses: msg = template.render_to_mail(context) msg.from_address = settings.ISSUE_ADDRESS msg.to = [recipient,] msg.send(fail_silently=True)
ObjectDoesNotExist
dataset/ETHPy150Open f4nt/djtracker/djtracker/__init__.py/update_watchers
def scale_from_matrix(matrix): """Return scaling factor, origin and direction from scaling matrix. >>> factor = random.random() * 10 - 5 >>> origin = numpy.random.random(3) - 0.5 >>> direct = numpy.random.random(3) - 0.5 >>> S0 = scale_matrix(factor, origin) >>> factor, origin, direction = scale_from_matrix(S0) >>> S1 = scale_matrix(factor, origin, direction) >>> is_same_transform(S0, S1) True >>> S0 = scale_matrix(factor, origin, direct) >>> factor, origin, direction = scale_from_matrix(S0) >>> S1 = scale_matrix(factor, origin, direction) >>> is_same_transform(S0, S1) True """ M = numpy.array(matrix, dtype=numpy.float64, copy=False) M33 = M[:3, :3] factor = numpy.trace(M33) - 2.0 try: # direction: unit eigenvector corresponding to eigenvalue factor l, V = numpy.linalg.eig(M33) i = numpy.where(abs(numpy.real(l) - factor) < 1e-8)[0][0] direction = numpy.real(V[:, i]).squeeze() direction /= vector_norm(direction) except __HOLE__: # uniform scaling factor = (factor + 2.0) / 3.0 direction = None # origin: any eigenvector corresponding to eigenvalue 1 l, V = numpy.linalg.eig(M) i = numpy.where(abs(numpy.real(l) - 1.0) < 1e-8)[0] if not len(i): raise ValueError("no eigenvector corresponding to eigenvalue 1") origin = numpy.real(V[:, i[-1]]).squeeze() origin /= origin[3] return factor, origin, direction
IndexError
dataset/ETHPy150Open omangin/multimodal/multimodal/lib/transformations.py/scale_from_matrix
def euler_matrix(ai, aj, ak, axes='sxyz'): """Return homogeneous rotation matrix from Euler angles and axis sequence. ai, aj, ak : Euler's roll, pitch and yaw angles axes : One of 24 axis sequences as string or encoded tuple >>> R = euler_matrix(1, 2, 3, 'syxz') >>> numpy.allclose(numpy.sum(R[0]), -1.34786452) True >>> R = euler_matrix(1, 2, 3, (0, 1, 0, 1)) >>> numpy.allclose(numpy.sum(R[0]), -0.383436184) True >>> ai, aj, ak = (4.0*math.pi) * (numpy.random.random(3) - 0.5) >>> for axes in _AXES2TUPLE.keys(): ... R = euler_matrix(ai, aj, ak, axes) >>> for axes in _TUPLE2AXES.keys(): ... R = euler_matrix(ai, aj, ak, axes) """ try: firstaxis, parity, repetition, frame = _AXES2TUPLE[axes] except (AttributeError, __HOLE__): _ = _TUPLE2AXES[axes] firstaxis, parity, repetition, frame = axes i = firstaxis j = _NEXT_AXIS[i+parity] k = _NEXT_AXIS[i-parity+1] if frame: ai, ak = ak, ai if parity: ai, aj, ak = -ai, -aj, -ak si, sj, sk = math.sin(ai), math.sin(aj), math.sin(ak) ci, cj, ck = math.cos(ai), math.cos(aj), math.cos(ak) cc, cs = ci*ck, ci*sk sc, ss = si*ck, si*sk M = numpy.identity(4) if repetition: M[i, i] = cj M[i, j] = sj*si M[i, k] = sj*ci M[j, i] = sj*sk M[j, j] = -cj*ss+cc M[j, k] = -cj*cs-sc M[k, i] = -sj*ck M[k, j] = cj*sc+cs M[k, k] = cj*cc-ss else: M[i, i] = cj*ck M[i, j] = sj*sc-cs M[i, k] = sj*cc+ss M[j, i] = cj*sk M[j, j] = sj*ss+cc M[j, k] = sj*cs-sc M[k, i] = -sj M[k, j] = cj*si M[k, k] = cj*ci return M
KeyError
dataset/ETHPy150Open omangin/multimodal/multimodal/lib/transformations.py/euler_matrix
def euler_from_matrix(matrix, axes='sxyz'): """Return Euler angles from rotation matrix for specified axis sequence. axes : One of 24 axis sequences as string or encoded tuple Note that many Euler angle triplets can describe one matrix. >>> R0 = euler_matrix(1, 2, 3, 'syxz') >>> al, be, ga = euler_from_matrix(R0, 'syxz') >>> R1 = euler_matrix(al, be, ga, 'syxz') >>> numpy.allclose(R0, R1) True >>> angles = (4.0*math.pi) * (numpy.random.random(3) - 0.5) >>> for axes in _AXES2TUPLE.keys(): ... R0 = euler_matrix(axes=axes, *angles) ... R1 = euler_matrix(axes=axes, *euler_from_matrix(R0, axes)) ... if not numpy.allclose(R0, R1): print(axes, "failed") """ try: firstaxis, parity, repetition, frame = _AXES2TUPLE[axes.lower()] except (AttributeError, __HOLE__): _ = _TUPLE2AXES[axes] firstaxis, parity, repetition, frame = axes i = firstaxis j = _NEXT_AXIS[i+parity] k = _NEXT_AXIS[i-parity+1] M = numpy.array(matrix, dtype=numpy.float64, copy=False)[:3, :3] if repetition: sy = math.sqrt(M[i, j]*M[i, j] + M[i, k]*M[i, k]) if sy > _EPS: ax = math.atan2( M[i, j], M[i, k]) ay = math.atan2( sy, M[i, i]) az = math.atan2( M[j, i], -M[k, i]) else: ax = math.atan2(-M[j, k], M[j, j]) ay = math.atan2( sy, M[i, i]) az = 0.0 else: cy = math.sqrt(M[i, i]*M[i, i] + M[j, i]*M[j, i]) if cy > _EPS: ax = math.atan2( M[k, j], M[k, k]) ay = math.atan2(-M[k, i], cy) az = math.atan2( M[j, i], M[i, i]) else: ax = math.atan2(-M[j, k], M[j, j]) ay = math.atan2(-M[k, i], cy) az = 0.0 if parity: ax, ay, az = -ax, -ay, -az if frame: ax, az = az, ax return ax, ay, az
KeyError
dataset/ETHPy150Open omangin/multimodal/multimodal/lib/transformations.py/euler_from_matrix
def quaternion_from_euler(ai, aj, ak, axes='sxyz'): """Return quaternion from Euler angles and axis sequence. ai, aj, ak : Euler's roll, pitch and yaw angles axes : One of 24 axis sequences as string or encoded tuple >>> q = quaternion_from_euler(1, 2, 3, 'ryxz') >>> numpy.allclose(q, [0.310622, -0.718287, 0.444435, 0.435953]) True """ try: firstaxis, parity, repetition, frame = _AXES2TUPLE[axes.lower()] except (AttributeError, __HOLE__): _ = _TUPLE2AXES[axes] firstaxis, parity, repetition, frame = axes i = firstaxis j = _NEXT_AXIS[i+parity] k = _NEXT_AXIS[i-parity+1] if frame: ai, ak = ak, ai if parity: aj = -aj ai /= 2.0 aj /= 2.0 ak /= 2.0 ci = math.cos(ai) si = math.sin(ai) cj = math.cos(aj) sj = math.sin(aj) ck = math.cos(ak) sk = math.sin(ak) cc = ci*ck cs = ci*sk sc = si*ck ss = si*sk quaternion = numpy.empty((4, ), dtype=numpy.float64) if repetition: quaternion[i] = cj*(cs + sc) quaternion[j] = sj*(cc + ss) quaternion[k] = sj*(cs - sc) quaternion[3] = cj*(cc - ss) else: quaternion[i] = cj*sc - sj*cs quaternion[j] = cj*ss + sj*cc quaternion[k] = cj*cs - sj*sc quaternion[3] = cj*cc + sj*ss if parity: quaternion[j] *= -1 return quaternion
KeyError
dataset/ETHPy150Open omangin/multimodal/multimodal/lib/transformations.py/quaternion_from_euler
def _import_module(module_name, warn=True, prefix='_py_', ignore='_'): """Try import all public attributes from module into global namespace. Existing attributes with name clashes are renamed with prefix. Attributes starting with underscore are ignored by default. Return True on successful import. """ try: module = __import__(module_name) except __HOLE__: if warn: warnings.warn("Failed to import module " + module_name) else: for attr in dir(module): if ignore and attr.startswith(ignore): continue if prefix: if attr in globals(): globals()[prefix + attr] = globals()[attr] elif warn: warnings.warn("No Python implementation of " + attr) globals()[attr] = getattr(module, attr) return True
ImportError
dataset/ETHPy150Open omangin/multimodal/multimodal/lib/transformations.py/_import_module
def alive(self): try: os.kill(self.pid, 0) except __HOLE__: return False return True
OSError
dataset/ETHPy150Open coreemu/core/daemon/core/netns/vnode.py/SimpleLxcNode.alive
def startup(self): ''' Start a new namespace node by invoking the vnoded process that allocates a new namespace. Bring up the loopback device and set the hostname. ''' if self.up: raise Exception, "already up" vnoded = ["%s/vnoded" % CORE_SBIN_DIR, "-v", "-c", self.ctrlchnlname, "-l", self.ctrlchnlname + ".log", "-p", self.ctrlchnlname + ".pid"] if self.nodedir: vnoded += ["-C", self.nodedir] env = self.session.getenviron(state=False) env['NODE_NUMBER'] = str(self.objid) env['NODE_NAME'] = str(self.name) try: tmp = subprocess.Popen(vnoded, stdout = subprocess.PIPE, env = env) except __HOLE__, e: msg = "error running vnoded command: %s (%s)" % (vnoded, e) self.exception(coreapi.CORE_EXCP_LEVEL_FATAL, "SimpleLxcNode.startup()", msg) raise Exception, msg try: self.pid = int(tmp.stdout.read()) tmp.stdout.close() except Exception: msg = "vnoded failed to create a namespace; " msg += "check kernel support and user priveleges" self.exception(coreapi.CORE_EXCP_LEVEL_FATAL, "SimpleLxcNode.startup()", msg) if tmp.wait(): raise Exception, ("command failed: %s" % vnoded) self.vnodeclient = vnodeclient.VnodeClient(self.name, self.ctrlchnlname) self.info("bringing up loopback interface") self.cmd([IP_BIN, "link", "set", "lo", "up"]) self.info("setting hostname: %s" % self.name) self.cmd(["hostname", self.name]) self.up = True
OSError
dataset/ETHPy150Open coreemu/core/daemon/core/netns/vnode.py/SimpleLxcNode.startup
def shutdown(self): if not self.up: return while self._mounts: source, target = self._mounts.pop(-1) self.umount(target) for netif in self.netifs(): netif.shutdown() try: os.kill(self.pid, signal.SIGTERM) os.waitpid(self.pid, 0) except OSError: pass try: os.unlink(self.ctrlchnlname) except __HOLE__: pass self._netif.clear() self.vnodeclient.close() self.up = False
OSError
dataset/ETHPy150Open coreemu/core/daemon/core/netns/vnode.py/SimpleLxcNode.shutdown
def newveth(self, ifindex = None, ifname = None, net = None): self.lock.acquire() try: if ifindex is None: ifindex = self.newifindex() if ifname is None: ifname = "eth%d" % ifindex sessionid = self.session.shortsessionid() try: suffix = '%x.%s.%s' % (self.objid, ifindex, sessionid) except __HOLE__: suffix = '%s.%s.%s' % (self.objid, ifindex, sessionid) localname = 'veth' + suffix if len(localname) >= 16: raise ValueError, "interface local name '%s' too long" % \ localname name = localname + 'p' if len(name) >= 16: raise ValueError, "interface name '%s' too long" % name ifclass = VEth veth = ifclass(node = self, name = name, localname = localname, mtu = 1500, net = net, start = self.up) if self.up: check_call([IP_BIN, "link", "set", veth.name, "netns", str(self.pid)]) self.cmd([IP_BIN, "link", "set", veth.name, "name", ifname]) veth.name = ifname try: self.addnetif(veth, ifindex) except: veth.shutdown() del veth raise return ifindex finally: self.lock.release()
TypeError
dataset/ETHPy150Open coreemu/core/daemon/core/netns/vnode.py/SimpleLxcNode.newveth
def deladdr(self, ifindex, addr): try: self._netif[ifindex].deladdr(addr) except __HOLE__: self.warn("trying to delete unknown address: %s" % addr) if self.up: self.cmd([IP_BIN, "addr", "del", str(addr), "dev", self.ifname(ifindex)])
ValueError
dataset/ETHPy150Open coreemu/core/daemon/core/netns/vnode.py/SimpleLxcNode.deladdr
def startup(self): self.lock.acquire() try: self.makenodedir() super(LxcNode, self).startup() self.privatedir("/var/run") self.privatedir("/var/log") except __HOLE__, e: self.warn("Error with LxcNode.startup(): %s" % e) self.exception(coreapi.CORE_EXCP_LEVEL_ERROR, "LxcNode.startup()", "%s" % e) finally: self.lock.release()
OSError
dataset/ETHPy150Open coreemu/core/daemon/core/netns/vnode.py/LxcNode.startup
def privatedir(self, path): if path[0] != "/": raise ValueError, "path not fully qualified: " + path hostpath = os.path.join(self.nodedir, os.path.normpath(path).strip('/').replace('/', '.')) try: os.mkdir(hostpath) except __HOLE__: pass except Exception, e: raise Exception, e self.mount(hostpath, path)
OSError
dataset/ETHPy150Open coreemu/core/daemon/core/netns/vnode.py/LxcNode.privatedir
def emit(self, record): try: msg = record.getMessage() log_data = "PLAINTEXT=" + urllib2.quote(simplejson.dumps( { 'msg':msg, 'localip':self.localip, 'publicip':self.publicip, 'tenant':'TODO :)' } )) urllib2.urlopen(self.base_url, log_data) except(KeyboardInterrupt, __HOLE__): raise except: self.handleError(record)
SystemExit
dataset/ETHPy150Open gosquadron/squadron/squadron/fileio/loghandlers/LogglyHandler.py/LogglyHandler.emit
def add_field(self, field): # Insert the given field in the order in which it was created, using # the "creation_counter" attribute of the field. # Move many-to-many related fields from self.fields into # self.many_to_many. if field.rel and isinstance(field.rel, ManyToManyRel): self.local_many_to_many.insert(bisect(self.local_many_to_many, field), field) if hasattr(self, '_m2m_cache'): del self._m2m_cache else: self.local_fields.insert(bisect(self.local_fields, field), field) self.setup_pk(field) if hasattr(self, '_field_cache'): del self._field_cache del self._field_name_cache # The fields, concrete_fields and local_concrete_fields are # implemented as cached properties for performance reasons. # The attrs will not exists if the cached property isn't # accessed yet, hence the try-excepts. try: del self.fields except __HOLE__: pass try: del self.concrete_fields except AttributeError: pass try: del self.local_concrete_fields except AttributeError: pass if hasattr(self, '_name_map'): del self._name_map
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/options.py/Options.add_field
def _swapped(self): """ Has this model been swapped out for another? If so, return the model name of the replacement; otherwise, return None. For historical reasons, model name lookups using get_model() are case insensitive, so we make sure we are case insensitive here. """ if self.swappable: model_label = '%s.%s' % (self.app_label, self.model_name) swapped_for = getattr(settings, self.swappable, None) if swapped_for: try: swapped_label, swapped_object = swapped_for.split('.') except __HOLE__: # setting not in the format app_label.model_name # raising ImproperlyConfigured here causes problems with # test cleanup code - instead it is raised in get_user_model # or as part of validation. return swapped_for if '%s.%s' % (swapped_label, swapped_object.lower()) not in (None, model_label): return swapped_for return None
ValueError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/options.py/Options._swapped
@cached_property def fields(self): """ The getter for self.fields. This returns the list of field objects available to this model (including through parent models). Callers are not permitted to modify this list, since it's a reference to this instance (not a copy). """ try: self._field_name_cache except __HOLE__: self._fill_fields_cache() return self._field_name_cache
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/options.py/Options.fields
def get_fields_with_model(self): """ Returns a sequence of (field, model) pairs for all fields. The "model" element is None for fields on the current model. Mostly of use when constructing queries so that we know which model a field belongs to. """ try: self._field_cache except __HOLE__: self._fill_fields_cache() return self._field_cache
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/options.py/Options.get_fields_with_model
def _many_to_many(self): try: self._m2m_cache except __HOLE__: self._fill_m2m_cache() return list(self._m2m_cache)
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/options.py/Options._many_to_many
def get_m2m_with_model(self): """ The many-to-many version of get_fields_with_model(). """ try: self._m2m_cache except __HOLE__: self._fill_m2m_cache() return list(six.iteritems(self._m2m_cache))
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/options.py/Options.get_m2m_with_model
def get_field_by_name(self, name): """ Returns the (field_object, model, direct, m2m), where field_object is the Field instance for the given name, model is the model containing this field (None for local fields), direct is True if the field exists on this model, and m2m is True for many-to-many relations. When 'direct' is False, 'field_object' is the corresponding RelatedObject for this field (since the field doesn't have an instance associated with it). Uses a cache internally, so after the first access, this is very fast. """ try: try: return self._name_map[name] except AttributeError: cache = self.init_name_map() return cache[name] except __HOLE__: raise FieldDoesNotExist('%s has no field named %r' % (self.object_name, name))
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/options.py/Options.get_field_by_name
def get_all_field_names(self): """ Returns a list of all field names that are possible for this model (including reverse relation names). This is used for pretty printing debugging output (a list of choices), so any internal-only field names are not included. """ try: cache = self._name_map except __HOLE__: cache = self.init_name_map() names = sorted(cache.keys()) # Internal-only names end with "+" (symmetrical m2m related names being # the main example). Trim them. return [val for val in names if not val.endswith('+')]
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/options.py/Options.get_all_field_names
def get_all_related_objects_with_model(self, local_only=False, include_hidden=False, include_proxy_eq=False): """ Returns a list of (related-object, model) pairs. Similar to get_fields_with_model(). """ try: self._related_objects_cache except __HOLE__: self._fill_related_objects_cache() predicates = [] if local_only: predicates.append(lambda k, v: not v) if not include_hidden: predicates.append(lambda k, v: not k.field.rel.is_hidden()) cache = (self._related_objects_proxy_cache if include_proxy_eq else self._related_objects_cache) return [t for t in cache.items() if all(p(*t) for p in predicates)]
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/options.py/Options.get_all_related_objects_with_model
def get_all_related_many_to_many_objects(self, local_only=False): try: cache = self._related_many_to_many_cache except __HOLE__: cache = self._fill_related_many_to_many_cache() if local_only: return [k for k, v in cache.items() if not v] return list(cache)
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/options.py/Options.get_all_related_many_to_many_objects
def get_all_related_m2m_objects_with_model(self): """ Returns a list of (related-m2m-object, model) pairs. Similar to get_fields_with_model(). """ try: cache = self._related_many_to_many_cache except __HOLE__: cache = self._fill_related_many_to_many_cache() return list(six.iteritems(cache))
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/options.py/Options.get_all_related_m2m_objects_with_model
def allowed(request, file): try: addon = file.version.addon except __HOLE__: raise http.Http404 # General case: addon is listed. if addon.is_listed: if ((addon.view_source and addon.status in amo.REVIEWED_STATUSES) or acl.check_addons_reviewer(request) or acl.check_addon_ownership(request, addon, viewer=True, dev=True)): return True # Public and sources are visible, or reviewer. raise PermissionDenied # Listed but not allowed. # Not listed? Needs an owner or an "unlisted" admin. else: if owner_or_unlisted_reviewer(request, addon): return True raise http.Http404 # Not listed, not owner or admin.
ObjectDoesNotExist
dataset/ETHPy150Open mozilla/addons-server/src/olympia/files/decorators.py/allowed
def file_view(func, **kwargs): @functools.wraps(func) def wrapper(request, file_id, *args, **kw): file_ = get_object_or_404(File, pk=file_id) result = allowed(request, file_) if result is not True: return result try: obj = FileViewer(file_,) except __HOLE__: raise http.Http404 response = func(request, obj, *args, **kw) if obj.selected: response['ETag'] = '"%s"' % obj.selected.get('md5') response['Last-Modified'] = http_date(obj.selected.get('modified')) return response return wrapper
ObjectDoesNotExist
dataset/ETHPy150Open mozilla/addons-server/src/olympia/files/decorators.py/file_view
def compare_file_view(func, **kwargs): @functools.wraps(func) def wrapper(request, one_id, two_id, *args, **kw): one = get_object_or_404(File, pk=one_id) two = get_object_or_404(File, pk=two_id) for obj in [one, two]: result = allowed(request, obj) if result is not True: return result try: obj = DiffHelper(one, two) except __HOLE__: raise http.Http404 response = func(request, obj, *args, **kw) if obj.left.selected: response['ETag'] = '"%s"' % obj.left.selected.get('md5') response['Last-Modified'] = http_date(obj.left.selected .get('modified')) return response return wrapper
ObjectDoesNotExist
dataset/ETHPy150Open mozilla/addons-server/src/olympia/files/decorators.py/compare_file_view
def main(): client = Client(['localhost']) plugins = None if args.plugin: if args.category or args.component or args.health: lg.warn("Plugins specified by name, ignoring --category, --component and --health") plugins = client.get_plugins([args.plugin]) elif args.category or args.component or args.health: filter = [] if args.category: filter.append({'key': 'Category', 'value': args.category}) if args.component: filter.append({'key': 'Component', 'value': args.component}) if args.health: filter.append({'key': 'Type', 'value': 'healthCheck'}) plugins = client.get_plugins(filter) else: nagios.exit_unknown("invalid startup configuration - neither plugin nor --category nor --component " "nor --health specified") # No plugin found if not plugins: if args.plugin: message = "plugin %s not found" % args.plugin else: message = "no plugin found by category %s and component %s, health: %s" % \ (args.category, args.component, args.health) nagios.exit_unknown(message) if args.force: plugins = client.force_run(plugins, progress=False) status_methods_pairs = [(ERROR, nagios.exit_critical), (UNKNOWN, nagios.exit_unknown), (WARN, nagios.exit_warning), (OK, nagios.exit_ok)] # Manage plugin result. We can't return much data to Nagios, so just say if it's alright or not results = dict((s, []) for s, _ in status_methods_pairs) for plugin in plugins.values()[0]['plugins'].itervalues(): plugin_name = plugin['name'] if not plugin['lastResult']: results[UNKNOWN].append({'name': plugin_name, 'message': "plugin has no last result"}) else: last_status = plugin['lastResult']['status'] if last_status in [ERROR, WARN, OK]: try: results[last_status].append({'name': plugin_name, 'message': plugin['lastResult']['messages'][last_status.lower()][0]}) except (KeyError, __HOLE__, TypeError): if last_status == OK: results[last_status].append( {'name': plugin_name, 'message': "smoke test %s succeeded at %s" % (plugin['name'], plugin['lastResult']['lastRun'])}) else: results[last_status].append({'name': plugin_name, 'message': parse_issue(plugin)}) else: results[UNKNOWN].append({'name': plugin_name, 'message': "unknown status %s at %s" % (plugin['lastResult']['status'], plugin['lastResult']['lastRun'])}) for status, exit_method in status_methods_pairs: if results[status]: if len(plugins.values()[0]['plugins']) == 1: # if only one plugin has been executed, do not print summary exit_method(results[status][0]['message']) else: summary = ', '.join(["%s: %s" % (s, len(results[s])) for s, _ in status_methods_pairs if results[s]]) messages = ['\n'.join(["%s - %s - %s" % (s, item['name'], item['message']) for item in list]) for s, list in results.iteritems() if list] exit_method("%s\n%s" % (summary, '\n'.join(messages)))
IndexError
dataset/ETHPy150Open gooddata/smoker/bin/check_smoker_plugin.py/main
def testInvalidColumnAlignmentStrings(self): t = TableGenerator.createTableWithDefaultContainer(3, 7) defaultAlignments = [Table.ALIGN_LEFT, Table.ALIGN_LEFT, Table.ALIGN_LEFT] try: t.setColumnAlignments(['a', 'b', 'c']) self.fail('No exception thrown for invalid array length') except __HOLE__: pass # Ok, expected self.assertEquals(defaultAlignments, t.getColumnAlignments(), 'Invalid change affected alignments')
ValueError
dataset/ETHPy150Open rwl/muntjac/muntjac/test/server/component/table/table_column_alignments.py/TableColumnAlignments.testInvalidColumnAlignmentStrings
def testInvalidColumnAlignmentString(self): t = TableGenerator.createTableWithDefaultContainer(3, 7) defaultAlignments = [Table.ALIGN_LEFT, Table.ALIGN_LEFT, Table.ALIGN_LEFT] try: t.setColumnAlignment('Property 1', 'a') self.fail('No exception thrown for invalid array length') except __HOLE__: pass # Ok, expected self.assertEquals(defaultAlignments, t.getColumnAlignments(), 'Invalid change affected alignments')
ValueError
dataset/ETHPy150Open rwl/muntjac/muntjac/test/server/component/table/table_column_alignments.py/TableColumnAlignments.testInvalidColumnAlignmentString
def testColumnAlignmentForPropertyNotInContainer(self): t = TableGenerator.createTableWithDefaultContainer(3, 7) defaultAlignments = [Table.ALIGN_LEFT, Table.ALIGN_LEFT, Table.ALIGN_LEFT] try: t.setColumnAlignment('Property 1200', Table.ALIGN_LEFT) # FIXME: Uncomment as there should be an exception (#6475) #self.fail("No exception thrown for property not in container") except __HOLE__: pass # Ok, expected self.assertEquals(defaultAlignments, t.getColumnAlignments(), 'Invalid change affected alignments') # FIXME: Uncomment as null should be returned (#6474) # self.assertEquals( # None, t.getColumnAlignment("Property 1200"), # "Column alignment for property not in container returned")
ValueError
dataset/ETHPy150Open rwl/muntjac/muntjac/test/server/component/table/table_column_alignments.py/TableColumnAlignments.testColumnAlignmentForPropertyNotInContainer
def testInvalidColumnAlignmentsLength(self): t = TableGenerator.createTableWithDefaultContainer(7, 7) defaultAlignments = [Table.ALIGN_LEFT, Table.ALIGN_LEFT, Table.ALIGN_LEFT, Table.ALIGN_LEFT, Table.ALIGN_LEFT, Table.ALIGN_LEFT, Table.ALIGN_LEFT] try: t.setColumnAlignments([Table.ALIGN_LEFT]) self.fail('No exception thrown for invalid array length') except ValueError: pass # Ok, expected self.assertEquals(defaultAlignments, t.getColumnAlignments(), 'Invalid change affected alignments') try: t.setColumnAlignments([]) self.fail('No exception thrown for invalid array length') except ValueError: pass # Ok, expected self.assertEquals(defaultAlignments, t.getColumnAlignments(), 'Invalid change affected alignments') try: t.setColumnAlignments([Table.ALIGN_LEFT, Table.ALIGN_LEFT, Table.ALIGN_LEFT, Table.ALIGN_LEFT, Table.ALIGN_LEFT, Table.ALIGN_LEFT, Table.ALIGN_LEFT, Table.ALIGN_LEFT]) self.fail('No exception thrown for invalid array length') except __HOLE__: pass # Ok, expected self.assertEquals(defaultAlignments, t.getColumnAlignments(), 'Invalid change affected alignments')
ValueError
dataset/ETHPy150Open rwl/muntjac/muntjac/test/server/component/table/table_column_alignments.py/TableColumnAlignments.testInvalidColumnAlignmentsLength
def readme(): try: root = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(root, 'README.rst')) as f: return f.read() except __HOLE__: warnings.warn("Couldn't found README.rst", RuntimeWarning) return ''
IOError
dataset/ETHPy150Open Kroisse/flask-factory/setup.py/readme
def __init__(self, params, offset=0): agents.Agent.__init__(self, params, offset) try: self.avgprice = self.args[0] except (AttributeError, IndexError): raise MissingParameter, 'avgprice' try: self.maxfluct = self.args[1] except __HOLE__: raise MissingParameter, 'maxfluct' try: self.maxbuy = self.args[2] except IndexError: raise MissingParameter, 'maxbuy' del self.args
IndexError
dataset/ETHPy150Open jcbagneris/fms/fms/agents/randomtrader.py/RandomTrader.__init__
def act(self, world=None, market=None): """ Return random order as a dict with keys in (direction, price, quantity). """ if self.stocks > 0: direction = random.choice((BUY, SELL)) else: direction = BUY if self.avgprice == 0: try: self.avgprice = market.lastprice except __HOLE__: self.avgprice = 100 logger.warning("No market, no avgprice, avgprice set to 100") price = random.randint(self.avgprice*(100-self.maxfluct), self.avgprice*(100+self.maxfluct))/100. if direction: maxq = self.stocks else: maxq = min(self.maxbuy, int(self.money/price)) try: quantity = random.randint(1, maxq) except ValueError: quantity = 1 return {'direction':direction, 'price':price, 'quantity':quantity}
AttributeError
dataset/ETHPy150Open jcbagneris/fms/fms/agents/randomtrader.py/RandomTrader.act
@utils.memoize def get_font_files(): """Returns a list of all font files we could find Returned as a list of dir/files tuples:: get_font_files() -> [('/some/dir', ['font1.ttf', ...]), ...] For example:: >>> fabfonts = os.path.join(os.path.dirname(__file__), 'fonts') >>> 'IndUni-H-Bold.ttf' in get_font_files()[fabfontdir] True >>> 'DejaVuSansMono.ttf' in get_font_files()[fabfontdir] True >>> 'cmr10.ttf' in get_font_files()[fabfontdir] True >>> assert len(get_font_files()) > 0 >>> for dirname, filename in get_font_files(): ... assert os.path.exists(os.path.join(dirname, filename)) ... """ dirs = [os.path.join(os.path.dirname(__file__), 'fonts'), os.path.expanduser('~/.fonts')] sys_dirs = [ # this is where ubuntu puts fonts '/usr/share/fonts/truetype', # this is where fedora puts fonts '/usr/share/fonts', ] for dirname in sys_dirs: try: dirs += [os.path.join(dirname, subdir) for subdir in os.listdir(dirname)] except __HOLE__: pass return [(p, os.listdir(p)) for p in dirs if os.path.isdir(p)]
OSError
dataset/ETHPy150Open jart/fabulous/fabulous/text.py/get_font_files
def _unpack_complex_cli_arg(argument_model, value, cli_name): type_name = argument_model.type_name if type_name == 'structure' or type_name == 'map': if value.lstrip()[0] == '{': try: return json.loads(value, object_pairs_hook=OrderedDict) except ValueError as e: raise ParamError( cli_name, "Invalid JSON: %s\nJSON received: %s" % (e, value)) raise ParamError(cli_name, "Invalid JSON:\n%s" % value) elif type_name == 'list': if isinstance(value, six.string_types): if value.lstrip()[0] == '[': return json.loads(value, object_pairs_hook=OrderedDict) elif isinstance(value, list) and len(value) == 1: single_value = value[0].strip() if single_value and single_value[0] == '[': return json.loads(value[0], object_pairs_hook=OrderedDict) try: # There's a couple of cases remaining here. # 1. It's possible that this is just a list of strings, i.e # --security-group-ids sg-1 sg-2 sg-3 => ['sg-1', 'sg-2', 'sg-3'] # 2. It's possible this is a list of json objects: # --filters '{"Name": ..}' '{"Name": ...}' member_shape_model = argument_model.member return [_unpack_cli_arg(member_shape_model, v, cli_name) for v in value] except (__HOLE__, TypeError) as e: # The list params don't have a name/cli_name attached to them # so they will have bad error messages. We're going to # attach the parent parameter to this error message to provide # a more helpful error message. raise ParamError(cli_name, value[0])
ValueError
dataset/ETHPy150Open aws/aws-cli/awscli/argprocess.py/_unpack_complex_cli_arg
def main(argv): """ Build the docs and serve them with an HTTP server. """ parser = argparse.ArgumentParser(description='Build and serve HTML Sphinx docs') parser.add_argument( '--port', help='Serve on this port, default 8000', type=int, default=8000) parser.add_argument( '--source', help='Directory of source Sphinx (reStructuredText) docs', type=os.path.realpath, default='docs/source') parser.add_argument( '--destination', help='Where to build the HTML output', type=os.path.realpath, default='docs/build/html') parser.add_argument( '--doctrees', help='Where the doctrees are built', type=os.path.realpath, default='docs/build/doctrees') options = parser.parse_args(argv) bound_build_docs = partial(build_docs, options.source, options.destination, options.doctrees) # Do the initial build bound_build_docs() # Watch the source directory for changes, build docs again if detected observer = Observer() observer.schedule( BuildDocsHandler(bound_build_docs), path=options.source, recursive=True) observer.start() # Set the root for the request handler, overriding Python stdlib current # working directory. DocsHTTPRequestHandler._root = options.destination server = SocketServer.TCPServer( ('', options.port), DocsHTTPRequestHandler) try: logger.info('Serving on localhost:{}'.format(options.port)) server.serve_forever() except __HOLE__: sys.stdout.write('\n') logger.info('(stopping server)') observer.stop() finally: observer.join() logging.info('Server stopped, exiting') sys.exit(0)
KeyboardInterrupt
dataset/ETHPy150Open robmadole/jig/salt/roots/salt/docs/httpdocs.py/main
def _get_error_message(self, resp): try: response_data = resp.json() message = response_data['title'] description = response_data.get('description') if description: message = '{0}: {1}'.format(message, description) except __HOLE__: message = resp.content return message
ValueError
dataset/ETHPy150Open openstack/python-barbicanclient/barbicanclient/client.py/_HTTPClient._get_error_message