function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
def test_patchobject_wont_create_by_default(self): try: @patch.object(SomeClass, 'ord', sentinel.Frooble) def test(): self.fail('Patching non existent attributes should fail') test() except __HOLE__: pass else: self.fail('Patching non existent attributes should fail') self.assertFalse(hasattr(SomeClass, 'ord'))
AttributeError
dataset/ETHPy150Open testing-cabal/mock/mock/tests/testpatch.py/PatchTest.test_patchobject_wont_create_by_default
def test_patch_with_exception(self): foo = {} @patch.dict(foo, {'a': 'b'}) def test(): raise NameError('Konrad') try: test() except __HOLE__: pass else: self.fail('NameError not raised by test') self.assertEqual(foo, {})
NameError
dataset/ETHPy150Open testing-cabal/mock/mock/tests/testpatch.py/PatchTest.test_patch_with_exception
def get_contents(self): if not settings.AWS_ENABLED: try: return open(self.local_filename).read() except __HOLE__: return '' else: return s3.read_file('source', self.s3_path)
IOError
dataset/ETHPy150Open pebble/cloudpebble/ide/models/files.py/SourceFile.get_contents
def copy_to_path(self, path): if not settings.AWS_ENABLED: try: shutil.copy(self.local_filename, path) except __HOLE__ as err: if err.errno == 2: open(path, 'w').close() # create the file if it's missing. else: raise else: s3.read_file_to_filesystem('source', self.s3_path, path)
IOError
dataset/ETHPy150Open pebble/cloudpebble/ide/models/files.py/SourceFile.copy_to_path
@receiver(post_delete) def delete_file(sender, instance, **kwargs): if sender == SourceFile or sender == ResourceVariant: if settings.AWS_ENABLED: try: s3.delete_file('source', instance.s3_path) except: traceback.print_exc() else: try: os.unlink(instance.local_filename) except __HOLE__: pass
OSError
dataset/ETHPy150Open pebble/cloudpebble/ide/models/files.py/delete_file
@classmethod def setupClass(cls): global np try: import numpy as np except __HOLE__: raise SkipTest('NumPy not available.')
ImportError
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/algorithms/centrality/tests/test_katz_centrality.py/TestKatzCentralityNumpy.setupClass
@classmethod def setupClass(cls): global np try: import numpy as np except __HOLE__: raise SkipTest('NumPy not available.')
ImportError
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/algorithms/centrality/tests/test_katz_centrality.py/TestKatzCentralityDirectedNumpy.setupClass
@classmethod def setupClass(cls): global np global eigvals try: import numpy as np from numpy.linalg import eigvals except __HOLE__: raise SkipTest('NumPy not available.')
ImportError
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/algorithms/centrality/tests/test_katz_centrality.py/TestKatzEigenvectorVKatz.setupClass
def save(self, **kwargs): title = self.metadata.get('title') slug = self.metadata.get('slug') content = self.cleaned_data.get('content') is_published = self.metadata.get('published') image_id = self.metadata.get('image', '') if self.instance.pk is not None: article = self.instance else: article = Article(author=self.user) article.title = title article.slug = slug article.content = content article.content_body = self.content_body article.is_published = is_published article.markup = self.data.get('markup') # update cover image if it specified try: image = EntryImage.objects.get(id=int(image_id)) except (EntryImage.DoesNotExist, ValueError, __HOLE__): image = None article.cover_image = image # save article article.save() # reset tags article.articletag_set.all().delete() for tag_name in self.metadata.get('tags', []): tag = Tag.objects.get_or_create(name=tag_name)[0] article.articletag_set.create(tag=tag) return article
TypeError
dataset/ETHPy150Open gkmngrgn/radpress/radpress/forms.py/ZenModeForm.save
def setUp(self): super(BaseTestCase, self).setUp() self.app = create_app(test_settings) self.client = self.app.test_client() for app in self.app.installed_apps: try: __import__('%s.models' % app) except __HOLE__: pass db = get_session(self.app) Model.metadata.create_all(db.bind)
ImportError
dataset/ETHPy150Open mozilla/standup/standup/tests/__init__.py/BaseTestCase.setUp
def testRmtreeNonExistingDir(self): directory = 'nonexisting' self.assertRaises(IOError, self.shutil.rmtree, directory) try: self.shutil.rmtree(directory, ignore_errors=True) except __HOLE__: self.fail('rmtree raised despite ignore_errors True')
IOError
dataset/ETHPy150Open jmcgeheeiv/pyfakefs/fake_filesystem_shutil_test.py/FakeShutilModuleTest.testRmtreeNonExistingDir
def testRmtreeNonExistingDirWithHandler(self): class NonLocal: pass def error_handler(_, path, error_info): NonLocal.errorHandled = True NonLocal.errorPath = path directory = 'nonexisting' NonLocal.errorHandled = False NonLocal.errorPath = '' try: self.shutil.rmtree(directory, onerror=error_handler) except IOError: self.fail('rmtree raised exception despite onerror defined') self.assertTrue(NonLocal.errorHandled) self.assertEqual(NonLocal.errorPath, directory) NonLocal.errorHandled = False NonLocal.errorPath = '' try: self.shutil.rmtree(directory, ignore_errors=True, onerror=error_handler) except __HOLE__: self.fail('rmtree raised exception despite ignore_errors True') # ignore_errors is True, so the onerror() error handler was not executed self.assertFalse(NonLocal.errorHandled) self.assertEqual(NonLocal.errorPath, '')
IOError
dataset/ETHPy150Open jmcgeheeiv/pyfakefs/fake_filesystem_shutil_test.py/FakeShutilModuleTest.testRmtreeNonExistingDirWithHandler
def main(): """ Parse arguments and start the program """ # Iterate over all lines in all files # listed in sys.argv[1:] # or stdin if no args given. try: for line in fileinput.input(): # Look for an INSERT statement and parse it. if is_insert(line): values = get_values(line) if values_sanity_check(values): parse_values(values, sys.stdout) except __HOLE__: sys.exit(0)
KeyboardInterrupt
dataset/ETHPy150Open jamesmishra/mysqldump-to-csv/mysqldump_to_csv.py/main
def load_backend(self, name, dumps='dumps', loads='loads', loads_exc=ValueError): """Load a JSON backend by name. This method loads a backend and sets up references to that backend's loads/dumps functions and exception classes. :param dumps: is the name of the backend's encode method. The method should take an object and return a string. Defaults to 'dumps'. :param loads: names the backend's method for the reverse operation -- returning a Python object from a string. :param loads_exc: can be either the name of the exception class used to denote decoding errors, or it can be a direct reference to the appropriate exception class itself. If it is a name, then the assumption is that an exception class of that name can be found in the backend module's namespace. :param load: names the backend's 'load' method. :param dump: names the backend's 'dump' method. :rtype bool: True on success, False if the backend could not be loaded. """ try: # Load the JSON backend mod = __import__(name) except ImportError: return False # Handle submodules, e.g. django.utils.simplejson try: for attr in name.split('.')[1:]: mod = getattr(mod, attr) except __HOLE__: return False if (not self._store(self._encoders, name, mod, dumps) or not self._store(self._decoders, name, mod, loads)): return False if isinstance(loads_exc, (str, unicode)): # This backend's decoder exception is part of the backend if not self._store(self._decoder_exceptions, name, mod, loads_exc): return False else: # simplejson uses ValueError self._decoder_exceptions[name] = loads_exc # Setup the default args and kwargs for this encoder/decoder self._encoder_options[name] = ([], {}) self._decoder_options[name] = ([], {}) # Add this backend to the list of candidate backends self._backend_names.append(name) # Indicate that we successfully loaded a JSON backend self._verified = True return True
AttributeError
dataset/ETHPy150Open jsonpickle/jsonpickle/jsonpickle/backend.py/JSONBackend.load_backend
def _store(self, dct, backend, obj, name): try: dct[backend] = getattr(obj, name) except __HOLE__: self.remove_backend(backend) return False return True
AttributeError
dataset/ETHPy150Open jsonpickle/jsonpickle/jsonpickle/backend.py/JSONBackend._store
def create_structure_from_variable(self, dir_structure): ''' create directory structure via given list of tuples (filename, content,) content being None means it is directory ''' for filename, content in dir_structure: if content is None: try: os.makedirs(filename) except __HOLE__: pass else: f = open(filename, 'w') f.write(content) f.close()
OSError
dataset/ETHPy150Open ella/citools/tests/test_debian/__init__.py/TestVersionedStatic.create_structure_from_variable
@slow @network def test_wdi_download_w_retired_indicator(self): cntry_codes = ['CA', 'MX', 'US'] # Despite showing up in the search feature, and being listed online, # the api calls to GDPPCKD don't work in their own query builder, nor # pandas module. GDPPCKD used to be a common symbol. # This test is written to ensure that error messages to pandas users # continue to make sense, rather than a user getting some missing # key error, cause their JSON message format changed. If # World bank ever finishes the deprecation of this symbol, # this nose test should still pass. inds = ['GDPPCKD'] try: result = download(country=cntry_codes, indicator=inds, start=2003, end=2004, errors='ignore') # If for some reason result actually ever has data, it's cause WB # fixed the issue with this ticker. Find another bad one. except __HOLE__ as e: raise nose.SkipTest("No indicators returned data: {0}".format(e)) # if it ever gets here, it means WB unretired the indicator. # even if they dropped it completely, it would still get caught above # or the WB API changed somehow in a really unexpected way. if len(result) > 0: raise nose.SkipTest("Invalid results")
ValueError
dataset/ETHPy150Open pydata/pandas/pandas/io/tests/test_wb.py/TestWB.test_wdi_download_w_retired_indicator
@slow @network def test_wdi_download_w_crash_inducing_countrycode(self): cntry_codes = ['CA', 'MX', 'US', 'XXX'] inds = ['NY.GDP.PCAP.CD'] try: result = download(country=cntry_codes, indicator=inds, start=2003, end=2004, errors='ignore') except __HOLE__ as e: raise nose.SkipTest("No indicators returned data: {0}".format(e)) # if it ever gets here, it means the country code XXX got used by WB # or the WB API changed somehow in a really unexpected way. if len(result) > 0: raise nose.SkipTest("Invalid results")
ValueError
dataset/ETHPy150Open pydata/pandas/pandas/io/tests/test_wb.py/TestWB.test_wdi_download_w_crash_inducing_countrycode
def inner_run(self, *args, **options): # If an exception was silenced in ManagementUtility.execute in order # to be raised in the child process, raise it now. autoreload.raise_last_exception() threading = options['use_threading'] # 'shutdown_message' is a stealth option. shutdown_message = options.get('shutdown_message', '') quit_command = 'CTRL-BREAK' if sys.platform == 'win32' else 'CONTROL-C' self.stdout.write("Performing system checks...\n\n") self.check(display_num_errors=True) # Need to check migrations here, so can't use the # requires_migrations_check attribute. self.check_migrations() now = datetime.now().strftime('%B %d, %Y - %X') if six.PY2: now = now.decode(get_system_encoding()) self.stdout.write(now) self.stdout.write(( "Django version %(version)s, using settings %(settings)r\n" "Starting development server at http://%(addr)s:%(port)s/\n" "Quit the server with %(quit_command)s.\n" ) % { "version": self.get_version(), "settings": settings.SETTINGS_MODULE, "addr": '[%s]' % self.addr if self._raw_ipv6 else self.addr, "port": self.port, "quit_command": quit_command, }) try: handler = self.get_handler(*args, **options) run(self.addr, int(self.port), handler, ipv6=self.use_ipv6, threading=threading) except socket.error as e: # Use helpful error messages instead of ugly tracebacks. ERRORS = { errno.EACCES: "You don't have permission to access that port.", errno.EADDRINUSE: "That port is already in use.", errno.EADDRNOTAVAIL: "That IP address can't be assigned to.", } try: error_text = ERRORS[e.errno] except __HOLE__: error_text = force_text(e) self.stderr.write("Error: %s" % error_text) # Need to use an OS exit because sys.exit doesn't work in a thread os._exit(1) except KeyboardInterrupt: if shutdown_message: self.stdout.write(shutdown_message) sys.exit(0) # Kept for backward compatibility
KeyError
dataset/ETHPy150Open django/django/django/core/management/commands/runserver.py/Command.inner_run
@staff_member_required def view_detail(request, view): if not utils.docutils_is_available: return missing_docutils_page(request) mod, func = urlresolvers.get_mod_func(view) try: view_func = getattr(import_module(mod), func) except (ImportError, __HOLE__): raise Http404 title, body, metadata = utils.parse_docstring(view_func.__doc__) if title: title = utils.parse_rst(title, 'view', _('view:') + view) if body: body = utils.parse_rst(body, 'view', _('view:') + view) for key in metadata: metadata[key] = utils.parse_rst(metadata[key], 'model', _('view:') + view) return render_to_response('admin_doc/view_detail.html', { 'root_path': urlresolvers.reverse('admin:index'), 'name': view, 'summary': title, 'body': body, 'meta': metadata, }, context_instance=RequestContext(request))
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/contrib/admindocs/views.py/view_detail
@staff_member_required def model_detail(request, app_label, model_name): if not utils.docutils_is_available: return missing_docutils_page(request) # Get the model class. try: app_mod = models.get_app(app_label) except ImproperlyConfigured: raise Http404(_("App %r not found") % app_label) model = None for m in models.get_models(app_mod): if m._meta.object_name.lower() == model_name: model = m break if model is None: raise Http404(_("Model %(model_name)r not found in app %(app_label)r") % {'model_name': model_name, 'app_label': app_label}) opts = model._meta # Gather fields/field descriptions. fields = [] for field in opts.fields: # ForeignKey is a special case since the field will actually be a # descriptor that returns the other object if isinstance(field, models.ForeignKey): data_type = field.rel.to.__name__ app_label = field.rel.to._meta.app_label verbose = utils.parse_rst((_("the related `%(app_label)s.%(data_type)s` object") % {'app_label': app_label, 'data_type': data_type}), 'model', _('model:') + data_type) else: data_type = get_readable_field_data_type(field) verbose = field.verbose_name fields.append({ 'name': field.name, 'data_type': data_type, 'verbose': verbose, 'help_text': field.help_text, }) # Gather many-to-many fields. for field in opts.many_to_many: data_type = field.rel.to.__name__ app_label = field.rel.to._meta.app_label verbose = _("related `%(app_label)s.%(object_name)s` objects") % {'app_label': app_label, 'object_name': data_type} fields.append({ 'name': "%s.all" % field.name, "data_type": 'List', 'verbose': utils.parse_rst(_("all %s") % verbose , 'model', _('model:') + opts.module_name), }) fields.append({ 'name' : "%s.count" % field.name, 'data_type' : 'Integer', 'verbose' : utils.parse_rst(_("number of %s") % verbose , 'model', _('model:') + opts.module_name), }) # Gather model methods. for func_name, func in model.__dict__.items(): if (inspect.isfunction(func) and len(inspect.getargspec(func)[0]) == 1): try: for exclude in MODEL_METHODS_EXCLUDE: if func_name.startswith(exclude): raise StopIteration except __HOLE__: continue verbose = func.__doc__ if verbose: verbose = utils.parse_rst(utils.trim_docstring(verbose), 'model', _('model:') + opts.module_name) fields.append({ 'name': func_name, 'data_type': get_return_data_type(func_name), 'verbose': verbose, }) # Gather related objects for rel in opts.get_all_related_objects() + opts.get_all_related_many_to_many_objects(): verbose = _("related `%(app_label)s.%(object_name)s` objects") % {'app_label': rel.opts.app_label, 'object_name': rel.opts.object_name} accessor = rel.get_accessor_name() fields.append({ 'name' : "%s.all" % accessor, 'data_type' : 'List', 'verbose' : utils.parse_rst(_("all %s") % verbose , 'model', _('model:') + opts.module_name), }) fields.append({ 'name' : "%s.count" % accessor, 'data_type' : 'Integer', 'verbose' : utils.parse_rst(_("number of %s") % verbose , 'model', _('model:') + opts.module_name), }) return render_to_response('admin_doc/model_detail.html', { 'root_path': urlresolvers.reverse('admin:index'), 'name': '%s.%s' % (opts.app_label, opts.object_name), 'summary': _("Fields on %s objects") % opts.object_name, 'description': model.__doc__, 'fields': fields, }, context_instance=RequestContext(request))
StopIteration
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/contrib/admindocs/views.py/model_detail
def load_all_installed_template_libraries(): # Load/register all template tag libraries from installed apps. for module_name in template.get_templatetags_modules(): mod = import_module(module_name) try: libraries = [ os.path.splitext(p)[0] for p in os.listdir(os.path.dirname(upath(mod.__file__))) if p.endswith('.py') and p[0].isalpha() ] except __HOLE__: libraries = [] for library_name in libraries: try: lib = template.get_library(library_name) except template.InvalidTemplateLibrary: pass
OSError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/contrib/admindocs/views.py/load_all_installed_template_libraries
def extract_views_from_urlpatterns(urlpatterns, base=''): """ Return a list of views from a list of urlpatterns. Each object in the returned list is a two-tuple: (view_func, regex) """ views = [] for p in urlpatterns: if hasattr(p, 'url_patterns'): try: patterns = p.url_patterns except __HOLE__: continue views.extend(extract_views_from_urlpatterns(patterns, base + p.regex.pattern)) elif hasattr(p, 'callback'): try: views.append((p.callback, base + p.regex.pattern)) except ViewDoesNotExist: continue else: raise TypeError(_("%s does not appear to be a urlpattern object") % p) return views
ImportError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/contrib/admindocs/views.py/extract_views_from_urlpatterns
def __call__(self, environ, start_response): """Resolves the URL in PATH_INFO, and uses wsgi.routing_args to pass on URL resolver results.""" old_method = None if self.use_method_override: req = None # In some odd cases, there's no query string try: qs = environ['QUERY_STRING'] except __HOLE__: qs = '' if '_method' in qs: req = Request(environ) req.errors = 'ignore' if '_method' in req.GET: old_method = environ['REQUEST_METHOD'] environ['REQUEST_METHOD'] = req.GET['_method'].upper() if self.log_debug: log.debug("_method found in QUERY_STRING, altering " "request method to %s", environ['REQUEST_METHOD']) elif environ['REQUEST_METHOD'] == 'POST' and is_form_post(environ): if req is None: req = Request(environ) req.errors = 'ignore' if '_method' in req.POST: old_method = environ['REQUEST_METHOD'] environ['REQUEST_METHOD'] = req.POST['_method'].upper() if self.log_debug: log.debug("_method found in POST data, altering " "request method to %s", environ['REQUEST_METHOD']) # Run the actual route matching # -- Assignment of environ to config triggers route matching if self.singleton: config = request_config() config.mapper = self.mapper config.environ = environ match = config.mapper_dict route = config.route else: results = self.mapper.routematch(environ=environ) if results: match, route = results[0], results[1] else: match = route = None if old_method: environ['REQUEST_METHOD'] = old_method if not match: match = {} if self.log_debug: urlinfo = "%s %s" % (environ['REQUEST_METHOD'], environ['PATH_INFO']) log.debug("No route matched for %s", urlinfo) elif self.log_debug: urlinfo = "%s %s" % (environ['REQUEST_METHOD'], environ['PATH_INFO']) log.debug("Matched %s", urlinfo) log.debug("Route path: '%s', defaults: %s", route.routepath, route.defaults) log.debug("Match dict: %s", match) url = URLGenerator(self.mapper, environ) environ['wsgiorg.routing_args'] = ((url), match) environ['routes.route'] = route environ['routes.url'] = url if route and route.redirect: route_name = '_redirect_%s' % id(route) location = url(route_name, **match) log.debug("Using redirect route, redirect to '%s' with status" "code: %s", location, route.redirect_status) start_response(route.redirect_status, [('Content-Type', 'text/plain; charset=utf8'), ('Location', location)]) return [] # If the route included a path_info attribute and it should be used to # alter the environ, we'll pull it out if self.path_info and 'path_info' in match: oldpath = environ['PATH_INFO'] newpath = match.get('path_info') or '' environ['PATH_INFO'] = newpath if not environ['PATH_INFO'].startswith('/'): environ['PATH_INFO'] = '/' + environ['PATH_INFO'] environ['SCRIPT_NAME'] += re.sub( r'^(.*?)/' + re.escape(newpath) + '$', r'\1', oldpath) response = self.app(environ, start_response) # Wrapped in try as in rare cases the attribute will be gone already try: del self.mapper.environ except AttributeError: pass return response
KeyError
dataset/ETHPy150Open bbangert/routes/routes/middleware.py/RoutesMiddleware.__call__
def __getattr__(self, name): try: return object.__getattribute__(self, name) except __HOLE__: log.debug("No attribute called %s found on c object, returning " "empty string", name) return ''
AttributeError
dataset/ETHPy150Open Pylons/pylons/pylons/util.py/AttribSafeContextObj.__getattr__
def path_hook(nvim): def _get_paths(): return discover_runtime_directories(nvim) def _find_module(fullname, oldtail, path): idx = oldtail.find('.') if idx > 0: name = oldtail[:idx] tail = oldtail[idx + 1:] fmr = imp.find_module(name, path) module = imp.find_module(fullname[:-len(oldtail)] + name, *fmr) return _find_module(fullname, tail, module.__path__) else: return imp.find_module(fullname, path) class VimModuleLoader(object): def __init__(self, module): self.module = module def load_module(self, fullname, path=None): # Check sys.modules, required for reload (see PEP302). if fullname in sys.modules: return sys.modules[fullname] return imp.load_module(fullname, *self.module) class VimPathFinder(object): @staticmethod def find_module(fullname, path=None): """Method for Python 2.7 and 3.3.""" try: return VimModuleLoader( _find_module(fullname, fullname, path or _get_paths())) except __HOLE__: return None @staticmethod def find_spec(fullname, path=None, target=None): """Method for Python 3.4+.""" return PathFinder.find_spec(fullname, path or _get_paths(), target) def hook(path): if path == nvim.VIM_SPECIAL_PATH: return VimPathFinder else: raise ImportError return hook
ImportError
dataset/ETHPy150Open neovim/python-client/neovim/plugin/script_host.py/path_hook
def check_fun_data(self, testfunc, targfunc, testarval, targarval, targarnanval, **kwargs): for axis in list(range(targarval.ndim)) + [None]: for skipna in [False, True]: targartempval = targarval if skipna else targarnanval try: targ = targfunc(targartempval, axis=axis, **kwargs) res = testfunc(testarval, axis=axis, skipna=skipna, **kwargs) self.check_results(targ, res, axis) if skipna: res = testfunc(testarval, axis=axis, **kwargs) self.check_results(targ, res, axis) if axis is None: res = testfunc(testarval, skipna=skipna, **kwargs) self.check_results(targ, res, axis) if skipna and axis is None: res = testfunc(testarval, **kwargs) self.check_results(targ, res, axis) except BaseException as exc: exc.args += ('axis: %s of %s' % (axis, testarval.ndim - 1), 'skipna: %s' % skipna, 'kwargs: %s' % kwargs) raise if testarval.ndim <= 1: return try: testarval2 = np.take(testarval, 0, axis=-1) targarval2 = np.take(targarval, 0, axis=-1) targarnanval2 = np.take(targarnanval, 0, axis=-1) except __HOLE__: return self.check_fun_data(testfunc, targfunc, testarval2, targarval2, targarnanval2, **kwargs)
ValueError
dataset/ETHPy150Open pydata/pandas/pandas/tests/test_nanops.py/TestnanopsDataFrame.check_fun_data
def check_funs(self, testfunc, targfunc, allow_complex=True, allow_all_nan=True, allow_str=True, allow_date=True, allow_tdelta=True, allow_obj=True, **kwargs): self.check_fun(testfunc, targfunc, 'arr_float', **kwargs) self.check_fun(testfunc, targfunc, 'arr_float_nan', 'arr_float', **kwargs) self.check_fun(testfunc, targfunc, 'arr_int', **kwargs) self.check_fun(testfunc, targfunc, 'arr_bool', **kwargs) objs = [self.arr_float.astype('O'), self.arr_int.astype('O'), self.arr_bool.astype('O')] if allow_all_nan: self.check_fun(testfunc, targfunc, 'arr_nan', **kwargs) if allow_complex: self.check_fun(testfunc, targfunc, 'arr_complex', **kwargs) self.check_fun(testfunc, targfunc, 'arr_complex_nan', 'arr_complex', **kwargs) if allow_all_nan: self.check_fun(testfunc, targfunc, 'arr_nan_nanj', **kwargs) objs += [self.arr_complex.astype('O')] if allow_str: self.check_fun(testfunc, targfunc, 'arr_str', **kwargs) self.check_fun(testfunc, targfunc, 'arr_utf', **kwargs) objs += [self.arr_str.astype('O'), self.arr_utf.astype('O')] if allow_date: try: targfunc(self.arr_date) except __HOLE__: pass else: self.check_fun(testfunc, targfunc, 'arr_date', **kwargs) objs += [self.arr_date.astype('O')] if allow_tdelta: try: targfunc(self.arr_tdelta) except TypeError: pass else: self.check_fun(testfunc, targfunc, 'arr_tdelta', **kwargs) objs += [self.arr_tdelta.astype('O')] if allow_obj: self.arr_obj = np.vstack(objs) # some nanops handle object dtypes better than their numpy # counterparts, so the numpy functions need to be given something # else if allow_obj == 'convert': targfunc = partial(self._badobj_wrap, func=targfunc, allow_complex=allow_complex) self.check_fun(testfunc, targfunc, 'arr_obj', **kwargs)
TypeError
dataset/ETHPy150Open pydata/pandas/pandas/tests/test_nanops.py/TestnanopsDataFrame.check_funs
def check_nancomp(self, checkfun, targ0): arr_float = self.arr_float arr_float1 = self.arr_float1 arr_nan = self.arr_nan arr_nan_nan = self.arr_nan_nan arr_float_nan = self.arr_float_nan arr_float1_nan = self.arr_float1_nan arr_nan_float1 = self.arr_nan_float1 while targ0.ndim: try: res0 = checkfun(arr_float, arr_float1) tm.assert_almost_equal(targ0, res0) if targ0.ndim > 1: targ1 = np.vstack([targ0, arr_nan]) else: targ1 = np.hstack([targ0, arr_nan]) res1 = checkfun(arr_float_nan, arr_float1_nan) tm.assert_almost_equal(targ1, res1) targ2 = arr_nan_nan res2 = checkfun(arr_float_nan, arr_nan_float1) tm.assert_almost_equal(targ2, res2) except Exception as exc: exc.args += ('ndim: %s' % arr_float.ndim, ) raise try: arr_float = np.take(arr_float, 0, axis=-1) arr_float1 = np.take(arr_float1, 0, axis=-1) arr_nan = np.take(arr_nan, 0, axis=-1) arr_nan_nan = np.take(arr_nan_nan, 0, axis=-1) arr_float_nan = np.take(arr_float_nan, 0, axis=-1) arr_float1_nan = np.take(arr_float1_nan, 0, axis=-1) arr_nan_float1 = np.take(arr_nan_float1, 0, axis=-1) targ0 = np.take(targ0, 0, axis=-1) except __HOLE__: break
ValueError
dataset/ETHPy150Open pydata/pandas/pandas/tests/test_nanops.py/TestnanopsDataFrame.check_nancomp
def check_bool(self, func, value, correct, *args, **kwargs): while getattr(value, 'ndim', True): try: res0 = func(value, *args, **kwargs) if correct: self.assertTrue(res0) else: self.assertFalse(res0) except BaseException as exc: exc.args += ('dim: %s' % getattr(value, 'ndim', value), ) raise if not hasattr(value, 'ndim'): break try: value = np.take(value, 0, axis=-1) except __HOLE__: break
ValueError
dataset/ETHPy150Open pydata/pandas/pandas/tests/test_nanops.py/TestnanopsDataFrame.check_bool
def get_config(self, connection_info, disk_info): """Returns xml for libvirt.""" conf = super(LibvirtNetVolumeDriver, self).get_config(connection_info, disk_info) netdisk_properties = connection_info['data'] conf.source_type = "network" conf.source_protocol = connection_info['driver_volume_type'] conf.source_name = netdisk_properties.get('name') conf.source_hosts = netdisk_properties.get('hosts', []) conf.source_ports = netdisk_properties.get('ports', []) auth_enabled = netdisk_properties.get('auth_enabled') if (conf.source_protocol == 'rbd' and CONF.libvirt.rbd_secret_uuid): conf.auth_secret_uuid = CONF.libvirt.rbd_secret_uuid auth_enabled = True # Force authentication locally if CONF.libvirt.rbd_user: conf.auth_username = CONF.libvirt.rbd_user if conf.source_protocol == 'iscsi': try: conf.source_name = ("%(target_iqn)s/%(target_lun)s" % netdisk_properties) target_portal = netdisk_properties['target_portal'] except __HOLE__: raise exception.NovaException(_("Invalid volume source data")) ip, port = utils.parse_server_string(target_portal) if ip == '' or port == '': raise exception.NovaException(_("Invalid target_lun")) conf.source_hosts = [ip] conf.source_ports = [port] if netdisk_properties.get('auth_method') == 'CHAP': auth_enabled = True conf.auth_secret_type = 'iscsi' password = netdisk_properties.get('auth_password') conf.auth_secret_uuid = self._get_secret_uuid(conf, password) if auth_enabled: conf.auth_username = (conf.auth_username or netdisk_properties['auth_username']) conf.auth_secret_type = (conf.auth_secret_type or netdisk_properties['secret_type']) conf.auth_secret_uuid = (conf.auth_secret_uuid or netdisk_properties['secret_uuid']) return conf
KeyError
dataset/ETHPy150Open openstack/nova/nova/virt/libvirt/volume/net.py/LibvirtNetVolumeDriver.get_config
def install(self, plugin): archive = self._download(plugin) prefix = archive.getnames()[0] dirname = ''.join((self._path, plugin)) directories = conf.supybot.directories.plugins() directory = self._getWritableDirectoryFromList(directories) assert directory is not None, \ 'No valid directory in supybot.directories.plugins.' try: assert archive.getmember(prefix + dirname).isdir(), \ 'This is not a valid plugin (it is a file, not a directory).' run_2to3 = minisix.PY3 for file in archive.getmembers(): if file.name.startswith(prefix + dirname): extractedFile = archive.extractfile(file) newFileName = os.path.join(*file.name.split('/')[1:]) newFileName = newFileName[len(self._path)-1:] newFileName = os.path.join(directory, newFileName) if os.path.exists(newFileName): assert os.path.isdir(newFileName), newFileName + \ 'should not be a file.' shutil.rmtree(newFileName) if extractedFile is None: os.mkdir(newFileName) else: with open(newFileName, 'ab') as fd: reload_imported = False for line in extractedFile.readlines(): if minisix.PY3: if 'import reload' in line.decode(): reload_imported = True elif not reload_imported and \ 'reload(' in line.decode(): fd.write('from imp import reload\n' \ .encode()) reload_imported = True fd.write(line) if newFileName.endswith('__init__.py'): with open(newFileName) as fd: lines = list(filter(lambda x:'import plugin' in x, fd.readlines())) if lines and lines[0].startswith('from . import'): # This should be already Python 3-compatible run_2to3 = False finally: archive.close() del archive if run_2to3: try: import lib2to3 except __HOLE__: return _('Plugin is probably not compatible with your ' 'Python version (3.x) and could not be converted ' 'because 2to3 is not installed.') import subprocess fixers = [] subprocess.Popen(['2to3', '-wn', os.path.join(directory, plugin)]) \ .wait() return _('Plugin was designed for Python 2, but an attempt to ' 'convert it to Python 3 has been made. There is no ' 'guarantee it will work, though.') else: return _('Plugin successfully installed.')
ImportError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/PluginDownloader/plugin.py/GithubRepository.install
def _write_options(name, configuration): ''' Writes a new OPTIONS file ''' _check_portname(name) pkg = next(iter(configuration)) conf_ptr = configuration[pkg] dirname = _options_dir(name) if not os.path.isdir(dirname): try: os.makedirs(dirname) except __HOLE__ as exc: raise CommandExecutionError( 'Unable to make {0}: {1}'.format(dirname, exc) ) with salt.utils.fopen(os.path.join(dirname, 'options'), 'w') as fp_: sorted_options = list(conf_ptr.keys()) sorted_options.sort() fp_.write( '# This file was auto-generated by Salt (http://saltstack.com)\n' '# Options for {0}\n' '_OPTIONS_READ={0}\n' '_FILE_COMPLETE_OPTIONS_LIST={1}\n' .format(pkg, ' '.join(sorted_options)) ) opt_tmpl = 'OPTIONS_FILE_{0}SET+={1}\n' for opt in sorted_options: fp_.write( opt_tmpl.format( '' if conf_ptr[opt] == 'on' else 'UN', opt ) )
OSError
dataset/ETHPy150Open saltstack/salt/salt/modules/freebsdports.py/_write_options
def showconfig(name, default=False, dict_return=False): ''' Show the configuration options for a given port. default : False Show the default options for a port (not necessarily the same as the current configuration) dict_return : False Instead of returning the output of ``make showconfig``, return the data in an dictionary CLI Example: .. code-block:: bash salt '*' ports.showconfig security/nmap salt '*' ports.showconfig security/nmap default=True ''' portpath = _check_portname(name) if default and _options_file_exists(name): saved_config = showconfig(name, default=False, dict_return=True) rmconfig(name) if _options_file_exists(name): raise CommandExecutionError('Unable to get default configuration') default_config = showconfig(name, default=False, dict_return=dict_return) _write_options(name, saved_config) return default_config try: result = __salt__['cmd.run_all']( ['make', 'showconfig'], cwd=portpath, python_shell=False ) output = result['stdout'].splitlines() if result['retcode'] != 0: error = result['stderr'] else: error = '' except TypeError: error = result if error: msg = ('Error running \'make showconfig\' for {0}: {1}' .format(name, error)) log.error(msg) raise SaltInvocationError(msg) if not dict_return: return '\n'.join(output) if (not output) or ('configuration options' not in output[0]): return {} try: pkg = output[0].split()[-1].rstrip(':') except (__HOLE__, AttributeError, TypeError) as exc: log.error( 'Unable to get pkg-version string: {0}'.format(exc) ) return {} ret = {pkg: {}} output = output[1:] for line in output: try: opt, val, desc = re.match( r'\s+([^=]+)=(off|on): (.+)', line ).groups() except AttributeError: continue ret[pkg][opt] = val if not ret[pkg]: return {} return ret
IndexError
dataset/ETHPy150Open saltstack/salt/salt/modules/freebsdports.py/showconfig
def config(name, reset=False, **kwargs): ''' Modify configuration options for a given port. Multiple options can be specified. To see the available options for a port, use :mod:`ports.showconfig <salt.modules.freebsdports.showconfig>`. name The port name, in ``category/name`` format reset : False If ``True``, runs a ``make rmconfig`` for the port, clearing its configuration before setting the desired options CLI Examples: .. code-block:: bash salt '*' ports.config security/nmap IPV6=off ''' portpath = _check_portname(name) if reset: rmconfig(name) configuration = showconfig(name, dict_return=True) if not configuration: raise CommandExecutionError( 'Unable to get port configuration for \'{0}\''.format(name) ) # Get top-level key for later reference pkg = next(iter(configuration)) conf_ptr = configuration[pkg] opts = dict( (str(x), _normalize(kwargs[x])) for x in kwargs if not x.startswith('_') ) bad_opts = [x for x in opts if x not in conf_ptr] if bad_opts: raise SaltInvocationError( 'The following opts are not valid for port {0}: {1}' .format(name, ', '.join(bad_opts)) ) bad_vals = [ '{0}={1}'.format(x, y) for x, y in six.iteritems(opts) if y not in ('on', 'off') ] if bad_vals: raise SaltInvocationError( 'The following key/value pairs are invalid: {0}' .format(', '.join(bad_vals)) ) conf_ptr.update(opts) _write_options(name, configuration) new_config = showconfig(name, dict_return=True) try: new_config = new_config[next(iter(new_config))] except (StopIteration, __HOLE__): return False return all(conf_ptr[x] == new_config.get(x) for x in conf_ptr)
TypeError
dataset/ETHPy150Open saltstack/salt/salt/modules/freebsdports.py/config
def update(extract=False): ''' Update the ports tree extract : False If ``True``, runs a ``portsnap extract`` after fetching, should be used for first-time installation of the ports tree. CLI Example: .. code-block:: bash salt '*' ports.update ''' result = __salt__['cmd.run_all']( _portsnap() + ['fetch'], python_shell=False ) if not result['retcode'] == 0: raise CommandExecutionError( 'Unable to fetch ports snapshot: {0}'.format(result['stderr']) ) ret = [] try: patch_count = re.search( r'Fetching (\d+) patches', result['stdout'] ).group(1) except __HOLE__: patch_count = 0 try: new_port_count = re.search( r'Fetching (\d+) new ports or files', result['stdout'] ).group(1) except AttributeError: new_port_count = 0 ret.append('Applied {0} new patches'.format(patch_count)) ret.append('Fetched {0} new ports or files'.format(new_port_count)) if extract: result = __salt__['cmd.run_all']( _portsnap() + ['extract'], python_shell=False ) if not result['retcode'] == 0: raise CommandExecutionError( 'Unable to extract ports snapshot {0}'.format(result['stderr']) ) result = __salt__['cmd.run_all']( _portsnap() + ['update'], python_shell=False ) if not result['retcode'] == 0: raise CommandExecutionError( 'Unable to apply ports snapshot: {0}'.format(result['stderr']) ) __context__.pop('ports.list_all', None) return '\n'.join(ret)
AttributeError
dataset/ETHPy150Open saltstack/salt/salt/modules/freebsdports.py/update
def get_object(self, request, object_id): """ Returns an instance matching the primary key provided. ``None`` is returned if no match is found (or the object_id failed validation against the primary key field). """ queryset = self.get_queryset(request) model = queryset.model try: object_id = model._meta.pk.to_python(object_id) return queryset.get(pk=object_id) except (model.DoesNotExist, __HOLE__, ValueError): return None
ValidationError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/contrib/admin/options.py/ModelAdmin.get_object
def get_action(self, action): """ Return a given action from a parameter, which can either be a callable, or the name of a method on the ModelAdmin. Return is a tuple of (callable, name, description). """ # If the action is a callable, just use it. if callable(action): func = action action = action.__name__ # Next, look for a method. Grab it off self.__class__ to get an unbound # method instead of a bound one; this ensures that the calling # conventions are the same for functions and methods. elif hasattr(self.__class__, action): func = getattr(self.__class__, action) # Finally, look for a named method on the admin site else: try: func = self.admin_site.get_action(action) except __HOLE__: return None if hasattr(func, 'short_description'): description = func.short_description else: description = capfirst(action.replace('_', ' ')) return func, action, description
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/contrib/admin/options.py/ModelAdmin.get_action
def message_user(self, request, message, level=messages.INFO, extra_tags='', fail_silently=False): """ Send a message to the user. The default implementation posts a message using the django.contrib.messages backend. Exposes almost the same API as messages.add_message(), but accepts the positional arguments in a different order to maintain backwards compatibility. For convenience, it accepts the `level` argument as a string rather than the usual level number. """ if not isinstance(level, int): # attempt to get the level if passed a string try: level = getattr(messages.constants, level.upper()) except __HOLE__: levels = messages.constants.DEFAULT_TAGS.values() levels_repr = ', '.join('`%s`' % l for l in levels) raise ValueError('Bad message level string: `%s`. ' 'Possible values are: %s' % (level, levels_repr)) messages.add_message(request, level, message, extra_tags=extra_tags, fail_silently=fail_silently)
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/contrib/admin/options.py/ModelAdmin.message_user
def response_action(self, request, queryset): """ Handle an admin action. This is called if a request is POSTed to the changelist; it returns an HttpResponse if the action was handled, and None otherwise. """ # There can be multiple action forms on the page (at the top # and bottom of the change list, for example). Get the action # whose button was pushed. try: action_index = int(request.POST.get('index', 0)) except __HOLE__: action_index = 0 # Construct the action form. data = request.POST.copy() data.pop(helpers.ACTION_CHECKBOX_NAME, None) data.pop("index", None) # Use the action whose button was pushed try: data.update({'action': data.getlist('action')[action_index]}) except IndexError: # If we didn't get an action from the chosen form that's invalid # POST data, so by deleting action it'll fail the validation check # below. So no need to do anything here pass action_form = self.action_form(data, auto_id=None) action_form.fields['action'].choices = self.get_action_choices(request) # If the form's valid we can handle the action. if action_form.is_valid(): action = action_form.cleaned_data['action'] select_across = action_form.cleaned_data['select_across'] func = self.get_actions(request)[action][0] # Get the list of selected PKs. If nothing's selected, we can't # perform an action on it, so bail. Except we want to perform # the action explicitly on all objects. selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME) if not selected and not select_across: # Reminder that something needs to be selected or nothing will happen msg = _("Items must be selected in order to perform " "actions on them. No items have been changed.") self.message_user(request, msg, messages.WARNING) return None if not select_across: # Perform the action only on the selected objects queryset = queryset.filter(pk__in=selected) response = func(self, request, queryset) # Actions may return an HttpResponse-like object, which will be # used as the response from the POST. If not, we'll be a good # little HTTP citizen and redirect back to the changelist page. if isinstance(response, HttpResponseBase): return response else: return HttpResponseRedirect(request.get_full_path()) else: msg = _("No action selected.") self.message_user(request, msg, messages.WARNING) return None
ValueError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/contrib/admin/options.py/ModelAdmin.response_action
def add(path, index=0): ''' Add the directory to the SYSTEM path in the index location Returns: boolean True if successful, False if unsuccessful CLI Example: .. code-block:: bash # Will add to the beginning of the path salt '*' win_path.add 'c:\\python27' 0 # Will add to the end of the path salt '*' win_path.add 'c:\\python27' index='-1' ''' currIndex = -1 sysPath = get_path() path = _normalize_dir(path) index = int(index) # validate index boundaries if index < 0: index = len(sysPath) + index + 1 if index > len(sysPath): index = len(sysPath) localPath = os.environ["PATH"].split(os.pathsep) if path not in localPath: localPath.append(path) os.environ["PATH"] = os.pathsep.join(localPath) # Check if we are in the system path at the right location try: currIndex = sysPath.index(path) if currIndex != index: sysPath.pop(currIndex) else: return True except __HOLE__: pass # Add it to the Path sysPath.insert(index, path) regedit = __salt__['reg.set_value']( 'HKEY_LOCAL_MACHINE', 'SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Environment', 'PATH', ';'.join(sysPath), 'REG_EXPAND_SZ' ) # Broadcast WM_SETTINGCHANGE to Windows if regedit: return rehash() else: return False
ValueError
dataset/ETHPy150Open saltstack/salt/salt/modules/win_path.py/add
def remove(path): r''' Remove the directory from the SYSTEM path Returns: boolean True if successful, False if unsuccessful CLI Example: .. code-block:: bash # Will remove C:\Python27 from the path salt '*' win_path.remove 'c:\\python27' ''' path = _normalize_dir(path) sysPath = get_path() localPath = os.environ["PATH"].split(os.pathsep) if path in localPath: localPath.remove(path) os.environ["PATH"] = os.pathsep.join(localPath) try: sysPath.remove(path) except __HOLE__: return True regedit = __salt__['reg.set_value']( 'HKEY_LOCAL_MACHINE', 'SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Environment', 'PATH', ';'.join(sysPath), 'REG_EXPAND_SZ' ) if regedit: return rehash() else: return False
ValueError
dataset/ETHPy150Open saltstack/salt/salt/modules/win_path.py/remove
def drain_consumer(consumer, limit=1, timeout=None, callbacks=None): acc = deque() def on_message(body, message): acc.append((body, message)) consumer.callbacks = [on_message] + (callbacks or []) with consumer: for _ in eventloop(consumer.channel.connection.client, limit=limit, timeout=timeout, ignore_timeouts=True): try: yield acc.popleft() except __HOLE__: pass
IndexError
dataset/ETHPy150Open celery/kombu/kombu/common.py/drain_consumer
def test_builtin_sequence_types(self): # a collection of tests on builtin sequence types a = range(10) for i in a: self.assertIn(i, a) self.assertNotIn(16, a) self.assertNotIn(a, a) a = tuple(a) for i in a: self.assertIn(i, a) self.assertNotIn(16, a) self.assertNotIn(a, a) class Deviant1: """Behaves strangely when compared This class is designed to make sure that the contains code works when the list is modified during the check. """ aList = range(15) def __cmp__(self, other): if other == 12: self.aList.remove(12) self.aList.remove(13) self.aList.remove(14) return 1 self.assertNotIn(Deviant1(), Deviant1.aList) class Deviant2: """Behaves strangely when compared This class raises an exception during comparison. That in turn causes the comparison to fail with a TypeError. """ def __cmp__(self, other): if other == 4: raise RuntimeError, "gotcha" try: self.assertNotIn(Deviant2(), a) except __HOLE__: pass
TypeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_contains.py/TestContains.test_builtin_sequence_types
def to_number(self, value): """ Transform categorical value to the ordinal. Raises ValueError if value is not in self.value_list """ try: return list(self.value_list).index(value) except __HOLE__ as e: if self.map_missing_to: return self.map_missing_to else: raise e
ValueError
dataset/ETHPy150Open alex-pirozhenko/sklearn-pmml/sklearn_pmml/convert/features.py/CategoricalFeature.to_number
def mayaInit(forversion=None) : """ Try to init Maya standalone module, use when running pymel from an external Python inerpreter, it is possible to pass the desired Maya version number to define which Maya to initialize Part of the complexity of initializing maya in standalone mode is that maya does not populate os.environ when parsing Maya.env. If we initialize normally, the env's are available via maya (via the shell), but not in python via os.environ. Note: the following example assumes that MAYA_SCRIPT_PATH is not set in your shell environment prior to launching python or mayapy. >>> import maya.standalone #doctest: +SKIP >>> maya.standalone.initialize() #doctest: +SKIP >>> import maya.mel as mm #doctest: +SKIP >>> print mm.eval("getenv MAYA_SCRIPT_PATH") #doctest: +SKIP /Network/Servers/sv-user.luma-pictures.com/luma ..... >>> import os #doctest: +SKIP >>> 'MAYA_SCRIPT_PATH' in os.environ #doctest: +SKIP False The solution lies in `refreshEnviron`, which copies the environment from the shell to os.environ after maya.standalone initializes. :rtype: bool :return: returns True if maya.cmds required initializing ( in other words, we are in a standalone python interpreter ) """ setupFormatting() global isInitializing # test that Maya actually is loaded and that commands have been initialized,for the requested version aboutExists = False try : from maya.cmds import about aboutExists = True except ImportError: pass if aboutExists and mayaStartupHasStarted(): # if this succeeded, we're initialized isInitializing = False return False _logger.debug( "startup.mayaInit running" ) # for use with pymel compatible maya package os.environ['MAYA_SKIP_USERSETUP_PY'] = 'on' if not aboutExists and not sys.modules.has_key('maya.standalone'): try : import maya.standalone #@UnresolvedImport maya.standalone.initialize(name="python") if versions.current() < versions.v2009: refreshEnviron() except __HOLE__, e: raise e, str(e) + ": pymel was unable to intialize maya.standalone" try: from maya.cmds import about except Exception: _logger.error("maya.standalone was successfully initialized, but pymel failed to import maya.cmds (or it was not populated)") raise if not mayaStartupHasRun(): _logger.debug( "running maya.app.startup" ) # If we're in 'maya -prompt' mode, and a plugin loads pymel, then we # can have a state where maya.standalone has been initialized, but # the python startup code hasn't yet been run... if about(batch=True): import maya.app.startup.batch else: import maya.app.startup.gui # return True, meaning we had to initialize maya standalone isInitializing = True return True
ImportError
dataset/ETHPy150Open CountZer0/PipelineConstructionSet/python/maya/site-packages/pymel-1.0.3/pymel/internal/startup.py/mayaInit
def initMEL(): if 'PYMEL_SKIP_MEL_INIT' in os.environ or pymel_options.get( 'skip_mel_init', False ) : _logger.info( "Skipping MEL initialization" ) return _logger.debug( "initMEL" ) mayaVersion = versions.installName() prefsDir = getUserPrefsDir() if prefsDir is None: _logger.error( "could not initialize user preferences: MAYA_APP_DIR not set" ) elif not os.path.isdir(prefsDir): _logger.error( "could not initialize user preferences: %s does not exist" % prefsDir ) # TODO : use cmds.internalVar to get paths # got this startup sequence from autodesk support startup = [ #'defaultRunTimeCommands.mel', # sourced automatically #os.path.join( prefsDir, 'userRunTimeCommands.mel'), # sourced automatically 'createPreferencesOptVars.mel', 'createGlobalOptVars.mel', os.path.join( prefsDir, 'userPrefs.mel') if prefsDir else None, 'initialStartup.mel', #$HOME/Documents/maya/projects/default/workspace.mel 'initialPlugins.mel', #'initialGUI.mel', #GUI #'initialLayout.mel', #GUI #os.path.join( prefsDir, 'windowPrefs.mel'), #GUI #os.path.join( prefsDir, 'menuSetPrefs.mel'), #GUI #'hotkeySetup.mel', #GUI 'namedCommandSetup.mel', os.path.join( prefsDir, 'userNamedCommands.mel' ) if prefsDir else None, #'initAfter.mel', #GUI os.path.join( prefsDir, 'pluginPrefs.mel' ) if prefsDir else None ] try: for f in startup: _logger.debug("running: %s" % f) if f is not None: if os.path.isabs(f) and not os.path.exists(f): _logger.warning( "Maya startup file %s does not exist" % f ) else: # need to encode backslashes (used for windows paths) if isinstance(f, unicode): encoding = 'unicode_escape' else: encoding = 'string_escape' #import pymel.core.language as lang #lang.mel.source( f.encode(encoding) ) import maya.mel maya.mel.eval( 'source "%s"' % f.encode(encoding) ) except Exception, e: _logger.error( "could not perform Maya initialization sequence: failed on %s: %s" % ( f, e) ) try: # make sure it exists res = maya.mel.eval('whatIs "userSetup.mel"') if res != 'Unknown': maya.mel.eval( 'source "userSetup.mel"') except __HOLE__: pass _logger.debug("done running mel files")
RuntimeError
dataset/ETHPy150Open CountZer0/PipelineConstructionSet/python/maya/site-packages/pymel-1.0.3/pymel/internal/startup.py/initMEL
def initAE(): try: pkg = __import__('AETemplates') except __HOLE__: return False except Exception: import traceback traceback.print_exc() return False else: # import subpackages for data in subpackages(pkg): pass return True
ImportError
dataset/ETHPy150Open CountZer0/PipelineConstructionSet/python/maya/site-packages/pymel-1.0.3/pymel/internal/startup.py/initAE
def encodeFix(): if mayaInit() : from maya.cmds import about mayaEncode = about(cs=True) pyEncode = sys.getdefaultencoding() # Encoding tel que defini par sitecustomize if mayaEncode != pyEncode : # s'il faut redefinir l'encoding #reload (sys) # attention reset aussi sys.stdout et sys.stderr #sys.setdefaultencoding(newEncode) #del sys.setdefaultencoding #print "# Encoding changed from '"+pyEncode+'" to "'+newEncode+"' #" if not about(b=True) : # si pas en batch, donc en mode UI, redefinir stdout et stderr avec encoding Maya import maya.utils try : import maya.app.baseUI import codecs # Replace sys.stdin with a GUI version that will request input from the user sys.stdin = codecs.getreader(mayaEncode)(maya.app.baseUI.StandardInput()) # Replace sys.stdout and sys.stderr with versions that can output to Maya's GUI sys.stdout = codecs.getwriter(mayaEncode)(maya.utils.Output()) sys.stderr = codecs.getwriter(mayaEncode)(maya.utils.Output( error=1 )) except __HOLE__ : _logger.debug("Unable to import maya.app.baseUI") #=============================================================================== # Cache utilities #===============================================================================
ImportError
dataset/ETHPy150Open CountZer0/PipelineConstructionSet/python/maya/site-packages/pymel-1.0.3/pymel/internal/startup.py/encodeFix
def DownloadDir(aff4_path, output_dir, bufsize=8192, preserve_path=True): """Take an aff4 path and download all files in it to output_dir. Args: aff4_path: Any aff4 path as a string output_dir: A local directory to write to, will be created if not there. bufsize: Buffer size to use. preserve_path: If set all paths will be created. Note that this works for collections as well. It will download all files in the collection. This only downloads files that are already in the datastore, it doesn't queue anything on the client. """ if not os.path.isdir(output_dir): os.makedirs(output_dir) fd = aff4.FACTORY.Open(aff4_path) for child in fd.OpenChildren(): if preserve_path: # Get a full path without the aff4: full_dir = utils.JoinPath(output_dir, child.urn.Path()) full_dir = os.path.dirname(full_dir) if not os.path.isdir(full_dir): os.makedirs(full_dir) outfile = os.path.join(full_dir, child.urn.Basename()) else: outfile = os.path.join(output_dir, child.urn.Basename()) logging.info(u"Downloading %s to %s", child.urn, outfile) with open(outfile, "wb") as out_fd: try: buf = child.Read(bufsize) while buf: out_fd.write(buf) buf = child.Read(bufsize) except __HOLE__ as e: logging.error("Failed to read %s. Err: %s", child.urn, e)
IOError
dataset/ETHPy150Open google/grr/grr/lib/console_utils.py/DownloadDir
@staticmethod def FindPathByUuid(path, uuid, arch): def ViewfinderPath(uuid): p = os.path.join(_VIEWFINDER_DSYMS_UUID_PATH, uuid) if not os.path.exists(p): return None p = open(p).read().strip(); if not p: return None return os.path.join(_VIEWFINDER_DSYMS_PATH, p, _VIEWFINDER_DSYMS_SUFFIX) def DsymForUuid(path, uuid, arch): dwarfdump = subprocess.Popen( ['dwarfdump', '--arch=%s' % arch, '--uuid', path], stdout=subprocess.PIPE, stderr=subprocess.PIPE) for line in dwarfdump.stdout: if uuid in line: return ' '.join(line.strip().split(' ')[3:]) return None try: p = uuid_cache[uuid] except KeyError: p = ViewfinderPath(uuid) if not p: mdfind = subprocess.Popen( ['mdfind', 'com_apple_xcode_dsym_uuids = ' + uuid], stdout=subprocess.PIPE) try: p = iter(mdfind.stdout).next()[:-1] # Strip \n if p.endswith(".xcarchive"): dsym_folder = os.path.join(p, "dSYMs") dsyms = glob.glob(os.path.join(p, "dSYMs/*.dSYM")) # I only know how to handle the case for one dsym. I'm sure there's a # way to figure out which we want for multiple-dsym xcarchives (if such # a thing exists?). if len(dsyms) == 1: p = os.path.join(p, "dSYMs", dsyms[0]) else: p = None except __HOLE__: p = None mdfind.wait() if p: # Translate the path to the dsym directory to the correct dsym file. p = DsymForUuid(p, uuid, arch) else: # Unable to find a dsym file, search through the symbol dirs looking for # an appropriately named library containing the desired uuid. global symbol_dirs for d in symbol_dirs: p = '%s/%s' % (d, path) if os.path.exists(p): p = DsymForUuid(p, uuid, arch) if p: break p = None uuid_cache[uuid] = p return p
StopIteration
dataset/ETHPy150Open viewfinderco/viewfinder/clients/ios/scripts/symbolicator.py/Symbolicator.FindPathByUuid
@staticmethod def ParseBinaryImageLine(line): elements = iter(line.split()) start_address = elements.next() elements.next() # Hyphen end_address = elements.next() # The main(?) executable has plus sign before its bundle ID. Strip this off. bundle_id = elements.next().strip('+') arch = elements.next() uuid = Symbolicator.ReformatUuid(elements.next().strip('<>')) try: path = elements.next() try: while True: path += ' ' + elements.next() except StopIteration: pass except __HOLE__: return (None, None, None) return (start_address, end_address, arch, bundle_id, uuid, path)
StopIteration
dataset/ETHPy150Open viewfinderco/viewfinder/clients/ios/scripts/symbolicator.py/Symbolicator.ParseBinaryImageLine
def start(self): ''' Start the magic!! ''' if self.opts['master_too']: master_swarm = MasterSwarm(self.opts) master_swarm.start() minions = MinionSwarm(self.opts) minions.start_minions() print('Starting minions...') #self.start_minions() print('All {0} minions have started.'.format(self.opts['minions'])) print('Waiting for CTRL-C to properly shutdown minions...') while True: try: time.sleep(5) except __HOLE__: print('\nShutting down minions') self.clean_configs() break
KeyboardInterrupt
dataset/ETHPy150Open saltstack/salt/tests/minionswarm.py/Swarm.start
def clean_configs(self): ''' Clean up the config files ''' for path in self.confs: pidfile = '{0}.pid'.format(path) try: try: pid = int(open(pidfile).read().strip()) os.kill(pid, signal.SIGTERM) except __HOLE__: pass if os.path.exists(pidfile): os.remove(pidfile) if not self.opts['no_clean']: shutil.rmtree(path) except (OSError, IOError): pass
ValueError
dataset/ETHPy150Open saltstack/salt/tests/minionswarm.py/Swarm.clean_configs
def load(self, file_obj, header=True, **kwargs): count = 0 reader = csv.reader(file_obj, **kwargs) if header: try: header_keys = next(reader) except __HOLE__: return count if self.strict: header_fields = [] for idx, key in enumerate(header_keys): if key in self.columns: header_fields.append((idx, self.columns[key])) else: header_fields = list(enumerate(header_keys)) else: header_fields = list(enumerate(self.model._meta.sorted_fields)) if not header_fields: return count for row in reader: obj = {} for idx, field in header_fields: if self.strict: obj[field.name] = field.python_value(row[idx]) else: obj[field] = row[idx] self.table.insert(**obj) count += 1 return count
StopIteration
dataset/ETHPy150Open coleifer/peewee/playhouse/dataset.py/CSVImporter.load
def parse_request(self, request): try: request = anyjson.deserialize(request['data']) except __HOLE__, e: logging.error("Request dictionary contains no 'data' key") return self.encode_result((500, "Internal error with request")) except Exception, e: logging.error("Invalid JSON in 'data' key for request") return self.encode_result((500, "Request is malformed")) try: cmd_name = request['name'] except KeyError: logging.error("Request is missing 'name' key") return self.encode_result((500, "Request is missing 'name' key")) cmd_string = request.get('value', '') # XXX Major kludge here to not log activation keys for redhat if isinstance(cmd_string, dict) and \ 'activation_key' in cmd_string: cs_copy = cmd_string.copy() cs_copy['activation_key'] = "<removed>" logging.info("Received command '%s' with argument: '%s'" % \ (cmd_name, cs_copy)) else: logging.info("Received command '%s' with argument: '%s'" % \ (cmd_name, cmd_string)) try: result = self._command_cls.run_command(cmd_name, cmd_string) except self._command_cls.CommandNotFoundError, e: logging.warn(str(e)) return self.encode_result((404, str(e))) except Exception, e: logging.exception('Exception while trying to process ' 'command %r' % cmd_name) return self.encode_result((500, str(e))) logging.info("'%s' completed with code '%s', message '%s'" % \ (cmd_name, result[0], result[1])) return self.encode_result(result)
KeyError
dataset/ETHPy150Open rackerlabs/openstack-guest-agents-unix/plugins/jsonparser.py/JsonParser.parse_request
def _import_hook(self, fqname, globals=None, locals=None, fromlist=None, level=-1): """Python calls this hook to locate and import a module.""" parts = fqname.split('.') #print "_import_hook", parts # pyjamas-gtk hack if parts[0] in ['gtk', 'gdk', 'pygtk', 'gobject']: parts = ['pygtkweb'] + parts # determine the context of this import parent = self._determine_import_context(globals) # if there is a parent, then its importer should manage this import if parent: module = parent.__importer__._do_import(parent, parts, fromlist) if module: return module # has the top module already been imported? try: top_module = sys.modules[parts[0]] except __HOLE__: # look for the topmost module top_module = self._import_top_module(parts[0]) if not top_module: # the topmost module wasn't found at all. # try previous importer. return self.previous_importer(fqname, globals, locals, fromlist, level) #raise ImportError, 'No module named ' + fqname # fast-path simple imports if len(parts) == 1: if not fromlist: return top_module if not top_module.__dict__.get('__ispkg__'): # __ispkg__ isn't defined (the module was not imported by us), # or it is zero. # # In the former case, there is no way that we could import # sub-modules that occur in the fromlist (but we can't raise an # error because it may just be names) because we don't know how # to deal with packages that were imported by other systems. # # In the latter case (__ispkg__ == 0), there can't be any sub- # modules present, so we can just return. # # In both cases, since len(parts) == 1, the top_module is also # the "bottom" which is the defined return when a fromlist # exists. return top_module importer = top_module.__dict__.get('__importer__') if importer: return importer._finish_import(top_module, parts[1:], fromlist) # Grrr, some people "import os.path" or do "from os.path import ..." if len(parts) == 2 and hasattr(top_module, parts[1]): if fromlist: return getattr(top_module, parts[1]) else: return top_module # assume that the module has already been imported, # walk from top_module to find it. mod = top_module for k in parts[1:]: if not hasattr(mod, k): #print "no mod", mod, k, parts mod = None break mod = getattr(mod, k) if mod: return mod # ok, pass through to previous importer return self.previous_importer(fqname, globals, locals, fromlist, level) # If the importer does not exist, then we have to bail. A missing # importer means that something else imported the module, and we have # no knowledge of how to get sub-modules out of the thing. raise ImportError, 'No module named ' + fqname
KeyError
dataset/ETHPy150Open anandology/pyjamas/pyjd/imputil.py/ImportManager._import_hook
def _import_one(self, parent, modname, fqname): "Import a single module." # has the module already been imported? try: return sys.modules[fqname] except __HOLE__: pass # load the module's code, or fetch the module itself result = self.get_code(parent, modname, fqname) if result is None: return None module = self._process_result(result, fqname) # insert the module into its parent if parent: setattr(parent, modname, module) return module
KeyError
dataset/ETHPy150Open anandology/pyjamas/pyjd/imputil.py/Importer._import_one
def _compile(pathname, timestamp): """Compile (and cache) a Python source file. The file specified by <pathname> is compiled to a code object and returned. Presuming the appropriate privileges exist, the bytecodes will be saved back to the filesystem for future imports. The source file's modification timestamp must be provided as a Long value. """ codestring = open(pathname, 'rU').read() if codestring and codestring[-1] != '\n': codestring = codestring + '\n' code = __builtin__.compile(codestring, pathname, 'exec') # try to cache the compiled code try: f = open(pathname + _suffix_char, 'wb') except __HOLE__: pass else: f.write('\0\0\0\0') f.write(struct.pack('<I', timestamp)) marshal.dump(code, f) f.flush() f.seek(0, 0) f.write(imp.get_magic()) f.close() return code
IOError
dataset/ETHPy150Open anandology/pyjamas/pyjd/imputil.py/_compile
def _os_path_isdir(pathname): "Local replacement for os.path.isdir()." try: s = _os_stat(pathname) except __HOLE__: return None return (s.st_mode & 0170000) == 0040000
OSError
dataset/ETHPy150Open anandology/pyjamas/pyjd/imputil.py/_os_path_isdir
def _timestamp(pathname): "Return the file modification time as a Long." try: s = _os_stat(pathname) except __HOLE__: return None return long(s.st_mtime) ###################################################################### # # Emulate the import mechanism for builtin and frozen modules #
OSError
dataset/ETHPy150Open anandology/pyjamas/pyjd/imputil.py/_timestamp
def _import_pathname(self, pathname, fqname): if _os_path_isdir(pathname): result = self._import_pathname(_os_path_join(pathname, '__init__'), fqname) if result: values = result[2] values['__pkgdir__'] = pathname values['__path__'] = [ pathname ] return 1, result[1], values return None for suffix, importFunc in self.suffixes: filename = pathname + suffix try: finfo = _os_stat(filename) except __HOLE__: pass else: return importFunc(filename, finfo, fqname) return None ###################################################################### # # SUFFIX-BASED IMPORTERS #
OSError
dataset/ETHPy150Open anandology/pyjamas/pyjd/imputil.py/_FilesystemImporter._import_pathname
def get_wordfile(): for fn in WORDFILES: try: wordfile = UserFile.open(fn, "r") except __HOLE__: pass else: return wordfile raise ValueError("cannot find file of words.")
IOError
dataset/ETHPy150Open kdart/pycopia/aid/pycopia/words.py/get_wordfile
def set_logging(log, loglevelnum, logfile, verbose_console=False): """Configure standard logging for the application. One ERROR level handler to stderr and one file handler with specified loglevelnum to logfile. log argument is the main (parent) application logger. """ # Prevent common error in using this API: loglevelnum is numeric if not loglevelnum in [logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL]: log.error("Incorrect loglevel value") sys.exit(1) try: # Specific log levels are set on individual handlers, but we must also set the most permissive log level on the logger itself to pass the initial filter. log.setLevel(logging.DEBUG) fh = logging.FileHandler(logfile) fh.setLevel(loglevelnum) fh.setFormatter(logging.Formatter('%(asctime)s %(levelname)-8s %(filename)s:%(lineno)d.%(funcName)s() - %(message)s')) log.addHandler(fh) ch = logging.StreamHandler() if verbose_console: ch.setLevel(logging.DEBUG) else: ch.setLevel(logging.ERROR) ch.setFormatter(logging.Formatter('%(levelname)s %(message)s')) log.addHandler(ch) # add log file handler for libraries according to the logging convention logging.getLogger('lib').addHandler(fh) except __HOLE__, e: msg = str(e) if e.errno == 13: msg += '\nYou need to be root' raise ConfigError(msg)
IOError
dataset/ETHPy150Open securitykiss-com/rfw/rfw/config.py/set_logging
def get_task_args(self): try: body = self.request.body options = json_decode(body) if body else {} except __HOLE__ as e: raise HTTPError(400, str(e)) args = options.pop('args', []) kwargs = options.pop('kwargs', {}) if not isinstance(args, (list, tuple)): raise HTTPError(400, 'args must be an array') return args, kwargs, options
ValueError
dataset/ETHPy150Open mher/flower/flower/api/tasks.py/BaseTaskHandler.get_task_args
def normalize_options(self, options): if 'eta' in options: options['eta'] = datetime.strptime(options['eta'], self.DATE_FORMAT) if 'countdown' in options: options['countdown'] = float(options['countdown']) if 'expires' in options: expires = options['expires'] try: expires = float(expires) except __HOLE__: expires = datetime.strptime(expires, self.DATE_FORMAT) options['expires'] = expires
ValueError
dataset/ETHPy150Open mher/flower/flower/api/tasks.py/BaseTaskHandler.normalize_options
def safe_result(self, result): "returns json encodable result" try: json.dumps(result) except __HOLE__: return repr(result) else: return result
TypeError
dataset/ETHPy150Open mher/flower/flower/api/tasks.py/BaseTaskHandler.safe_result
@web.authenticated @web.asynchronous def post(self, taskname): """ Execute a task by name and wait results **Example request**: .. sourcecode:: http POST /api/task/apply/tasks.add HTTP/1.1 Accept: application/json Accept-Encoding: gzip, deflate, compress Content-Length: 16 Content-Type: application/json; charset=utf-8 Host: localhost:5555 { "args": [1, 2] } **Example response**: .. sourcecode:: http HTTP/1.1 200 OK Content-Length: 71 Content-Type: application/json; charset=UTF-8 { "state": "SUCCESS", "task-id": "c60be250-fe52-48df-befb-ac66174076e6", "result": 3 } :query args: a list of arguments :query kwargs: a dictionary of arguments :reqheader Authorization: optional OAuth token to authenticate :statuscode 200: no error :statuscode 401: unauthorized request :statuscode 404: unknown task """ args, kwargs, options = self.get_task_args() logger.debug("Invoking a task '%s' with '%s' and '%s'", taskname, args, kwargs) try: task = self.capp.tasks[taskname] except KeyError: raise HTTPError(404, "Unknown task '%s'" % taskname) try: self.normalize_options(options) except __HOLE__: raise HTTPError(400, 'Invalid option') result = task.apply_async(args=args, kwargs=kwargs, **options) response = {'task-id': result.task_id} # In tornado for not blocking event loop we must return results # from other thread by self.finish() th = Thread(target=self.wait_results, args=(result, response, )) th.start() # So just exit
ValueError
dataset/ETHPy150Open mher/flower/flower/api/tasks.py/TaskApply.post
@web.authenticated def post(self, taskname): """ Execute a task **Example request**: .. sourcecode:: http POST /api/task/async-apply/tasks.add HTTP/1.1 Accept: application/json Accept-Encoding: gzip, deflate, compress Content-Length: 16 Content-Type: application/json; charset=utf-8 Host: localhost:5555 { "args": [1, 2] } **Example response**: .. sourcecode:: http HTTP/1.1 200 OK Content-Length: 71 Content-Type: application/json; charset=UTF-8 Date: Sun, 13 Apr 2014 15:55:00 GMT { "state": "PENDING", "task-id": "abc300c7-2922-4069-97b6-a635cc2ac47c" } :query args: a list of arguments :query kwargs: a dictionary of arguments :query options: a dictionary of `apply_async` keyword arguments :reqheader Authorization: optional OAuth token to authenticate :statuscode 200: no error :statuscode 401: unauthorized request :statuscode 404: unknown task """ args, kwargs, options = self.get_task_args() logger.debug("Invoking a task '%s' with '%s' and '%s'", taskname, args, kwargs) try: task = self.capp.tasks[taskname] except KeyError: raise HTTPError(404, "Unknown task '%s'" % taskname) try: self.normalize_options(options) except __HOLE__: raise HTTPError(400, 'Invalid option') result = task.apply_async(args=args, kwargs=kwargs, **options) response = {'task-id': result.task_id} if self.backend_configured(result): response.update(state=result.state) self.write(response)
ValueError
dataset/ETHPy150Open mher/flower/flower/api/tasks.py/TaskAsyncApply.post
def _volume_offset_changed(self, event): """ Called when the user inputs text in this panel to change the volume offset. """ new_value = self._volume_offset_text.GetValue() try: new_value = int(new_value) self._model.set_volume_offset(new_value) self._volume_offset_label.SetValue(self._volume_offset_label_ok) except __HOLE__: self._volume_offset_label.SetValue(self._volume_offset_label_fail)
ValueError
dataset/ETHPy150Open williballenthin/INDXParse/MFTView.py/DiskGeometryWarningPanel._volume_offset_changed
def _cluster_size_changed(self, event): """ Called when the user inputs text in this panel to change the cluster size. """ new_value = self._cluster_size_text.GetValue() try: new_value = int(new_value) self._model.set_cluster_size(new_value) self._cluster_size_label.SetValue(self._cluster_size_label_ok) except __HOLE__: self._cluster_size_label.SetValue(self._cluster_size_label_fail)
ValueError
dataset/ETHPy150Open williballenthin/INDXParse/MFTView.py/DiskGeometryWarningPanel._cluster_size_changed
def __init__(self, *args, **kwargs): self._model = kwargs.get("model", None) try: del kwargs["model"] except __HOLE__: pass super(RecordPane, self).__init__(*args, **kwargs) self._sizer = wx.BoxSizer(wx.VERTICAL) self.SetSizer(self._sizer) # this is only for readability, and is # specific to the wx.VERTICAL box sizer # used here self.EXPAND_VERTICALLY = 1 self.NOT_EXPAND_VERTICALLY = 0 self.SetAutoLayout(1) self.SetupScrolling() eventManager.Register(self.update, EVT_RECORD_UPDATED_EVENT, self._model)
KeyError
dataset/ETHPy150Open williballenthin/INDXParse/MFTView.py/RecordPane.__init__
def unicode_strings(buf, n=4): reg = b"((?:[%s]\x00){4,})" % (ascii_byte) ascii_re = re.compile(reg) for match in ascii_re.finditer(buf): try: yield match.group().decode("utf-16") except __HOLE__: print "unicode find error: " + str(match.group()) pass
UnicodeDecodeError
dataset/ETHPy150Open williballenthin/INDXParse/MFTView.py/unicode_strings
def update(self, event): self._sizer.Clear() self.DestroyChildren() has_runlists = False for attr in self._model.record().attributes(): if attr.type() == ATTR_TYPE.DATA: if attr.non_resident(): for (_, __) in attr.runlist().runs(): has_runlists = True if has_runlists: warning_panel = DiskGeometryWarningPanel(self, self._model) self._sizer.Add(warning_panel, self.NOT_EXPAND_VERTICALLY, wx.EXPAND) for attr in self._model.record().attributes(): if attr.type() == ATTR_TYPE.DATA: try: if attr.non_resident(): try: for (offset, length) in attr.runlist().runs(): runlist_panel = RunlistPanel(self, offset, length, self._model) self._sizer.Add(runlist_panel, 0, wx.EXPAND) except __HOLE__: sys.stderr.write("Error parsing runlist\n") continue elif len(attr.value()) > 0: value_view = wx.TextCtrl(self, style=wx.TE_MULTILINE) value_view.SetFont(wx.Font(8, wx.SWISS, wx.NORMAL, wx.NORMAL, False, u'Courier')) value_view.SetValue(unicode(_format_hex(attr.value()))) self._sizer.Add(value_view, self.EXPAND_VERTICALLY, wx.EXPAND) except ZeroDivisionError: continue self.Layout()
IndexError
dataset/ETHPy150Open williballenthin/INDXParse/MFTView.py/RecordDataPane.update
@_lazy_load def _get_sendfile(): try: from importlib import import_module except __HOLE__: from django.utils.importlib import import_module from django.conf import settings from django.core.exceptions import ImproperlyConfigured backend = getattr(settings, 'SENDFILE_BACKEND', None) if not backend: raise ImproperlyConfigured('You must specify a value for SENDFILE_BACKEND') module = import_module(backend) return module.sendfile
ImportError
dataset/ETHPy150Open johnsensible/django-sendfile/sendfile/__init__.py/_get_sendfile
def sendfile(request, filename, attachment=False, attachment_filename=None, mimetype=None, encoding=None): ''' create a response to send file using backend configured in SENDFILE_BACKEND If attachment is True the content-disposition header will be set. This will typically prompt the user to download the file, rather than view it. The content-disposition filename depends on the value of attachment_filename: None (default): Same as filename False: No content-disposition filename String: Value used as filename If no mimetype or encoding are specified, then they will be guessed via the filename (using the standard python mimetypes module) ''' _sendfile = _get_sendfile() if not os.path.exists(filename): from django.http import Http404 raise Http404('"%s" does not exist' % filename) guessed_mimetype, guessed_encoding = guess_type(filename) if mimetype is None: if guessed_mimetype: mimetype = guessed_mimetype else: mimetype = 'application/octet-stream' response = _sendfile(request, filename, mimetype=mimetype) if attachment: if attachment_filename is None: attachment_filename = os.path.basename(filename) parts = ['attachment'] if attachment_filename: try: from django.utils.encoding import force_text except __HOLE__: # Django 1.3 from django.utils.encoding import force_unicode as force_text attachment_filename = force_text(attachment_filename) ascii_filename = unicodedata.normalize('NFKD', attachment_filename).encode('ascii','ignore') parts.append('filename="%s"' % ascii_filename) if ascii_filename != attachment_filename: from django.utils.http import urlquote quoted_filename = urlquote(attachment_filename) parts.append('filename*=UTF-8\'\'%s' % quoted_filename) response['Content-Disposition'] = '; '.join(parts) response['Content-length'] = os.path.getsize(filename) response['Content-Type'] = mimetype if not encoding: encoding = guessed_encoding if encoding: response['Content-Encoding'] = encoding return response
ImportError
dataset/ETHPy150Open johnsensible/django-sendfile/sendfile/__init__.py/sendfile
def get_object_fallback(cls, title, locale, default=None, **kwargs): """Return an instance of cls matching title and locale, or fall back to the default locale. When falling back to the default locale, follow any wiki redirects internally. If the fallback fails, the return value is `default`. You may pass in additional kwargs which go straight to the query. """ try: return cls.objects.get(title=title, locale=locale, **kwargs) except (cls.DoesNotExist, __HOLE__): pass # Fallback try: default_lang_doc = cls.objects.get( title=title, locale=settings.WIKI_DEFAULT_LANGUAGE, **kwargs) # Return the translation of this English item: if hasattr(default_lang_doc, 'translated_to'): trans = default_lang_doc.translated_to(locale) if trans and trans.current_revision: return trans # Follow redirects internally in an attempt to find a # translation of the final redirect target in the requested # locale. This happens a lot when an English article is # renamed and a redirect is left in its wake: we wouldn't want # the non-English user to be linked to the English redirect, # which would happily redirect them to the English final # article. if hasattr(default_lang_doc, 'redirect_document'): target = default_lang_doc.redirect_document() if target: trans = target.translated_to(locale) if trans and trans.current_revision: return trans # Return the English item: return default_lang_doc # Okay, all else failed except (cls.DoesNotExist, IOError): return default
IOError
dataset/ETHPy150Open mozilla/kitsune/kitsune/sumo/parser.py/get_object_fallback
def feed(request, url, feed_dict=None): """Provided for backwards compatibility.""" if not feed_dict: raise Http404(_("No feeds are registered.")) slug = url.partition('/')[0] try: f = feed_dict[slug] except __HOLE__: raise Http404(_("Slug %r isn't registered.") % slug) instance = f() instance.feed_url = getattr(f, 'feed_url', None) or request.path instance.title_template = f.title_template or ('feeds/%s_title.html' % slug) instance.description_template = f.description_template or ('feeds/%s_description.html' % slug) return instance(request)
KeyError
dataset/ETHPy150Open django/django/django/contrib/gis/views.py/feed
def get_ordered_insertion_target(self, node, parent): """ Attempts to retrieve a suitable right sibling for ``node`` underneath ``parent`` (which may be ``None`` in the case of root nodes) so that ordering by the fields specified by the node's class' ``order_insertion_by`` option is maintained. Returns ``None`` if no suitable sibling can be found. """ right_sibling = None # Optimisation - if the parent doesn't have descendants, # the node will always be its last child. if parent is None or parent.get_descendant_count() > 0: opts = node._mptt_meta order_by = opts.order_insertion_by[:] filters = self.insertion_target_filters(node, order_by) if parent: filters = filters & Q(**{opts.parent_attr: parent}) # Fall back on tree ordering if multiple child nodes have # the same values. order_by.append(opts.left_attr) else: filters = filters & Q(**{opts.parent_attr: None}) # Fall back on tree id ordering if multiple root nodes have # the same values. order_by.append(opts.tree_id_attr) queryset = node.__class__._tree_manager.filter(filters).order_by(*order_by) if node.pk: queryset = queryset.exclude(pk=node.pk) try: right_sibling = queryset[:1][0] except __HOLE__: # No suitable right sibling could be found pass return right_sibling
IndexError
dataset/ETHPy150Open django-mptt/django-mptt/mptt/models.py/MPTTOptions.get_ordered_insertion_target
def __new__(meta, class_name, bases, class_dict): """ Create subclasses of MPTTModel. This: - adds the MPTT fields to the class - adds a TreeManager to the model """ if class_name == 'NewBase' and class_dict == {}: return super(MPTTModelBase, meta).__new__(meta, class_name, bases, class_dict) is_MPTTModel = False try: MPTTModel except __HOLE__: is_MPTTModel = True MPTTMeta = class_dict.pop('MPTTMeta', None) if not MPTTMeta: class MPTTMeta: pass initial_options = frozenset(dir(MPTTMeta)) # extend MPTTMeta from base classes for base in bases: if hasattr(base, '_mptt_meta'): for name, value in base._mptt_meta: if name == 'tree_manager_attr': continue if name not in initial_options: setattr(MPTTMeta, name, value) class_dict['_mptt_meta'] = MPTTOptions(MPTTMeta) super_new = super(MPTTModelBase, meta).__new__ cls = super_new(meta, class_name, bases, class_dict) cls = meta.register(cls) # see error cases in TreeManager.disable_mptt_updates for the reasoning here. cls._mptt_tracking_base = None if is_MPTTModel: bases = [cls] else: bases = [base for base in cls.mro() if issubclass(base, MPTTModel)] for base in bases: if (not (base._meta.abstract or base._meta.proxy) and base._tree_manager.tree_model is base): cls._mptt_tracking_base = base break if cls is cls._mptt_tracking_base: cls._threadlocal = threading.local() # set on first access (to make threading errors more obvious): # cls._threadlocal.mptt_delayed_tree_changes = None return cls
NameError
dataset/ETHPy150Open django-mptt/django-mptt/mptt/models.py/MPTTModelBase.__new__
@classmethod def register(meta, cls, **kwargs): """ For the weird cases when you need to add tree-ness to an *existing* class. For other cases you should subclass MPTTModel instead of calling this. """ if not issubclass(cls, models.Model): raise ValueError(_("register() expects a Django model class argument")) if not hasattr(cls, '_mptt_meta'): cls._mptt_meta = MPTTOptions(**kwargs) abstract = getattr(cls._meta, 'abstract', False) try: MPTTModel except __HOLE__: # We're defining the base class right now, so don't do anything # We only want to add this stuff to the subclasses. # (Otherwise if field names are customized, we'll end up adding two # copies) pass else: if not issubclass(cls, MPTTModel): bases = list(cls.__bases__) # strip out bases that are strict superclasses of MPTTModel. # (i.e. Model, object) # this helps linearize the type hierarchy if possible for i in range(len(bases) - 1, -1, -1): if issubclass(MPTTModel, bases[i]): del bases[i] bases.insert(0, MPTTModel) cls.__bases__ = tuple(bases) if _get_tree_model(cls) is cls: # HACK: _meta.get_field() doesn't work before AppCache.ready in Django>=1.8 # ( see https://code.djangoproject.com/ticket/24231 ) # So the only way to get existing fields is using local_fields on all superclasses. existing_field_names = set() for base in cls.mro(): if hasattr(base, '_meta'): existing_field_names.update([f.name for f in base._meta.local_fields]) for key in ('left_attr', 'right_attr', 'tree_id_attr', 'level_attr'): field_name = getattr(cls._mptt_meta, key) if field_name not in existing_field_names: field = models.PositiveIntegerField(db_index=True, editable=False) field.contribute_to_class(cls, field_name) # Add a tree manager, if there isn't one already if not abstract: manager = getattr(cls, 'objects', None) if manager is None: manager = cls._default_manager._copy_to_model(cls) manager.contribute_to_class(cls, 'objects') elif manager.model != cls: # manager was inherited manager = manager._copy_to_model(cls) manager.contribute_to_class(cls, 'objects') # make sure we have a tree manager somewhere tree_manager = None cls_managers = cls._meta.concrete_managers + cls._meta.abstract_managers for __, __, cls_manager in cls_managers: if isinstance(cls_manager, TreeManager): # prefer any locally defined manager (i.e. keep going if not local) if cls_manager.model is cls: tree_manager = cls_manager break if tree_manager and tree_manager.model is not cls: tree_manager = tree_manager._copy_to_model(cls) elif tree_manager is None: tree_manager = TreeManager() tree_manager.contribute_to_class(cls, '_tree_manager') # avoid using ManagerDescriptor, so instances can refer to self._tree_manager setattr(cls, '_tree_manager', tree_manager) return cls
NameError
dataset/ETHPy150Open django-mptt/django-mptt/mptt/models.py/MPTTModelBase.register
def save(self, *args, **kwargs): """ If this is a new node, sets tree fields up before it is inserted into the database, making room in the tree structure as neccessary, defaulting to making the new node the last child of its parent. It the node's left and right edge indicators already been set, we take this as indication that the node has already been set up for insertion, so its tree fields are left untouched. If this is an existing node and its parent has been changed, performs reparenting in the tree structure, defaulting to making the node the last child of its new parent. In either case, if the node's class has its ``order_insertion_by`` tree option set, the node will be inserted or moved to the appropriate position to maintain ordering by the specified field. """ do_updates = self.__class__._mptt_updates_enabled track_updates = self.__class__._mptt_is_tracking opts = self._mptt_meta if not (do_updates or track_updates): # inside manager.disable_mptt_updates(), don't do any updates. # unless we're also inside TreeManager.delay_mptt_updates() if self._mpttfield('left') is None: # we need to set *some* values, though don't care too much what. parent = getattr(self, '_%s_cache' % opts.parent_attr, None) # if we have a cached parent, have a stab at getting # possibly-correct values. otherwise, meh. if parent: left = parent._mpttfield('left') + 1 setattr(self, opts.left_attr, left) setattr(self, opts.right_attr, left + 1) setattr(self, opts.level_attr, parent._mpttfield('level') + 1) setattr(self, opts.tree_id_attr, parent._mpttfield('tree_id')) self._tree_manager._post_insert_update_cached_parent_right(parent, 2) else: setattr(self, opts.left_attr, 1) setattr(self, opts.right_attr, 2) setattr(self, opts.level_attr, 0) setattr(self, opts.tree_id_attr, 0) return super(MPTTModel, self).save(*args, **kwargs) parent_id = opts.get_raw_field_value(self, opts.parent_attr) # determine whether this instance is already in the db force_update = kwargs.get('force_update', False) force_insert = kwargs.get('force_insert', False) collapse_old_tree = None deferred_fields = self.get_deferred_fields() if force_update or (not force_insert and self._is_saved(using=kwargs.get('using'))): # it already exists, so do a move old_parent_id = self._mptt_cached_fields[opts.parent_attr] if old_parent_id is DeferredAttribute: same_order = True else: same_order = old_parent_id == parent_id if same_order and len(self._mptt_cached_fields) > 1: for field_name, old_value in self._mptt_cached_fields.items(): if old_value is DeferredAttribute and field_name not in deferred_fields: same_order = False break if old_value != opts.get_raw_field_value(self, field_name): same_order = False break if not do_updates and not same_order: same_order = True self.__class__._mptt_track_tree_modified(self._mpttfield('tree_id')) elif (not do_updates) and not same_order and old_parent_id is None: # the old tree no longer exists, so we need to collapse it. collapse_old_tree = self._mpttfield('tree_id') parent = getattr(self, opts.parent_attr) tree_id = parent._mpttfield('tree_id') left = parent._mpttfield('left') + 1 self.__class__._mptt_track_tree_modified(tree_id) setattr(self, opts.tree_id_attr, tree_id) setattr(self, opts.left_attr, left) setattr(self, opts.right_attr, left + 1) setattr(self, opts.level_attr, parent._mpttfield('level') + 1) same_order = True if not same_order: opts.set_raw_field_value(self, opts.parent_attr, old_parent_id) try: right_sibling = None if opts.order_insertion_by: right_sibling = opts.get_ordered_insertion_target( self, getattr(self, opts.parent_attr)) if parent_id is not None: parent = getattr(self, opts.parent_attr) # If we aren't already a descendant of the new parent, # we need to update the parent.rght so things like # get_children and get_descendant_count work correctly. update_cached_parent = ( getattr(self, opts.tree_id_attr) != getattr(parent, opts.tree_id_attr) or # noqa getattr(self, opts.left_attr) < getattr(parent, opts.left_attr) or getattr(self, opts.right_attr) > getattr(parent, opts.right_attr)) if right_sibling: self._tree_manager._move_node( self, right_sibling, 'left', save=False, refresh_target=False) else: # Default movement if parent_id is None: root_nodes = self._tree_manager.root_nodes() try: rightmost_sibling = root_nodes.exclude( pk=self.pk).order_by('-' + opts.tree_id_attr)[0] self._tree_manager._move_node( self, rightmost_sibling, 'right', save=False, refresh_target=False) except __HOLE__: pass else: self._tree_manager._move_node( self, parent, 'last-child', save=False) if parent_id is not None and update_cached_parent: # Update rght of cached parent right_shift = 2 * (self.get_descendant_count() + 1) self._tree_manager._post_insert_update_cached_parent_right( parent, right_shift) finally: # Make sure the new parent is always # restored on the way out in case of errors. opts.set_raw_field_value(self, opts.parent_attr, parent_id) # If there were no exceptions raised then send a moved signal node_moved.send(sender=self.__class__, instance=self, target=getattr(self, opts.parent_attr)) else: opts.set_raw_field_value(self, opts.parent_attr, parent_id) if not track_updates: # When not using delayed/disabled updates, # populate update_fields with user defined model fields. # This helps preserve tree integrity when saving model on top # of a modified tree. if len(args) > 3: if not args[3]: args = list(args) args[3] = self._get_user_field_names() args = tuple(args) else: if not kwargs.get("update_fields", None): kwargs["update_fields"] = self._get_user_field_names() else: # new node, do an insert if (getattr(self, opts.left_attr) and getattr(self, opts.right_attr)): # This node has already been set up for insertion. pass else: parent = getattr(self, opts.parent_attr) right_sibling = None # if we're inside delay_mptt_updates, don't do queries to find # sibling position. instead, do default insertion. correct # positions will be found during partial rebuild later. # *unless* this is a root node. (as update tracking doesn't # handle re-ordering of trees.) if do_updates or parent is None: if opts.order_insertion_by: right_sibling = opts.get_ordered_insertion_target(self, parent) if right_sibling: self.insert_at(right_sibling, 'left', allow_existing_pk=True, refresh_target=False) if parent: # since we didn't insert into parent, we have to update parent.rght # here instead of in TreeManager.insert_node() right_shift = 2 * (self.get_descendant_count() + 1) self._tree_manager._post_insert_update_cached_parent_right( parent, right_shift) else: # Default insertion self.insert_at(parent, position='last-child', allow_existing_pk=True) try: super(MPTTModel, self).save(*args, **kwargs) finally: if collapse_old_tree is not None: self._tree_manager._create_tree_space(collapse_old_tree, -1) self._mptt_saved = True opts.update_mptt_cached_fields(self)
IndexError
dataset/ETHPy150Open django-mptt/django-mptt/mptt/models.py/MPTTModel.save
@blueprint.route("/magic/<pattern>.jpg") def image(pattern): """Get the first matching image.""" # TODO: share this logic text = Text(pattern) items = [] for template in app.template_service.all(): ratio, path = template.match(str(text).lower()) if not ratio: continue data = {} data['ratio'] = ratio data['image'] = route('image.get', key=template.key, path=path) items.append(data) try: url = max(items, key=lambda item: item['ratio'])['image'] except __HOLE__: url = route('image.get', key="unknown", path="_") return redirect(url)
ValueError
dataset/ETHPy150Open jacebrowning/memegen/memegen/routes/magic.py/image
def test(self): self.assertTrue('/test_var/structure variable' in self.h5file) self.h5file.close() # Do a copy to a temporary to avoid modifying the original file h5fname_copy = tempfile.mktemp(".h5") shutil.copy(self.h5fname, h5fname_copy) # Reopen in 'a'ppend mode try: self.h5file = tables.open_file(h5fname_copy, 'a') except __HOLE__: # Problems for opening (probably not permisions to write the file) return tbl = self.h5file.get_node('/test_var/structure variable') # Try to add rows to a non-chunked table (this should raise an error) self.assertRaises(tables.HDF5ExtError, tbl.append, [(4.0, 5.0, [2.0, 3.0], 'd')]) # Appending using the Row interface self.assertRaises(tables.HDF5ExtError, tbl.row.append) # Remove the file copy self.h5file.close() # Close the handler first os.remove(h5fname_copy)
IOError
dataset/ETHPy150Open PyTables/PyTables/tables/tests/test_hdf5compat.py/ContiguousCompoundAppendTestCase.test
@task def watch_docs(): """Run build the docs when a file changes.""" try: import sphinx_autobuild # noqa except __HOLE__: print('ERROR: watch task requires the sphinx_autobuild package.') print('Install it with:') print(' pip install sphinx-autobuild') sys.exit(1) run('sphinx-autobuild {0} {1} --watch {2}'.format( docs_dir, build_dir, 'marshmallow'), echo=True, pty=True)
ImportError
dataset/ETHPy150Open marshmallow-code/marshmallow/tasks.py/watch_docs
def test_backend(self): b = DatabaseBackend(app=app) tid = gen_unique_id() self.assertEqual(b.get_status(tid), states.PENDING) self.assertIsNone(b.get_result(tid)) b.mark_as_done(tid, 42) self.assertEqual(b.get_status(tid), states.SUCCESS) self.assertEqual(b.get_result(tid), 42) tid2 = gen_unique_id() result = {'foo': 'baz', 'bar': SomeClass(12345)} b.mark_as_done(tid2, result) # is serialized properly. rindb = b.get_result(tid2) self.assertEqual(rindb.get('foo'), 'baz') self.assertEqual(rindb.get('bar').data, 12345) tid3 = gen_unique_id() try: raise KeyError('foo') except __HOLE__ as exception: b.mark_as_failure(tid3, exception) self.assertEqual(b.get_status(tid3), states.FAILURE) self.assertIsInstance(b.get_result(tid3), KeyError)
KeyError
dataset/ETHPy150Open celery/django-celery/djcelery/tests/test_backends/test_database.py/TestDatabaseBackend.test_backend
def tearDown(self): if self.timing: elapsed = time.time() - self.begun with open(TIMING_FILE, "r") as jj: try: times = json.load(jj) except __HOLE__: times = [] times.append((elapsed, self._testMethodName)) with open(TIMING_FILE, "w") as jj: json.dump(times, jj) super(BaseTest, self).tearDown()
ValueError
dataset/ETHPy150Open rackspace/pyrax/tests/unit/base_test.py/BaseTest.tearDown
def __init__(self, request): super(CMSToolbar, self).__init__() self.right_items = [] self.left_items = [] self.populated = False self.post_template_populated = False self.menus = {} self.obj = None self.redirect_url = None self.request = None self.is_staff = None self.edit_mode = None self.edit_mode_url_on = get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON') self.edit_mode_url_off = get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF') self.disable_url = get_cms_setting('CMS_TOOLBAR_URL__DISABLE') self.build_mode = None self.use_draft = None self.show_toolbar = None self.login_form = None self.clipboard = None self.language = None self.toolbar_language = None self.simple_structure_mode = get_cms_setting('TOOLBAR_SIMPLE_STRUCTURE_MODE') self.show_toolbar = True self.init_toolbar(request) with force_language(self.language): try: decorator = resolve(self.request.path_info).func try: # If the original view is decorated we try to extract the real function # module instead of the decorator's one if decorator and getattr(decorator, 'func_closure', False): # python 2 self.app_name = decorator.func_closure[0].cell_contents.__module__ elif decorator and getattr(decorator, '__closure__', False): # python 3 self.app_name = decorator.__closure__[0].cell_contents.__module__ else: raise AttributeError() except (__HOLE__, AttributeError): # no decorator self.app_name = decorator.__module__ except Resolver404: self.app_name = "" toolbars = toolbar_pool.get_toolbars() parts = self.app_name.split('.') while parts: path = '.'.join(parts) if path in installed_apps(): self.app_name = path break parts.pop() self.toolbars = OrderedDict() for key in toolbars: toolbar = toolbars[key](self.request, self, toolbars[key].check_current_app(key, self.app_name), self.app_name) self.toolbars[key] = toolbar
TypeError
dataset/ETHPy150Open divio/django-cms/cms/toolbar/toolbar.py/CMSToolbar.__init__
def handle(self, *args, **options): if not options["tos"]: raise CommandError("""You must confirm that this user has accepted the Terms of Service by passing --this-user-has-accepted-the-tos.""") if not options["domain"]: raise CommandError("""Please specify a realm by passing --domain.""") try: realm = get_realm(options["domain"]) except Realm.DoesNotExist: raise CommandError("Realm does not exist.") try: email = options['email'] full_name = options['full_name'] try: validators.validate_email(email) except ValidationError: raise CommandError("Invalid email address.") except __HOLE__: if 'email' in options or 'full_name' in options: raise CommandError("""Either specify an email and full name as two parameters, or specify no parameters for interactive user creation.""") else: while True: email = input("Email: ") try: validators.validate_email(email) break except ValidationError: print("Invalid email address.", file=sys.stderr) full_name = input("Full name: ") try: notify_new_user(do_create_user(email, initial_password(email), realm, full_name, email_to_username(email)), internal=True) except IntegrityError: raise CommandError("User already exists.")
KeyError
dataset/ETHPy150Open zulip/zulip/zerver/management/commands/create_user.py/Command.handle
def _check_vpc(vpc_id=None, vpc_name=None, region=None, key=None, keyid=None, profile=None): data = __salt__['boto_vpc.get_id'](name=vpc_name, region=region, key=key, keyid=keyid, profile=profile) try: return data.get('id') except __HOLE__: return None except KeyError: return None
TypeError
dataset/ETHPy150Open saltstack/salt/salt/modules/boto_secgroup.py/_check_vpc
def get_python_exec(ver): """Return the executable of python for the given version.""" # XXX Check that the file actually exists try: return PYEXECS[ver] except __HOLE__: raise ValueError("Version %s not supported/recognized" % ver)
KeyError
dataset/ETHPy150Open scipy/scipy/tools/win32/build_scripts/pavement.py/get_python_exec
def raw_build_sdist(cwd): cmd = ["python", "setup.py", "sdist", "--format=zip"] build_log = "sdist.log" f = open(build_log, 'w') try: try: st = subprocess.call(cmd, #shell = True, stderr = subprocess.STDOUT, stdout = f, cwd=cwd) if st: raise RuntimeError("The cmd failed with status %d" % st) finally: f.close() except (subprocess.CalledProcessError, __HOLE__), e: print e msg = """ There was an error while executing the following command: %s Error was : %s Look at the log (%s).""" % (cmd, str(e), build_log) raise Exception(msg)
RuntimeError
dataset/ETHPy150Open scipy/scipy/tools/win32/build_scripts/pavement.py/raw_build_sdist
def raw_build_arch(pyver, arch, src_root): scipy_verstr = get_scipy_version(src_root) bdir = bootstrap_dir(pyver) print "Building scipy (version %s) binary for python %s, arch is %s" % \ (scipy_verstr, get_python_exec(pyver), arch) if BUILD_MSI: cmd = [get_python_exec(pyver), "setup.py", "build", "-c", "mingw32", "bdist_msi"] else: cmd = [get_python_exec(pyver), "setup.py", "build", "-c", "mingw32", "bdist_wininst"] build_log = "build-%s-%s.log" % (arch, pyver) f = open(build_log, 'w') try: try: st = subprocess.call(cmd, #shell = True, stderr = subprocess.STDOUT, stdout = f, cwd=bdir) if st: raise RuntimeError("The cmd failed with status %d" % st) finally: f.close() except (subprocess.CalledProcessError, __HOLE__), e: print e msg = """ There was an error while executing the following command: %s Error was : %s Look at the build log (%s).""" % (cmd, str(e), build_log) raise Exception(msg) move_binary(arch, pyver, bdir, scipy_verstr)
RuntimeError
dataset/ETHPy150Open scipy/scipy/tools/win32/build_scripts/pavement.py/raw_build_arch
def pick(self): # <5> try: return self._items.pop() except __HOLE__: raise LookupError('pick from empty BingoCage')
IndexError
dataset/ETHPy150Open fluentpython/example-code/11-iface-abc/bingo.py/BingoCage.pick
def seed(self, a=None): # """Initialize internal state from hashable object. # None or no argument seeds from current time or from an operating # system specific randomness source if available. # If a is not None or an int or long, hash(a) is used instead. # """ if a is None: try: a = long(_hexlify(_urandom(16)), 16) except __HOLE__: import time a = long(time.time() * 256) # use fractional seconds super(Random, self).seed(a) self.gauss_next = None
NotImplementedError
dataset/ETHPy150Open anandology/pyjamas/pyjs/src/pyjs/lib/random.py/Random.seed
def setstate(self, state): # """Restore internal state from object returned by getstate().""" version = state[0] if version == 3: version, internalstate, self.gauss_next = state super(Random, self).setstate(internalstate) elif version == 2: version, internalstate, self.gauss_next = state # In version 2, the state was saved as signed ints, which causes # inconsistencies between 32/64-bit systems. The state is # really unsigned 32-bit ints, so we convert negative ints from # version 2 to positive longs for version 3. try: internalstate = tuple( long(x) % (2**32) for x in internalstate ) except __HOLE__, e: raise TypeError, e super(Random, self).setstate(internalstate) else: raise ValueError("state with version %s passed to " "Random.setstate() of version %s" % (version, self.VERSION)) ## ---- Methods below this point do not need to be overridden when ## ---- subclassing for the purpose of using a different core generator. ## -------------------- pickle support -------------------
ValueError
dataset/ETHPy150Open anandology/pyjamas/pyjs/src/pyjs/lib/random.py/Random.setstate
def _randbelow(self, n, _log=_log, fint=int, _maxwidth=1L<<BPF): #def _randbelow(self, n, _log=_log, int=int, _maxwidth=1L<<BPF, # _Method=_MethodType, _BuiltinMethod=_BuiltinMethodType): # """Return a random int in the range [0,n) # Handles the case where n has more bits than returned # by a single call to the underlying generator. # """ try: getrandbits = self.getrandbits except __HOLE__: pass else: # Only call self.getrandbits if the original random() builtin method # has not been overridden or if a new getrandbits() was supplied. # This assures that the two methods correspond. #if type(self.random) is _BuiltinMethod or type(getrandbits) is _Method: if True: k = fint(1.00001 + _log(n-1, 2.0)) # 2**k > n-1 > 2**(k-2) r = getrandbits(k) while r >= n: r = getrandbits(k) return long(r) #if n >= _maxwidth: # _warn("Underlying random() generator does not supply \n" # "enough bits to choose from a population range this large") return fint(self.random() * n) ## -------------------- sequence methods ------------------- def choice(self, seq): # """Choose a random element from a non-empty sequence.""" return seq[int(self.random() * len(seq))] # raises IndexError if seq is empty def shuffle(self, x, random=None, fint=int): # """x, random=random.random -> shuffle list x in place; return None. # Optional arg random is a 0-argument function returning a random # float in [0.0, 1.0); by default, the standard random.random. # """ if random is None: random = self.random for i in reversed(xrange(1, len(x))): # pick an element in x[:i+1] with which to exchange x[i] j = fint(random() * (i+1)) x[i], x[j] = x[j], x[i] def sample(self, population, k): # """Chooses k unique random elements from a population sequence. # Returns a new list containing elements from the population while # leaving the original population unchanged. The resulting list is # in selection order so that all sub-slices will also be valid random # samples. This allows raffle winners (the sample) to be partitioned # into grand prize and second place winners (the subslices). # # Members of the population need not be hashable or unique. If the # population contains repeats, then each occurrence is a possible # selection in the sample. # # To choose a sample in a range of integers, use xrange as an argument. # This is especially fast and space efficient for sampling from a # large population: sample(xrange(10000000), 60) # """ # XXX Although the documentation says `population` is "a sequence", # XXX attempts are made to cater to any iterable with a __len__ # XXX method. This has had mixed success. Examples from both # XXX sides: sets work fine, and should become officially supported; # XXX dicts are much harder, and have failed in various subtle # XXX ways across attempts. Support for mapping types should probably # XXX be dropped (and users should pass mapping.keys() or .values() # XXX explicitly). # Sampling without replacement entails tracking either potential # selections (the pool) in a list or previous selections in a set. # When the number of selections is small compared to the # population, then tracking selections is efficient, requiring # only a small set and an occasional reselection. For # a larger number of selections, the pool tracking method is # preferred since the list takes less space than the # set and it doesn't suffer from frequent reselections. n = len(population) if not 0 <= k <= n: raise ValueError, "sample larger than population" __random = self.random _int = int result = [None] * k setsize = 21 # size of a small set minus size of an empty list if k > 5: setsize += 4 ** _ceil(_log(k * 3, 4)) # table size for big sets if n <= setsize or hasattr(population, "keys"): # An n-length list is smaller than a k-length set, or this is a # mapping type so the other algorithm wouldn't work. pool = list(population) for i in xrange(k): # invariant: non-selected at [0,n-i) j = _int(__random() * (n-i)) result[i] = pool[j] pool[j] = pool[n-i-1] # move non-selected item into vacancy else: try: selected = set() selected_add = selected.add for i in xrange(k): j = _int(__random() * n) while j in selected: j = _int(__random() * n) selected_add(j) result[i] = population[j] except (TypeError, KeyError): # handle (at least) sets if isinstance(population, list): raise return self.sample(tuple(population), k) return result ## -------------------- real-valued distributions ------------------- ## -------------------- uniform distribution ------------------- def uniform(self, a, b): # """Get a random number in the range [a, b).""" return a + (b-a) * self.random() ## -------------------- triangular -------------------- def triangular(self, low=0.0, high=1.0, mode=None): # """Triangular distribution. # # Continuous distribution bounded by given lower and upper limits, # and having a given mode value in-between. # # http://en.wikipedia.org/wiki/Triangular_distribution # """ u = self.random() c = 0.5 if mode is None else (mode - low) / (high - low) if u > c: u = 1.0 - u c = 1.0 - c low, high = high, low return low + (high - low) * (u * c) ** 0.5 ## -------------------- normal distribution -------------------- def normalvariate(self, mu, sigma): # """Normal distribution. # mu is the mean, and sigma is the standard deviation. # """ # mu = mean, sigma = standard deviation # Uses Kinderman and Monahan method. Reference: Kinderman, # A.J. and Monahan, J.F., "Computer generation of random # variables using the ratio of uniform deviates", ACM Trans # Math Software, 3, (1977), pp257-260. __random = self.random while 1: u1 = __random() u2 = 1.0 - __random() z = NV_MAGICCONST*(u1-0.5)/u2 zz = z*z/4.0 if zz <= -_log(u2): break return mu + z*sigma ## -------------------- lognormal distribution -------------------- def lognormvariate(self, mu, sigma): # """Log normal distribution. # If you take the natural logarithm of this distribution, you'll get a # normal distribution with mean mu and standard deviation sigma. # mu can have any value, and sigma must be greater than zero. # """ return _exp(self.normalvariate(mu, sigma)) ## -------------------- exponential distribution -------------------- def expovariate(self, lambd): # """Exponential distribution. # lambd is 1.0 divided by the desired mean. It should be # nonzero. (The parameter would be called "lambda", but that is # a reserved word in Python.) Returned values range from 0 to # positive infinity if lambd is positive, and from negative # infinity to 0 if lambd is negative. # """ # lambd: rate lambd = 1/mean # ('lambda' is a Python reserved word) __random = self.random u = __random() while u <= 1e-7: u = __random() return -_log(u)/lambd ## -------------------- von Mises distribution -------------------- def vonmisesvariate(self, mu, kappa): # """Circular data distribution. # mu is the mean angle, expressed in radians between 0 and 2*pi, and # kappa is the concentration parameter, which must be greater than or # equal to zero. If kappa is equal to zero, this distribution reduces # to a uniform random angle over the range 0 to 2*pi. # """ # mu: mean angle (in radians between 0 and 2*pi) # kappa: concentration parameter kappa (>= 0) # if kappa = 0 generate uniform random angle # Based upon an algorithm published in: Fisher, N.I., # "Statistical Analysis of Circular Data", Cambridge # University Press, 1993. # Thanks to Magnus Kessler for a correction to the # implementation of step 4. __random = self.random if kappa <= 1e-6: return TWOPI * __random() a = 1.0 + _sqrt(1.0 + 4.0 * kappa * kappa) b = (a - _sqrt(2.0 * a))/(2.0 * kappa) r = (1.0 + b * b)/(2.0 * b) while 1: u1 = __random() z = _cos(_pi * u1) f = (1.0 + r * z)/(r + z) c = kappa * (r - f) u2 = __random() if u2 < c * (2.0 - c) or u2 <= c * _exp(1.0 - c): break u3 = __random() if u3 > 0.5: theta = (mu % TWOPI) + _acos(f) else: theta = (mu % TWOPI) - _acos(f) return theta ## -------------------- gamma distribution -------------------- def gammavariate(self, alpha, beta): # """Gamma distribution. Not the gamma function! # Conditions on the parameters are alpha > 0 and beta > 0. # """ # alpha > 0, beta > 0, mean is alpha*beta, variance is alpha*beta**2 # Warning: a few older sources define the gamma distribution in terms # of alpha > -1.0 if alpha <= 0.0 or beta <= 0.0: raise ValueError, 'gammavariate: alpha and beta must be > 0.0' __random = self.random if alpha > 1.0: # Uses R.C.H. Cheng, "The generation of Gamma # variables with non-integral shape parameters", # Applied Statistics, (1977), 26, No. 1, p71-74 ainv = _sqrt(2.0 * alpha - 1.0) bbb = alpha - LOG4 ccc = alpha + ainv while 1: u1 = __random() if not 1e-7 < u1 < .9999999: continue u2 = 1.0 - __random() v = _log(u1/(1.0-u1))/ainv x = alpha*_exp(v) z = u1*u1*u2 r = bbb+ccc*v-x if r + SG_MAGICCONST - 4.5*z >= 0.0 or r >= _log(z): return x * beta elif alpha == 1.0: # expovariate(1) u = __random() while u <= 1e-7: u = __random() return -_log(u) * beta else: # alpha is between 0 and 1 (exclusive) # Uses ALGORITHM GS of Statistical Computing - Kennedy & Gentle while 1: u = __random() b = (_e + alpha)/_e p = b*u if p <= 1.0: x = p ** (1.0/alpha) else: x = -_log((b-p)/alpha) u1 = __random() if p > 1.0: if u1 <= x ** (alpha - 1.0): break elif u1 <= _exp(-x): break return x * beta ## -------------------- Gauss (faster alternative) -------------------- def gauss(self, mu, sigma): # """Gaussian distribution. # mu is the mean, and sigma is the standard deviation. This is # slightly faster than the normalvariate() function. # Not thread-safe without a lock around calls. # """ # When x and y are two variables from [0, 1), uniformly # distributed, then # # cos(2*pi*x)*sqrt(-2*log(1-y)) # sin(2*pi*x)*sqrt(-2*log(1-y)) # # are two *independent* variables with normal distribution # (mu = 0, sigma = 1). # (Lambert Meertens) # (corrected version; bug discovered by Mike Miller, fixed by LM) # Multithreading note: When two threads call this function # simultaneously, it is possible that they will receive the # same return value. The window is very small though. To # avoid this, you have to use a lock around all calls. (I # didn't want to slow this down in the serial case by using a # lock here.) __random = self.random z = self.gauss_next self.gauss_next = None if z is None: x2pi = __random() * TWOPI g2rad = _sqrt(-2.0 * _log(1.0 - __random())) z = _cos(x2pi) * g2rad self.gauss_next = _sin(x2pi) * g2rad return mu + z*sigma ## -------------------- beta -------------------- ## See ## http://sourceforge.net/bugs/?func=detailbug&bug_id=130030&group_id=5470 ## for Ivan Frohne's insightful analysis of why the original implementation: ## ## def betavariate(self, alpha, beta): ## # Discrete Event Simulation in C, pp 87-88. ## ## y = self.expovariate(alpha) ## z = self.expovariate(1.0/beta) ## return z/(y+z) ## ## was dead wrong, and how it probably got that way. def betavariate(self, alpha, beta): # """Beta distribution. # Conditions on the parameters are alpha > 0 and beta > 0. # Returned values range between 0 and 1. # """ # This version due to Janne Sinkkonen, and matches all the std # texts (e.g., Knuth Vol 2 Ed 3 pg 134 "the beta distribution"). y = self.gammavariate(alpha, 1.) if y == 0: return 0.0 else: return y / (y + self.gammavariate(beta, 1.)) ## -------------------- Pareto -------------------- def paretovariate(self, alpha): # """Pareto distribution. alpha is the shape parameter.""" # Jain, pg. 495 u = 1.0 - self.random() return 1.0 / pow(u, 1.0/alpha) ## -------------------- Weibull -------------------- def weibullvariate(self, alpha, beta): # """Weibull distribution. # alpha is the scale parameter and beta is the shape parameter. # """ # Jain, pg. 499; bug fix courtesy Bill Arms u = 1.0 - self.random() return alpha * pow(-_log(u), 1.0/beta) ## -------------------- Wichmann-Hill -------------------
AttributeError
dataset/ETHPy150Open anandology/pyjamas/pyjs/src/pyjs/lib/random.py/Random._randbelow
def seed(self, a=None): # """Initialize internal state from hashable object. # # None or no argument seeds from current time or from an operating # system specific randomness source if available. # # If a is not None or an int or long, hash(a) is used instead. # # If a is an int or long, a is used directly. Distinct values between # 0 and 27814431486575L inclusive are guaranteed to yield distinct # internal states (this guarantee is specific to the default # Wichmann-Hill generator). # """ if a is None: try: a = long(_hexlify(_urandom(16)), 16) except __HOLE__: import time a = long(time.time() * 256) # use fractional seconds if not isinstance(a, (int, long)): a = hash(a) a, x = divmod(a, 30268) a, y = divmod(a, 30306) a, z = divmod(a, 30322) self._seed = int(x)+1, int(y)+1, int(z)+1 self.gauss_next = None
NotImplementedError
dataset/ETHPy150Open anandology/pyjamas/pyjs/src/pyjs/lib/random.py/WichmannHill.seed
def guess_external_ip(): gateways = netifaces.gateways() try: ifnet = gateways['default'][netifaces.AF_INET][1] return netifaces.ifaddresses(ifnet)[netifaces.AF_INET][0]['addr'] except (__HOLE__, IndexError): return
KeyError
dataset/ETHPy150Open deliveryhero/lymph/lymph/utils/sockets.py/guess_external_ip
def create_socket(host, family=socket.AF_INET, type=socket.SOCK_STREAM, backlog=2048, blocking=True, inheritable=False): if family == socket.AF_UNIX and not host.startswith('unix:'): raise ValueError('Your host needs to have the unix:/path form') if host.startswith('unix:'): family = socket.AF_UNIX if host.startswith('fd://'): fd = int(host[5:]) sock = socket.fromfd(fd, family, type) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) else: sock = socket.socket(family, type) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) if host.startswith('unix:'): filename = host[len('unix:'):] try: os.remove(filename) except __HOLE__: pass sock.bind(filename) else: if ':' in host: host, port = host.rsplit(':', 1) port = int(port) else: host, port = '0.0.0.0', int(host) sock.bind((host, port)) sock.listen(backlog) if blocking: sock.setblocking(1) else: sock.setblocking(0) # Required since Python 3.4 to be able to share a socket with a child # process. if inheritable and hasattr(os, 'set_inheritable'): os.set_inheritable(sock.fileno(), True) return sock
OSError
dataset/ETHPy150Open deliveryhero/lymph/lymph/utils/sockets.py/create_socket