function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
def test_raise_exception_when_edge_added_from_non_existing_node(self): gr = hypergraph() gr.add_nodes([0,1]) try: gr.link(3,0) except __HOLE__: pass else: fail() assert gr.neighbors(0) == []
KeyError
dataset/ETHPy150Open pmatiello/python-graph/tests/unittests-hypergraph.py/test_hypergraph.test_raise_exception_when_edge_added_from_non_existing_node
def test_raise_exception_when_edge_added_to_non_existing_node(self): gr = hypergraph() gr.add_nodes([0,1]) try: gr.link(0,3) except __HOLE__: pass else: fail() assert gr.neighbors(0) == []
KeyError
dataset/ETHPy150Open pmatiello/python-graph/tests/unittests-hypergraph.py/test_hypergraph.test_raise_exception_when_edge_added_to_non_existing_node
def __init__(self, value, strip=True, *args, **kw): """Initialisation Check that the value is a bool In: - ``v`` -- value to validate """ try: if isinstance(value, basestring): value = value.strip() if value.lower() in ('yes', 'on', 'true', '1'): self.value = True elif value.lower() in ('no', 'off', 'false', '0'): self.value = False else: self.value = False else: self.value = bool(value) except (__HOLE__, TypeError): raise ValueError(i18n._(u'Must be a boolean'))
ValueError
dataset/ETHPy150Open Net-ng/kansha/kansha/validator.py/BoolValidator.__init__
def read_config_file(): global USERNAME, TOKEN config_parser = configparser.ConfigParser() config_filename = get_config_filename() try: with open(config_filename, 'r') as f: config_parser.readfp(f) except configparser.Error as e: utils.die("""The following error was encountered while attempting to parse the configuration file. %s This may indicate a mal-formed configuration file. Recreate the file by invoking cligh configure """ % str(e)) except __HOLE__ as e: utils.die("""The following error occurred while trying to open the configuration file. %s. If you have not already done so, create the configuration file using cligh configure at your shell prompt. """ % str(e)) try: USERNAME = config_parser.get('credentials', 'username') TOKEN = config_parser.get('credentials', 'token') except configparser.Error as e: utils.die("""The config file is missing one or more expected options. You should probably recreate it using these two commands: rm %s cligh configure """ % config_filename)
IOError
dataset/ETHPy150Open CMB/cligh/cligh/config.py/read_config_file
def get_tests(app_module): try: app_path = app_module.__name__.split('.')[:-1] test_module = __import__('.'.join(app_path + [TEST_MODULE]), {}, {}, TEST_MODULE) except ImportError, e: # Couldn't import tests.py. Was it due to a missing file, or # due to an import error in a tests.py that actually exists? import os.path from imp import find_module try: mod = find_module(TEST_MODULE, [os.path.dirname(app_module.__file__)]) except __HOLE__: # 'tests' module doesn't exist. Move on. test_module = None else: # The module exists, so there must be an import error in the # test module itself. We don't need the module; so if the # module was a single file module (i.e., tests.py), close the file # handle returned by find_module. Otherwise, the test module # is a directory, and there is nothing to close. if mod[0]: mod[0].close() raise return test_module
ImportError
dataset/ETHPy150Open CollabQ/CollabQ/vendor/django/test/simple.py/get_tests
def build_suite(app_module): "Create a complete Django test suite for the provided application module" suite = unittest.TestSuite() # Load unit and doctests in the models.py module. If module has # a suite() method, use it. Otherwise build the test suite ourselves. if hasattr(app_module, 'suite'): suite.addTest(app_module.suite()) else: suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(app_module)) try: suite.addTest(doctest.DocTestSuite(app_module, checker=doctestOutputChecker, runner=DocTestRunner)) except __HOLE__: # No doc tests in models.py pass # Check to see if a separate 'tests' module exists parallel to the # models module test_module = get_tests(app_module) if test_module: # Load unit and doctests in the tests.py module. If module has # a suite() method, use it. Otherwise build the test suite ourselves. if hasattr(test_module, 'suite'): suite.addTest(test_module.suite()) else: suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(test_module)) try: suite.addTest(doctest.DocTestSuite(test_module, checker=doctestOutputChecker, runner=DocTestRunner)) except ValueError: # No doc tests in tests.py pass return suite
ValueError
dataset/ETHPy150Open CollabQ/CollabQ/vendor/django/test/simple.py/build_suite
def build_test(label): """Construct a test case a test with the specified label. Label should be of the form model.TestClass or model.TestClass.test_method. Returns an instantiated test or test suite corresponding to the label provided. """ parts = label.split('.') if len(parts) < 2 or len(parts) > 3: raise ValueError("Test label '%s' should be of the form app.TestCase or app.TestCase.test_method" % label) app_module = get_app(parts[0]) TestClass = getattr(app_module, parts[1], None) # Couldn't find the test class in models.py; look in tests.py if TestClass is None: test_module = get_tests(app_module) if test_module: TestClass = getattr(test_module, parts[1], None) if len(parts) == 2: # label is app.TestClass try: return unittest.TestLoader().loadTestsFromTestCase(TestClass) except __HOLE__: raise ValueError("Test label '%s' does not refer to a test class" % label) else: # label is app.TestClass.test_method if not TestClass: raise ValueError("Test label '%s' does not refer to a test class" % label) return TestClass(parts[2]) # Python 2.3 compatibility: TestSuites were made iterable in 2.4. # We need to iterate over them, so we add the missing method when # necessary.
TypeError
dataset/ETHPy150Open CollabQ/CollabQ/vendor/django/test/simple.py/build_test
@gen.coroutine def wait_for_http_server(url, timeout=10): """Wait for an HTTP Server to respond at url Any non-5XX response code will do, even 404. """ loop = ioloop.IOLoop.current() tic = loop.time() client = AsyncHTTPClient() while loop.time() - tic < timeout: try: r = yield client.fetch(url, follow_redirects=False) except __HOLE__ as e: if e.code >= 500: # failed to respond properly, wait and try again if e.code != 599: # we expect 599 for no connection, # but 502 or other proxy error is conceivable app_log.warning("Server at %s responded with error: %s", url, e.code) yield gen.sleep(0.1) else: app_log.debug("Server at %s responded with %s", url, e.code) return except (OSError, socket.error) as e: if e.errno not in {errno.ECONNABORTED, errno.ECONNREFUSED, errno.ECONNRESET}: app_log.warning("Failed to connect to %s (%s)", url, e) yield gen.sleep(0.1) else: return raise TimeoutError("Server at {url} didn't respond in {timeout} seconds".format( **locals() )) # Decorators for authenticated Handlers
HTTPError
dataset/ETHPy150Open jupyterhub/jupyterhub/jupyterhub/utils.py/wait_for_http_server
@property def bytes(self): try: return os.stat(self.filename).st_size except (OSError, __HOLE__): return 0
IOError
dataset/ETHPy150Open lektor/lektor-archive/lektor/filecontents.py/FileContents.bytes
def __repr__(self): classname = self.__class__.__name__ try: s = """<lifelines.%s: fitted with %d observations, %d censored>""" % ( classname, self.event_observed.shape[0], (1 - self.event_observed).sum()) except __HOLE__: s = """<lifelines.%s>""" % classname return s
AttributeError
dataset/ETHPy150Open CamDavidsonPilon/lifelines/lifelines/fitters/__init__.py/BaseFitter.__repr__
def _predict(self, estimate, label): class_name = self.__class__.__name__ doc_string = """ Predict the %s at certain point in time. Parameters: time: a scalar or an array of times to predict the value of %s at. Returns: predictions: a scalar if time is a scalar, a numpy array if time in an array. """ % (class_name, class_name) def predict(time): predictor = lambda t: getattr(self, estimate).ix[:t].iloc[-1][label] try: return np.array([predictor(t) for t in time]) except __HOLE__: return predictor(time) predict.__doc__ = doc_string return predict
TypeError
dataset/ETHPy150Open CamDavidsonPilon/lifelines/lifelines/fitters/__init__.py/UnivariateFitter._predict
def ProcessPlugin(self, plugin_dir, plugin, status={}): """Process a plugin from running to ranking. :param str plugin_dir: Path to the plugin directory. :param dict plugin: The plugin dictionary with all the information. :param dict status: Running status of the plugin. :return: The output generated by the plugin when run. :return: None if the plugin was not run. :rtype: list """ # Ensure that the plugin CAN be run before starting anything. if not self.plugin_can_run(plugin, show_reason=True): return None # Save how long it takes for the plugin to run. self.timer.start_timer('Plugin') plugin['start'] = self.timer.get_start_date_time('Plugin') # Use relative path from targets folders while saving plugin['output_path'] = os.path.relpath( self.GetPluginOutputDir(plugin), self.config.GetOutputDirForTargets()) status['AllSkipped'] = False # A plugin is going to be run. plugin['status'] = 'Running' self.PluginCount += 1 logging.info( '_' * 10 + ' %d - Target: %s -> Plugin: %s (%s/%s) ' + '_' * 10, self.PluginCount, self.target.GetTargetURL(), plugin['title'], plugin['group'], plugin['type']) # Skip processing in simulation mode, but show until line above # to illustrate what will run if self.Simulation: return None # DB empty => grep plugins will fail, skip!! if ('grep' == plugin['type'] and self.transaction.NumTransactions() == 0): logging.info( 'Skipped - Cannot run grep plugins: ' 'The Transaction DB is empty') return None output = None status_msg = '' partial_output = [] abort_reason = '' try: output = self.RunPlugin(plugin_dir, plugin) status_msg = 'Successful' status['SomeSuccessful'] = True except __HOLE__: # Just explain why crashed. status_msg = 'Aborted' abort_reason = 'Aborted by User' status['SomeAborted (Keyboard Interrupt)'] = True except SystemExit: # Abort plugin processing and get out to external exception # handling, information saved elsewhere. raise SystemExit except PluginAbortException as PartialOutput: status_msg = 'Aborted (by user)' partial_output = PartialOutput.parameter abort_reason = 'Aborted by User' status['SomeAborted'] = True except UnreachableTargetException as PartialOutput: status_msg = 'Unreachable Target' partial_output = PartialOutput.parameter abort_reason = 'Unreachable Target' status['SomeAborted'] = True except FrameworkAbortException as PartialOutput: status_msg = 'Aborted (Framework Exit)' partial_output = PartialOutput.parameter abort_reason = 'Framework Aborted' # TODO: Handle this gracefully # except: # Plugin["status"] = "Crashed" # cprint("Crashed") # self.SavePluginInfo(self.Core.Error.Add("Plugin "+Plugin['Type']+"/"+Plugin['File']+" failed for target "+self.Core.Config.Get('TARGET')), Plugin) # Try to save something # TODO: http://blog.tplus1.com/index.php/2007/09/28/the-python-logging-module-is-much-better-than-print-statements/ finally: plugin['status'] = status_msg plugin['end'] = self.timer.get_end_date_time('Plugin') plugin['owtf_rank'] = self.rank_plugin(output, self.GetPluginOutputDir(plugin)) if status_msg == 'Successful': self.plugin_output.SavePluginOutput(plugin, output) else: self.plugin_output.SavePartialPluginOutput( plugin, partial_output, abort_reason) if status_msg == 'Aborted': self.error_handler.UserAbort('Plugin') if abort_reason == 'Framework Aborted': self.Core.finish() return output
KeyboardInterrupt
dataset/ETHPy150Open owtf/owtf/framework/plugin/plugin_handler.py/PluginHandler.ProcessPlugin
def test_unicode(self): G = nx.Graph() try: # Python 3.x name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) except __HOLE__: # Python 2.6+ name1 = unichr(2344) + unichr(123) + unichr(6543) name2 = unichr(5543) + unichr(1543) + unichr(324) G.add_edge(name1, 'Radiohead', attr_dict={name2: 3}) fd, fname = tempfile.mkstemp() nx.write_edgelist(G, fname) H = nx.read_edgelist(fname) assert_equal(G.adj, H.adj) os.close(fd) os.unlink(fname)
ValueError
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/readwrite/tests/test_edgelist.py/TestEdgelist.test_unicode
def test_latin1_error(self): G = nx.Graph() try: # Python 3.x name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) except __HOLE__: # Python 2.6+ name1 = unichr(2344) + unichr(123) + unichr(6543) name2 = unichr(5543) + unichr(1543) + unichr(324) G.add_edge(name1, 'Radiohead', attr_dict={name2: 3}) fd, fname = tempfile.mkstemp() assert_raises(UnicodeEncodeError, nx.write_edgelist, G, fname, encoding = 'latin-1') os.close(fd) os.unlink(fname)
ValueError
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/readwrite/tests/test_edgelist.py/TestEdgelist.test_latin1_error
def test_latin1(self): G = nx.Graph() try: # Python 3.x blurb = chr(1245) # just to trigger the exception name1 = 'Bj' + chr(246) + 'rk' name2 = chr(220) + 'ber' except __HOLE__: # Python 2.6+ name1 = 'Bj' + unichr(246) + 'rk' name2 = unichr(220) + 'ber' G.add_edge(name1, 'Radiohead', attr_dict={name2: 3}) fd, fname = tempfile.mkstemp() nx.write_edgelist(G, fname, encoding = 'latin-1') H = nx.read_edgelist(fname, encoding = 'latin-1') assert_equal(G.adj, H.adj) os.close(fd) os.unlink(fname)
ValueError
dataset/ETHPy150Open gkno/gkno_launcher/src/networkx/readwrite/tests/test_edgelist.py/TestEdgelist.test_latin1
def runtests(*test_args): # Setup settings if not settings.configured: settings.configure(**SETTINGS) # New Django 1.7 app registry setup try: from django import setup setup() except __HOLE__: pass # New Django 1.8 test runner try: from django.test.runner import DiscoverRunner as TestRunner except ImportError: from django.test.simple import DjangoTestSuiteRunner as TestRunner test_runner = TestRunner(verbosity=1) failures = test_runner.run_tests(['memcache_status']) if failures: sys.exit(failures)
ImportError
dataset/ETHPy150Open bartTC/django-memcache-status/runtests.py/runtests
def to_python(self, value): # object case if value is None: return None if isinstance(value, dict): return value # string case if value and value[0] in '[{': # JSON try: d = json.loads(value) except __HOLE__: d = None else: # phpserialize data try: if isinstance(value, unicode): value = value.encode('utf8') d = php.unserialize(value, decode_strings=True) except ValueError: d = None if isinstance(d, dict): return d return None
ValueError
dataset/ETHPy150Open mozilla/addons-server/src/olympia/stats/db.py/StatsDictField.to_python
def get_db_prep_value(self, value, connection, prepared=False): if value is None or value == '': return value try: value = json.dumps(dict(value)) except __HOLE__: value = None return value
TypeError
dataset/ETHPy150Open mozilla/addons-server/src/olympia/stats/db.py/StatsDictField.get_db_prep_value
def db_add_object(self, object, parentObjType=None, parentObjId=None): if parentObjType is None or parentObjId is None: parentObj = self else: try: parentObj = self.objects[(parentObjType, parentObjId)] except __HOLE__: msg = "Cannot find object of type '%s' with id '%s'" % \ (parentObjType, parentObjId) raise Exception(msg) funname = 'db_add_%s' % object.vtType objCopy = copy.copy(object) getattr(parentObj, funname)(objCopy) self.addToIndex(objCopy)
KeyError
dataset/ETHPy150Open VisTrails/VisTrails/vistrails/db/versions/v0_7_0/domain/workflow.py/DBWorkflow.db_add_object
def db_change_object(self, object, parentObjType=None, parentObjId=None): if parentObjType is None or parentObjId is None: parentObj = self else: try: parentObj = self.objects[(parentObjType, parentObjId)] except __HOLE__: msg = "Cannot find object of type '%s' with id '%s'" % \ (parentObjType, parentObjId) raise Exception(msg) funname = 'db_change_%s' % object.vtType objCopy = copy.copy(object) getattr(parentObj, funname)(objCopy) self.addToIndex(objCopy)
KeyError
dataset/ETHPy150Open VisTrails/VisTrails/vistrails/db/versions/v0_7_0/domain/workflow.py/DBWorkflow.db_change_object
def db_delete_object(self, objId, objType, parentObjType=None, parentObjId=None): if parentObjType is None or parentObjId is None: parentObj = self else: try: parentObj = self.objects[(parentObjType, parentObjId)] except KeyError: msg = "Cannot find object of type '%s' with id '%s'" % \ (parentObjType, parentObjId) raise Exception(msg) funname = 'db_get_%s' % objType try: object = getattr(parentObj, funname)(objId) except __HOLE__: attr_name = 'db_%s' % objType object = getattr(parentObj, attr_name) funname = 'db_delete_%s' % objType getattr(parentObj, funname)(object) self.deleteFromIndex(object)
AttributeError
dataset/ETHPy150Open VisTrails/VisTrails/vistrails/db/versions/v0_7_0/domain/workflow.py/DBWorkflow.db_delete_object
def get_user(name, exact=False): users = settings.SELENIUM_USERS try: user = users[name] except __HOLE__: if name == 'WEB_USER' and not exact: user = users['ADMIN'] else: raise return attrdict(**user)
KeyError
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/hqwebapp/selenium.py/get_user
def _get_char_relative_to_cursor(self, offset=0): """ Return character relative to cursor position, or empty string """ try: return self.text[self.cursor_position + offset] except __HOLE__: return ''
IndexError
dataset/ETHPy150Open jonathanslenders/python-prompt-toolkit/prompt_toolkit/document.py/Document._get_char_relative_to_cursor
def translate_row_col_to_index(self, row, col): """ Given a (row, col) tuple, return the corresponding index. (Row and col params are 0-based.) Negative row/col values are turned into zero. """ try: result = self._line_start_indexes[row] line = self.lines[row] except __HOLE__: if row < 0: result = self._line_start_indexes[0] line = self.lines[0] else: result = self._line_start_indexes[-1] line = self.lines[-1] result += max(0, min(col, len(line))) # Keep in range. (len(self.text) is included, because the cursor can be # right after the end of the text as well.) result = max(0, min(result, len(self.text))) return result
IndexError
dataset/ETHPy150Open jonathanslenders/python-prompt-toolkit/prompt_toolkit/document.py/Document.translate_row_col_to_index
def find(self, sub, in_current_line=False, include_current_position=False, ignore_case=False, count=1): """ Find `text` after the cursor, return position relative to the cursor position. Return `None` if nothing was found. :param count: Find the n-th occurance. """ assert isinstance(ignore_case, bool) if in_current_line: text = self.current_line_after_cursor else: text = self.text_after_cursor if not include_current_position: if len(text) == 0: return # (Otherwise, we always get a match for the empty string.) else: text = text[1:] flags = re.IGNORECASE if ignore_case else 0 iterator = re.finditer(re.escape(sub), text, flags) try: for i, match in enumerate(iterator): if i + 1 == count: if include_current_position: return match.start(0) else: return match.start(0) + 1 except __HOLE__: pass
StopIteration
dataset/ETHPy150Open jonathanslenders/python-prompt-toolkit/prompt_toolkit/document.py/Document.find
def find_backwards(self, sub, in_current_line=False, ignore_case=False, count=1): """ Find `text` before the cursor, return position relative to the cursor position. Return `None` if nothing was found. :param count: Find the n-th occurance. """ if in_current_line: before_cursor = self.current_line_before_cursor[::-1] else: before_cursor = self.text_before_cursor[::-1] flags = re.IGNORECASE if ignore_case else 0 iterator = re.finditer(re.escape(sub[::-1]), before_cursor, flags) try: for i, match in enumerate(iterator): if i + 1 == count: return - match.start(0) - len(sub) except __HOLE__: pass
StopIteration
dataset/ETHPy150Open jonathanslenders/python-prompt-toolkit/prompt_toolkit/document.py/Document.find_backwards
def find_start_of_previous_word(self, count=1, WORD=False): """ Return an index relative to the cursor position pointing to the start of the previous word. Return `None` if nothing was found. """ # Reverse the text before the cursor, in order to do an efficient # backwards search. text_before_cursor = self.text_before_cursor[::-1] regex = _FIND_BIG_WORD_RE if WORD else _FIND_WORD_RE iterator = regex.finditer(text_before_cursor) try: for i, match in enumerate(iterator): if i + 1 == count: return - match.end(1) except __HOLE__: pass
StopIteration
dataset/ETHPy150Open jonathanslenders/python-prompt-toolkit/prompt_toolkit/document.py/Document.find_start_of_previous_word
def find_next_word_beginning(self, count=1, WORD=False): """ Return an index relative to the cursor position pointing to the start of the next word. Return `None` if nothing was found. """ regex = _FIND_BIG_WORD_RE if WORD else _FIND_WORD_RE iterator = regex.finditer(self.text_after_cursor) try: for i, match in enumerate(iterator): # Take first match, unless it's the word on which we're right now. if i == 0 and match.start(1) == 0: count += 1 if i + 1 == count: return match.start(1) except __HOLE__: pass
StopIteration
dataset/ETHPy150Open jonathanslenders/python-prompt-toolkit/prompt_toolkit/document.py/Document.find_next_word_beginning
def find_next_word_ending(self, include_current_position=False, count=1, WORD=False): """ Return an index relative to the cursor position pointing to the end of the next word. Return `None` if nothing was found. """ if include_current_position: text = self.text_after_cursor else: text = self.text_after_cursor[1:] regex = _FIND_BIG_WORD_RE if WORD else _FIND_WORD_RE iterable = regex.finditer(text) try: for i, match in enumerate(iterable): if i + 1 == count: value = match.end(1) if include_current_position: return value else: return value + 1 except __HOLE__: pass
StopIteration
dataset/ETHPy150Open jonathanslenders/python-prompt-toolkit/prompt_toolkit/document.py/Document.find_next_word_ending
def find_previous_word_beginning(self, count=1, WORD=False): """ Return an index relative to the cursor position pointing to the start of the previous word. Return `None` if nothing was found. """ regex = _FIND_BIG_WORD_RE if WORD else _FIND_WORD_RE iterator = regex.finditer(self.text_before_cursor[::-1]) try: for i, match in enumerate(iterator): if i + 1 == count: return - match.end(1) except __HOLE__: pass
StopIteration
dataset/ETHPy150Open jonathanslenders/python-prompt-toolkit/prompt_toolkit/document.py/Document.find_previous_word_beginning
def find_previous_word_ending(self, count=1, WORD=False): """ Return an index relative to the cursor position pointing to the end of the previous word. Return `None` if nothing was found. """ text_before_cursor = self.text_after_cursor[:1] + self.text_before_cursor[::-1] regex = _FIND_BIG_WORD_RE if WORD else _FIND_WORD_RE iterator = regex.finditer(text_before_cursor) try: for i, match in enumerate(iterator): # Take first match, unless it's the word on which we're right now. if i == 0 and match.start(1) == 0: count += 1 if i + 1 == count: return -match.start(1) + 1 except __HOLE__: pass
StopIteration
dataset/ETHPy150Open jonathanslenders/python-prompt-toolkit/prompt_toolkit/document.py/Document.find_previous_word_ending
def setUp(self): """Run before each test method to initialize test environment.""" super(TestCase, self).setUp() test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0) try: test_timeout = int(test_timeout) except __HOLE__: # If timeout value is invalid do not set a timeout. test_timeout = 0 if self.TIMEOUT_SCALING_FACTOR >= 0: test_timeout *= self.TIMEOUT_SCALING_FACTOR else: raise ValueError('TIMEOUT_SCALING_FACTOR value must be >= 0') if test_timeout > 0: self.useFixture(fixtures.Timeout(test_timeout, gentle=True)) self.useFixture(fixtures.NestedTempfile()) self.useFixture(fixtures.TempHomeDir()) self.useFixture(TranslationFixture()) self.useFixture(log_fixture.get_logging_handle_error_fixture()) if os.environ.get('OS_STDOUT_CAPTURE') in _TRUE_VALUES: stdout = self.useFixture(fixtures.StringStream('stdout')).stream self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout)) if os.environ.get('OS_STDERR_CAPTURE') in _TRUE_VALUES: stderr = self.useFixture(fixtures.StringStream('stderr')).stream self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr)) fs = '%(levelname)s [%(name)s] %(message)s' self.log_fixture = self.useFixture(fixtures.FakeLogger( level=logging.DEBUG, format=fs)) self.useFixture(conf_fixture.ConfFixture(CONF)) self.messaging_conf = messaging_conffixture.ConfFixture(CONF) self.messaging_conf.transport_driver = 'fake' self.messaging_conf.response_timeout = 15 self.useFixture(self.messaging_conf) if self.USES_DB: global _DB_CACHE if not _DB_CACHE: _DB_CACHE = Database(session, migration, sql_connection=CONF.database.connection, sqlite_db=CONF.database.sqlite_db, sqlite_clean_db=CONF.sqlite_clean_db) self.useFixture(_DB_CACHE) mox_fixture = self.useFixture(moxstubout.MoxStubout()) self.mox = mox_fixture.mox self.stubs = mox_fixture.stubs self.addCleanup(self._clear_attrs) self.useFixture(fixtures.EnvironmentVariable('http_proxy')) self.policy = self.useFixture(policy_fixture.PolicyFixture()) CONF.set_override('fatal_exception_format_errors', True)
ValueError
dataset/ETHPy150Open openstack/rack/rack/test.py/TestCase.setUp
@_skip_under_py3k def test_mixed_metas(self): from zope.interface.tests.advisory_testing import ping class Metaclass1(type): pass class Metaclass2(type): pass class Base1: __metaclass__ = Metaclass1 class Base2: __metaclass__ = Metaclass2 try: class Derived(Base1, Base2): ping([], 1) except __HOLE__: pass else: raise AssertionError("Should have gotten incompatibility error") class Metaclass3(Metaclass1, Metaclass2): pass class Derived(Base1, Base2): __metaclass__ = Metaclass3 ping([], 1) self.failUnless(isinstance(Derived, list)) Derived, = Derived self.failUnless(isinstance(Derived, Metaclass3))
TypeError
dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/zope/interface/tests/test_advice.py/AdviceTests.test_mixed_metas
@_skip_under_py3k def test_meta_no_bases(self): from zope.interface.tests.advisory_testing import ping try: from types import ClassType except __HOLE__: return class Thing: ping([], 1) klass, = Thing # unpack list created by pong self.assertEqual(type(klass), ClassType)
ImportError
dataset/ETHPy150Open nlloyd/SubliminalCollaborator/libs/zope/interface/tests/test_advice.py/AdviceTests.test_meta_no_bases
def _reloader_loop(): mtimes = {} while 1: for filename in _iter_module_files(): try: mtime = os.stat(filename).st_mtime except __HOLE__: continue old_time = mtimes.get(filename) if old_time is None: mtimes[filename] = mtime continue elif mtime > old_time: print ' * Detected change, reloading' sys.exit(3) time.sleep(1)
OSError
dataset/ETHPy150Open fengsp/batpod/exts/serving.py/_reloader_loop
def run_server(host, port, app): def inner(): httpd = make_server(host, port, app) httpd.serve_forever() if os.environ.get('BATPOD_RELOADER') != 'true': print " * Serving on port " + str(port) + "..." else: try: import thread thread.start_new_thread(inner, ()) _reloader_loop() except __HOLE__: sys.exit(0) try: restart_with_reloader() except KeyboardInterrupt: sys.exit(0) sys.exit(0)
KeyboardInterrupt
dataset/ETHPy150Open fengsp/batpod/exts/serving.py/run_server
def finalize_options(self): # have to finalize 'plat_name' before 'bdist_base' if self.plat_name is None: if self.skip_build: self.plat_name = get_platform() else: self.plat_name = self.get_finalized_command('build').plat_name # 'bdist_base' -- parent of per-built-distribution-format # temporary directories (eg. we'll probably have # "build/bdist.<plat>/dumb", "build/bdist.<plat>/rpm", etc.) if self.bdist_base is None: build_base = self.get_finalized_command('build').build_base self.bdist_base = os.path.join(build_base, 'bdist.' + self.plat_name) self.ensure_string_list('formats') if self.formats is None: try: self.formats = [self.default_format[os.name]] except __HOLE__: raise DistutilsPlatformError, \ "don't know how to create built distributions " + \ "on platform %s" % os.name if self.dist_dir is None: self.dist_dir = "dist"
KeyError
dataset/ETHPy150Open ctxis/canape/CANAPE.Scripting/Lib/distutils/command/bdist.py/bdist.finalize_options
def run(self): # Figure out which sub-commands we need to run. commands = [] for format in self.formats: try: commands.append(self.format_command[format][0]) except __HOLE__: raise DistutilsOptionError, "invalid format '%s'" % format # Reinitialize and run each command. for i in range(len(self.formats)): cmd_name = commands[i] sub_cmd = self.reinitialize_command(cmd_name) if cmd_name not in self.no_format_option: sub_cmd.format = self.formats[i] # passing the owner and group names for tar archiving if cmd_name == 'bdist_dumb': sub_cmd.owner = self.owner sub_cmd.group = self.group # If we're going to need to run this command again, tell it to # keep its temporary files around so subsequent runs go faster. if cmd_name in commands[i+1:]: sub_cmd.keep_temp = 1 self.run_command(cmd_name)
KeyError
dataset/ETHPy150Open ctxis/canape/CANAPE.Scripting/Lib/distutils/command/bdist.py/bdist.run
def is_stable(self, state): """Is the state stable? :param state: the state of interest :raises: InvalidState if the state is invalid :returns True if it is a stable state; False otherwise """ try: return self._states[state]['stable'] except __HOLE__: raise excp.InvalidState(_("State '%s' does not exist") % state)
KeyError
dataset/ETHPy150Open openstack/ironic/ironic/common/fsm.py/FSM.is_stable
def start(): try: command = sys.argv[1] except IndexError: command = 'help' if '--help' in sys.argv: command = 'help' args = sys.argv[2:] instance = SftpCli() callback = getattr(instance, "command_%s" % command, None) if isinstance(callback, collections.Callable): try: callback(*args) except __HOLE__ as e: sys.stderr.write('Bad call for %s: %s' % (command, str(e))) else: instance.command_help()
TypeError
dataset/ETHPy150Open spantaleev/sftpman/sftpman/cli.py/start
def test_optoins_configure(): # Options have no defaults yet -- configure() was never called. with pytest.raises(AttributeError): _Options.foo configure(foo='bar') try: assert _Options.foo == 'bar' except __HOLE__: pytest.fail('Options.foo should\'ve been set.') del _Options.foo
AttributeError
dataset/ETHPy150Open slacy/minimongo/minimongo/tests/test_utils.py/test_optoins_configure
@classmethod def apply(cls, args, run): """Adjust the loglevel of the root-logger of this run.""" try: lvl = int(args) except __HOLE__: lvl = args run.root_logger.setLevel(lvl)
ValueError
dataset/ETHPy150Open IDSIA/sacred/sacred/commandline_options.py/LoglevelOption.apply
def auth_info_view(request): # TODO: insecure URL, must be protected behind a firewall storage = request.registry.velruse_store token = request.GET.get('token') try: return storage.retrieve(token) except __HOLE__: log.info('auth_info requested invalid token "%s"') request.response.status = 400 return None
KeyError
dataset/ETHPy150Open bbangert/velruse/velruse/app/__init__.py/auth_info_view
def service(self, name): service_name = 'Service%s' % name.title() try: self._service = getattr(services, service_name)(self._proxy) except __HOLE__: raise AttributeError('Unrecognized service name %s' % service_name) return self
AttributeError
dataset/ETHPy150Open smsapi/smsapi-python-client/smsapi/client.py/SmsAPI.service
def test_context_is_reset_after_request_has_finished(self): context = {'foo': 'bar'} def responseCls(connection, response): connection.called = True self.assertEqual(connection.context, context) con = Connection() con.called = False con.connection = Mock() con.responseCls = responseCls con.set_context(context) self.assertEqual(con.context, context) con.request('/') # Context should have been reset self.assertTrue(con.called) self.assertEqual(con.context, {}) # Context should also be reset if a method inside request throws con = Connection(timeout=1, retry_delay=0.1) con.connection = Mock() con.set_context(context) self.assertEqual(con.context, context) con.connection.request = Mock(side_effect=ssl.SSLError()) try: con.request('/') except ssl.SSLError: pass self.assertEqual(con.context, {}) con.connection = Mock() con.set_context(context) self.assertEqual(con.context, context) con.responseCls = Mock(side_effect=ValueError()) try: con.request('/') except __HOLE__: pass self.assertEqual(con.context, {})
ValueError
dataset/ETHPy150Open apache/libcloud/libcloud/test/test_connection.py/ConnectionClassTestCase.test_context_is_reset_after_request_has_finished
def findDependencies(filename): filenames = commands.getoutput('otool -l "%s" | grep " name" | sort | uniq | sed -e "s/ *name //g" | grep -v "@" | sed "s/ (offset.*)//"' % filename).splitlines() if 'executable' not in getMachOType(filename): try: fileId = getMachOId(filename) except __HOLE__: pass else: if fileId in filenames: filenames.remove(fileId) return filenames
RuntimeError
dataset/ETHPy150Open RobotLocomotion/director/distro/package/fixup_mach_o.py/findDependencies
def get_filename(self, filename, ext, remove_existing=False): filename = join(dirname(__file__), "files", "%s.%s" % (filename, ext)) if remove_existing: try: unlink(filename) except __HOLE__: pass return filename
OSError
dataset/ETHPy150Open wq/wq.io/tests/base.py/IoTestCase.get_filename
def check_instance(self, instance): self.assertEqual(len(instance), len(self.data)) for row, data in zip(instance, self.data): for key in data: val = getattr(row, key) try: val = int(float(val)) except __HOLE__: pass self.assertEqual(val, data[key])
ValueError
dataset/ETHPy150Open wq/wq.io/tests/base.py/IoTestCase.check_instance
def render(self, context): try: output = self.filter_expression.resolve(context) output = template_localtime(output, use_tz=context.use_tz) output = localize(output, use_l10n=context.use_l10n) output = force_text(output) except __HOLE__: return '' except Exception as e: if not hasattr(e, 'django_template_source'): e.django_template_source = self.source raise if (context.autoescape and not isinstance(output, SafeData)) or isinstance(output, EscapeData): return escape(output) else: return output
UnicodeDecodeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/template/debug.py/DebugVariableNode.render
def prompt(self): """ Prompt the user, and store their input as `self.input`. """ attempt = 0 while self.input is None: if attempt >= int(self._meta.max_attempts): if self._meta.max_attempts_exception is True: raise FrameworkError("Maximum attempts exceeded getting " "valid user input") else: return self.input attempt += 1 self._prompt() if self.input is None: continue elif self._meta.options is not None: if self._meta.numbered: try: self.input = self._meta.options[int(self.input) - 1] except (IndexError, __HOLE__) as e: self.input = None continue else: if self._meta.case_insensitive is True: lower_options = [x.lower() for x in self._meta.options] if not self.input.lower() in lower_options: self.input = None continue else: if self.input not in self._meta.options: self.input = None continue self.process_input() return self.input
ValueError
dataset/ETHPy150Open datafolklabs/cement/cement/utils/shell.py/Prompt.prompt
def __init__(self, namestr, valstr): for name, val in zip(namestr.split(), valstr.split()): try: self[name] = long(val) except __HOLE__: # catches heading self._name = name
ValueError
dataset/ETHPy150Open kdart/pycopia/core/pycopia/OS/Linux/proc/net/snmp.py/_Row.__init__
def __init__(self, lock=None): if lock is None: lock = RLock() self.__lock = lock # Export the lock's acquire() and release() methods self.acquire = lock.acquire self.release = lock.release try: self._release_save = lock._release_save except __HOLE__: pass try: self._acquire_restore = lock._acquire_restore except AttributeError: pass try: self._is_owned = lock._is_owned except AttributeError: pass self.__waiters = []
AttributeError
dataset/ETHPy150Open ooici/pyon/pyon/util/threading.py/Condition.__init__
def wait(self, timeout=None): if not self._is_owned(): raise RuntimeError("cannot wait on un-acquired lock") waiter = Lock() waiter.acquire() self.__waiters.append(waiter) saved_state = self._release_save() try: # restore state no matter what (e.g., KeyboardInterrupt) if timeout is None: waiter.acquire() else: # Balancing act: We can't afford a pure busy loop, so we have # to sleep; but if we sleep the whole timeout time, we'll be # unresponsive. The scheme her sleeps very little at first, # longer as time goes on, but never longer than 20 times per # second (or the timeout time remaining). endtime = _time() + timeout delay = 0.0005 while True: gotit = waiter.acquire(0) if gotit: break remaining = endtime - _time() if remaining <= 0: break # The delay is the least between 2x the delay, the time remaining and/or .05 delay = min(delay * 2, remaining, 0.05) _sleep(delay) if not gotit: try: self.__waiters.remove(waiter) except __HOLE__: pass finally: self._acquire_restore(saved_state)
ValueError
dataset/ETHPy150Open ooici/pyon/pyon/util/threading.py/Condition.wait
def notify(self, n=1): if not self._is_owned(): raise RuntimeError("cannot notify on an un-acquired lock") __waiters = self.__waiters waiters = __waiters[:n] if not waiters: return for waiter in waiters: waiter.release() try: __waiters.remove(waiter) except __HOLE__: pass
ValueError
dataset/ETHPy150Open ooici/pyon/pyon/util/threading.py/Condition.notify
def load_workbook(filename, read_only=False, use_iterators=False, keep_vba=KEEP_VBA, guess_types=False, data_only=False): """Open the given filename and return the workbook :param filename: the path to open or a file-like object :type filename: string or a file-like object open in binary mode c.f., :class:`zipfile.ZipFile` :param read_only: optimised for reading, content cannot be edited :type read_only: bool :param use_iterators: use lazy load for cells :type use_iterators: bool :param keep_vba: preseve vba content (this does NOT mean you can use it) :type keep_vba: bool :param guess_types: guess cell content type and do not read it from the file :type guess_types: bool :param data_only: controls whether cells with formulae have either the formula (default) or the value stored the last time Excel read the sheet :type data_only: bool :rtype: :class:`openpyxl.workbook.Workbook` .. note:: When using lazy load, all worksheets will be :class:`openpyxl.worksheet.iter_worksheet.IterableWorksheet` and the returned workbook will be read-only. """ archive = _validate_archive(filename) read_only = read_only or use_iterators wb = Workbook(guess_types=guess_types, data_only=data_only, read_only=read_only) if read_only and guess_types: warnings.warn('Data types are not guessed when using iterator reader') valid_files = archive.namelist() # If are going to preserve the vba then attach a copy of the archive to the # workbook so that is available for the save. if keep_vba: try: f = open(filename, 'rb') s = f.read() f.close() except: pos = filename.tell() filename.seek(0) s = filename.read() filename.seek(pos) wb.vba_archive = ZipFile(BytesIO(s), 'r') if read_only: wb._archive = ZipFile(filename) # get workbook-level information try: wb.properties = read_properties(archive.read(ARC_CORE)) except __HOLE__: wb.properties = DocumentProperties() wb.active = read_workbook_settings(archive.read(ARC_WORKBOOK)) or 0 # what content types do we have? cts = dict(read_content_types(archive)) strings_path = cts.get(SHARED_STRINGS) if strings_path is not None: if strings_path.startswith("/"): strings_path = strings_path[1:] shared_strings = read_string_table(archive.read(strings_path)) else: shared_strings = [] wb.is_template = XLTX in cts or XLTM in cts try: wb.loaded_theme = archive.read(ARC_THEME) # some writers don't output a theme, live with it (fixes #160) except KeyError: assert wb.loaded_theme == None, "even though the theme information is missing there is a theme object ?" parsed_styles = read_style_table(archive) if parsed_styles is not None: wb._differential_styles = parsed_styles.differential_styles wb._cell_styles = parsed_styles.cell_styles wb._named_styles = parsed_styles.named_styles wb._colors = parsed_styles.color_index wb._borders = parsed_styles.border_list wb._fonts = parsed_styles.font_list wb._fills = parsed_styles.fill_list wb._number_formats = parsed_styles.number_formats wb._protections = parsed_styles.protections wb._alignments = parsed_styles.alignments wb._colors = parsed_styles.color_index wb.excel_base_date = read_excel_base_date(archive) # get worksheets wb.worksheets = [] # remove preset worksheet for sheet in detect_worksheets(archive): sheet_name = sheet['title'] worksheet_path = sheet['path'] if not worksheet_path in valid_files: continue if read_only: new_ws = ReadOnlyWorksheet(wb, sheet_name, worksheet_path, None, shared_strings) wb._add_sheet(new_ws) else: parser = WorkSheetParser(wb, sheet_name, archive.read(worksheet_path), shared_strings) parser.parse() new_ws = wb[sheet_name] new_ws.sheet_state = sheet['state'] if not read_only: # load comments into the worksheet cells comments_file = get_comments_file(worksheet_path, archive, valid_files) if comments_file is not None: read_comments(new_ws, archive.read(comments_file)) wb._differential_styles = [] # reset wb._named_ranges = list(read_named_ranges(archive.read(ARC_WORKBOOK), wb)) wb.code_name = read_workbook_code_name(archive.read(ARC_WORKBOOK)) if EXTERNAL_LINK in cts: rels = read_rels(archive) wb._external_links = list(detect_external_links(rels, archive)) archive.close() return wb
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/openpyxl-2.3.0-b2/openpyxl/reader/excel.py/load_workbook
def test_formset_iteration(self): # Regression tests for #16455 -- formset instances are iterable ChoiceFormset = formset_factory(Choice, extra=3) formset = ChoiceFormset() # confirm iterated formset yields formset.forms forms = list(formset) self.assertEqual(forms, formset.forms) self.assertEqual(len(formset), len(forms)) # confirm indexing of formset self.assertEqual(formset[0], forms[0]) try: formset[3] self.fail('Requesting an invalid formset index should raise an exception') except __HOLE__: pass # Formets can override the default iteration order class BaseReverseFormSet(BaseFormSet): def __iter__(self): return reversed(self.forms) def __getitem__(self, idx): return super(BaseReverseFormSet, self).__getitem__(len(self) - idx - 1) ReverseChoiceFormset = formset_factory(Choice, BaseReverseFormSet, extra=3) reverse_formset = ReverseChoiceFormset() # confirm that __iter__ modifies rendering order # compare forms from "reverse" formset with forms from original formset self.assertEqual(str(reverse_formset[0]), str(forms[-1])) self.assertEqual(str(reverse_formset[1]), str(forms[-2])) self.assertEqual(len(reverse_formset), len(forms))
IndexError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/tests/forms_tests/tests/test_formsets.py/FormsFormsetTestCase.test_formset_iteration
def finish_starttag(self, tag, attrs): try: method = getattr(self, 'start_' + tag) except AttributeError: try: method = getattr(self, 'do_' + tag) except __HOLE__: self.unknown_starttag(tag, attrs) return -1 else: self.handle_starttag(tag, method, attrs) return 0 else: self.stack.append(tag) self.handle_starttag(tag, method, attrs) return 1 # Internal -- finish processing of end tag
AttributeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/sgmllib.py/SGMLParser.finish_starttag
def finish_endtag(self, tag): if not tag: found = len(self.stack) - 1 if found < 0: self.unknown_endtag(tag) return else: if tag not in self.stack: try: method = getattr(self, 'end_' + tag) except __HOLE__: self.unknown_endtag(tag) else: self.report_unbalanced(tag) return found = len(self.stack) for i in range(found): if self.stack[i] == tag: found = i while len(self.stack) > found: tag = self.stack[-1] try: method = getattr(self, 'end_' + tag) except AttributeError: method = None if method: self.handle_endtag(tag, method) else: self.unknown_endtag(tag) del self.stack[-1] # Overridable -- handle start tag
AttributeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/sgmllib.py/SGMLParser.finish_endtag
def convert_charref(self, name): """Convert character reference, may be overridden.""" try: n = int(name) except __HOLE__: return if not 0 <= n <= 127: return return self.convert_codepoint(n)
ValueError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/sgmllib.py/SGMLParser.convert_charref
def test(args = None): import sys if args is None: args = sys.argv[1:] if args and args[0] == '-s': args = args[1:] klass = SGMLParser else: klass = TestSGMLParser if args: file = args[0] else: file = 'test.html' if file == '-': f = sys.stdin else: try: f = open(file, 'r') except __HOLE__, msg: print file, ":", msg sys.exit(1) data = f.read() if f is not sys.stdin: f.close() x = klass() for c in data: x.feed(c) x.close()
IOError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/sgmllib.py/test
@plugins_cmd.command('add', short_help='Adds a new plugin to the project.') @click.argument('name') @pass_context def plugins_add_cmd(ctx, name): """This command can add a new plugion to the project. If just given the name of the plugin the latest version of that plugin is added to the project. The argument is either the name of the plugin or the name of the plugin suffixed with `@version` with the version. For instance to install the version 0.1 of the plugin demo you would do `[email protected]`. """ project = ctx.get_project() from .packages import add_package_to_project try: info = add_package_to_project(project, name) except __HOLE__ as e: click.echo('Error: %s' % e, err=True) else: click.echo('Package %s (%s) was added to the project' % ( info['name'], info['version'], ))
RuntimeError
dataset/ETHPy150Open lektor/lektor-archive/lektor/cli.py/plugins_add_cmd
@plugins_cmd.command('remove', short_help='Removes a plugin from the project.') @click.argument('name') @pass_context def plugins_remove_cmd(ctx, name): """This command can remove a plugion to the project again. The name of the plugin is the only argument to the function. """ project = ctx.get_project() from .packages import remove_package_from_project try: old_info = remove_package_from_project(project, name) except __HOLE__ as e: click.echo('Error: %s' % e, err=True) else: if old_info is None: click.echo('Package was not registered with the project. ' 'Nothing was removed.') else: click.echo('Removed package %s (%s)' % ( old_info['name'], old_info['version'], ))
RuntimeError
dataset/ETHPy150Open lektor/lektor-archive/lektor/cli.py/plugins_remove_cmd
def clean(self): count = 0 for form in self.forms: try: if form.cleaned_data and not form.cleaned_data.get('DELETE', False): count += 1 except __HOLE__: # annoyingly, if a subform is invalid Django explicity raises # an AttributeError for cleaned_data pass if count < 1: raise ValidationError('You must have at least one of these.')
AttributeError
dataset/ETHPy150Open iiilx/django-addressbook/addressbook/forms.py/MandatoryInlineFormSet.clean
def handle_module_upgrade_request(controller, module_id, pipeline): def add_keyname(fname, module): new_function = controller.create_function(module, "key_name", ["_key"]) return [('add', new_function, 'module', module.id)] module_remap = { 'read|csv|CSVFile': [ (None, '0.1.1', 'read|CSVFile', { 'src_port_remap': { 'self': 'value'}, }) ], 'read|numpy|NumPyArray': [ (None, '0.1.1', 'read|NumPyArray', { 'src_port_remap': { 'self': 'value'}, }) ], 'read|CSVFile': [ ('0.1.1', '0.1.2', None, { 'src_port_remap': { 'self': 'value'}, }), ('0.1.3', '0.1.5', None, {}) ], 'read|NumPyArray': [ ('0.1.1', '0.1.2', None, { 'src_port_remap': { 'self': 'value'}, }) ], 'read|ExcelSpreadsheet': [ ('0.1.1', '0.1.2', None, { 'src_port_remap': { 'self': 'value'}, }), ('0.1.3', '0.1.4', None, {}) ], 'read|JSONFile': [ (None, '0.1.5', 'read|JSONObject', { 'function_remap': { None: add_keyname}, }) ], } try: from vistrails.packages.spreadsheet.init import upgrade_cell_to_output except __HOLE__: pass else: module_remap = upgrade_cell_to_output( module_remap, module_id, pipeline, 'TableCell', 'TableOutput', '0.1.6', 'table') return UpgradeWorkflowHandler.remap_module(controller, module_id, pipeline, module_remap)
ImportError
dataset/ETHPy150Open VisTrails/VisTrails/vistrails/packages/tabledata/init.py/handle_module_upgrade_request
def remove_output(): for f in ['test_featureset.nc']: try: os.remove(pjoin(TEMP_DIR, f)) except __HOLE__: pass
OSError
dataset/ETHPy150Open cesium-ml/cesium/cesium/tests/test_featurize.py/remove_output
@classmethod @jsonify def read_list(cls): try: return {'ok': True, 'data': { 'services': ServiceStatus.get_status_list(), 'pollInterval': ServiceStatus.get_poll_interval() }} except __HOLE__: response.status_int = 400 return {'ok': False, 'msg': 'Missing service name'}
KeyError
dataset/ETHPy150Open turbulenz/turbulenz_local/turbulenz_local/controllers/apiv1/servicestatus.py/ServicestatusController.read_list
@classmethod #@postonly @jsonify def set_poll_interval(cls): try: poll_interval = float(request.params['value']) if poll_interval <= 0: raise ValueError ServiceStatus.set_poll_interval(poll_interval) return {'ok': True} except (KeyError, __HOLE__): response.status_int = 400 return {'ok': False, 'msg': 'Polling interval must be a positive value'}
ValueError
dataset/ETHPy150Open turbulenz/turbulenz_local/turbulenz_local/controllers/apiv1/servicestatus.py/ServicestatusController.set_poll_interval
def __bool__(self): if self._prefetch_related_lookups and not self._prefetch_done: # We need all the results in order to be able to do the prefetch # in one go. To minimize code duplication, we use the __len__ # code path which also forces this, and also does the prefetch len(self) if self._result_cache is not None: return bool(self._result_cache) try: next(iter(self)) except __HOLE__: return False return True
StopIteration
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/db/models/query.py/QuerySet.__bool__
def iterator(self): """ An iterator over the results from applying this QuerySet to the database. """ fill_cache = False if connections[self.db].features.supports_select_related: fill_cache = self.query.select_related if isinstance(fill_cache, dict): requested = fill_cache else: requested = None max_depth = self.query.max_depth extra_select = list(self.query.extra_select) aggregate_select = list(self.query.aggregate_select) only_load = self.query.get_loaded_field_names() if not fill_cache: fields = self.model._meta.fields load_fields = [] # If only/defer clauses have been specified, # build the list of fields that are to be loaded. if only_load: for field, model in self.model._meta.get_fields_with_model(): if model is None: model = self.model try: if field.name in only_load[model]: # Add a field that has been explicitly included load_fields.append(field.name) except __HOLE__: # Model wasn't explicitly listed in the only_load table # Therefore, we need to load all fields from this model load_fields.append(field.name) index_start = len(extra_select) aggregate_start = index_start + len(load_fields or self.model._meta.fields) skip = None if load_fields and not fill_cache: # Some fields have been deferred, so we have to initialise # via keyword arguments. skip = set() init_list = [] for field in fields: if field.name not in load_fields: skip.add(field.attname) else: init_list.append(field.attname) model_cls = deferred_class_factory(self.model, skip) # Cache db and model outside the loop db = self.db model = self.model compiler = self.query.get_compiler(using=db) if fill_cache: klass_info = get_klass_info(model, max_depth=max_depth, requested=requested, only_load=only_load) for row in compiler.results_iter(): if fill_cache: obj, _ = get_cached_row(row, index_start, db, klass_info, offset=len(aggregate_select)) else: # Omit aggregates in object creation. row_data = row[index_start:aggregate_start] if skip: obj = model_cls(**dict(zip(init_list, row_data))) else: obj = model(*row_data) # Store the source database of the object obj._state.db = db # This object came from the database; it's not being added. obj._state.adding = False if extra_select: for i, k in enumerate(extra_select): setattr(obj, k, row[i]) # Add the aggregates to the model if aggregate_select: for i, aggregate in enumerate(aggregate_select): setattr(obj, aggregate, row[i + aggregate_start]) # Add the known related objects to the model, if there are any if self._known_related_objects: for field, rel_objs in self._known_related_objects.items(): pk = getattr(obj, field.get_attname()) try: rel_obj = rel_objs[pk] except KeyError: pass # may happen in qs1 | qs2 scenarios else: setattr(obj, field.name, rel_obj) yield obj
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/db/models/query.py/QuerySet.iterator
def _safe_iterator(self, iterator): # ensure result cache is cleared when iterating over a queryset # raises an exception try: for item in iterator: yield item except __HOLE__: raise except Exception: self._result_cache = None raise
StopIteration
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/db/models/query.py/QuerySet._safe_iterator
def _fill_cache(self, num=None): """ Fills the result cache with 'num' more entries (or until the results iterator is exhausted). """ if self._iter: try: for i in range(num or ITER_CHUNK_SIZE): self._result_cache.append(next(self._iter)) except __HOLE__: self._iter = None
StopIteration
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/db/models/query.py/QuerySet._fill_cache
def get_cached_row(row, index_start, using, klass_info, offset=0): """ Helper function that recursively returns an object with the specified related attributes already populated. This method may be called recursively to populate deep select_related() clauses. Arguments: * row - the row of data returned by the database cursor * index_start - the index of the row at which data for this object is known to start * offset - the number of additional fields that are known to exist in row for `klass`. This usually means the number of annotated results on `klass`. * using - the database alias on which the query is being executed. * klass_info - result of the get_klass_info function """ if klass_info is None: return None klass, field_names, field_count, related_fields, reverse_related_fields, pk_idx = klass_info fields = row[index_start : index_start + field_count] # If the pk column is None (or the Oracle equivalent ''), then the related # object must be non-existent - set the relation to None. if fields[pk_idx] == None or fields[pk_idx] == '': obj = None elif field_names: obj = klass(**dict(zip(field_names, fields))) else: obj = klass(*fields) # If an object was retrieved, set the database state. if obj: obj._state.db = using obj._state.adding = False # Instantiate related fields index_end = index_start + field_count + offset # Iterate over each related object, populating any # select_related() fields for f, klass_info in related_fields: # Recursively retrieve the data for the related object cached_row = get_cached_row(row, index_end, using, klass_info) # If the recursive descent found an object, populate the # descriptor caches relevant to the object if cached_row: rel_obj, index_end = cached_row if obj is not None: # If the base object exists, populate the # descriptor cache setattr(obj, f.get_cache_name(), rel_obj) if f.unique and rel_obj is not None: # If the field is unique, populate the # reverse descriptor cache on the related object setattr(rel_obj, f.related.get_cache_name(), obj) # Now do the same, but for reverse related objects. # Only handle the restricted case - i.e., don't do a depth # descent into reverse relations unless explicitly requested for f, klass_info in reverse_related_fields: # Recursively retrieve the data for the related object cached_row = get_cached_row(row, index_end, using, klass_info) # If the recursive descent found an object, populate the # descriptor caches relevant to the object if cached_row: rel_obj, index_end = cached_row if obj is not None: # If the field is unique, populate the # reverse descriptor cache setattr(obj, f.related.get_cache_name(), rel_obj) if rel_obj is not None: # If the related object exists, populate # the descriptor cache. setattr(rel_obj, f.get_cache_name(), obj) # Now populate all the non-local field values on the related # object. If this object has deferred fields, we need to use # the opts from the original model to get non-local fields # correctly. opts = rel_obj._meta if getattr(rel_obj, '_deferred'): opts = opts.proxy_for_model._meta for rel_field, rel_model in opts.get_fields_with_model(): if rel_model is not None: setattr(rel_obj, rel_field.attname, getattr(obj, rel_field.attname)) # populate the field cache for any related object # that has already been retrieved if rel_field.rel: try: cached_obj = getattr(obj, rel_field.get_cache_name()) setattr(rel_obj, rel_field.get_cache_name(), cached_obj) except __HOLE__: # Related object hasn't been cached yet pass return obj, index_end
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/db/models/query.py/get_cached_row
@property def columns(self): """ A list of model field names in the order they'll appear in the query results. """ if not hasattr(self, '_columns'): self._columns = self.query.get_columns() # Adjust any column names which don't match field names for (query_name, model_name) in self.translations.items(): try: index = self._columns.index(query_name) self._columns[index] = model_name except __HOLE__: # Ignore translations for non-existant column names pass return self._columns
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/db/models/query.py/RawQuerySet.columns
def prefetch_related_objects(result_cache, related_lookups): """ Helper function for prefetch_related functionality Populates prefetched objects caches for a list of results from a QuerySet """ if len(result_cache) == 0: return # nothing to do model = result_cache[0].__class__ # We need to be able to dynamically add to the list of prefetch_related # lookups that we look up (see below). So we need some book keeping to # ensure we don't do duplicate work. done_lookups = set() # list of lookups like foo__bar__baz done_queries = {} # dictionary of things like 'foo__bar': [results] auto_lookups = [] # we add to this as we go through. followed_descriptors = set() # recursion protection all_lookups = itertools.chain(related_lookups, auto_lookups) for lookup in all_lookups: if lookup in done_lookups: # We've done exactly this already, skip the whole thing continue done_lookups.add(lookup) # Top level, the list of objects to decorate is the result cache # from the primary QuerySet. It won't be for deeper levels. obj_list = result_cache attrs = lookup.split(LOOKUP_SEP) for level, attr in enumerate(attrs): # Prepare main instances if len(obj_list) == 0: break good_objects = True for obj in obj_list: if not hasattr(obj, '_prefetched_objects_cache'): try: obj._prefetched_objects_cache = {} except __HOLE__: # Must be in a QuerySet subclass that is not returning # Model instances, either in Django or 3rd # party. prefetch_related() doesn't make sense, so quit # now. good_objects = False break else: # We already did this list break if not good_objects: break # Descend down tree # We assume that objects retrieved are homogenous (which is the premise # of prefetch_related), so what applies to first object applies to all. first_obj = obj_list[0] prefetcher, descriptor, attr_found, is_fetched = get_prefetcher(first_obj, attr) if not attr_found: raise AttributeError("Cannot find '%s' on %s object, '%s' is an invalid " "parameter to prefetch_related()" % (attr, first_obj.__class__.__name__, lookup)) if level == len(attrs) - 1 and prefetcher is None: # Last one, this *must* resolve to something that supports # prefetching, otherwise there is no point adding it and the # developer asking for it has made a mistake. raise ValueError("'%s' does not resolve to a item that supports " "prefetching - this is an invalid parameter to " "prefetch_related()." % lookup) if prefetcher is not None and not is_fetched: # Check we didn't do this already current_lookup = LOOKUP_SEP.join(attrs[0:level+1]) if current_lookup in done_queries: obj_list = done_queries[current_lookup] else: obj_list, additional_prl = prefetch_one_level(obj_list, prefetcher, attr) # We need to ensure we don't keep adding lookups from the # same relationships to stop infinite recursion. So, if we # are already on an automatically added lookup, don't add # the new lookups from relationships we've seen already. if not (lookup in auto_lookups and descriptor in followed_descriptors): for f in additional_prl: new_prl = LOOKUP_SEP.join([current_lookup, f]) auto_lookups.append(new_prl) done_queries[current_lookup] = obj_list followed_descriptors.add(descriptor) else: # Either a singly related object that has already been fetched # (e.g. via select_related), or hopefully some other property # that doesn't support prefetching but needs to be traversed. # We replace the current list of parent objects with the list # of related objects, filtering out empty or missing values so # that we can continue with nullable or reverse relations. new_obj_list = [] for obj in obj_list: try: new_obj = getattr(obj, attr) except exceptions.ObjectDoesNotExist: continue if new_obj is None: continue new_obj_list.append(new_obj) obj_list = new_obj_list
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/db/models/query.py/prefetch_related_objects
def get_prefetcher(instance, attr): """ For the attribute 'attr' on the given instance, finds an object that has a get_prefetch_query_set(). Returns a 4 tuple containing: (the object with get_prefetch_query_set (or None), the descriptor object representing this relationship (or None), a boolean that is False if the attribute was not found at all, a boolean that is True if the attribute has already been fetched) """ prefetcher = None attr_found = False is_fetched = False # For singly related objects, we have to avoid getting the attribute # from the object, as this will trigger the query. So we first try # on the class, in order to get the descriptor object. rel_obj_descriptor = getattr(instance.__class__, attr, None) if rel_obj_descriptor is None: try: rel_obj = getattr(instance, attr) attr_found = True except __HOLE__: pass else: attr_found = True if rel_obj_descriptor: # singly related object, descriptor object has the # get_prefetch_query_set() method. if hasattr(rel_obj_descriptor, 'get_prefetch_query_set'): prefetcher = rel_obj_descriptor if rel_obj_descriptor.is_cached(instance): is_fetched = True else: # descriptor doesn't support prefetching, so we go ahead and get # the attribute on the instance rather than the class to # support many related managers rel_obj = getattr(instance, attr) if hasattr(rel_obj, 'get_prefetch_query_set'): prefetcher = rel_obj return prefetcher, rel_obj_descriptor, attr_found, is_fetched
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/db/models/query.py/get_prefetcher
def cleanup(self): try: self.shutdown() except __HOLE__, e: pass
KeyboardInterrupt
dataset/ETHPy150Open chromium/web-page-replay/trafficshaper_test.py/TimedTcpServer.cleanup
def get_user_channel(text): tokens = text.split('/') try: user = tokens[0] channel = tokens[1] except __HOLE__: channel = "testing" return user, channel
IndexError
dataset/ETHPy150Open conan-io/conan/conans/client/manager.py/get_user_channel
@staticmethod def VersionNumber(): """Return the version of ebq.""" try: import pkg_resources # pylint: disable=g-import-not-at-top version = pkg_resources.get_distribution('encrypted_bigquery').version return 'v%s' % (version,) except __HOLE__: return '<unknown>'
ImportError
dataset/ETHPy150Open google/encrypted-bigquery-client/src/ebq.py/_Version.VersionNumber
def yum(s): """ Walk the dependency tree of all the packages in set s all the way to the leaves. Return the set of s plus all their dependencies. """ logging.debug('searching for Yum dependencies') if not hasattr(yum, '_cache'): yum._cache = {} try: p = subprocess.Popen(['rpm', '-qa', '--qf=%{NAME}\x1E[%{PROVIDES}\x1F]\n'], close_fds=True, stdout=subprocess.PIPE) for line in p.stdout: name, caps = line.rstrip().split('\x1E') yum._cache.update([(cap, name) for cap in caps.split('\x1F')]) except OSError: pass if not isinstance(s, set): s = set([s]) tmp_s = s while 1: new_s = set() for package in tmp_s: try: p = subprocess.Popen(['rpm', '-qR', package], close_fds=True, stdout=subprocess.PIPE) except OSError: continue for line in p.stdout: cap = line.rstrip()[0:line.find(' ')] if 'rpmlib' == cap[0:6]: continue try: new_s.add(yum._cache[cap]) except KeyError: try: p2 = subprocess.Popen(['rpm', '-q', '--qf=%{NAME}', '--whatprovides', cap], close_fds=True, stdout=subprocess.PIPE) stdout, stderr = p2.communicate() yum._cache[cap] = stdout new_s.add(stdout) except __HOLE__: pass # If there is to be a next iteration, `new_s` must contain some # packages not yet in `s`. tmp_s = new_s - s if 0 == len(tmp_s): break s |= new_s return s
OSError
dataset/ETHPy150Open devstructure/blueprint/blueprint/deps.py/yum
def get(self, url, is_file=False): result = [] if is_file: r = self._get(url, is_file=True) else: r = self._get(url) raise_errors(r) content = r.headers['Content-Type'] if content == 'application/json': data = r.json() for k in list(data): if not k == 'version': key = k if key not in PLURALS: try: datatype = JSON_MAP[key] except __HOLE__: raise NotFound(key) if datatype is None: result = data[key] else: result = JsonObjectFactory.create(datatype, data[key]) else: datatype = PLURALS[key] for d in data[key]: result.append(JsonObjectFactory.create(datatype, d)) elif content == 'text/plain': result = r.text elif r.headers['Content-Type'] == 'application/zip': # Strip the 'attachment; filename='' from Content-Disposition filename = r.headers["Content-Disposition"][21:] # Save the data to a file with open(filename, 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks if type(chunk) == bytes: f.write(chunk) else: f.write(chunk.encode("UTF-8")) f.flush() return filename else: result = None return result
KeyError
dataset/ETHPy150Open dave-tucker/hp-sdn-client/hpsdnclient/rest.py/RestClient.get
def process_lines(self, haml_lines): root = RootNode(**self.options_dict) line_iter = iter(haml_lines) haml_node=None for line_number, line in enumerate(line_iter): node_lines = line if not root.parent_of(HamlNode(line)).inside_filter_node(): if line.count('{') - line.count('}') == 1: start_multiline=line_number # For exception handling while line.count('{') - line.count('}') != -1: try: line = line_iter.next() except __HOLE__: raise Exception('No closing brace found for multi-line HAML beginning at line %s' % (start_multiline+1)) node_lines += line # Blank lines if haml_node is not None and len(node_lines.strip()) == 0: haml_node.newlines += 1 else: haml_node = create_node(node_lines) if haml_node: root.add_node(haml_node) if self.options_dict and self.options_dict.get('debug_tree'): return root.debug_tree() else: return root.render()
StopIteration
dataset/ETHPy150Open jessemiller/HamlPy/hamlpy/hamlpy.py/Compiler.process_lines
def target_function(self, running, data): module_verbosity = boolify(self.verbosity) name = threading.current_thread().name url = sanitize_url("{}:{}{}".format(self.target, self.port, self.path)) headers = {u'Content-Type': u'application/x-www-form-urlencoded'} print_status(name, 'process is starting...', verbose=module_verbosity) while running.is_set(): try: line = data.next().split(":") user = line[0].strip() password = line[1].strip() postdata = self.data.replace("{{USER}}", user).replace("{{PASS}}", password) r = requests.post(url, headers=headers, data=postdata, verify=False) l = len(r.text) if l < self.invalid["min"] or l > self.invalid["max"]: if boolify(self.stop_on_success): running.clear() print_success("Target: {}:{} {}: Authentication Succeed - Username: '{}' Password: '{}'".format(self.target, self.port, name, user, password), verbose=module_verbosity) self.credentials.append((self.target, self.port, user, password)) else: print_error("Target: {}:{} {}: Authentication Failed - Username: '{}' Password: '{}'".format(self.target, self.port, name, user, password), verbose=module_verbosity) except __HOLE__: break print_status(name, 'process is terminated.', verbose=module_verbosity)
StopIteration
dataset/ETHPy150Open reverse-shell/routersploit/routersploit/modules/creds/http_form_default.py/Exploit.target_function
def _register_update_callbacks(self, update_callbacks): """ .. todo:: WRITEME """ if update_callbacks is None: update_callbacks = [] # If it's iterable, we're fine. If not, it's a single callback, # so wrap it in a list. try: iter(update_callbacks) self.update_callbacks = update_callbacks except __HOLE__: self.update_callbacks = [update_callbacks]
TypeError
dataset/ETHPy150Open lisa-lab/pylearn2/pylearn2/training_algorithms/training_algorithm.py/TrainingAlgorithm._register_update_callbacks
@classmethod def fromVisualization(cls, target, others): """ Create a L{Description} from a L{Thing} and some L{Paths} visually related to that L{Thing}. @param target: The L{IThing} being described by this L{Description}. @type target: L{IThing} @param others: Paths to items that are visible as portions of the target. @type others: L{list} of L{Path <imaginary.idea.Path>}s. @return: A L{Description} comprising C{target} and C{others}. """ exits = [] for other in others: # All of others are paths that go through target so just # using targetAs won't accidentally include any exits that aren't # for the target room except for the bug mentioned below. # # TODO: This might show too many exits. There might be exits to # rooms with exits to other rooms, they'll all show up as on some # path here as IExit targets. Check the exit's source to make sure # it is target. anExit = other.targetAs(IExit) if anExit is not None: exits.append(anExit) exits.sort(key=lambda anExit: anExit.name) descriptionConcepts = [] for pup in target.powerupsFor(iimaginary.IDescriptionContributor): descriptionConcepts.append(pup.contributeDescriptionFrom(others)) def index(c): # https://github.com/twisted/imaginary/issues/63 preferredOrder = [ 'ExpressCondition', 'ExpressClothing', ] try: return preferredOrder.index(c.__class__.__name__) except __HOLE__: # Anything unrecognized goes after anything recognized. return len(preferredOrder) descriptionConcepts.sort(key=index) return cls( title=Noun(target).shortName(), exits=exits, description=target.description, components=descriptionConcepts, target=target, )
ValueError
dataset/ETHPy150Open twisted/imaginary/imaginary/language.py/Description.fromVisualization
def expand(self, values): """ Generate concepts based on the template. @param values: A L{dict} mapping substitution markers to application objects from which to take values for those substitutions. For example, a key might be C{u"target"}. The associated value will be sustituted each place C{u"{target}"} appears in the template string. Or, the value's name will be substituted each place C{u"{target:name}"} appears in the template string. @type values: L{dict} mapping L{unicode} to L{object} @return: An iterator the combined elements of which represent the result of expansion of the template. The elements are adaptable to L{IConcept}. """ parts = Formatter().parse(self.templateText) for (literalText, fieldName, formatSpec, conversion) in parts: if literalText: yield ExpressString(literalText) if fieldName: try: target = values[fieldName.lower()] except KeyError: extra = u"" if formatSpec: extra = u" '%s'" % (formatSpec,) yield u"<missing target '%s' for%s expansion>" % ( fieldName, extra) else: if formatSpec: # A nice enhancement would be to delegate this logic to # target try: expander = getattr( self, '_expand_' + formatSpec.upper() ) except __HOLE__: yield u"<'%s' unsupported by target '%s'>" % ( formatSpec, fieldName) else: yield expander(target) else: yield target
AttributeError
dataset/ETHPy150Open twisted/imaginary/imaginary/language.py/ConceptTemplate.expand
def _request_instance_property(): """ Set and retrieve the request instance. This works to force to tie the consistency between the request path and self.vars (i.e. api_version, account, container, obj) even if unexpectedly (separately) assigned. """ def getter(self): return self._request def setter(self, request): self._request = request try: self._extract_vaco() except __HOLE__: raise NotStorletRequest() return property(getter, setter, doc="Force to tie the request to acc/con/obj vars")
ValueError
dataset/ETHPy150Open openstack/storlets/Engine/swift/storlet_middleware/storlet_handler.py/_request_instance_property
def _validate_registration(self, req): params = self._parse_storlet_params(req.headers) try: if self.container == self.storlet_container: self.logger.debug('updating object in storlet container. ' 'Sanity check') self.gateway_class.validate_storlet_registration( params, self.obj) else: self.logger.debug('updating object in storlet dependency. ' 'Sanity check') self.gateway_class.validate_dependency_registration( params, self.obj) except __HOLE__ as e: self.logger.exception('Bad parameter') raise HTTPBadRequest(e.message)
ValueError
dataset/ETHPy150Open openstack/storlets/Engine/swift/storlet_middleware/storlet_handler.py/StorletProxyHandler._validate_registration
def testFindTrovesWithFlavorPreferences4(self): repos = self.openRepository() db = self.openDatabase() Flavor = deps.parseFlavor def _find(spec, getLeaves=True, bestFlavor=True): cfg = self.cfg spec = cmdline.parseTroveSpec(spec) tupList = repos.findTrove(cfg.installLabelPath, spec, cfg.flavor, affinityDatabase=db, getLeaves=getLeaves, bestFlavor=bestFlavor) tupList = sorted([('%s=%s[%s]' % (x[0], x[1].trailingRevision().getVersion(), x[2])) for x in tupList ]) return tupList repos.setFlavorPreferenceList([Flavor('is: x86_64'), Flavor('is:x86')]) self.cfg.flavor = [Flavor('is:x86(~sse,~sse2) x86_64')] self.addComponent('foo:run=1[is:x86(sse,sse2)]') self.addComponent('foo:run=1[is:x86 x86_64]') self.addComponent('foo:run=2[is:x86(sse,sse2)]') self.addComponent('foo:run=2[is:x86 x86_64]') self.addComponent('foo:run=3[is:x86(sse,sse2)]') self.addComponent('foo:run=3[is:x86 x86_64]') # get the best flavor for each version. expectedResults = ['foo:run=1[is: x86 x86_64]', 'foo:run=2[is: x86 x86_64]', 'foo:run=3[is: x86 x86_64]'] try: results = _find('foo:run', getLeaves=False) assert(results == expectedResults) except __HOLE__: print "\nexpected: ['%s']" % "', '".join(expectedResults) print "got ['%s']" % "', '".join(results) raise
AssertionError
dataset/ETHPy150Open sassoftware/conary/conary_test/repositorytest/findtrovetest.py/FindTroveTest.testFindTrovesWithFlavorPreferences4
def __init__(self, backend, key, algorithm, ctx=None): self._algorithm = algorithm self._backend = backend if ctx is None: ctx = self._backend._ffi.new("CCHmacContext *") try: alg = self._backend._supported_hmac_algorithms[algorithm.name] except __HOLE__: raise UnsupportedAlgorithm( "{0} is not a supported HMAC hash on this backend.".format( algorithm.name), _Reasons.UNSUPPORTED_HASH ) self._backend._lib.CCHmacInit(ctx, alg, key, len(key)) self._ctx = ctx self._key = key
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/cryptography-1.3.1/src/cryptography/hazmat/backends/commoncrypto/hmac.py/_HMACContext.__init__
def runTests(self): if (self.catchbreak and getattr(unittest, 'installHandler', None) is not None): unittest.installHandler() if self.testRunner is None: self.testRunner = runner.TextTestRunner if isinstance(self.testRunner, classtypes()): try: testRunner = self.testRunner(verbosity=self.verbosity, failfast=self.failfast, buffer=self.buffer) except __HOLE__: # didn't accept the verbosity, buffer or failfast arguments testRunner = self.testRunner() else: # it is assumed to be a TestRunner instance testRunner = self.testRunner self.result = testRunner.run(self.test) if self.exit: sys.exit(not self.result.wasSuccessful()) ################
TypeError
dataset/ETHPy150Open byt3bl33d3r/pth-toolkit/lib/python2.7/site-packages/samba/external/testtools/run.py/TestProgram.runTests
def handle(self, server_sock, client_sock, addr): """Handle client connection The client may send one or more requests. """ req = None try: parser = http.RequestParser(self.cfg, client_sock) try: server_name = server_sock.getsockname() if not self.cfg.keepalive: req = next(parser) self.handle_request(server_name, req, client_sock, addr) else: # keepalive loop while True: req = None with self.timeout_ctx(): req = next(parser) if not req: break self.handle_request(server_name, req, client_sock, addr) gyield() except http.errors.NoMoreData as e: self.log.debug("Ignored premature client disconnection. %s", e) except __HOLE__ as e: self.log.debug("Closing connection. %s", e) except ssl.SSLError: exc_info = sys.exc_info() # pass to next try-except level reraise(exc_info[0], exc_info[1], exc_info[2]) except socket.error: exc_info = sys.exc_info() # pass to next try-except level reraise(exc_info[0], exc_info[1], exc_info[2]) except Exception as e: self.handle_error(req, client_sock, addr, e) except ssl.SSLError as e: if get_errno(e) == ssl.SSL_ERROR_EOF: self.log.debug("ssl connection closed") client_sock.close() else: self.log.debug("Error processing SSL request.") self.handle_error(req, client_sock, addr, e) except socket.error as e: if get_errno(e) not in BROKEN_SOCK: self.log.exception("Socket error processing request.") else: if get_errno(e) == errno.ECONNRESET: self.log.debug("Ignoring connection reset") else: self.log.debug("Ignoring EPIPE") except Exception as e: self.handle_error(req, client_sock, addr, e) finally: util.close(client_sock)
StopIteration
dataset/ETHPy150Open veegee/guv/guv/support/gunicorn_worker.py/AsyncWorker.handle
def handle_request(self, listener_name, req, sock, addr): request_start = datetime.now() environ = {} resp = None try: self.cfg.pre_request(self, req) resp, environ = wsgi.create(req, sock, addr, listener_name, self.cfg) environ["wsgi.multithread"] = True self.nr += 1 if self.alive and self.nr >= self.max_requests: self.log.info("Autorestarting worker after current request.") resp.force_close() self.alive = False if not self.cfg.keepalive: resp.force_close() respiter = self.wsgi(environ, resp.start_response) if respiter == ALREADY_HANDLED: return False try: if isinstance(respiter, environ['wsgi.file_wrapper']): resp.write_file(respiter) else: for item in respiter: resp.write(item) resp.close() request_time = datetime.now() - request_start self.log.access(resp, req, environ, request_time) except socket.error as e: # BROKEN_SOCK not interesting here if not get_errno(e) in BROKEN_SOCK: raise finally: if hasattr(respiter, "close"): respiter.close() if resp.should_close(): raise StopIteration() except __HOLE__: raise except Exception: if resp and resp.headers_sent: # If the requests have already been sent, we should close the # connection to indicate the error. self.log.exception("Error handling request") try: sock.shutdown(socket.SHUT_RDWR) sock.close() except socket.error: pass raise StopIteration() raise finally: try: self.cfg.post_request(self, req, environ, resp) except Exception: self.log.exception("Exception in post_request hook") return True
StopIteration
dataset/ETHPy150Open veegee/guv/guv/support/gunicorn_worker.py/AsyncWorker.handle_request
def _guv_sendfile(fdout, fdin, offset, nbytes): while True: try: return o_sendfile(fdout, fdin, offset, nbytes) except __HOLE__ as e: if get_errno(e) == errno.EAGAIN: if not isinstance(fdout, int): fd = fdout.fileno() else: fd = fdout trampoline(fd, WRITE) else: raise
OSError
dataset/ETHPy150Open veegee/guv/guv/support/gunicorn_worker.py/_guv_sendfile
def run(self): acceptors = [] for sock in self.sockets: gsock = gsocket(sock.FAMILY, socket.SOCK_STREAM, fileno=sock.fileno()) gsock.setblocking(1) hfun = partial(self.handle, gsock) acceptor = guv.spawn(_guv_serve, gsock, hfun, self.worker_connections) acceptors.append(acceptor) guv.gyield() try: while self.alive: self.notify() guv.sleep(self.timeout / 2) except (KeyboardInterrupt, __HOLE__): log.debug('KeyboardInterrupt, exiting') self.notify() try: with guv.Timeout(self.cfg.graceful_timeout) as t: for a in acceptors: a.kill(guv.StopServe()) for a in acceptors: a.wait() except guv.Timeout as te: if te != t: raise for a in acceptors: a.kill() log.debug('GuvWorker exited')
SystemExit
dataset/ETHPy150Open veegee/guv/guv/support/gunicorn_worker.py/GuvWorker.run
def load_class(path): """ Load class from path. """ try: mod_name, klass_name = path.rsplit('.', 1) mod = import_module(mod_name) except AttributeError as e: raise ImproperlyConfigured(u'Error importing %s: "%s"' % (mod_name, e)) try: klass = getattr(mod, klass_name) except __HOLE__: raise ImproperlyConfigured('Module "%s" does not define a "%s" class' % (mod_name, klass_name)) return klass
AttributeError
dataset/ETHPy150Open niwinz/django-greenqueue/greenqueue/utils.py/load_class
def resolve_aliases(self, pipeline, customAliases=None): # We don't build the alias dictionary anymore because as we don't # perform expression evaluation anymore, the values won't change. # We only care for custom aliases because they might have a value # different from what it's stored. aliases = {} if customAliases: #customAliases can be only a subset of the aliases #so we need to build the Alias Dictionary always for k,v in customAliases.iteritems(): aliases[k] = v # no support for expression evaluation. The code that does that is # ugly and dangerous. for alias in aliases: try: info = pipeline.aliases[alias] param = pipeline.db_get_object(info[0],info[1]) param.strValue = str(aliases[alias]) except __HOLE__: pass return aliases
KeyError
dataset/ETHPy150Open VisTrails/VisTrails/vistrails/core/interpreter/base.py/BaseInterpreter.resolve_aliases
def cs_graph_components(x): """ Determine connected components of a graph stored as a compressed sparse row or column matrix. For speed reasons, the symmetry of the matrix x is not checked. A nonzero at index `(i, j)` means that node `i` is connected to node `j` by an edge. The number of rows/columns of the matrix thus corresponds to the number of nodes in the graph. Parameters ----------- x : array_like or sparse matrix, 2 dimensions The adjacency matrix of the graph. Only the upper triangular part is used. Returns -------- n_comp : int The number of connected components. label : ndarray (ints, 1 dimension): The label array of each connected component (-2 is used to indicate empty rows in the matrix: 0 everywhere, including diagonal). This array has the length of the number of nodes, i.e. one label for each node of the graph. Nodes having the same label belong to the same connected component. Notes ------ The matrix is assumed to be symmetric and the upper triangular part of the matrix is used. The matrix is converted to a CSR matrix unless it is already a CSR. Examples -------- >>> from scipy.sparse.csgraph import connected_components >>> D = np.eye(4) >>> D[0,1] = D[1,0] = 1 >>> cs_graph_components(D) (3, array([0, 0, 1, 2])) >>> from scipy.sparse import dok_matrix >>> cs_graph_components(dok_matrix(D)) (3, array([0, 0, 1, 2])) """ try: shape = x.shape except __HOLE__: raise ValueError(_msg0) if not ((len(x.shape) == 2) and (x.shape[0] == x.shape[1])): raise ValueError(_msg1 % x.shape) if isspmatrix(x): x = x.tocsr() else: x = csr_matrix(x) label = np.empty((shape[0],), dtype=x.indptr.dtype) n_comp = _cs_graph_components(shape[0], x.indptr, x.indices, label) return n_comp, label
AttributeError
dataset/ETHPy150Open scipy/scipy/scipy/sparse/csgraph/_components.py/cs_graph_components
def url_for(*a, **kw): """Generate external URLs with HTTPS (if configured).""" try: kw['_external'] = True if get_config('PREFERRED_URL_SCHEME'): kw['_scheme'] = get_config('PREFERRED_URL_SCHEME') return flask_url_for(*a, **kw) except __HOLE__: return None
RuntimeError
dataset/ETHPy150Open pudo/aleph/aleph/core.py/url_for
def call(self, *args, **kwargs): """Call the function inside the rpc thread.""" with self._lock: if not self._running: raise RPCNotRunning() # Create an object to hold the result of the function call. result = _Result() # Since we will block while waiting for our function to get called, we # can cache our events per thread. try: event = self._events.event except __HOLE__: event = self._events.event = threading.Event() else: event.clear() msg = (event, result, args, kwargs) self._queue.put(msg) # Wait for the message to be processed. event.wait() assert result.result is not _NULL, "function failed to get called!" # Raise any exceptions we've received. if isinstance(result.result, BaseException): raise result.result else: return result.result
AttributeError
dataset/ETHPy150Open felix-lang/fbuild/lib/fbuild/rpc.py/RPC.call
def run(self): """Run the rpc thread.""" self._started.set() self._running = True try: while True: msg = self._queue.get() try: # If we received a null value, then we're supposed to shut # down. if msg is _NULL: sys.stdout.flush() break self._process(msg) finally: # ... and let the queue know we finished. self._queue.task_done() except __HOLE__: # let the main thread know we got a SIGINT import _thread _thread.interrupt_main() raise except BaseException as err: raise finally: self._running = False
KeyboardInterrupt
dataset/ETHPy150Open felix-lang/fbuild/lib/fbuild/rpc.py/RPC.run