function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
def test(): import sys recursive = 0 if sys.argv[1:] and sys.argv[1] == '-r': del sys.argv[1:2] recursive = 1 try: if sys.argv[1:]: testall(sys.argv[1:], recursive, 1) else: testall(['.'], recursive, 1) except __HOLE__: sys.stderr.write('\n[Interrupted]\n') sys.exit(1)
KeyboardInterrupt
dataset/ETHPy150Open babble/babble/include/jython/Lib/sndhdr.py/test
def testall(list, recursive, toplevel): import sys import os for filename in list: if os.path.isdir(filename): print filename + '/:', if recursive or toplevel: print 'recursing down:' import glob names = glob.glob(os.path.join(filename, '*')) testall(names, recursive, 0) else: print '*** directory (use -r) ***' else: print filename + ':', sys.stdout.flush() try: print what(filename) except __HOLE__: print '*** not found ***'
IOError
dataset/ETHPy150Open babble/babble/include/jython/Lib/sndhdr.py/testall
def program(args, env, log_error): """ The main program without error handling :param args: parsed args (argparse.Namespace) :type env: Environment :param log_error: error log function :return: status code """ exit_status = ExitStatus.OK downloader = None show_traceback = args.debug or args.traceback try: if args.download: args.follow = True # --download implies --follow. downloader = Downloader( output_file=args.output_file, progress_file=env.stderr, resume=args.download_resume ) downloader.pre_request(args.headers) final_response = get_response(args, config_dir=env.config.directory) if args.all: responses = final_response.history + [final_response] else: responses = [final_response] for response in responses: if args.check_status or downloader: exit_status = get_exit_status( http_status=response.status_code, follow=args.follow ) if not env.stdout_isatty and exit_status != ExitStatus.OK: log_error( 'HTTP %s %s', response.raw.status, response.raw.reason, level='warning' ) write_stream_kwargs = { 'stream': build_output_stream( args=args, env=env, request=response.request, response=response, output_options=( args.output_options if response is final_response else args.output_options_others ) ), # NOTE: `env.stdout` will in fact be `stderr` with `--download` 'outfile': env.stdout, 'flush': env.stdout_isatty or args.stream } try: if env.is_windows and is_py3 and 'colors' in args.prettify: write_stream_with_colors_win_py3(**write_stream_kwargs) else: write_stream(**write_stream_kwargs) except __HOLE__ as e: if not show_traceback and e.errno == errno.EPIPE: # Ignore broken pipes unless --traceback. env.stderr.write('\n') else: raise if downloader and exit_status == ExitStatus.OK: # Last response body download. download_stream, download_to = downloader.start(final_response) write_stream( stream=download_stream, outfile=download_to, flush=False, ) downloader.finish() if downloader.interrupted: exit_status = ExitStatus.ERROR log_error('Incomplete download: size=%d; downloaded=%d' % ( downloader.status.total_size, downloader.status.downloaded )) return exit_status finally: if downloader and not downloader.finished: downloader.failed() if (not isinstance(args, list) and args.output_file and args.output_file_specified): args.output_file.close()
IOError
dataset/ETHPy150Open jkbrzt/httpie/httpie/core.py/program
def main(args=sys.argv[1:], env=Environment(), custom_log_error=None): """ The main function. Pre-process args, handle some special types of invocations, and run the main program with error handling. Return exit status code. """ args = decode_args(args, env.stdin_encoding) plugin_manager.load_installed_plugins() def log_error(msg, *args, **kwargs): msg = msg % args level = kwargs.get('level', 'error') assert level in ['error', 'warning'] env.stderr.write('\nhttp: %s: %s\n' % (level, msg)) from httpie.cli import parser if env.config.default_options: args = env.config.default_options + args if custom_log_error: log_error = custom_log_error include_debug_info = '--debug' in args include_traceback = include_debug_info or '--traceback' in args if include_debug_info: print_debug_info(env) if args == ['--debug']: return ExitStatus.OK exit_status = ExitStatus.OK try: parsed_args = parser.parse_args(args=args, env=env) except KeyboardInterrupt: env.stderr.write('\n') if include_traceback: raise exit_status = ExitStatus.ERROR except SystemExit as e: if e.code != ExitStatus.OK: env.stderr.write('\n') if include_traceback: raise exit_status = ExitStatus.ERROR else: try: exit_status = program( args=parsed_args, env=env, log_error=log_error, ) except __HOLE__: env.stderr.write('\n') if include_traceback: raise exit_status = ExitStatus.ERROR except SystemExit as e: if e.code != ExitStatus.OK: env.stderr.write('\n') if include_traceback: raise exit_status = ExitStatus.ERROR except requests.Timeout: exit_status = ExitStatus.ERROR_TIMEOUT log_error('Request timed out (%ss).', parsed_args.timeout) except requests.TooManyRedirects: exit_status = ExitStatus.ERROR_TOO_MANY_REDIRECTS log_error('Too many redirects (--max-redirects=%s).', parsed_args.max_redirects) except Exception as e: # TODO: Further distinction between expected and unexpected errors. msg = str(e) if hasattr(e, 'request'): request = e.request if hasattr(request, 'url'): msg += ' while doing %s request to URL: %s' % ( request.method, request.url) log_error('%s: %s', type(e).__name__, msg) if include_traceback: raise exit_status = ExitStatus.ERROR return exit_status
KeyboardInterrupt
dataset/ETHPy150Open jkbrzt/httpie/httpie/core.py/main
def __new__(cls, ctypesArray, shape, dtype=float, strides=None, offset=0, order=None): #some magic (copied from numpy.ctypeslib) to make sure the ctypes array #has the array interface tp = type(ctypesArray) try: tp.__array_interface__ except __HOLE__: ctypeslib.prep_array(tp) obj = numpy.ndarray.__new__(cls, shape, dtype, ctypesArray, offset, strides, order) # keep track of the underlying storage # this may not be strictly necessary as the same info should be stored in .base obj.ctypesArray = ctypesArray return obj
AttributeError
dataset/ETHPy150Open aheadley/pynemap/shmem.py/shmarray.__new__
def parse_mapreduce_yaml(contents): """Parses mapreduce.yaml file contents. Args: contents: mapreduce.yaml file contents. Returns: MapReduceYaml object with all the data from original file. Raises: errors.BadYamlError: when contents is not a valid mapreduce.yaml file. """ try: builder = yaml_object.ObjectBuilder(MapReduceYaml) handler = yaml_builder.BuilderHandler(builder) listener = yaml_listener.EventListener(handler) listener.Parse(contents) mr_info = handler.GetResults() except (__HOLE__, yaml_errors.EventError), e: raise errors.BadYamlError(e) if len(mr_info) < 1: raise errors.BadYamlError("No configs found in mapreduce.yaml") if len(mr_info) > 1: raise errors.MultipleDocumentsInMrYaml("Found %d YAML documents" % len(mr_info)) jobs = mr_info[0] job_names = set(j.name for j in jobs.mapreduce) if len(jobs.mapreduce) != len(job_names): raise errors.BadYamlError( "Overlapping mapreduce names; names must be unique") return jobs
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/ext/mapreduce/status.py/parse_mapreduce_yaml
def __getitem__(self, name): "Returns a BoundField with the given name." try: field = self.fields[name] except __HOLE__: raise KeyError('Key %r not found in Form' % name) return BoundField(self, field, name)
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-0.96/django/newforms/forms.py/BaseForm.__getitem__
def full_clean(self): """ Cleans all of self.data and populates self.__errors and self.clean_data. """ errors = ErrorDict() if not self.is_bound: # Stop further processing. self.__errors = errors return self.clean_data = {} for name, field in self.fields.items(): # value_from_datadict() gets the data from the dictionary. # Each widget type knows how to retrieve its own data, because some # widgets split data over several HTML fields. value = field.widget.value_from_datadict(self.data, self.add_prefix(name)) try: value = field.clean(value) self.clean_data[name] = value if hasattr(self, 'clean_%s' % name): value = getattr(self, 'clean_%s' % name)() self.clean_data[name] = value except ValidationError, e: errors[name] = e.messages try: self.clean_data = self.clean() except __HOLE__, e: errors[NON_FIELD_ERRORS] = e.messages if errors: delattr(self, 'clean_data') self.__errors = errors
ValidationError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-0.96/django/newforms/forms.py/BaseForm.full_clean
def handle_bot_message(event, server): try: bot = server.slack.server.bots[event["bot_id"]] except __HOLE__: logger.debug("bot_message event {0} has no bot".format(event)) return return "\n".join(run_hook(server.hooks, "bot_message", event, server))
KeyError
dataset/ETHPy150Open serverdensity/sdbot/limbo/handlers/__init__.py/handle_bot_message
def handle_message(event, server): subtype = event.get("subtype", "") if subtype == "message_changed": return if subtype == "bot_message": return handle_bot_message(event, server) try: msguser = server.slack.server.users[event["user"]] except __HOLE__: logger.debug("event {0} has no user".format(event)) return return "\n".join(run_hook(server.hooks, "message", event, server))
KeyError
dataset/ETHPy150Open serverdensity/sdbot/limbo/handlers/__init__.py/handle_message
def collect(self): metrics = {} raw = {} data = self.getData() data = data.split('\n\x00') for d in data: matches = re.search("([A-Z]+)\s+:\s+(.*)$", d) if matches: value = matches.group(2).strip() raw[matches.group(1)] = matches.group(2).strip() vmatch = re.search("([0-9.]+)", value) if not vmatch: continue try: value = float(vmatch.group(1)) except __HOLE__: continue metrics[matches.group(1)] = value for metric in self.config['metrics']: if metric not in metrics: continue metric_name = "%s.%s" % (raw['UPSNAME'], metric) value = metrics[metric] if metric in ['TONBATT', 'CUMONBATT', 'NUMXFERS']: value = self.derivative(metric_name, metrics[metric]) self.publish(metric_name, value) return True
ValueError
dataset/ETHPy150Open BrightcoveOS/Diamond/src/collectors/apcupsd/apcupsd.py/ApcupsdCollector.collect
def test_unresolvable_specification_raises(self): try: Specification.from_object(None) except __HOLE__: pass else: assert False, "Failed to raise TypeError."
TypeError
dataset/ETHPy150Open marrow/script/test/test_schema.py/TestSpecification.test_unresolvable_specification_raises
def _request(self, url, params=None, files=None, method='GET', check_for_errors=True): url = self._resolve_url(url) self.log.info("{method} {url!r} with params {params!r}".format(method=method, url=url, params=params)) if params is None: params = {} if self.access_token: params['access_token'] = self.access_token methods = {'GET': self.rsession.get, 'POST': functools.partial(self.rsession.post, files=files), 'PUT': self.rsession.put, 'DELETE': self.rsession.delete} try: requester = methods[method.upper()] except __HOLE__: raise ValueError("Invalid/unsupported request method specified: {0}".format(method)) raw = requester(url, params=params) if check_for_errors: self._handle_protocol_error(raw) # 204 = No content if raw.status_code in [204]: resp = [] else: resp = raw.json() # TODO: We should parse the response to get the rate limit details and # update our rate limiter. # see: http://strava.github.io/api/#access # At this stage we should assume that request was successful and we should invoke # our rate limiter. (Note that this may need to be reviewed; some failures may # also count toward the limit?) self.rate_limiter() return resp
KeyError
dataset/ETHPy150Open hozn/stravalib/stravalib/protocol.py/ApiV3._request
def _handle_protocol_error(self, response): """ Parses the raw response from the server, raising a :class:`stravalib.exc.Fault` if the server returned an error. :param response: The response object. :raises Fault: If the response contains an error. """ error_str = None try: json_response = response.json() except __HOLE__: pass else: if 'message' in json_response or 'errors' in json_response: error_str = '{0}: {1}'.format(json_response.get('message', 'Undefined error'), json_response.get('errors')) x = None if 400 <= response.status_code < 500: x = requests.exceptions.HTTPError('%s Client Error: %s [%s]' % (response.status_code, response.reason, error_str)) elif 500 <= response.status_code < 600: x = requests.exceptions.HTTPError('%s Server Error: %s [%s]' % (response.status_code, response.reason, error_str)) elif error_str: x = exc.Fault(error_str) if x is not None: raise x return response
ValueError
dataset/ETHPy150Open hozn/stravalib/stravalib/protocol.py/ApiV3._handle_protocol_error
def _extract_referenced_vars(self, s): """ Utility method to find the referenced format variables in a string. (Assumes string.format() format vars.) :param s: The string that contains format variables. (e.g. "{foo}-text") :return: The list of referenced variable names. (e.g. ['foo']) :rtype: list """ d = {} while True: try: s.format(**d) except __HOLE__ as exc: # exc.args[0] contains the name of the key that was not found; # 0 is used because it appears to work with all types of placeholders. d[exc.args[0]] = 0 else: break return d.keys()
KeyError
dataset/ETHPy150Open hozn/stravalib/stravalib/protocol.py/ApiV3._extract_referenced_vars
def _get_settings_from_cmd_line(self): for arg in sys.argv[1:]: for lib_arg in self.COMMAND_LINE_ARGS: if arg.startswith(lib_arg): try: return arg.split('=')[1] except __HOLE__: return return
IndexError
dataset/ETHPy150Open drgarcia1986/simple-settings/simple_settings/core.py/LazySettings._get_settings_from_cmd_line
def __getattr__(self, attr): self.setup() try: return self._dict[attr] except __HOLE__: raise AttributeError('You do not set {} setting'.format(attr))
KeyError
dataset/ETHPy150Open drgarcia1986/simple-settings/simple_settings/core.py/LazySettings.__getattr__
def get_in(keys, coll, default=None, no_default=False): """ Returns coll[i0][i1]...[iX] where [i0, i1, ..., iX]==keys. If coll[i0][i1]...[iX] cannot be found, returns ``default``, unless ``no_default`` is specified, then it raises KeyError or IndexError. ``get_in`` is a generalization of ``operator.getitem`` for nested data structures such as dictionaries and lists. >>> transaction = {'name': 'Alice', ... 'purchase': {'items': ['Apple', 'Orange'], ... 'costs': [0.50, 1.25]}, ... 'credit card': '5555-1234-1234-1234'} >>> get_in(['purchase', 'items', 0], transaction) 'Apple' >>> get_in(['name'], transaction) 'Alice' >>> get_in(['purchase', 'total'], transaction) >>> get_in(['purchase', 'items', 'apple'], transaction) >>> get_in(['purchase', 'items', 10], transaction) >>> get_in(['purchase', 'total'], transaction, 0) 0 >>> get_in(['y'], {}, no_default=True) Traceback (most recent call last): ... KeyError: 'y' See Also: itertoolz.get operator.getitem """ try: return reduce(operator.getitem, keys, coll) except (KeyError, __HOLE__, TypeError): if no_default: raise return default
IndexError
dataset/ETHPy150Open pytoolz/toolz/toolz/dicttoolz.py/get_in
def __get_raw_model(self, model_id): try: doc = views.model_definitions(self._db, key=model_id).rows[0] return doc.value except __HOLE__: raise backend_exceptions.ModelNotFound(model_id)
IndexError
dataset/ETHPy150Open spiral-project/daybed/daybed/backends/couchdb/__init__.py/CouchDBBackend.__get_raw_model
def __get_raw_record(self, model_id, record_id): key = u'-'.join((model_id, record_id)) try: return views.records_all(self._db, key=key).rows[0].value except __HOLE__: raise backend_exceptions.RecordNotFound( u'(%s, %s)' % (model_id, record_id) )
IndexError
dataset/ETHPy150Open spiral-project/daybed/daybed/backends/couchdb/__init__.py/CouchDBBackend.__get_raw_record
def delete_model(self, model_id): """DELETE ALL THE THINGS""" # Delete the associated data if any. records = self.delete_records(model_id) try: doc = views.model_definitions(self._db, key=model_id).rows[0].value except __HOLE__: raise backend_exceptions.ModelNotFound(model_id) # Delete the model definition if it exists. self._db.delete(doc) return {"definition": doc["definition"], "permissions": doc["permissions"], "records": records}
IndexError
dataset/ETHPy150Open spiral-project/daybed/daybed/backends/couchdb/__init__.py/CouchDBBackend.delete_model
def __get_raw_token(self, credentials_id): try: return views.tokens(self._db, key=credentials_id).rows[0].value except __HOLE__: raise backend_exceptions.CredentialsNotFound(credentials_id)
IndexError
dataset/ETHPy150Open spiral-project/daybed/daybed/backends/couchdb/__init__.py/CouchDBBackend.__get_raw_token
def sshagent_run(command, use_sudo=False): """ Helper function. Runs a command with SSH agent forwarding enabled. Note:: Fabric (and paramiko) can't forward your SSH agent. This helper uses your system's ssh to do so. """ if use_sudo: command = 'sudo %s' % command cwd = env.get('cwd', '') if cwd: cwd = 'cd %s && ' % cwd real_command = cwd + command with settings(cwd=''): if env.port: port = env.port host = env.host else: try: # catch the port number to pass to ssh host, port = env.host.split(':') except __HOLE__: port = None host = env.host if port: local('ssh -p %s -A %s "%s"' % (port, host, real_command)) else: local('ssh -A %s "%s"' % (env.host, real_command))
ValueError
dataset/ETHPy150Open bueda/ops/buedafab/operations.py/sshagent_run
def register_storage_use(storage_path, hostname): """Identify the id of this instance storage.""" LOCK_PATH = os.path.join(CONF.instances_path, 'locks') @utils.synchronized('storage-registry-lock', external=True, lock_path=LOCK_PATH) def do_register_storage_use(storage_path, hostname): # NOTE(mikal): this is required to determine if the instance storage is # shared, which is something that the image cache manager needs to # know. I can imagine other uses as well though. d = {} id_path = os.path.join(storage_path, 'compute_nodes') if os.path.exists(id_path): with open(id_path) as f: try: d = jsonutils.loads(f.read()) except __HOLE__: LOG.warning(_LW("Cannot decode JSON from %(id_path)s"), {"id_path": id_path}) d[hostname] = time.time() with open(id_path, 'w') as f: f.write(jsonutils.dumps(d)) return do_register_storage_use(storage_path, hostname)
ValueError
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/storage_users.py/register_storage_use
def get_storage_users(storage_path): """Get a list of all the users of this storage path.""" # See comments above method register_storage_use LOCK_PATH = os.path.join(CONF.instances_path, 'locks') @utils.synchronized('storage-registry-lock', external=True, lock_path=LOCK_PATH) def do_get_storage_users(storage_path): d = {} id_path = os.path.join(storage_path, 'compute_nodes') if os.path.exists(id_path): with open(id_path) as f: try: d = jsonutils.loads(f.read()) except __HOLE__: LOG.warning(_LW("Cannot decode JSON from %(id_path)s"), {"id_path": id_path}) recent_users = [] for node in d: if time.time() - d[node] < TWENTY_FOUR_HOURS: recent_users.append(node) return recent_users return do_get_storage_users(storage_path)
ValueError
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/virt/storage_users.py/get_storage_users
def lookup(self, key): try: return self.__bindings.get(key) except __HOLE__: return None
KeyError
dataset/ETHPy150Open spinnaker/spinnaker/pylib/spinnaker/transform_old_config.py/Processor.lookup
def update_remaining_keys(self): stack = [('', self.__bindings.map)] while stack: prefix, root = stack.pop() for name, value in root.items(): key = '{prefix}{child}'.format(prefix=prefix, child=name) if isinstance(value, dict): stack.append((key + '.', value)) elif not key in self.__environ_keys: try: self.update_in_place(key) except __HOLE__: pass
ValueError
dataset/ETHPy150Open spinnaker/spinnaker/pylib/spinnaker/transform_old_config.py/Processor.update_remaining_keys
def datetime_factory(**kwargs): second = kwargs.get('second') if second is not None: f, i = math.modf(second) kwargs['second'] = int(i) kwargs['microsecond'] = int(f * 1000000) try: return datetime.datetime(**kwargs) except __HOLE__ as e: raise DateTimeRangeError(e.args[0])
ValueError
dataset/ETHPy150Open lugensa/scorched/scorched/dates.py/datetime_factory
def __lt__(self, other): try: other = other._dt_obj except __HOLE__: pass return self._dt_obj < other
AttributeError
dataset/ETHPy150Open lugensa/scorched/scorched/dates.py/solr_date.__lt__
def __eq__(self, other): try: other = other._dt_obj except __HOLE__: pass return self._dt_obj == other
AttributeError
dataset/ETHPy150Open lugensa/scorched/scorched/dates.py/solr_date.__eq__
def _get_plugins(): """ Get list of available host discovery plugin module names """ plugins = [] conf_file = os.path.expanduser('~/.smokercli.yaml') if not os.path.exists(conf_file): conf_file = CONFIG_FILE if not os.path.exists(conf_file): return plugins with open(conf_file) as f: config = yaml.safe_load(f) if config and 'plugin_paths' in config: paths = config['plugin_paths'] else: raise Exception('Invalid config file') for path in paths: try: module = __import__(path) except __HOLE__: raise Exception('Invalid config file') toplevel = os.path.dirname(module.__file__) submodule = '/'.join(path.split('.')[1:]) plugin_dir = os.path.join(toplevel, submodule, '*.py') modules = [os.path.basename(name)[:-3] for name in glob.glob(plugin_dir)] modules.remove('__init__') plugins += ['%s.%s' % (path, name) for name in modules] return plugins
ImportError
dataset/ETHPy150Open gooddata/smoker/smoker/client/cli.py/_get_plugins
def _get_plugin_arguments(name): """ Get list of host discovery plugin specific cmdline arguments :param name: plugin module name """ try: plugin = __import__(name, globals(), locals(), ['HostDiscoveryPlugin']) except __HOLE__ as e: lg.error("Can't load module %s: %s" % (name, e)) raise return plugin.HostDiscoveryPlugin.arguments
ImportError
dataset/ETHPy150Open gooddata/smoker/smoker/client/cli.py/_get_plugin_arguments
def _run_discovery_plugin(name, args): """ Run the host discovery plugin :param name: plugin module name :param args: attribute namespace :return: discovered hosts list """ try: this_plugin = __import__(name, globals(), locals(), ['HostDiscoveryPlugin']) except __HOLE__ as e: lg.error("Can't load module %s: %s" % (name, e)) raise plugin=this_plugin.HostDiscoveryPlugin() return plugin.get_hosts(args)
ImportError
dataset/ETHPy150Open gooddata/smoker/smoker/client/cli.py/_run_discovery_plugin
def release(self, connection): try: self.__active.remove(connection) except __HOLE__: pass if len(self.__passive) < 2: self.__passive.append(connection) else: connection.close()
ValueError
dataset/ETHPy150Open nigelsmall/httpstream/httpstream/http.py/ConnectionPuddle.release
def submit(method, uri, body, headers): """ Submit one HTTP request. """ for key, value in headers.items(): del headers[key] headers[xstr(key)] = xstr(value) headers["Host"] = xstr(uri.host_port) if uri.user_info: credentials = uri.user_info.encode("UTF-8") value = "Basic " + b64encode(credentials).decode("ASCII") headers["Authorization"] = value try: http = ConnectionPool.acquire(uri.scheme, uri.host_port) except KeyError: raise ValueError("Unsupported URI scheme " + repr(uri.scheme)) def send(reconnect=None): if reconnect: log.info("~ Reconnecting (%s)", reconnect) http.close() http.connect() if (method == "GET" or method == "DELETE") and not body: log.info("> %s %s", method, uri.string) elif body: log.info("> %s %s [%s]", method, uri.string, len(body)) else: log.info("> %s %s [%s]", method, uri.string, 0) for key, value in headers.items(): log.debug("> %s: %s", key, value) http.request(xstr(method), xstr(uri.absolute_path_reference), body, headers) return http.getresponse(**getresponse_args) try: try: response = send() except BadStatusLine as err: if err.line == "''": response = send("peer closed connection") else: raise except ResponseNotReady: response = send("response not ready") except timeout: response = send("timeout") except error as err: if isinstance(err.args[0], tuple): code = err.args[0][0] else: code = err.args[0] if code in retry_codes: response = send(retry_codes[code]) else: raise except (gaierror, herror) as err: raise NetworkAddressError(err.args[1], host_port=uri.host_port) except error as err: if isinstance(err.args[0], tuple): code, description = err.args[0] elif isinstance(err.args[0], int): code = err.args[0] try: description = strerror(code) except __HOLE__: description = None else: code, description = None, err.args[0] if code == 2: # Workaround for Linux bug with incorrect error message on # host resolution # ---- # https://bugs.launchpad.net/ubuntu/+source/eglibc/+bug/1154599 raise NetworkAddressError("Name or service not known", host_port=uri.host_port) else: raise SocketError(code, description, host_port=uri.host_port) else: return http, response
ValueError
dataset/ETHPy150Open nigelsmall/httpstream/httpstream/http.py/submit
@property def reason(self): """ The reason phrase attached to this response. """ if self.__reason: return self.__reason else: try: return responses[self.status_code] except __HOLE__: if self.status_code == 422: return "Unprocessable Entity" else: raise SystemError("HTTP status code %s is not known by the " "Python standard library" % self.status_code)
KeyError
dataset/ETHPy150Open nigelsmall/httpstream/httpstream/http.py/Response.reason
@property def content_type(self): """ The type of content as provided by the `Content-Type` header field. """ try: content_type = [ _.strip() for _ in self.__response.getheader("Content-Type").split(";") ] except __HOLE__: return None return content_type[0]
AttributeError
dataset/ETHPy150Open nigelsmall/httpstream/httpstream/http.py/Response.content_type
@property def encoding(self): """ The content character set encoding. """ try: content_type = dict( _.strip().partition("=")[0::2] for _ in self.__response.getheader("Content-Type").split(";") ) except __HOLE__: return default_encoding return content_type.get("charset", default_encoding)
AttributeError
dataset/ETHPy150Open nigelsmall/httpstream/httpstream/http.py/Response.encoding
@property def filename(self): """ The suggested filename from the `Content-Disposition` header field or the final segment of the path name if no such header is available. """ default_filename = self.uri.path.segments[-1] try: content_type = dict( _.strip().partition("=")[0::2] for _ in self.__response.getheader("Content-Disposition").split(";") ) except __HOLE__: return default_filename return content_type.get("filename", default_filename)
AttributeError
dataset/ETHPy150Open nigelsmall/httpstream/httpstream/http.py/Response.filename
def chunks(self, chunk_size=None): """ Iterate through the content as chunks of text. Chunk sizes may vary slightly from that specified due to multi-byte characters. If no chunk size is specified, a default of 4096 is used. """ try: if not chunk_size: chunk_size = self.chunk_size pending = bytearray() data = True while data: data = self.read(chunk_size) pending.extend(data) decoded = None while data and not decoded: try: decoded = pending.decode(self.encoding) except __HOLE__: data = self.read(1) pending.extend(data) else: del pending[:] yield decoded finally: self.close()
UnicodeDecodeError
dataset/ETHPy150Open nigelsmall/httpstream/httpstream/http.py/TextResponse.chunks
@property def __content(self): try: from bs4 import BeautifulSoup except __HOLE__: return super(HTMLResponse, self).content else: return BeautifulSoup(super(HTMLResponse, self).content)
ImportError
dataset/ETHPy150Open nigelsmall/httpstream/httpstream/http.py/HTMLResponse.__content
def __iter__(self): """ Iterate through the content as individual JSON values. """ try: from jsonstream import JSONStream except __HOLE__: from ..jsonstream import JSONStream return iter(JSONStream(self.chunks()))
ImportError
dataset/ETHPy150Open nigelsmall/httpstream/httpstream/http.py/JSONResponse.__iter__
def handleNewGraphRequestApply(self, graph): '''Handles a graph submission request and closes the given ticket according to the result of the process. ''' prevTimer = time.time() nodes = self.dispatchTree.registerNewGraph(graph) logging.getLogger('main.dispatcher').info("%.2f ms --> graph registered" % ((time.time() - prevTimer) * 1000)) prevTimer = time.time() # handles the case of post job with paused status for node in nodes: try: if node.tags['paused'] == 'true' or node.tags['paused'] == True: node.setPaused(True) except __HOLE__: continue logging.getLogger('main.dispatcher').info("%.2f ms --> jobs set in pause if needed" % ((time.time() - prevTimer) * 1000)) prevTimer = time.time() logging.getLogger('main.dispatcher').info('Added graph "%s" to the model.' % graph['name']) return nodes
KeyError
dataset/ETHPy150Open mikrosimage/OpenRenderManagement/src/octopus/dispatcher/dispatcher.py/Dispatcher.handleNewGraphRequestApply
def updateCommandApply(self, dct): ''' Called from a RN with a json desc of a command (ie rendernode info, command info etc). Raise an execption to tell caller to send a HTTP404 response to RN, if not error a HTTP200 will be send instead ''' log = logging.getLogger('main.dispatcher') commandId = dct['id'] renderNodeName = dct['renderNodeName'] try: command = self.dispatchTree.commands[commandId] except __HOLE__: raise KeyError("Command not found: %d" % commandId) if not command.renderNode: # souldn't we reassign the command to the rn?? raise KeyError("Command %d (%d) is no longer registered on rendernode %s" % (commandId, int(dct['status']), renderNodeName)) elif command.renderNode.name != renderNodeName: # in this case, kill the command running on command.renderNode.name # rn = command.renderNode # rn.clearAssignment(command) # rn.request("DELETE", "/commands/" + str(commandId) + "/") log.warning("The emitting RN %s is different from the RN assigned to the command in pulimodel: %s." % (renderNodeName, command.renderNode.name)) raise KeyError("Command %d is running on a different rendernode (%s) than the one in puli's model (%s)." % (commandId, renderNodeName, command.renderNode.name)) rn = command.renderNode rn.lastAliveTime = max(time.time(), rn.lastAliveTime) # if command is no more in the rn's list, it means the rn was reported as timeout or asynchronously removed from RN if commandId not in rn.commands: if len(rn.commands) == 0 and command.status is not enums.CMD_CANCELED: # in this case, re-add the command to the list of the rendernode rn.commands[commandId] = command # we should re-reserve the lic rn.reserveLicense(command, self.licenseManager) log.warning("re-assigning command %d on %s. (TIMEOUT?)" % (commandId, rn.name)) # Command is already remove from RN at this point (it happens when receiving a CANCEL order from external GUI) # else: # # The command has been cancelled on the dispatcher but update from RN only arrives now # log.warning("Status update for %d (%d) from %s but command is currently assigned." % (commandId, int(dct['status']), renderNodeName)) if "status" in dct: command.status = int(dct['status']) if "completion" in dct and command.status == enums.CMD_RUNNING: command.completion = float(dct['completion']) command.message = dct['message'] if "validatorMessage" in dct: command.validatorMessage = dct['validatorMessage'] command.errorInfos = dct['errorInfos'] if command.validatorMessage: command.status = enums.CMD_ERROR # Stats info received and not none. Means we need to update it on the command. # If stats received is none, no change on the worker, we do not update the command. if "stats" in dct and dct["stats"] is not None: command.stats = dct["stats"]
KeyError
dataset/ETHPy150Open mikrosimage/OpenRenderManagement/src/octopus/dispatcher/dispatcher.py/Dispatcher.updateCommandApply
def __getattr__(self, name): try: return object.__getattribute__(self, name) except __HOLE__: try: return self.get(name) except: raise AttributeError(name)
AttributeError
dataset/ETHPy150Open VisTrails/VisTrails/vistrails/core/configuration.py/ConfigurationObject.__getattr__
def check_enough_semaphores(): """Check that the system supports enough semaphores to run the test.""" # minimum number of semaphores available according to POSIX nsems_min = 256 try: nsems = os.sysconf("SC_SEM_NSEMS_MAX") except (AttributeError, __HOLE__): # sysconf not available or setting not available return if nsems == -1 or nsems >= nsems_min: return raise unittest.SkipTest("The OS doesn't support enough semaphores " "to run the test (required: %d)." % nsems_min) # # Creates a wrapper for a function which records the time it takes to finish #
ValueError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_multiprocessing.py/check_enough_semaphores
def assertReturnsIfImplemented(self, value, func, *args): try: res = func(*args) except __HOLE__: pass else: return self.assertEqual(value, res) # For the sanity of Windows users, rather than crashing or freezing in # multiple ways.
NotImplementedError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_multiprocessing.py/BaseTestCase.assertReturnsIfImplemented
def get_value(self): try: return self.get_value() except AttributeError: try: return self._Semaphore__value except __HOLE__: try: return self._value except AttributeError: raise NotImplementedError # # Testcases #
AttributeError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_multiprocessing.py/get_value
def test_cpu_count(self): try: cpus = multiprocessing.cpu_count() except __HOLE__: cpus = 1 self.assertTrue(type(cpus) is int) self.assertTrue(cpus >= 1)
NotImplementedError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_multiprocessing.py/_TestProcess.test_cpu_count
def test_qsize(self): q = self.Queue() try: self.assertEqual(q.qsize(), 0) except __HOLE__: return q.put(1) self.assertEqual(q.qsize(), 1) q.put(5) self.assertEqual(q.qsize(), 2) q.get() self.assertEqual(q.qsize(), 1) q.get() self.assertEqual(q.qsize(), 0)
NotImplementedError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_multiprocessing.py/_TestQueue.test_qsize
def check_invariant(self, cond): # this is only supposed to succeed when there are no sleepers if self.TYPE == 'processes': try: sleepers = (cond._sleeping_count.get_value() - cond._woken_count.get_value()) self.assertEqual(sleepers, 0) self.assertEqual(cond._wait_semaphore.get_value(), 0) except __HOLE__: pass
NotImplementedError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_multiprocessing.py/_TestCondition.check_invariant
def test_notify_all(self): cond = self.Condition() sleeping = self.Semaphore(0) woken = self.Semaphore(0) # start some threads/processes which will timeout for i in range(3): p = self.Process(target=self.f, args=(cond, sleeping, woken, TIMEOUT1)) p.daemon = True p.start() t = threading.Thread(target=self.f, args=(cond, sleeping, woken, TIMEOUT1)) t.daemon = True t.start() # wait for them all to sleep for i in range(6): sleeping.acquire() # check they have all timed out for i in range(6): woken.acquire() self.assertReturnsIfImplemented(0, get_value, woken) # check state is not mucked up self.check_invariant(cond) # start some more threads/processes for i in range(3): p = self.Process(target=self.f, args=(cond, sleeping, woken)) p.daemon = True p.start() t = threading.Thread(target=self.f, args=(cond, sleeping, woken)) t.daemon = True t.start() # wait for them to all sleep for i in range(6): sleeping.acquire() # check no process/thread has woken up time.sleep(DELTA) self.assertReturnsIfImplemented(0, get_value, woken) # wake them all up cond.acquire() cond.notify_all() cond.release() # check they have all woken for i in range(10): try: if get_value(woken) == 6: break except __HOLE__: break time.sleep(DELTA) self.assertReturnsIfImplemented(6, get_value, woken) # check state is not mucked up self.check_invariant(cond)
NotImplementedError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_multiprocessing.py/_TestCondition.test_notify_all
@classmethod def multipass(cls, barrier, results, n): m = barrier.parties assert m == cls.N for i in range(n): results[0].append(True) assert len(results[1]) == i * m barrier.wait() results[1].append(True) assert len(results[0]) == (i + 1) * m barrier.wait() try: assert barrier.n_waiting == 0 except __HOLE__: pass assert not barrier.broken
NotImplementedError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_multiprocessing.py/_TestBarrier.multipass
@classmethod def _test_abort_f(cls, barrier, results1, results2): try: i = barrier.wait() if i == cls.N//2: raise RuntimeError barrier.wait() results1.append(True) except threading.BrokenBarrierError: results2.append(True) except __HOLE__: barrier.abort()
RuntimeError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_multiprocessing.py/_TestBarrier._test_abort_f
@classmethod def _test_abort_and_reset_f(cls, barrier, barrier2, results1, results2, results3): try: i = barrier.wait() if i == cls.N//2: raise RuntimeError barrier.wait() results1.append(True) except threading.BrokenBarrierError: results2.append(True) except __HOLE__: barrier.abort() # Synchronize and reset the barrier. Must synchronize first so # that everyone has left it when we reset, and after so that no # one enters it before the reset. if barrier2.wait() == cls.N//2: barrier.reset() barrier2.wait() barrier.wait() results3.append(True)
RuntimeError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_multiprocessing.py/_TestBarrier._test_abort_and_reset_f
def test_rapid_restart(self): authkey = os.urandom(32) manager = QueueManager( address=(test.support.HOST, 0), authkey=authkey, serializer=SERIALIZER) srvr = manager.get_server() addr = srvr.address # Close the connection.Listener socket which gets opened as a part # of manager.get_server(). It's not needed for the test. srvr.listener.close() manager.start() p = self.Process(target=self._putter, args=(manager.address, authkey)) p.daemon = True p.start() queue = manager.get_queue() self.assertEqual(queue.get(), 'hello world') del queue manager.shutdown() manager = QueueManager( address=addr, authkey=authkey, serializer=SERIALIZER) try: manager.start() except __HOLE__ as e: if e.errno != errno.EADDRINUSE: raise # Retry after some time, in case the old socket was lingering # (sporadic failure on buildbots) time.sleep(1.0) manager = QueueManager( address=addr, authkey=authkey, serializer=SERIALIZER) manager.shutdown() # # #
OSError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_multiprocessing.py/_TestManagerRestart.test_rapid_restart
@classmethod def _is_fd_assigned(cls, fd): try: os.fstat(fd) except __HOLE__ as e: if e.errno == errno.EBADF: return False raise else: return True
OSError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_multiprocessing.py/_TestConnection._is_fd_assigned
def setUpModule(): if sys.platform.startswith("linux"): try: lock = multiprocessing.RLock() except __HOLE__: raise unittest.SkipTest("OSError raises on RLock creation, " "see issue 3111!") check_enough_semaphores() util.get_temp_dir() # creates temp directory for use by all processes multiprocessing.get_logger().setLevel(LOG_LEVEL)
OSError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_multiprocessing.py/setUpModule
def add_user_picture(orig_filename, new_prefix, up_dir, image_file): import time import os import tempfile new_filename = "{0}-{1}.jpg".format(new_prefix, time.time()) full_path = os.path.join(up_dir, new_filename) import hashlib skip_seek = False try: image_file.seek(0) except __HOLE__: # we want a file, so if this isn't a file, make one. tmp_f = tempfile.TemporaryFile() # urllib2.urlopen object passed, read is implemented # or maybe not, and then just assume the string is the binary data # and ready to be written directly if hasattr(image_file, 'read'): # im_b for "image binary" im_b = image_file.read() else: im_b = image_file tmp_f.write(im_b) image_file = tmp_f image_file.seek(0) sha = hashlib.sha1() sha.update(image_file.read()) sha = sha.hexdigest() if not skip_seek: image_file.seek(0) f = image_file from PIL import Image im = Image.open(f) im.thumbnail((50, 50), Image.ANTIALIAS) im.save(full_path, 'JPEG') from raggregate.models.userpicture import UserPicture up = UserPicture(orig_filename, new_filename, sha, 0) dbsession.add(up) dbsession.flush() return up.id
AttributeError
dataset/ETHPy150Open sjuxax/raggregate/raggregate/queries/users.py/add_user_picture
def _parseURL(url): try: url = urinorm.urinorm(url) except __HOLE__: return None proto, netloc, path, params, query, frag = urlparse(url) if not path: # Python <2.4 does not parse URLs with no path properly if not query and '?' in netloc: netloc, query = netloc.split('?', 1) path = '/' path = urlunparse(('', '', path, params, query, frag)) if ':' in netloc: try: host, port = netloc.split(':') except ValueError: return None if not re.match(r'\d+$', port): return None else: host = netloc port = '' host = host.lower() if not host_segment_re.match(host): return None return proto, host, port, path
ValueError
dataset/ETHPy150Open openid/python-openid/openid/server/trustroot.py/_parseURL
def get(self, cmd, args, options): '''Get content blob. Usage: exo [options] content get <model> <id> <file>''' pop = options['pop'] exoconfig = options['config'] ExoException = options['exception'] key = exoconfig.config['vendortoken'] # This should be in the pyonep.provision class. It is not. # This should loop through chunks, not fully pull the file into RAM. path = '/provision/manage/content/' + args['<model>'] + '/' + args['<id>'] + '?download=true' headers = {"Accept": "*"} mlist = pop._request(path, key, '', 'GET', False, headers) try: if args['<file>'] == '-': sys.stdout.write(mlist.body) else: with open(args['<file>'], 'wb') as f: #print('debug', mlist.response.content) f.write(mlist.response.content) except __HOLE__ as ex: raise ExoException("Could not write {0}".format(args['<file>']))
IOError
dataset/ETHPy150Open exosite/exoline/exoline/plugins/provision.py/Plugin.content.get
def put(self, cmd, args, options): '''Upload content for a model. Usage: exo [options] content put <model> <id> <file> [--mime=type] [--meta=meta] [--protected=<bool>] Command options: --mime=type Set the mime type of the uploaded data. Will autodetect if omitted --protected=<bool> Set to true to make this content unavailable to other model serial numbers [default: false]''' pop = options['pop'] exoconfig = options['config'] ExoException = options['exception'] ProvisionException = options['provision-exception'] key = exoconfig.config['vendortoken'] # if not exist, create. try: mlist = pop.content_info(key, args['<model>'], args['<id>']) except ProvisionException as pe: if pe.response.status() == 404: meta = args['--meta'] if meta is None: meta = '' mlist = pop.content_create( key, args['<model>'], args['<id>'], meta, protected=args['--protected']=='true') else: raise # whats the max size? Are we going to be ok with the pull it # all into RAM method? Short term, yes. Long term, No. data='' try: if args['<file>'] == '-': data = sys.stdin.read() else: with open(args['<file>']) as f: data = f.read() except __HOLE__ as ex: raise ExoException("Could not read {0}".format(args['<file>'])) if args['--mime'] is None: url = pathname2url(args['<file>']) mime, encoding = mimetypes.guess_type(url) else: mime = args['--mime'] mlist = pop.content_upload(key, args['<model>'], args['<id>'], data, mime) if len(mlist.body.strip()) > 0: print(mlist.body) ########################
IOError
dataset/ETHPy150Open exosite/exoline/exoline/plugins/provision.py/Plugin.content.put
def add(self, cmd, args, options): '''Add an individual serial number to a model. Usage: exo [options] sn add <model> (--file=<file> | <sn>...)''' pop = options['pop'] exoconfig = options['config'] ExoException = options['exception'] key = exoconfig.config['vendortoken'] if args['--file'] is None: mlist = pop.serialnumber_add_batch(key, args['<model>'], args['<sn>']) print(mlist.body) else: # This should chunk the file from disk to socket. data='' try: if args['--file'] == '-': data = sys.stdin.read() else: with open(args['--file']) as f: data = f.read() except __HOLE__ as ex: raise ExoException("Could not read {0}".format(args['--file'])) # This should be in the pyonep.provision class. It is not. path = '/provision/manage/model/' + args['<model>'] + '/' headers = {"Content-Type": "text/csv; charset=utf-8"} mlist = pop._request(path, key, data, 'POST', False, headers) print(mlist.body)
IOError
dataset/ETHPy150Open exosite/exoline/exoline/plugins/provision.py/Plugin.sn.add
def delete(self, cmd, args, options): '''Delete an individual serial number from a model. Usage: exo [options] sn delete <model> (--file=<file> | <sn>...)''' pop = options['pop'] exoconfig = options['config'] ExoException = options['exception'] key = exoconfig.config['vendortoken'] if args['--file'] is None: mlist = pop.serialnumber_remove_batch(key, args['<model>'], args['<sn>']) print(mlist.body.strip()) else: # ??? should this raise or trim columns beyond the first??? # This should chunk the file from disk to socket. data='' try: if args['--file'] == '-': data = sys.stdin.read() else: with open(args['--file']) as f: data = f.read() except __HOLE__ as ex: raise ExoException("Could not read {0}".format(args['--file'])) # This should be in the pyonep.provision class. It is not. path = '/provision/manage/model/' + args['<model>'] + '/' headers = {"Content-Type": "text/csv; charset=utf-8"} mlist = pop._request(path, key, data, 'DELETE', False, headers) print(mlist.body)
IOError
dataset/ETHPy150Open exosite/exoline/exoline/plugins/provision.py/Plugin.sn.delete
def run(self, cmd, args, options): cik = options['cik'] rpc = options['rpc'] ProvisionException = options['provision-exception'] ExoException = options['exception'] ExoUtilities = options['utils'] exoconfig = options['config'] options['pop'] = options['provision'] err = "This command requires 'vendor' and 'vendortoken' in your Exoline config. See http://github.com/exosite/exoline#provisioning" if 'vendortoken' not in exoconfig.config or exoconfig.config['vendortoken'] is None: raise ExoException(err) if 'vendor' not in exoconfig.config or exoconfig.config['vendor'] is None: raise ExoException(err) argv = [cmd, args['<command>']] + args['<args>'] methodInfo = self.digMethod(argv, self) if len(methodInfo) == 3: meth, obj, name = methodInfo if meth is not None and obj is not None: if args['<command>'] in obj.subcommands: doc = meth.__doc__ try: args_cmd = docopt(doc, argv=argv) except __HOLE__ as ex: return ExoUtilities.handleSystemExit(ex) return meth(obj(), name, args_cmd, options) else: raise ExoException('Unknown command {0}. Try "exo --help"'.format(args['<command>'])) else: raise ExoException("Could not find requested sub command {0}".format(args['<command>'])) else: # did not find method. Detect help request manually or fail if ('-h' in argv or '--help' in argv): cls = self.findSubcommandClass(argv, self) print(cls.doc()) return 0 else: raise ExoException("Could not find requested sub command {0}".format(args['<command>'])) # vim: set ai noet sw=4 ts=4 :
SystemExit
dataset/ETHPy150Open exosite/exoline/exoline/plugins/provision.py/Plugin.run
def main(self): try: engine, metadata = sql.get_connection(self.args.connection_string) except __HOLE__: raise ImportError('You don\'t appear to have the necessary database backend installed for connection string you\'re trying to use.. Available backends include:\n\nPostgresql:\tpip install psycopg2\nMySQL:\t\tpip install MySQL-python\n\nFor details on connection strings and other backends, please see the SQLAlchemy documentation on dialects at: \n\nhttp://www.sqlalchemy.org/docs/dialects/\n\n') conn = engine.connect() if self.args.query: query = self.args.query.strip() else: query = "" for line in self.args.file: query += line # Must escape '%'. # @see https://github.com/onyxfish/csvkit/issues/440 # @see https://bitbucket.org/zzzeek/sqlalchemy/commits/5bc1f17cb53248e7cea609693a3b2a9bb702545b rows = conn.execute(query.replace('%', '%%')) output = agate.csv.writer(self.output_file, **self.writer_kwargs) if rows.returns_rows: if not self.args.no_header_row: output.writerow(rows._metadata.keys) for row in rows: output.writerow(row) conn.close()
ImportError
dataset/ETHPy150Open wireservice/csvkit/csvkit/utilities/sql2csv.py/SQL2CSV.main
@cache_control(must_revalidate=True, max_age=3600, private=True) def page_detail(request, slug, template_name='wiki/page_detail.html'): try: page = Page.objects.filter(slug=slug).get() except __HOLE__: page = Page(title=title(slug), slug=slug) if not page.has_read_permission(request.user): if request.user.is_anonymous(): return HttpResponseRedirect('%s?%s=%s' % (settings.LOGIN_URL, REDIRECT_FIELD_NAME, urlquote(request.get_full_path()))) else: return render_to_response('wiki/permission_denied.html', context_instance=RequestContext(request)) return render_to_response(template_name, {'page':page, 'page_exists': bool(page.pk)}, RequestContext(request))
ObjectDoesNotExist
dataset/ETHPy150Open kylef-archive/lithium/lithium/wiki/views.py/page_detail
def page_edit(request, slug): try: page = Page.objects.filter(slug=slug).get() except __HOLE__: page = Page(title=title(slug), slug=slug) if not page.user_can_edit(request.user): if request.user.is_anonymous(): return HttpResponseRedirect('%s?%s=%s' % (settings.LOGIN_URL, REDIRECT_FIELD_NAME, urlquote(request.get_full_path()))) else: return render_to_response('wiki/permission_denied.html', context_instance=RequestContext(request)) if request.method == 'POST': form = EditForm(request, page, request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(form.instance.get_absolute_url()) else: if page.content: form = EditForm(request, page, {'text': page.content}) else: form = EditForm(request, page) template_context = { 'page': page, 'form': form, } return render_to_response('wiki/page_edit.html', template_context, RequestContext(request))
ObjectDoesNotExist
dataset/ETHPy150Open kylef-archive/lithium/lithium/wiki/views.py/page_edit
def page_history(request, slug, **kwargs): try: page = Page.objects.filter(slug=slug).get() except __HOLE__: raise Http404, "No page found matching the query" if not page.has_read_permission(request.user): if request.user.is_anonymous(): return HttpResponseRedirect('%s?%s=%s' % (settings.LOGIN_URL, REDIRECT_FIELD_NAME, urlquote(request.get_full_path()))) else: return render_to_response('wiki/permission_denied.html', context_instance=RequestContext(request)) kwargs['queryset'] = Revision.objects.filter(page=page) kwargs['extra_context'] = {'page': page} kwargs['template_name'] = 'wiki/page_history.html' kwargs['template_object_name'] = 'revision' kwargs['paginate_by'] = settings.WIKI_HISTORY_PAGINATE_BY return object_list(request, **kwargs)
ObjectDoesNotExist
dataset/ETHPy150Open kylef-archive/lithium/lithium/wiki/views.py/page_history
def revision_detail(request, slug, pk): try: revision = Revision.objects.filter(page__slug=slug, pk=pk).get() except __HOLE__: raise Http404, "No revision found matching the query" if not revision.page.has_read_permission(request.user): if request.user.is_anonymous(): return HttpResponseRedirect('%s?%s=%s' % (settings.LOGIN_URL, REDIRECT_FIELD_NAME, urlquote(request.get_full_path()))) else: return render_to_response('wiki/permission_denied.html', context_instance=RequestContext(request)) return render_to_response('wiki/revision_detail.html', {'revision': revision,}, RequestContext(request))
ObjectDoesNotExist
dataset/ETHPy150Open kylef-archive/lithium/lithium/wiki/views.py/revision_detail
def revision_revert(request, slug, pk): try: revision = Revision.objects.filter(page__slug=slug, pk=pk).get() except __HOLE__: raise Http404, "No revision found matching the query" if not revision.page.user_can_edit(request.user): if request.user.is_anonymous(): return HttpResponseRedirect('%s?%s=%s' % (settings.LOGIN_URL, REDIRECT_FIELD_NAME, urlquote(request.get_full_path()))) else: return render_to_response('wiki/permission_denied.html', context_instance=RequestContext(request)) r = Revision(text=revision.text, page=revision.page) r.comment = _(u'Reverted to revision %(revision)s by %(author)s') % {'revision':revision.pk, 'author':revision.author or revision.author_ip} if request.user.is_anonymous(): r.author_ip = request.META['REMOTE_ADDR'] else: r.author = request.user r.save() return HttpResponseRedirect(r.get_absolute_url())
ObjectDoesNotExist
dataset/ETHPy150Open kylef-archive/lithium/lithium/wiki/views.py/revision_revert
def revision_diff(request, slug): if request.GET.get('a', '').isdigit() and request.GET.get('b', '').isdigit(): a = int(request.GET.get('a')) b = int(request.GET.get('b')) else: return HttpResponseBadRequest(u'You must select two revisions.') try: page = Page.objects.filter(slug=slug).get() revisionA = Revision.objects.filter(page=page, pk=a).get() revisionB = Revision.objects.filter(page=page, pk=b).get() except __HOLE__: raise Http404, "No revision found matching the query" if not page.has_read_permission(request.user): if request.user.is_anonymous(): return HttpResponseRedirect('%s?%s=%s' % (settings.LOGIN_URL, REDIRECT_FIELD_NAME, urlquote(request.get_full_path()))) else: return render_to_response('wiki/permission_denied.html', context_instance=RequestContext(request)) if revisionA.content != revisionB.content: d = difflib.unified_diff( revisionA.content.splitlines(), revisionB.content.splitlines(), 'Revision %s' % revisionA.pk, 'Revision %s' % revisionB.pk, lineterm='' ) difftext = '\n'.join(d) difftext = pygments_diff(difftext) else: difftext = _('No changes were made between this two revisions.') template_context = { 'page': page, 'revisionA': revisionA, 'revisionB': revisionB, 'difftext': difftext, } return render_to_response('wiki/revision_diff.html', template_context, RequestContext(request))
ObjectDoesNotExist
dataset/ETHPy150Open kylef-archive/lithium/lithium/wiki/views.py/revision_diff
def testFindEntries(self): """ Given a list of users and a starting point, entries should generate a list of all entries for each user from that time until now. """ start = check_entries.Command().find_start() if start.day == 1: start += relativedelta(days=1) all_users = check_entries.Command().find_users() entries = check_entries.Command().find_entries(all_users, start) # Determine the number of days checked today = timezone.now() - \ relativedelta(hour=0, minute=0, second=0, microsecond=0) diff = today - start days_checked = diff.days total_entries = 0 while True: try: user_entries = next(entries) for entry in user_entries: total_entries += 1 except __HOLE__: # Verify that every entry from the start point was returned expected_total = days_checked * len(self.all_users) self.assertEqual(total_entries, expected_total) return
StopIteration
dataset/ETHPy150Open caktus/django-timepiece/timepiece/tests/test_management.py/CheckEntries.testFindEntries
def testCheckEntry(self): """ Given lists of entries from users, check_entry should return all overlapping entries. """ start = check_entries.Command().find_start() all_users = check_entries.Command().find_users() entries = check_entries.Command().find_entries(all_users, start) total_overlaps = 0 # make some bad entries num_days = 5 self.make_entry_bulk(self.all_users, num_days) while True: try: user_entries = next(entries) user_overlaps = check_entries.Command().check_entry( user_entries, verbosity=0) total_overlaps += user_overlaps except __HOLE__: self.assertEqual( total_overlaps, num_days * len(self.all_users)) return
StopIteration
dataset/ETHPy150Open caktus/django-timepiece/timepiece/tests/test_management.py/CheckEntries.testCheckEntry
def _get_authz_info(self): try: mtime = os.path.getmtime(self.authz_file) except __HOLE__ as e: if self._authz is not None: self.log.error('Error accessing authz file: %s', exception_to_unicode(e)) self._mtime = mtime = 0 self._authz = None self._users = set() if mtime != self._mtime: self._mtime = mtime rm = RepositoryManager(self.env) modules = set(repos.reponame for repos in rm.get_real_repositories()) if '' in modules and self.authz_module_name: modules.add(self.authz_module_name) modules.add('') self.log.info('Parsing authz file: %s', self.authz_file) try: self._authz = parse(read_file(self.authz_file), modules) self._users = set(user for paths in self._authz.itervalues() for path in paths.itervalues() for user, result in path.iteritems() if result) except Exception as e: self._authz = None self._users = set() self.log.error('Error parsing authz file: %s', exception_to_unicode(e)) return self._authz, self._users
OSError
dataset/ETHPy150Open edgewall/trac/trac/versioncontrol/svn_authz.py/AuthzSourcePolicy._get_authz_info
def labeled_neighbor(self, obj, judge, back=False): """Returns the id of the "closest" labeled object to the one provided. Notes: - By "closest", it's mean the distance of the id numbers. - Works both for TextSegment and for IEDocument - If back is True, it's picked the previous item, otherwise, the next one. - It's assumed that the obj provided HAS labeled evidence already. If not, it's not possible to determine what is next. In such case, the id of the last labeled object will be returned. - If asking "next" and obj is currently the last, his id will be returned. - If asking "prev" and obj is currently the first, his id will be returned. """ filters = dict( judge__isnull=False, label__isnull=False, relation=self, ) if judge is not None: filters["judge"] = judge judge_labels = EvidenceLabel.objects.filter(**filters) if isinstance(obj, TextSegment): segments = self._matching_text_segments() segments = segments.filter(evidence_relations__labels__relation=self) candidates_with_label = judge_labels.values_list("evidence_candidate__segment", flat=True) segments = segments.filter(id__in=candidates_with_label).distinct() ids = list(segments.values_list('id', flat=True).order_by('id')) elif isinstance(obj, IEDocument): ids = sorted(set(judge_labels.values_list( 'evidence_candidate__segment__document_id', flat=True) )) else: ids = [] if not ids: return None try: base_idx = ids.index(obj.id) except __HOLE__: # the base-object provided is not listed... Returning the base-object # Returning last in list return ids[-1] else: if back: if base_idx == 0: # there is no previous one. Returning same. return obj.id else: return ids[base_idx - 1] else: if base_idx == len(ids) - 1: # there is no next one. Returning same. return obj.id else: return ids[base_idx + 1]
ValueError
dataset/ETHPy150Open machinalis/iepy/iepy/data/models.py/Relation.labeled_neighbor
def get_next_segment_to_label(self, judge): # We'll pick first those Segments having already created questions with empty # answer (label=None). After finishing those, we'll look for # Segments never considered (ie, that doest have any question created). # Finally, those with answers in place, but with some answers "ASK-ME-LATER" segments = self._matching_text_segments().order_by('id') never_considered_segm = segments.exclude(evidence_relations__labels__relation=self) evidences = EvidenceCandidate.objects.filter( labels__relation=self ).order_by('segment_id') never_considered_ev = evidences.filter(labels__isnull=True) existent_labels = EvidenceLabel.objects.filter( evidence_candidate__in=evidences, labeled_by_machine=False ).order_by('evidence_candidate__segment_id') none_labels = existent_labels.filter(label__isnull=True) own_none_labels = none_labels.filter(judge=judge) # requires re answer if there's no Good answer at all (not just for this judge) NOT_NEED_RELABEL = [k for k, name in EvidenceLabel.LABEL_CHOICES if k not in EvidenceLabel.NEED_RELABEL] to_re_answer = evidences.exclude(labels__label__in=NOT_NEED_RELABEL) for qset in [own_none_labels, never_considered_ev, never_considered_segm, to_re_answer, none_labels]: try: obj = qset[0] except __HOLE__: pass else: if isinstance(obj, TextSegment): return obj elif isinstance(obj, EvidenceCandidate): return obj.segment elif isinstance(obj, EvidenceLabel): return obj.evidence_candidate.segment else: raise ValueError return None
IndexError
dataset/ETHPy150Open machinalis/iepy/iepy/data/models.py/Relation.get_next_segment_to_label
def get_commits_by_date(self, d): if str(d) in self.symbol_map: return self.symbol_map[d] else: try: rtn = self.commit_log[str(d)] except __HOLE__: rtn = 'empty' else: if rtn > 9: rtn = 'more' return self.symbol_map[str(rtn)]
KeyError
dataset/ETHPy150Open littleq0903/git-calendar/git_calendar/utils.py/GitCalendar.get_commits_by_date
def fix_call(callable, *args, **kw): """ Call ``callable(*args, **kw)`` fixing any type errors that come out. """ try: val = callable(*args, **kw) except __HOLE__: exc_info = fix_type_error(None, callable, args, kw) reraise(*exc_info) return val
TypeError
dataset/ETHPy150Open galaxyproject/pulsar/pulsar/util/pastescript/loadwsgi.py/fix_call
def test_class_for_name(self): cls = class_for_name('grizzled.config.Configuration') got_name = '%s.%s' % (cls.__module__, cls.__name__) assert got_name == 'grizzled.config.Configuration' try: class_for_name('grizzled.foo.bar.baz') assert False except NameError: pass except __HOLE__: pass
ImportError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/grizzled/grizzled/test/system/Test.py/TestSys.test_class_for_name
def _session_accessed(self, request): try: return request.session.accessed except __HOLE__: return False
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.3/django/middleware/cache.py/UpdateCacheMiddleware._session_accessed
def __init__(self, cache_timeout=None, cache_anonymous_only=None, **kwargs): # We need to differentiate between "provided, but using default value", # and "not provided". If the value is provided using a default, then # we fall back to system defaults. If it is not provided at all, # we need to use middleware defaults. cache_kwargs = {} try: self.key_prefix = kwargs['key_prefix'] if self.key_prefix is not None: cache_kwargs['KEY_PREFIX'] = self.key_prefix else: self.key_prefix = '' except __HOLE__: self.key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX cache_kwargs['KEY_PREFIX'] = self.key_prefix try: self.cache_alias = kwargs['cache_alias'] if self.cache_alias is None: self.cache_alias = DEFAULT_CACHE_ALIAS if cache_timeout is not None: cache_kwargs['TIMEOUT'] = cache_timeout except KeyError: self.cache_alias = settings.CACHE_MIDDLEWARE_ALIAS if cache_timeout is None: cache_kwargs['TIMEOUT'] = settings.CACHE_MIDDLEWARE_SECONDS else: cache_kwargs['TIMEOUT'] = cache_timeout if cache_anonymous_only is None: self.cache_anonymous_only = getattr(settings, 'CACHE_MIDDLEWARE_ANONYMOUS_ONLY', False) else: self.cache_anonymous_only = cache_anonymous_only self.cache = get_cache(self.cache_alias, **cache_kwargs) self.cache_timeout = self.cache.default_timeout
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.3/django/middleware/cache.py/CacheMiddleware.__init__
def __getattr__(self, name): try: return self[name] except __HOLE__: raise AttributeError(name)
KeyError
dataset/ETHPy150Open sashka/flask-googleauth/flask_googleauth.py/ObjectDict.__getattr__
def test_start(self): try: # KeyboardInterrupt will be raised after two iterations self.proxy(reset_master_mock=True).start() except __HOLE__: pass master_calls = self.proxy_start_calls([ mock.call.connection.drain_events(timeout=self.de_period), mock.call.connection.drain_events(timeout=self.de_period), mock.call.connection.drain_events(timeout=self.de_period), ], exc_type=KeyboardInterrupt) self.master_mock.assert_has_calls(master_calls)
KeyboardInterrupt
dataset/ETHPy150Open openstack/taskflow/taskflow/tests/unit/worker_based/test_proxy.py/TestProxy.test_start
def test_start_with_on_wait(self): try: # KeyboardInterrupt will be raised after two iterations self.proxy(reset_master_mock=True, on_wait=self.on_wait_mock).start() except __HOLE__: pass master_calls = self.proxy_start_calls([ mock.call.connection.drain_events(timeout=self.de_period), mock.call.on_wait(), mock.call.connection.drain_events(timeout=self.de_period), mock.call.on_wait(), mock.call.connection.drain_events(timeout=self.de_period), ], exc_type=KeyboardInterrupt) self.master_mock.assert_has_calls(master_calls)
KeyboardInterrupt
dataset/ETHPy150Open openstack/taskflow/taskflow/tests/unit/worker_based/test_proxy.py/TestProxy.test_start_with_on_wait
def test_start_with_on_wait_raises(self): self.on_wait_mock.side_effect = RuntimeError('Woot!') try: # KeyboardInterrupt will be raised after two iterations self.proxy(reset_master_mock=True, on_wait=self.on_wait_mock).start() except __HOLE__: pass master_calls = self.proxy_start_calls([ mock.call.connection.drain_events(timeout=self.de_period), mock.call.on_wait(), ], exc_type=RuntimeError) self.master_mock.assert_has_calls(master_calls)
KeyboardInterrupt
dataset/ETHPy150Open openstack/taskflow/taskflow/tests/unit/worker_based/test_proxy.py/TestProxy.test_start_with_on_wait_raises
def _get_doc_by_id(self, namespace, aliases_dict): try: nid = aliases_dict[namespace][0] kwargs = {namespace:nid, "view":'stats'} doc = self.session.catalog.by_identifier(**kwargs) except (__HOLE__, MendeleyException): doc = None return doc
KeyError
dataset/ETHPy150Open Impactstory/total-impact-core/totalimpact/providers/mendeley.py/Mendeley._get_doc_by_id
def _get_doc_by_title(self, aliases_dict): try: biblio = aliases_dict["biblio"][0] biblio_title = self.remove_punctuation(biblio["title"]).lower() biblio_year = str(biblio["year"]) if biblio_title and biblio_year: doc = self.session.catalog.advanced_search( title=biblio_title, min_year=biblio_year, max_year=biblio_year, view='stats').list(page_size=1).items[0] mendeley_title = self.remove_punctuation(doc.title).lower() if biblio_title != mendeley_title: logger.debug(u"Mendeley: titles don't match so not using this match /biblio_print %s and %s" %( biblio_title, mendeley_title)) doc = None except (__HOLE__, MendeleyException): doc = None return doc
KeyError
dataset/ETHPy150Open Impactstory/total-impact-core/totalimpact/providers/mendeley.py/Mendeley._get_doc_by_title
def metrics(self, aliases, provider_url_template=None, # ignore this because multiple url steps cache_enabled=True): metrics_and_drilldown = {} doc = self._get_doc(aliases) if doc: try: drilldown_url = doc.link metrics_and_drilldown["mendeley:readers"] = (doc.reader_count, drilldown_url) metrics_and_drilldown["mendeley:career_stage"] = (doc.reader_count_by_academic_status, drilldown_url) by_discipline = {} by_subdiscipline = doc.reader_count_by_subdiscipline for discipline, subdiscipline_breakdown in by_subdiscipline.iteritems(): by_discipline[discipline] = sum(subdiscipline_breakdown.values()) metrics_and_drilldown["mendeley:discipline"] = (by_discipline, drilldown_url) by_country_iso = {} by_country_names = doc.reader_count_by_country if by_country_names: for country_name, country_breakdown in by_country_names.iteritems(): if country_name in country_iso_by_name: iso = country_iso_by_name[country_name] by_country_iso[iso] = country_breakdown else: logger.error(u"Can't find country {country} in lookup".format( country=country_name)) if by_country_iso: metrics_and_drilldown["mendeley:countries"] = (by_country_iso, drilldown_url) except __HOLE__: pass return metrics_and_drilldown
KeyError
dataset/ETHPy150Open Impactstory/total-impact-core/totalimpact/providers/mendeley.py/Mendeley.metrics
def main(): version = pkg_resources.require("octohatrack")[0].version parser = argparse.ArgumentParser() parser.add_argument("repo_name", help="githubuser/repo") parser.add_argument("-l", "--limit", help="Limit to the last x Issues/Pull Requests", type=int, default=0) parser.add_argument("--no-cache", action='store_false', help='Disable local caching of API results') parser.add_argument("-w", "--wiki", action='store_true', help="Experimental: Show wiki contributions, if available") parser.add_argument("-v", "--version", action='version', version="octohatrack version %s" % version) # Deprecated parser.add_argument("-c", "--show-contributors", action='store_true', help="DEPRECATED - Output the code contributors") parser.add_argument("-n", "--show-names", action='store_true', help="DEPRECATED - Show the user's display name") parser.add_argument("-g", "--generate-html", action='store_true', help="DEPRECATED - Generate output as HTML") args = parser.parse_args() if args.show_contributors: print("The --show-contributors (-c) flag is deprecated. Ignoring.") if args.show_names: print("The --show-contributors (-n) flag is deprecated. Ignoring.") if args.generate_html: print("The --generate-html (-g) flag is deprecated. Ignoring.") repo_name = args.repo_name try: if not repo_exists(repo_name): print("Repo does not exist: %s" % repo_name) sys.exit(1) code_contributors = get_code_contributors(repo_name) code_commentors = get_code_commentors(repo_name, args.limit) except __HOLE__ as e: print(e) sys.exit(1) non_code_contributors = consolidate(code_contributors, code_commentors) if args.wiki: wiki_contributors = get_wiki_contributors(repo_name, code_contributors, non_code_contributors) display_users(code_contributors, "Code contributors") display_users(non_code_contributors, "Non-coding contributors") if args.wiki: display_users(wiki_contributors, "Wiki contributors")
ValueError
dataset/ETHPy150Open LABHR/octohatrack/octohatrack/__init__.py/main
def render(self, name, value, attrs=None): output = [] if value and getattr(value, "url", None): # defining the size size = '200x200' x, y = [int(x) for x in size.split('x')] try: # defining the filename and the miniature filename filehead, filetail = os.path.split(value.path) basename, format = os.path.splitext(filetail) miniature = basename + '_' + size + format filename = value.path miniature_filename = os.path.join(filehead, miniature) filehead, filetail = os.path.split(value.url) miniature_url = filehead + '/' + miniature # make sure that the thumbnail is a version of the current original sized image if os.path.exists(miniature_filename) and os.path.getmtime(filename) > os.path.getmtime(miniature_filename): os.unlink(miniature_filename) # if the image wasn't already resized, resize it if not os.path.exists(miniature_filename): image = Image.open(filename) image.thumbnail([x, y], Image.ANTIALIAS) try: image.save(miniature_filename, image.format, quality=100, optimize=1) except: image.save(miniature_filename, image.format, quality=100) output.append(u' <div><a href="%s" target="_blank"><img src="%s" alt="%s" /></a></div>' % (miniature_url, miniature_url, miniature_filename)) except __HOLE__: pass output.append(super(AdminFileWidget, self).render(name, value, attrs)) return mark_safe(u''.join(output))
IOError
dataset/ETHPy150Open jtuz/django-events-calendar/events/widgets.py/AdminImageWidget.render
def get_google_access_token(email): # TODO: This should be cacheable try: me = UserSocialAuth.objects.get(uid=email, provider="google-oauth2") return me.extra_data['access_token'] except (UserSocialAuth.DoesNotExist, __HOLE__): raise AccessTokenNotFound
KeyError
dataset/ETHPy150Open coddingtonbear/django-mailbox/django_mailbox/google_utils.py/get_google_access_token
def update_google_extra_data(email, extra_data): try: me = UserSocialAuth.objects.get(uid=email, provider="google-oauth2") me.extra_data = extra_data me.save() except (UserSocialAuth.DoesNotExist, __HOLE__): raise AccessTokenNotFound
KeyError
dataset/ETHPy150Open coddingtonbear/django-mailbox/django_mailbox/google_utils.py/update_google_extra_data
def get_google_refresh_token(email): try: me = UserSocialAuth.objects.get(uid=email, provider="google-oauth2") return me.extra_data['refresh_token'] except (UserSocialAuth.DoesNotExist, __HOLE__): raise RefreshTokenNotFound
KeyError
dataset/ETHPy150Open coddingtonbear/django-mailbox/django_mailbox/google_utils.py/get_google_refresh_token
def google_api_get(email, url): headers = dict( Authorization="Bearer %s" % get_google_access_token(email), ) r = requests.get(url, headers=headers) logger.info("I got a %s", r.status_code) if r.status_code == 401: # Go use the refresh token refresh_authorization(email) r = requests.get(url, headers=headers) logger.info("I got a %s", r.status_code) if r.status_code == 200: try: return r.json() except __HOLE__: return r.text
ValueError
dataset/ETHPy150Open coddingtonbear/django-mailbox/django_mailbox/google_utils.py/google_api_get
def google_api_post(email, url, post_data, authorized=True): # TODO: Make this a lot less ugly. especially the 401 handling headers = dict() if authorized is True: headers.update(dict( Authorization="Bearer %s" % get_google_access_token(email), )) r = requests.post(url, headers=headers, data=post_data) if r.status_code == 401: refresh_authorization(email) r = requests.post(url, headers=headers, data=post_data) if r.status_code == 200: try: return r.json() except __HOLE__: return r.text
ValueError
dataset/ETHPy150Open coddingtonbear/django-mailbox/django_mailbox/google_utils.py/google_api_post
def NormalizeAndTypeCheck(arg, types): """Normalizes and type checks the given argument. Args: arg: an instance, tuple, list, iterator, or generator of the given type(s) types: allowed type or tuple of types Returns: A (list, bool) tuple. The list is a normalized, shallow copy of the argument. The boolean is True if the argument was a sequence, False if it was a single object. Raises: AssertionError: types includes list or tuple. BadArgumentError: arg is not an instance or sequence of one of the given types. """ if not isinstance(types, (list, tuple)): types = (types,) assert list not in types and tuple not in types if isinstance(arg, types): return ([arg], False) else: try: for val in arg: if not isinstance(val, types): raise datastore_errors.BadArgumentError( 'Expected one of %s; received %s (a %s).' % (types, val, typename(val))) except __HOLE__: raise datastore_errors.BadArgumentError( 'Expected an instance or sequence of %s; received %s (a %s).' % (types, arg, typename(arg))) return (list(arg), True)
TypeError
dataset/ETHPy150Open CollabQ/CollabQ/.google_appengine/google/appengine/api/datastore.py/NormalizeAndTypeCheck
@staticmethod def _FromPb(pb, require_valid_key=True): """Static factory method. Returns the Entity representation of the given protocol buffer (datastore_pb.Entity). Not intended to be used by application developers. The Entity PB's key must be complete. If it isn't, an AssertionError is raised. Args: # a protocol buffer Entity pb: datastore_pb.Entity Returns: # the Entity representation of the argument Entity """ assert pb.key().path().element_size() > 0 last_path = pb.key().path().element_list()[-1] if require_valid_key: assert last_path.has_id() ^ last_path.has_name() if last_path.has_id(): assert last_path.id() != 0 else: assert last_path.has_name() assert last_path.name() unindexed_properties = [p.name() for p in pb.raw_property_list()] e = Entity(unicode(last_path.type().decode('utf-8')), unindexed_properties=unindexed_properties, _app=pb.key().app()) ref = e.__key._Key__reference ref.CopyFrom(pb.key()) temporary_values = {} for prop_list in (pb.property_list(), pb.raw_property_list()): for prop in prop_list: try: value = datastore_types.FromPropertyPb(prop) except (AssertionError, AttributeError, TypeError, __HOLE__), e: raise datastore_errors.Error( 'Property %s is corrupt in the datastore. %s: %s' % (e.__class__, prop.name(), e)) multiple = prop.multiple() if multiple: value = [value] name = prop.name() cur_value = temporary_values.get(name) if cur_value is None: temporary_values[name] = value elif not multiple: raise datastore_errors.Error( 'Property %s is corrupt in the datastore; it has multiple ' 'values, but is not marked as multiply valued.' % name) else: cur_value.extend(value) for name, value in temporary_values.iteritems(): decoded_name = unicode(name.decode('utf-8')) datastore_types.ValidateReadProperty(decoded_name, value) dict.__setitem__(e, decoded_name, value) return e
ValueError
dataset/ETHPy150Open CollabQ/CollabQ/.google_appengine/google/appengine/api/datastore.py/Entity._FromPb
def GetCompiledQuery(self): try: return self.__compiled_query except __HOLE__: raise AssertionError('No cursor available, either this query has not ' 'been executed or there is no compilation ' 'available for this kind of query')
AttributeError
dataset/ETHPy150Open CollabQ/CollabQ/.google_appengine/google/appengine/api/datastore.py/Query.GetCompiledQuery
def _CheckFilter(self, filter, values): """Type check a filter string and list of values. Raises BadFilterError if the filter string is empty, not a string, or invalid. Raises BadValueError if the value type is not supported. Args: filter: String containing the filter text. values: List of associated filter values. Returns: re.MatchObject (never None) that matches the 'filter'. Group 1 is the property name, group 3 is the operator. (Group 2 is unused.) """ try: match = Query.FILTER_REGEX.match(filter) if not match: raise datastore_errors.BadFilterError( 'Could not parse filter string: %s' % str(filter)) except __HOLE__: raise datastore_errors.BadFilterError( 'Could not parse filter string: %s' % str(filter)) property = match.group(1) operator = match.group(3) if operator is None: operator = '=' if isinstance(values, tuple): values = list(values) elif not isinstance(values, list): values = [values] if isinstance(values[0], datastore_types._RAW_PROPERTY_TYPES): raise datastore_errors.BadValueError( 'Filtering on %s properties is not supported.' % typename(values[0])) if operator in self.INEQUALITY_OPERATORS: if self.__inequality_prop and property != self.__inequality_prop: raise datastore_errors.BadFilterError( 'Only one property per query may have inequality filters (%s).' % ', '.join(self.INEQUALITY_OPERATORS)) elif len(self.__orderings) >= 1 and self.__orderings[0][0] != property: raise datastore_errors.BadFilterError( 'Inequality operators (%s) must be on the same property as the ' 'first sort order, if any sort orders are supplied' % ', '.join(self.INEQUALITY_OPERATORS)) if (self.__kind is None and property != datastore_types._KEY_SPECIAL_PROPERTY): raise datastore_errors.BadFilterError( 'Only %s filters are allowed on kindless queries.' % datastore_types._KEY_SPECIAL_PROPERTY) if property in datastore_types._SPECIAL_PROPERTIES: if property == datastore_types._KEY_SPECIAL_PROPERTY: for value in values: if not isinstance(value, Key): raise datastore_errors.BadFilterError( '%s filter value must be a Key; received %s (a %s)' % (datastore_types._KEY_SPECIAL_PROPERTY, value, typename(value))) return match
TypeError
dataset/ETHPy150Open CollabQ/CollabQ/.google_appengine/google/appengine/api/datastore.py/Query._CheckFilter