function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
def create_order(self, order): """ See more: http://developer.oanda.com/rest-live/orders/#createNewOrder """ url = "{0}/{1}/accounts/{2}/orders".format( self.domain, self.API_VERSION, self.account_id ) try: return self._Client__call( uri=url, params=order.__dict__, method="post" ) except RequestException: return False except __HOLE__: return False
AssertionError
dataset/ETHPy150Open toloco/pyoanda/pyoanda/client.py/Client.create_order
def update_order(self, order_id, order): """ See more: http://developer.oanda.com/rest-live/orders/#modifyExistingOrder """ url = "{0}/{1}/accounts/{2}/orders/{3}".format( self.domain, self.API_VERSION, self.account_id, order_id ) try: return self._Client__call( uri=url, params=order.__dict__, method="patch" ) except RequestException: return False except __HOLE__: return False
AssertionError
dataset/ETHPy150Open toloco/pyoanda/pyoanda/client.py/Client.update_order
def close_order(self, order_id): """ See more: http://developer.oanda.com/rest-live/orders/#closeOrder """ url = "{0}/{1}/accounts/{2}/orders/{3}".format( self.domain, self.API_VERSION, self.account_id, order_id ) try: return self._Client__call(uri=url, method="delete") except RequestException: return False except __HOLE__: return False
AssertionError
dataset/ETHPy150Open toloco/pyoanda/pyoanda/client.py/Client.close_order
def get_trades(self, max_id=None, count=None, instrument=None, ids=None): """ Get a list of open trades Parameters ---------- max_id : int The server will return trades with id less than or equal to this, in descending order (for pagination) count : int Maximum number of open trades to return. Default: 50 Max value: 500 instrument : str Retrieve open trades for a specific instrument only Default: all ids : list A list of trades to retrieve. Maximum number of ids: 50. No other parameter may be specified with the ids parameter. See more: http://developer.oanda.com/rest-live/trades/#getListOpenTrades """ url = "{0}/{1}/accounts/{2}/trades".format( self.domain, self.API_VERSION, self.account_id ) params = { "maxId": int(max_id) if max_id and max_id > 0 else None, "count": int(count) if count and count > 0 else None, "instrument": instrument, "ids": ','.join(ids) if ids else None } try: return self._Client__call(uri=url, params=params, method="get") except RequestException: return False except __HOLE__: return False
AssertionError
dataset/ETHPy150Open toloco/pyoanda/pyoanda/client.py/Client.get_trades
def get_trade(self, trade_id): """ Get information on a specific trade. Parameters ---------- trade_id : int The id of the trade to get information on. See more: http://developer.oanda.com/rest-live/trades/#getInformationSpecificTrade """ url = "{0}/{1}/accounts/{2}/trades/{3}".format( self.domain, self.API_VERSION, self.account_id, trade_id ) try: return self._Client__call(uri=url, method="get") except RequestException: return False except __HOLE__: return False
AssertionError
dataset/ETHPy150Open toloco/pyoanda/pyoanda/client.py/Client.get_trade
def update_trade( self, trade_id, stop_loss=None, take_profit=None, trailing_stop=None ): """ Modify an existing trade. Note: Only the specified parameters will be modified. All other parameters will remain unchanged. To remove an optional parameter, set its value to 0. Parameters ---------- trade_id : int The id of the trade to modify. stop_loss : number Stop Loss value. take_profit : number Take Profit value. trailing_stop : number Trailing Stop distance in pips, up to one decimal place See more: http://developer.oanda.com/rest-live/trades/#modifyExistingTrade """ url = "{0}/{1}/accounts/{2}/trades/{3}".format( self.domain, self.API_VERSION, self.account_id, trade_id ) params = { "stopLoss": stop_loss, "takeProfit": take_profit, "trailingStop": trailing_stop } try: return self._Client__call(uri=url, params=params, method="patch") except RequestException: return False except __HOLE__: return False raise NotImplementedError()
AssertionError
dataset/ETHPy150Open toloco/pyoanda/pyoanda/client.py/Client.update_trade
def close_trade(self, trade_id): """ Close an open trade. Parameters ---------- trade_id : int The id of the trade to close. See more: http://developer.oanda.com/rest-live/trades/#closeOpenTrade """ url = "{0}/{1}/accounts/{2}/trades/{3}".format( self.domain, self.API_VERSION, self.account_id, trade_id ) try: return self._Client__call(uri=url, method="delete") except RequestException: return False except __HOLE__: return False
AssertionError
dataset/ETHPy150Open toloco/pyoanda/pyoanda/client.py/Client.close_trade
def get_positions(self): """ Get a list of all open positions. See more: http://developer.oanda.com/rest-live/positions/#getListAllOpenPositions """ url = "{0}/{1}/accounts/{2}/positions".format( self.domain, self.API_VERSION, self.account_id ) try: return self._Client__call(uri=url, method="get") except RequestException: return False except __HOLE__: return False
AssertionError
dataset/ETHPy150Open toloco/pyoanda/pyoanda/client.py/Client.get_positions
def get_position(self, instrument): """ Get the position for an instrument. Parameters ---------- instrument : string The instrument to get the open position for. See more: http://developer.oanda.com/rest-live/positions/#getPositionForInstrument """ url = "{0}/{1}/accounts/{2}/positions/{3}".format( self.domain, self.API_VERSION, self.account_id, instrument ) try: return self._Client__call(uri=url, method="get") except RequestException: return False except __HOLE__: return False
AssertionError
dataset/ETHPy150Open toloco/pyoanda/pyoanda/client.py/Client.get_position
def close_position(self, instrument): """ Close an existing position Parameters ---------- instrument : string The instrument to close the position for. See more: http://developer.oanda.com/rest-live/positions/#closeExistingPosition """ url = "{0}/{1}/accounts/{2}/positions/{3}".format( self.domain, self.API_VERSION, self.account_id, instrument ) try: return self._Client__call(uri=url, method="delete") except RequestException: return False except __HOLE__: return False
AssertionError
dataset/ETHPy150Open toloco/pyoanda/pyoanda/client.py/Client.close_position
def get_transactions( self, max_id=None, count=None, instrument="all", ids=None ): """ Get a list of transactions. Parameters ---------- max_id : int The server will return transactions with id less than or equal to this, in descending order (for pagination). count : int Maximum number of open transactions to return. Default: 50. Max value: 500. instrument : str Retrieve open transactions for a specific instrument only. Default: all. ids : list A list of transactions to retrieve. Maximum number of ids: 50. No other parameter may be specified with the ids parameter. See more: http://developer.oanda.com/rest-live/transaction-history/#getTransactionHistory http://developer.oanda.com/rest-live/transaction-history/#transactionTypes """ url = "{0}/{1}/accounts/{2}/transactions".format( self.domain, self.API_VERSION, self.account_id ) params = { "maxId": int(max_id) if max_id and max_id > 0 else None, "count": int(count) if count and count > 0 else None, "instrument": instrument, "ids": ','.join(ids) if ids else None } try: return self._Client__call(uri=url, params=params, method="get") except RequestException: return False except __HOLE__: return False
AssertionError
dataset/ETHPy150Open toloco/pyoanda/pyoanda/client.py/Client.get_transactions
def get_transaction(self, transaction_id): """ Get information on a specific transaction. Parameters ---------- transaction_id : int The id of the transaction to get information on. See more: http://developer.oanda.com/rest-live/transaction-history/#getInformationForTransaction http://developer.oanda.com/rest-live/transaction-history/#transactionTypes """ url = "{0}/{1}/accounts/{2}/transactions/{3}".format( self.domain, self.API_VERSION, self.account_id, transaction_id ) try: return self._Client__call(uri=url, method="get") except RequestException: return False except __HOLE__: return False
AssertionError
dataset/ETHPy150Open toloco/pyoanda/pyoanda/client.py/Client.get_transaction
def request_transaction_history(self): """ Request full account history. Submit a request for a full transaction history. A successfully accepted submission results in a response containing a URL in the Location header to a file that will be available once the request is served. Response for the URL will be HTTP 404 until the file is ready. Once served the URL will be valid for a certain amount of time. See more: http://developer.oanda.com/rest-live/transaction-history/#getFullAccountHistory http://developer.oanda.com/rest-live/transaction-history/#transactionTypes """ url = "{0}/{1}/accounts/{2}/alltransactions".format( self.domain, self.API_VERSION, self.account_id ) try: resp = self.__get_response(url) return resp.headers['location'] except RequestException: return False except __HOLE__: return False
AssertionError
dataset/ETHPy150Open toloco/pyoanda/pyoanda/client.py/Client.request_transaction_history
def create_account(self, currency=None): """ Create a new account. This call is only available on the sandbox system. Please create accounts on fxtrade.oanda.com on our production system. See more: http://developer.oanda.com/rest-sandbox/accounts/#-a-name-createtestaccount-a-create-a-test-account """ url = "{0}/{1}/accounts".format(self.domain, self.API_VERSION) params = {"currency": currency} try: return self._Client__call(uri=url, params=params, method="post") except RequestException: return False except __HOLE__: return False
AssertionError
dataset/ETHPy150Open toloco/pyoanda/pyoanda/client.py/Client.create_account
def get_accounts(self, username=None): """ Get a list of accounts owned by the user. Parameters ---------- username : string The name of the user. Note: This is only required on the sandbox, on production systems your access token will identify you. See more: http://developer.oanda.com/rest-sandbox/accounts/#-a-name-getaccountsforuser-a-get-accounts-for-a-user """ url = "{0}/{1}/accounts".format(self.domain, self.API_VERSION) params = {"username": username} try: return self._Client__call(uri=url, params=params, method="get") except RequestException: return False except __HOLE__: return False
AssertionError
dataset/ETHPy150Open toloco/pyoanda/pyoanda/client.py/Client.get_accounts
@staticmethod def draw_spectrum_analyzer(all_frames, thresh_frames): time.sleep(1) # Wait just one second pw = pg.plot(title="Spectrum Analyzer") # Window title pg.setConfigOptions(antialias=True) # Enable antialias for better resolution pw.win.resize(800, 300) # Define window size pw.win.move(540 * SCREEN_WIDTH / 1920, 500 * SCREEN_HEIGHT / 1080) # Define window position while True: # Loop over the frames of the audio / data chunks data = ''.join(all_frames[-1:]) # Get only the last frame of all frames data = numpy.fromstring(data, 'int16') # Binary string to numpy int16 data format pw.setMouseEnabled(y=False) # Disable mouse pw.setYRange(0,1000) # Set Y range of graph pw.setXRange(-(RATE/16), (RATE/16), padding=0) # Set X range of graph relative to Bit Rate pwAxis = pw.getAxis("bottom") # Get bottom axis pwAxis.setLabel("Frequency [Hz]") # Set bottom axis label f, Pxx = HearingPerception.find_frequency(data) # Call find frequency function f = f.tolist() # Numpy array to list Pxx = (numpy.absolute(Pxx)).tolist() # Numpy array to list try: # Try this block if thresh_frames[-1:][0] == EMPTY_CHUNK: # If last thresh frame is equal to EMPTY CHUNK pw.plot(x=f,y=Pxx, clear=True, pen=pg.mkPen('w', width=1.0, style=QtCore.Qt.SolidLine)) # Then plot with white pen else: # If last thresh frame is not equal to EMPTY CHUNK pw.plot(x=f,y=Pxx, clear=True, pen=pg.mkPen('y', width=1.0, style=QtCore.Qt.SolidLine)) # Then plot with yellow pen except __HOLE__: # If we are getting an IndexError because of this -> thresh_frames[-1:][0] pw.plot(x=f,y=Pxx, clear=True, pen=pg.mkPen('w', width=1.0, style=QtCore.Qt.SolidLine)) # Then plot with white pen pg.QtGui.QApplication.processEvents() # ??? time.sleep(0.05) # Wait a few miliseconds # A function that will draw a waveform graphic to screen (PyQtGraph)
IndexError
dataset/ETHPy150Open mertyildiran/Cerebrum/cerebrum/hearing/perception.py/HearingPerception.draw_spectrum_analyzer
def _get_model_from_node(self, node, attr): """ Helper to look up a model from a <object model=...> or a <field rel=... to=...> node. """ model_identifier = node.getAttribute(attr) if not model_identifier: raise base.DeserializationError( "<%s> node is missing the required '%s' attribute" % (node.nodeName, attr)) try: Model = models.get_model(*model_identifier.split(".")) except __HOLE__: Model = None if Model is None: raise base.DeserializationError( "<%s> node has invalid model identifier: '%s'" % (node.nodeName, model_identifier)) return Model
TypeError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/_internal/django/core/serializers/xml_serializer.py/Deserializer._get_model_from_node
def __init__(self, name, variable, options=None): self.name = name self.variable = template.Variable(variable) self.options = options or {} for name, value in self.options.items(): try: self.options[name] = ast.literal_eval(value) except __HOLE__: self.options[name] = template.Variable(value) except SyntaxError as e: raise template.TemplateSyntaxError(e)
ValueError
dataset/ETHPy150Open mher/chartkick.py/chartkick/templatetags/chartkick.py/ChartNode.__init__
def chart(name, parser, token): args = token.split_contents() if len(args) < 2: raise template.TemplateSyntaxError( '%r statement requires at least one argument' % token.split_contents()[0]) options = None if len(args) > 2: if args[2] != 'with': raise template.TemplateSyntaxError("Expected 'with' statement") try: options = parse_options(' '.join(args[3:])) except __HOLE__: raise template.TemplateSyntaxError('Invalid options') return ChartNode(name=name, variable=args[1], options=options)
ValueError
dataset/ETHPy150Open mher/chartkick.py/chartkick/templatetags/chartkick.py/chart
def autorun_commands(cmds,my_globals=None,verb=0): sv = conf.verb import builtins try: try: if my_globals is None: my_globals = __import__("scapy.all").all.__dict__ conf.verb = verb interp = ScapyAutorunInterpreter(my_globals) cmd = "" cmds = cmds.splitlines() cmds.append("") # ensure we finish multiline commands cmds.reverse() builtins.__dict__["_"] = None while 1: if cmd: sys.stderr.write(sys.__dict__.get("ps2","... ")) else: sys.stderr.write(str(sys.__dict__.get("ps1",ColorPrompt()))) l = cmds.pop() print(l) cmd += "\n"+l if interp.runsource(cmd): continue if interp.error: return 0 cmd = "" if len(cmds) <= 1: break except __HOLE__: pass finally: conf.verb = sv return _
SystemExit
dataset/ETHPy150Open phaethon/scapy/scapy/autorun.py/autorun_commands
def _get_account_policy(name): ''' Get the entire accountPolicy and return it as a dictionary. For use by this module only :param str name: The user name :return: a dictionary containing all values for the accountPolicy :rtype: dict :raises: CommandExecutionError on user not found or any other unknown error ''' cmd = 'pwpolicy -u {0} -getpolicy'.format(name) try: ret = salt.utils.mac_utils.execute_return_result(cmd) except CommandExecutionError as exc: if 'Error: user <{0}> not found'.format(name) in exc.strerror: raise CommandExecutionError('User not found: {0}'.format(name)) raise CommandExecutionError('Unknown error: {0}'.format(exc.strerror)) try: policy_list = ret.split('\n')[1].split(' ') policy_dict = {} for policy in policy_list: if '=' in policy: key, value = policy.split('=') policy_dict[key] = value return policy_dict except __HOLE__: return {}
IndexError
dataset/ETHPy150Open saltstack/salt/salt/modules/mac_shadow.py/_get_account_policy
def _convert_to_datetime(unix_timestamp): ''' Converts a unix timestamp to a human readable date/time :param float unix_timestamp: A unix timestamp :return: A date/time in the format YYYY-mm-dd HH:MM:SS :rtype: str ''' try: unix_timestamp = float(unix_timestamp) return datetime.fromtimestamp(unix_timestamp).strftime('%Y-%m-%d %H:%M:%S') except (ValueError, __HOLE__): return 'Invalid Timestamp'
TypeError
dataset/ETHPy150Open saltstack/salt/salt/modules/mac_shadow.py/_convert_to_datetime
def info(name): ''' Return information for the specified user :param str name: the username :return: A dictionary containing the user's shadow information :rtype: dict CLI Example: .. code-block:: bash salt '*' shadow.info admin ''' try: data = pwd.getpwnam(name) return {'name': data.pw_name, 'passwd': data.pw_passwd, 'account_created': get_account_created(name), 'login_failed_count': get_login_failed_count(name), 'login_failed_last': get_login_failed_last(name), 'lstchg': get_last_change(name), 'max': get_maxdays(name), 'expire': get_expire(name), 'change': get_change(name), 'min': 'Unavailable', 'warn': 'Unavailable', 'inact': 'Unavailable'} except __HOLE__: log.debug('User not found: {0}'.format(name)) return {'name': '', 'passwd': '', 'account_created': '', 'login_failed_count': '', 'login_failed_last': '', 'lstchg': '', 'max': '', 'expire': '', 'change': '', 'min': '', 'warn': '', 'inact': ''}
KeyError
dataset/ETHPy150Open saltstack/salt/salt/modules/mac_shadow.py/info
def get_minion_data(minion, opts): ''' Get the grains/pillar for a specific minion. If minion is None, it will return the grains/pillar for the first minion it finds. Return value is a tuple of the minion ID, grains, and pillar ''' if opts.get('minion_data_cache', False): serial = salt.payload.Serial(opts) cdir = os.path.join(opts['cachedir'], 'minions') if not os.path.isdir(cdir): return minion if minion else None, None, None minions = os.listdir(cdir) if minion is None: # If no minion specified, take first one with valid grains for id_ in minions: datap = os.path.join(cdir, id_, 'data.p') try: with salt.utils.fopen(datap, 'rb') as fp_: miniondata = serial.load(fp_) except (IOError, __HOLE__): continue grains = miniondata.get('grains') pillar = miniondata.get('pillar') return id_, grains, pillar else: # Search for specific minion datap = os.path.join(cdir, minion, 'data.p') try: with salt.utils.fopen(datap, 'rb') as fp_: miniondata = serial.load(fp_) except (IOError, OSError): return minion, None, None grains = miniondata.get('grains') pillar = miniondata.get('pillar') return minion, grains, pillar # No cache dir, return empty dict return minion if minion else None, None, None
OSError
dataset/ETHPy150Open saltstack/salt/salt/utils/minions.py/get_minion_data
def _check_glob_minions(self, expr, greedy): # pylint: disable=unused-argument ''' Return the minions found by looking via globs ''' pki_dir = os.path.join(self.opts['pki_dir'], self.acc) try: files = [] for fn_ in salt.utils.isorted(os.listdir(pki_dir)): if not fn_.startswith('.') and os.path.isfile(os.path.join(pki_dir, fn_)): files.append(fn_) return fnmatch.filter(files, expr) except __HOLE__: return []
OSError
dataset/ETHPy150Open saltstack/salt/salt/utils/minions.py/CkMinions._check_glob_minions
def _check_pcre_minions(self, expr, greedy): # pylint: disable=unused-argument ''' Return the minions found by looking via regular expressions ''' try: minions = [] for fn_ in salt.utils.isorted(os.listdir(os.path.join(self.opts['pki_dir'], self.acc))): if not fn_.startswith('.') and os.path.isfile(os.path.join(self.opts['pki_dir'], self.acc, fn_)): minions.append(fn_) reg = re.compile(expr) return [m for m in minions if reg.match(m)] except __HOLE__: return []
OSError
dataset/ETHPy150Open saltstack/salt/salt/utils/minions.py/CkMinions._check_pcre_minions
def _check_ipcidr_minions(self, expr, greedy): ''' Return the minions found by looking via ipcidr ''' cache_enabled = self.opts.get('minion_data_cache', False) if greedy: mlist = [] for fn_ in salt.utils.isorted(os.listdir(os.path.join(self.opts['pki_dir'], self.acc))): if not fn_.startswith('.') and os.path.isfile(os.path.join(self.opts['pki_dir'], self.acc, fn_)): mlist.append(fn_) minions = set(mlist) elif cache_enabled: minions = os.listdir(os.path.join(self.opts['cachedir'], 'minions')) else: return [] if cache_enabled: cdir = os.path.join(self.opts['cachedir'], 'minions') if not os.path.isdir(cdir): return list(minions) tgt = expr try: # Target is an address? tgt = ipaddress.ip_address(tgt) except: # pylint: disable=bare-except try: # Target is a network? tgt = ipaddress.ip_network(tgt) except: # pylint: disable=bare-except log.error('Invalid IP/CIDR target: {0}'.format(tgt)) return [] proto = 'ipv{0}'.format(tgt.version) for id_ in os.listdir(cdir): if not greedy and id_ not in minions: continue datap = os.path.join(cdir, id_, 'data.p') if not os.path.isfile(datap): if not greedy and id_ in minions: minions.remove(id_) continue try: with salt.utils.fopen(datap, 'rb') as fp_: grains = self.serial.load(fp_).get('grains') except (__HOLE__, OSError): continue if proto not in grains: match = False elif isinstance(tgt, (ipaddress.IPv4Address, ipaddress.IPv6Address)): match = str(tgt) in grains[proto] else: match = salt.utils.network.in_subnet(tgt, grains[proto]) if not match and id_ in minions: minions.remove(id_) return list(minions)
IOError
dataset/ETHPy150Open saltstack/salt/salt/utils/minions.py/CkMinions._check_ipcidr_minions
def connected_ids(self, subset=None, show_ipv4=False, include_localhost=False): ''' Return a set of all connected minion ids, optionally within a subset ''' minions = set() if self.opts.get('minion_data_cache', False): cdir = os.path.join(self.opts['cachedir'], 'minions') if not os.path.isdir(cdir): return minions addrs = salt.utils.network.local_port_tcp(int(self.opts['publish_port'])) if '127.0.0.1' in addrs or '0.0.0.0' in addrs: # Add in possible ip addresses of a locally connected minion addrs.discard('127.0.0.1') addrs.discard('0.0.0.0') addrs.update(set(salt.utils.network.ip_addrs())) if subset: search = subset else: search = os.listdir(cdir) for id_ in search: datap = os.path.join(cdir, id_, 'data.p') try: with salt.utils.fopen(datap, 'rb') as fp_: grains = self.serial.load(fp_).get('grains', {}) except (AttributeError, IOError, __HOLE__): continue for ipv4 in grains.get('ipv4', []): if ipv4 == '127.0.0.1' and not include_localhost: continue if ipv4 == '0.0.0.0': continue if ipv4 in addrs: if show_ipv4: minions.add((id_, ipv4)) else: minions.add(id_) break return minions
OSError
dataset/ETHPy150Open saltstack/salt/salt/utils/minions.py/CkMinions.connected_ids
def auth_check_expanded(self, auth_list, funs, args, tgt, tgt_type='glob', groups=None, publish_validate=False): # Here's my thinking # 1. Retrieve anticipated targeted minions # 2. Iterate through each entry in the auth_list # 3. If it is a minion_id, check to see if any targeted minions match. # If there is a match, check to make sure funs are permitted # (if it's not a match we don't care about this auth entry and can # move on) # a. If funs are permitted, Add this minion_id to a new set of allowed minion_ids # If funs are NOT permitted, can short-circuit and return FALSE # b. At the end of the auth_list loop, make sure all targeted IDs # are in the set of allowed minion_ids. If not, return FALSE # 4. If it is a target (glob, pillar, etc), retrieve matching minions # and make sure that ALL targeted minions are in the set. # then check to see if the funs are permitted # a. If ALL targeted minions are not in the set, then return FALSE # b. If the desired fun doesn't mass the auth check with any # auth_entry's fun, then return FALSE # NOTE we are not going to try to allow functions to run on partial # sets of minions. If a user targets a group of minions and does not # have access to run a job on ALL of these minions then the job will # fail with 'Eauth Failed'. # The recommended workflow in that case will be for the user to narrow # his target. # This should cover adding the AD LDAP lookup functionality while # preserving the existing auth behavior. # Recommend we config-get this behind an entry called # auth.enable_expanded_auth_matching # and default to False v_tgt_type = tgt_type if tgt_type.lower() in ('pillar', 'pillar_pcre'): v_tgt_type = 'pillar_exact' elif tgt_type.lower() == 'compound': v_tgt_type = 'compound_pillar_exact' v_minions = set(self.check_minions(tgt, v_tgt_type)) minions = set(self.check_minions(tgt, tgt_type)) mismatch = bool(minions.difference(v_minions)) # If the non-exact match gets more minions than the exact match # then pillar globbing or PCRE is being used, and we have a # problem if publish_validate: if mismatch: return False # compound commands will come in a list so treat everything as a list if not isinstance(funs, list): funs = [funs] args = [args] # Take the auth list and get all the minion names inside it allowed_minions = set() auth_dictionary = {} # Make a set, so we are guaranteed to have only one of each minion # Also iterate through the entire auth_list and create a dictionary # so it's easy to look up what functions are permitted for auth_list_entry in auth_list: if isinstance(auth_list_entry, six.string_types): for fun in funs: # represents toplevel auth entry is a function. # so this fn is permitted by all minions if self.match_check(auth_list_entry, fun): return True if isinstance(auth_list_entry, dict): if len(auth_list_entry) != 1: log.info('Malformed ACL: {0}'.format(auth_list_entry)) continue allowed_minions.update(set(auth_list_entry.keys())) for key in auth_list_entry.keys(): for match in self._expand_matching(key): if match in auth_dictionary: auth_dictionary[match].extend(auth_list_entry[key]) else: auth_dictionary[match] = auth_list_entry[key] allowed_minions_from_auth_list = set() for next_entry in allowed_minions: allowed_minions_from_auth_list.update(self._expand_matching(next_entry)) # 'minions' here are all the names of minions matched by the target # if we take out all the allowed minions, and there are any left, then # the target includes minions that are not allowed by eauth # so we can give up here. if len(minions - allowed_minions_from_auth_list) > 0: return False try: for minion in minions: results = [] for num, fun in enumerate(auth_dictionary[minion]): results.append(self.match_check(fun, funs)) if not any(results): return False return True except __HOLE__: return False return False
TypeError
dataset/ETHPy150Open saltstack/salt/salt/utils/minions.py/CkMinions.auth_check_expanded
def auth_check(self, auth_list, funs, args, tgt, tgt_type='glob', groups=None, publish_validate=False): ''' Returns a bool which defines if the requested function is authorized. Used to evaluate the standard structure under external master authentication interfaces, like eauth, peer, peer_run, etc. ''' if self.opts.get('auth.enable_expanded_auth_matching', False): return self.auth_check_expanded(auth_list, funs, args, tgt, tgt_type, groups, publish_validate) if publish_validate: v_tgt_type = tgt_type if tgt_type.lower() in ('pillar', 'pillar_pcre'): v_tgt_type = 'pillar_exact' elif tgt_type.lower() == 'compound': v_tgt_type = 'compound_pillar_exact' v_minions = set(self.check_minions(tgt, v_tgt_type)) minions = set(self.check_minions(tgt, tgt_type)) mismatch = bool(minions.difference(v_minions)) # If the non-exact match gets more minions than the exact match # then pillar globbing or PCRE is being used, and we have a # problem if mismatch: return False # compound commands will come in a list so treat everything as a list if not isinstance(funs, list): funs = [funs] args = [args] try: for num, fun in enumerate(funs): for ind in auth_list: if isinstance(ind, six.string_types): # Allowed for all minions if self.match_check(ind, fun): return True elif isinstance(ind, dict): if len(ind) != 1: # Invalid argument continue valid = next(six.iterkeys(ind)) # Check if minions are allowed if self.validate_tgt( valid, tgt, tgt_type): # Minions are allowed, verify function in allowed list if isinstance(ind[valid], six.string_types): if self.match_check(ind[valid], fun): return True elif isinstance(ind[valid], list): for cond in ind[valid]: # Function name match if isinstance(cond, six.string_types): if self.match_check(cond, fun): return True # Function and args match elif isinstance(cond, dict): if len(cond) != 1: # Invalid argument continue fcond = next(six.iterkeys(cond)) # cond: { # 'mod.func': { # 'args': [ # 'one.*', 'two\\|three'], # 'kwargs': { # 'functioin': 'teach\\|feed', # 'user': 'mother\\|father' # } # } # } if self.match_check(fcond, fun): # check key that is function name match acond = cond[fcond] if not isinstance(acond, dict): # Invalid argument continue # whitelist args, kwargs arg_list = args[num] cond_args = acond.get('args', []) good = True for i, cond_arg in enumerate(cond_args): if len(arg_list) <= i: good = False break if cond_arg is None: # None == '.*' i.e. allow any continue if not self.match_check(cond_arg, arg_list[i]): good = False break if not good: continue # Check kwargs cond_kwargs = acond.get('kwargs', {}) arg_kwargs = {} for a in arg_list: if isinstance(a, dict) and '__kwarg__' in a: arg_kwargs = a break for k, v in six.iteritems(cond_kwargs): if k not in arg_kwargs: good = False break if v is None: # None == '.*' i.e. allow any continue if not self.match_check(v, arg_kwargs[k]): good = False break if good: return True except __HOLE__: return False return False
TypeError
dataset/ETHPy150Open saltstack/salt/salt/utils/minions.py/CkMinions.auth_check
def test_add_placeholder(self): # create page page = create_page("Add Placeholder", "nav_playground.html", "en", position="last-child", published=True, in_navigation=True) page.template = 'add_placeholder.html' page.save() page.publish('en') url = page.get_absolute_url() response = self.client.get(url) self.assertEqual(200, response.status_code) try: path = os.path.join(settings.TEMPLATE_DIRS[0], 'add_placeholder.html') except __HOLE__: path = os.path.join(settings.TEMPLATES[0]['DIRS'][0], 'add_placeholder.html') with open(path, 'r') as fobj: old = fobj.read() try: new = old.replace( '<!-- SECOND_PLACEHOLDER -->', '{% placeholder second_placeholder %}' ) with open(path, 'w') as fobj: fobj.write(new) response = self.client.get(url) self.assertEqual(200, response.status_code) finally: with open(path, 'w') as fobj: fobj.write(old)
IndexError
dataset/ETHPy150Open divio/django-cms/cms/tests/test_page.py/PagesTestCase.test_add_placeholder
def test_slug_url_overwrite_clash(self): """ Tests if a URL-Override clashes with a normal page url """ with self.settings(CMS_PERMISSION=False): create_page('home', 'nav_playground.html', 'en', published=True) bar = create_page('bar', 'nav_playground.html', 'en', published=False) foo = create_page('foo', 'nav_playground.html', 'en', published=True) # Tests to assure is_valid_url is ok on plain pages self.assertTrue(is_valid_url(bar.get_absolute_url('en'), bar)) self.assertTrue(is_valid_url(foo.get_absolute_url('en'), foo)) # Set url_overwrite for page foo title = foo.get_title_obj(language='en') title.has_url_overwrite = True title.path = '/bar/' title.save() foo.publish('en') try: url = is_valid_url(bar.get_absolute_url('en'), bar) except __HOLE__: url = False if url: bar.save() bar.publish('en') self.assertFalse(bar.is_published('en'))
ValidationError
dataset/ETHPy150Open divio/django-cms/cms/tests/test_page.py/PagesTestCase.test_slug_url_overwrite_clash
def check(fn): try: checker = KivyStyleChecker(fn) except __HOLE__: # File couldn't be opened, so was deleted apparently. # Don't check deleted files. return 0 return checker.check_all()
IOError
dataset/ETHPy150Open kivy/kivy/kivy/tools/pep8checker/pep8kivy.py/check
def hash_or_str(obj): try: return hash((type(obj).__name__, obj)) except __HOLE__: ## Adds the type name to make sure two object of different type but ## identical string representation get distinguished. return type(obj).__name__ + str(obj) ## ## All purpose object ##
TypeError
dataset/ETHPy150Open vaab/colour/colour.py/hash_or_str
def __getattr__(self, label): if label.startswith("get_"): raise AttributeError("'%s' not found" % label) try: return getattr(self, 'get_' + label)() except __HOLE__: raise AttributeError("'%s' not found" % label)
AttributeError
dataset/ETHPy150Open vaab/colour/colour.py/Color.__getattr__
def run(self, action='primary', force_update=False, kill_only=False): ''' @action One of: primary, secondary ''' self.raise_event(self, EventSource.ON_DART_RUN) try: view = self.window.active_view() except TypeError: return if force_update or DartSmartRunCommand.last_run_file[0] is None: try: DartSmartRunCommand.last_run_file = ( DartFile(view).is_pubspec, view.file_name()) except __HOLE__: return if DartSmartRunCommand.last_run_file[0]: self.window.run_command('dart_run_pubspec', { 'action': action, 'file_name': DartSmartRunCommand.last_run_file[1] }) return self.window.run_command('dart_run_file', { 'action': action, 'file_name': DartSmartRunCommand.last_run_file[1], 'kill_only': kill_only, }) # This class will be instantiated for each view/window, so we need to # ensure that only one function will be registered as event handler. # Therefore, we use a function whose id is the same across all instances.
TypeError
dataset/ETHPy150Open guillermooo/dart-sublime-bundle/run.py/DartSmartRunCommand.run
def run(self, file_name=None, action='primary', kill_only=False): ''' @action One of: [primary, secondary] @kill_only If `True`, simply kill any running processes we've started. ''' assert kill_only or file_name, 'wrong call' self._cleanup() if kill_only: self._kill() return working_dir = None try: working_dir = os.path.dirname(find_pubspec(file_name)) except (__HOLE__, AttributeError): try: if not working_dir: working_dir = os.path.dirname(file_name) except TypeError as e: _logger.debug('cannot run an unsaved file') _logger.debug(e) return except Exception as e: _logger.error('programmer error: this exception needs to be handled') _logger.error(e) return except Exception as e: _logger.error('programmer error: this exception needs to be handled') _logger.error(e) return dart_view = DartFile.from_path(file_name) if dart_view.is_server_app: self.run_server_app(file_name, working_dir, action) return if dart_view.is_web_app: self.run_web_app(dart_view, working_dir, action) return # TODO: improve detection of runnable file (for example, don't attempt # to run a part of a library). # At this point, we are looking at a file that either: # - is not a .dart or .html file # - is outside of a pub package # As a last resort, try to run the file as a script. if action != 'primary' or not dart_view.is_dart_file: print("Dart: Cannot determine best action for {}".format( dart_view.path )) _logger.debug("cannot determine best run action for %s", dart_view.path) return self.run_server_app(file_name, working_dir, action)
TypeError
dataset/ETHPy150Open guillermooo/dart-sublime-bundle/run.py/DartRunFileCommand.run
def el_iter(el): """ Go through all elements """ try: for child in el.iter(): yield child except __HOLE__: # iter isn't available in < python 2.7 for child in el.getiterator(): yield child
AttributeError
dataset/ETHPy150Open CenterForOpenScience/pydocx/pydocx/util/xml.py/el_iter
def findroot(ctx, f, x0, solver=Secant, tol=None, verbose=False, verify=True, **kwargs): r""" Find a solution to `f(x) = 0`, using *x0* as starting point or interval for *x*. Multidimensional overdetermined systems are supported. You can specify them using a function or a list of functions. If the found root does not satisfy `|f(x)|^2 \leq \mathrm{tol}`, an exception is raised (this can be disabled with *verify=False*). **Arguments** *f* one dimensional function *x0* starting point, several starting points or interval (depends on solver) *tol* the returned solution has an error smaller than this *verbose* print additional information for each iteration if true *verify* verify the solution and raise a ValueError if `|f(x)|^2 > \mathrm{tol}` *solver* a generator for *f* and *x0* returning approximative solution and error *maxsteps* after how many steps the solver will cancel *df* first derivative of *f* (used by some solvers) *d2f* second derivative of *f* (used by some solvers) *multidimensional* force multidimensional solving *J* Jacobian matrix of *f* (used by multidimensional solvers) *norm* used vector norm (used by multidimensional solvers) solver has to be callable with ``(f, x0, **kwargs)`` and return an generator yielding pairs of approximative solution and estimated error (which is expected to be positive). You can use the following string aliases: 'secant', 'mnewton', 'halley', 'muller', 'illinois', 'pegasus', 'anderson', 'ridder', 'anewton', 'bisect' See mpmath.calculus.optimization for their documentation. **Examples** The function :func:`~mpmath.findroot` locates a root of a given function using the secant method by default. A simple example use of the secant method is to compute `\pi` as the root of `\sin x` closest to `x_0 = 3`:: >>> from mpmath import * >>> mp.dps = 30; mp.pretty = True >>> findroot(sin, 3) 3.14159265358979323846264338328 The secant method can be used to find complex roots of analytic functions, although it must in that case generally be given a nonreal starting value (or else it will never leave the real line):: >>> mp.dps = 15 >>> findroot(lambda x: x**3 + 2*x + 1, j) (0.226698825758202 + 1.46771150871022j) A nice application is to compute nontrivial roots of the Riemann zeta function with many digits (good initial values are needed for convergence):: >>> mp.dps = 30 >>> findroot(zeta, 0.5+14j) (0.5 + 14.1347251417346937904572519836j) The secant method can also be used as an optimization algorithm, by passing it a derivative of a function. The following example locates the positive minimum of the gamma function:: >>> mp.dps = 20 >>> findroot(lambda x: diff(gamma, x), 1) 1.4616321449683623413 Finally, a useful application is to compute inverse functions, such as the Lambert W function which is the inverse of `w e^w`, given the first term of the solution's asymptotic expansion as the initial value. In basic cases, this gives identical results to mpmath's built-in ``lambertw`` function:: >>> def lambert(x): ... return findroot(lambda w: w*exp(w) - x, log(1+x)) ... >>> mp.dps = 15 >>> lambert(1); lambertw(1) 0.567143290409784 0.567143290409784 >>> lambert(1000); lambert(1000) 5.2496028524016 5.2496028524016 Multidimensional functions are also supported:: >>> f = [lambda x1, x2: x1**2 + x2, ... lambda x1, x2: 5*x1**2 - 3*x1 + 2*x2 - 3] >>> findroot(f, (0, 0)) [-0.618033988749895] [-0.381966011250105] >>> findroot(f, (10, 10)) [ 1.61803398874989] [-2.61803398874989] You can verify this by solving the system manually. Please note that the following (more general) syntax also works:: >>> def f(x1, x2): ... return x1**2 + x2, 5*x1**2 - 3*x1 + 2*x2 - 3 ... >>> findroot(f, (0, 0)) [-0.618033988749895] [-0.381966011250105] **Multiple roots** For multiple roots all methods of the Newtonian family (including secant) converge slowly. Consider this example:: >>> f = lambda x: (x - 1)**99 >>> findroot(f, 0.9, verify=False) 0.918073542444929 Even for a very close starting point the secant method converges very slowly. Use ``verbose=True`` to illustrate this. It is possible to modify Newton's method to make it converge regardless of the root's multiplicity:: >>> findroot(f, -10, solver='mnewton') 1.0 This variant uses the first and second derivative of the function, which is not very efficient. Alternatively you can use an experimental Newtonian solver that keeps track of the speed of convergence and accelerates it using Steffensen's method if necessary:: >>> findroot(f, -10, solver='anewton', verbose=True) x: -9.88888888888888888889 error: 0.111111111111111111111 converging slowly x: -9.77890011223344556678 error: 0.10998877665544332211 converging slowly x: -9.67002233332199662166 error: 0.108877778911448945119 converging slowly accelerating convergence x: -9.5622443299551077669 error: 0.107778003366888854764 converging slowly x: 0.99999999999999999214 error: 10.562244329955107759 x: 1.0 error: 7.8598304758094664213e-18 ZeroDivisionError: canceled with x = 1.0 1.0 **Complex roots** For complex roots it's recommended to use Muller's method as it converges even for real starting points very fast:: >>> findroot(lambda x: x**4 + x + 1, (0, 1, 2), solver='muller') (0.727136084491197 + 0.934099289460529j) **Intersection methods** When you need to find a root in a known interval, it's highly recommended to use an intersection-based solver like ``'anderson'`` or ``'ridder'``. Usually they converge faster and more reliable. They have however problems with multiple roots and usually need a sign change to find a root:: >>> findroot(lambda x: x**3, (-1, 1), solver='anderson') 0.0 Be careful with symmetric functions:: >>> findroot(lambda x: x**2, (-1, 1), solver='anderson') #doctest:+ELLIPSIS Traceback (most recent call last): ... ZeroDivisionError It fails even for better starting points, because there is no sign change:: >>> findroot(lambda x: x**2, (-1, .5), solver='anderson') Traceback (most recent call last): ... ValueError: Could not find root within given tolerance. (1 > 2.1684e-19) Try another starting point or tweak arguments. """ prec = ctx.prec try: ctx.prec += 20 # initialize arguments if tol is None: tol = ctx.eps * 2**10 kwargs['verbose'] = kwargs.get('verbose', verbose) if 'd1f' in kwargs: kwargs['df'] = kwargs['d1f'] kwargs['tol'] = tol if isinstance(x0, (list, tuple)): x0 = [ctx.convert(x) for x in x0] else: x0 = [ctx.convert(x0)] if isinstance(solver, str): try: solver = str2solver[solver] except KeyError: raise ValueError('could not recognize solver') # accept list of functions if isinstance(f, (list, tuple)): f2 = copy(f) def tmp(*args): return [fn(*args) for fn in f2] f = tmp # detect multidimensional functions try: fx = f(*x0) multidimensional = isinstance(fx, (list, tuple, ctx.matrix)) except __HOLE__: fx = f(x0[0]) multidimensional = False if 'multidimensional' in kwargs: multidimensional = kwargs['multidimensional'] if multidimensional: # only one multidimensional solver available at the moment solver = MDNewton if not 'norm' in kwargs: norm = lambda x: ctx.norm(x, 'inf') kwargs['norm'] = norm else: norm = kwargs['norm'] else: norm = abs # happily return starting point if it's a root if norm(fx) == 0: if multidimensional: return ctx.matrix(x0) else: return x0[0] # use solver iterations = solver(ctx, f, x0, **kwargs) if 'maxsteps' in kwargs: maxsteps = kwargs['maxsteps'] else: maxsteps = iterations.maxsteps i = 0 for x, error in iterations: if verbose: print_('x: ', x) print_('error:', error) i += 1 if error < tol * max(1, norm(x)) or i >= maxsteps: break if not isinstance(x, (list, tuple, ctx.matrix)): xl = [x] else: xl = x if verify and norm(f(*xl))**2 > tol: # TODO: better condition? raise ValueError('Could not find root within given tolerance. ' '(%g > %g)\n' 'Try another starting point or tweak arguments.' % (norm(f(*xl))**2, tol)) return x finally: ctx.prec = prec
TypeError
dataset/ETHPy150Open fredrik-johansson/mpmath/mpmath/calculus/optimization.py/findroot
def _rm_containers(cids): rm_containers(cids) for c in cids: try: models.node.delete_eru_instance(c) except __HOLE__ as e: logging.exception(e)
ValueError
dataset/ETHPy150Open HunanTV/redis-ctl/daemonutils/auto_balance.py/_rm_containers
def dialogRssWatchDBextender(dialog, frame, row, options, cntlr, openFileImage, openDatabaseImage): from tkinter import PhotoImage, N, S, E, W from tkinter.simpledialog import askstring from arelle.CntlrWinTooltip import ToolTip from arelle.UiUtil import gridCell, label try: from tkinter.ttk import Button except __HOLE__: from ttk import Button def enterConnectionString(): from arelle.DialogUserPassword import askDatabase # (user, password, host, port, database) db = askDatabase(cntlr.parent, dialog.cellDBconnection.value.split(',') if dialog.cellDBconnection.value else None) if db: dbConnectionString = ','.join(db) dialog.options["xbrlDBconnection"] = dbConnectionString dialog.cellDBconnection.setValue(dbConnectionString) else: # deleted dialog.options.pop("xbrlDBconnection", "") # remove entry label(frame, 1, row, "DB Connection:") dialog.cellDBconnection = gridCell(frame,2, row, options.get("xbrlDBconnection","")) ToolTip(dialog.cellDBconnection, text=_("Enter an XBRL Database (Postgres) connection string. " "E.g., host,port,user,password,db[,timeout]. "), wraplength=240) enterDBconnectionButton = Button(frame, image=openDatabaseImage, width=12, command=enterConnectionString) enterDBconnectionButton.grid(row=row, column=3, sticky=W)
ImportError
dataset/ETHPy150Open Arelle/Arelle/arelle/plugin/xbrlDB/DialogRssWatchExtender.py/dialogRssWatchDBextender
@csrf_exempt_m def ipn(self, request): OGONE = settings.OGONE try: parameters_repr = repr(request.POST.copy()).encode('utf-8') logger.info('IPN: Processing request data %s' % parameters_repr) try: orderID = request.POST['orderID'] currency = request.POST['currency'] amount = request.POST['amount'] STATUS = request.POST['STATUS'] PAYID = request.POST['PAYID'] BRAND = request.POST['BRAND'] SHASIGN = request.POST['SHASIGN'] except __HOLE__: logger.error('IPN: Missing data in %s' % parameters_repr) return HttpResponseForbidden('Missing data') value_strings = [ u'{0}={1}{2}'.format(key.upper(), value, OGONE['SHA1_OUT']) for key, value in request.POST.items() if value and not key == 'SHASIGN'] sha1_out = sha1( (u''.join(sorted(value_strings))).encode('utf-8')).hexdigest() if sha1_out.lower() != SHASIGN.lower(): logger.error('IPN: Invalid hash in %s' % parameters_repr) return HttpResponseForbidden('Hash did not validate') try: order, order_id, payment_id = orderID.split('-') except ValueError: logger.error('IPN: Error getting order for %s' % orderID) return HttpResponseForbidden('Malformed order ID') # Try fetching the order and order payment objects # We create a new order payment object in case the old one # cannot be found. try: order = self.shop.order_model.objects.get(pk=order_id) except self.shop.order_model.DoesNotExist: logger.error('IPN: Order %s does not exist' % order_id) return HttpResponseForbidden( 'Order %s does not exist' % order_id) try: payment = order.payments.get(pk=payment_id) except order.payments.model.DoesNotExist: payment = order.payments.model( order=order, payment_module='%s' % self.name, ) payment.status = OrderPayment.PROCESSED payment.currency = currency payment.amount = Decimal(amount) payment.data = request.POST.copy() payment.transaction_id = PAYID payment.payment_method = BRAND payment.notes = STATUS_DICT.get(STATUS) if STATUS in ('5', '9'): payment.authorized = timezone.now() payment.status = OrderPayment.AUTHORIZED payment.save() order = order.reload() logger.info( 'IPN: Successfully processed IPN request for %s' % order) if payment.authorized and plata.settings.PLATA_STOCK_TRACKING: StockTransaction = plata.stock_model() self.create_transactions( order, _('sale'), type=StockTransaction.SALE, negative=True, payment=payment) if not order.balance_remaining: self.order_paid(order, payment=payment, request=request) return HttpResponse('OK') except Exception as e: logger.error('IPN: Processing failure %s' % e) raise
KeyError
dataset/ETHPy150Open matthiask/plata/plata/payment/modules/ogone.py/PaymentProcessor.ipn
def _spawn_n_impl(self, func, args, kwargs, coro): try: try: func(*args, **kwargs) except (__HOLE__, SystemExit, greenlet.GreenletExit): raise except: if DEBUG: traceback.print_exc() finally: if coro is None: return else: coro = greenlet.getcurrent() self._spawn_done(coro)
KeyboardInterrupt
dataset/ETHPy150Open veegee/guv/guv/greenpool.py/GreenPool._spawn_n_impl
def main(): current_dir = os.path.dirname(__file__) app_name = os.path.basename(current_dir) sys.path.insert(0, os.path.join(current_dir, '..')) if not settings.configured: settings.configure( INSTALLED_APPS=('django.contrib.auth', 'django.contrib.contenttypes', app_name), DATABASES={'default': {'ENGINE': 'django.db.backends.sqlite3'}}, MIDDLEWARE_CLASSES=global_settings.MIDDLEWARE_CLASSES, # Prevents Django 1.7 warning. ROOT_URLCONF = 'siteblocks.tests', ) try: # Django 1.7 + from django import setup setup() except __HOLE__: pass from django.test.utils import get_runner runner = get_runner(settings)() failures = runner.run_tests((app_name,)) sys.exit(failures)
ImportError
dataset/ETHPy150Open idlesign/django-siteblocks/siteblocks/runtests.py/main
def _translate_floating_ip_view(floating_ip): result = { 'id': floating_ip['id'], 'ip': floating_ip['address'], 'pool': floating_ip['pool'], } try: result['fixed_ip'] = floating_ip['fixed_ip']['address'] except (TypeError, KeyError, AttributeError): result['fixed_ip'] = None try: result['instance_id'] = floating_ip['fixed_ip']['instance_uuid'] except (TypeError, KeyError, __HOLE__): result['instance_id'] = None return {'floating_ip': result}
AttributeError
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/api/openstack/compute/floating_ips.py/_translate_floating_ip_view
@extensions.expected_errors((400, 403, 404)) @wsgi.action('addFloatingIp') @validation.schema(floating_ips.add_floating_ip) def _add_floating_ip(self, req, id, body): """Associate floating_ip to an instance.""" context = req.environ['nova.context'] authorize(context) address = body['addFloatingIp']['address'] instance = common.get_instance(self.compute_api, context, id, expected_attrs=['flavor']) cached_nwinfo = compute_utils.get_nw_info_for_instance(instance) if not cached_nwinfo: LOG.warning( _LW('Info cache is %r during associate with no nw_info cache'), instance.info_cache, instance=instance) msg = _('No nw_info cache associated with instance') raise webob.exc.HTTPBadRequest(explanation=msg) fixed_ips = cached_nwinfo.fixed_ips() if not fixed_ips: msg = _('No fixed IPs associated to instance') raise webob.exc.HTTPBadRequest(explanation=msg) fixed_address = None if 'fixed_address' in body['addFloatingIp']: fixed_address = body['addFloatingIp']['fixed_address'] for fixed in fixed_ips: if fixed['address'] == fixed_address: break else: msg = _('Specified fixed address not assigned to instance') raise webob.exc.HTTPBadRequest(explanation=msg) if not fixed_address: try: fixed_address = next(ip['address'] for ip in fixed_ips if netaddr.valid_ipv4(ip['address'])) except __HOLE__: msg = _('Unable to associate floating IP %(address)s ' 'to any fixed IPs for instance %(id)s. ' 'Instance has no fixed IPv4 addresses to ' 'associate.') % ( {'address': address, 'id': id}) raise webob.exc.HTTPBadRequest(explanation=msg) if len(fixed_ips) > 1: LOG.warning(_LW('multiple fixed_ips exist, using the first ' 'IPv4 fixed_ip: %s'), fixed_address) try: self.network_api.associate_floating_ip(context, instance, floating_address=address, fixed_address=fixed_address) except exception.FloatingIpAssociated: msg = _('floating IP is already associated') raise webob.exc.HTTPBadRequest(explanation=msg) except exception.NoFloatingIpInterface: msg = _('l3driver call to add floating IP failed') raise webob.exc.HTTPBadRequest(explanation=msg) except exception.InstanceUnknownCell as e: raise webob.exc.HTTPNotFound(explanation=e.format_message()) except exception.FloatingIpNotFoundForAddress: msg = _('floating IP not found') raise webob.exc.HTTPNotFound(explanation=msg) except exception.Forbidden as e: raise webob.exc.HTTPForbidden(explanation=e.format_message()) except Exception as e: msg = _('Unable to associate floating IP %(address)s to ' 'fixed IP %(fixed_address)s for instance %(id)s. ' 'Error: %(error)s') % ( {'address': address, 'fixed_address': fixed_address, 'id': id, 'error': e}) LOG.exception(msg) raise webob.exc.HTTPBadRequest(explanation=msg) return webob.Response(status_int=202)
StopIteration
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/api/openstack/compute/floating_ips.py/FloatingIPActionController._add_floating_ip
def _route_choices(): # Only type='3' for buses at the moment ids = [] for route in Route.objects.filter(type='3').only('name'): try: name = int(route.name) except __HOLE__: name = route.name ids.append(name) return [(str(x), str(x)) for x in sorted(ids)]
ValueError
dataset/ETHPy150Open shaunduncan/breezeminder/breezeminder/forms/marta.py/_route_choices
def ensure_dir_notexists(path): """ helper function, removes dir if it exists :returns: True if dir does not exist after this function :raises: OSError if dir exists and removal failed for non-trivial reasons """ try: if os.path.exists(path): os.rmdir(path) return True except __HOLE__ as ose: # ignore if directory if ose.errno not in [errno.ENOENT, errno.ENOTEMPTY, errno.ENOTDIR]: return False
OSError
dataset/ETHPy150Open vcstools/vcstools/src/vcstools/common.py/ensure_dir_notexists
def urlopen_netrc(uri, *args, **kwargs): ''' wrapper to urlopen, using netrc on 401 as fallback Since this wraps both python2 and python3 urlopen, accepted arguments vary :returns: file-like object as urllib.urlopen :raises: IOError and urlopen errors ''' try: return urlopen(uri, *args, **kwargs) except __HOLE__ as ioe: if hasattr(ioe, 'code') and ioe.code == 401: # 401 means authentication required, we try netrc credentials result = _netrc_open(uri) if result is not None: return result raise
IOError
dataset/ETHPy150Open vcstools/vcstools/src/vcstools/common.py/urlopen_netrc
def _netrc_open(uri, filename=None): ''' open uri using netrc credentials. :param uri: uri to open :param filename: optional, path to non-default netrc config file :returns: file-like object from opening a socket to uri, or None :raises IOError: if opening .netrc file fails (unless file not found) ''' if not uri: return None parsed_uri = urlparse(uri) machine = parsed_uri.netloc if not machine: return None opener = None try: info = netrc.netrc(filename).authenticators(machine) if info is not None: (username, _, password) = info if username and password: pass_man = HTTPPasswordMgrWithDefaultRealm() pass_man.add_password(None, machine, username, password) authhandler = HTTPBasicAuthHandler(pass_man) opener = build_opener(authhandler) return opener.open(uri) else: # caught below, like other netrc parse errors raise netrc.NetrcParseError('No authenticators for "%s"' % machine) except __HOLE__ as ioe: if ioe.errno != 2: # if = 2, User probably has no .netrc, this is not an error raise except netrc.NetrcParseError as neterr: logger = logging.getLogger('vcstools') logger.warn('WARNING: parsing .netrc: %s' % str(neterr)) # we could install_opener() here, but prefer to keep # default opening clean. Client can do that, though. return None
IOError
dataset/ETHPy150Open vcstools/vcstools/src/vcstools/common.py/_netrc_open
def run_shell_command(cmd, cwd=None, shell=False, us_env=True, show_stdout=False, verbose=False, timeout=None, no_warn=False, no_filter=False): """ executes a command and hides the stdout output, loggs stderr output when command result is not zero. Make sure to sanitize arguments in the command. :param cmd: A string to execute. :param shell: Whether to use os shell. :param us_env: changes env var LANG before running command, can influence program output :param show_stdout: show some of the output (except for discarded lines in _discard_line()), ignored if no_filter :param no_warn: hides warnings :param verbose: show all output, overrides no_warn, ignored if no_filter :param timeout: time allocated to the subprocess :param no_filter: does not wrap stdout, so invoked command prints everything outside our knowledge this is DANGEROUS, as vulnerable to shell injection. :returns: ( returncode, stdout, stderr); stdout is None if no_filter==True :raises: VcsError on OSError """ try: env = copy.copy(os.environ) if us_env: env["LANG"] = "en_US.UTF-8" if no_filter: # in no_filter mode, we cannot pipe stdin, as this # causes some prompts to be hidden (e.g. mercurial over # http) stdout_target = None stderr_target = None else: stdout_target = subprocess.PIPE stderr_target = subprocess.PIPE # additional parameters to Popen when using a timeout crflags = {} if timeout is not None: if hasattr(os.sys, 'winver'): crflags['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP else: crflags['preexec_fn'] = os.setsid proc = subprocess.Popen(cmd, shell=shell, cwd=cwd, stdout=stdout_target, stderr=stderr_target, env=env, **crflags) # using a queue to enable usage in a separate thread q = Queue() if timeout is None: _read_shell_output(proc, no_filter, verbose, show_stdout, q) else: t = threading.Thread(target=_read_shell_output, args=[proc, no_filter, verbose, show_stdout, q]) t.start() t.join(timeout) if t.isAlive(): if hasattr(os.sys, 'winver'): os.kill(proc.pid, signal.CTRL_BREAK_EVENT) else: os.killpg(proc.pid, signal.SIGTERM) t.join() (stdout, stderr) = q.get() stdout_buf = q.get() stderr_buf = q.get() if stdout is not None: stdout_buf.append(stdout.decode('utf-8')) stdout = "\n".join(stdout_buf) if stderr is not None: stderr_buf.append(stderr.decode('utf-8')) stderr = "\n".join(stderr_buf) message = None if proc.returncode != 0 and stderr is not None and stderr != '': logger = logging.getLogger('vcstools') message = "Command failed: '%s'" % (cmd) if cwd is not None: message += "\n run at: '%s'" % (cwd) message += "\n errcode: %s:\n%s" % (proc.returncode, stderr) if not no_warn: logger.warn(message) result = stdout if result is not None: result = result.rstrip() return (proc.returncode, result, message) except __HOLE__ as ose: logger = logging.getLogger('vcstools') message = "Command failed with OSError. '%s' <%s, %s>:\n%s" % (cmd, shell, cwd, ose) logger.error(message) raise VcsError(message)
OSError
dataset/ETHPy150Open vcstools/vcstools/src/vcstools/common.py/run_shell_command
def list_or_args(keys, args): # returns a single list combining keys and args try: iter(keys) # a string can be iterated, but indicates # keys wasn't passed as a list if isinstance(keys, basestring): keys = [keys] except __HOLE__: keys = [keys] if args: keys.extend(args) return keys
TypeError
dataset/ETHPy150Open zhihu/redis-shard/redis_shard/shard.py/list_or_args
def __contains__(self, key): try: self[key] except __HOLE__: return False return True
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/test/utils.py/ContextList.__contains__
def setup_test_template_loader(templates_dict, use_cached_loader=False): """ Changes Django to only find templates from within a dictionary (where each key is the template name and each value is the corresponding template content to return). Use meth:`restore_template_loaders` to restore the original loaders. """ if hasattr(loader, RESTORE_LOADERS_ATTR): raise Exception("loader.%s already exists" % RESTORE_LOADERS_ATTR) def test_template_loader(template_name, template_dirs=None): "A custom template loader that loads templates from a dictionary." try: return (templates_dict[template_name], "test:%s" % template_name) except __HOLE__: raise TemplateDoesNotExist(template_name) if use_cached_loader: template_loader = cached.Loader(('test_template_loader',)) template_loader._cached_loaders = (test_template_loader,) else: template_loader = test_template_loader setattr(loader, RESTORE_LOADERS_ATTR, loader.template_source_loaders) loader.template_source_loaders = (template_loader,) return template_loader
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/test/utils.py/setup_test_template_loader
def _sso_location(self, entityid=None, binding=BINDING_HTTP_REDIRECT): if entityid: # verify that it's in the metadata srvs = self.metadata.single_sign_on_service(entityid, binding) if srvs: return destinations(srvs)[0] else: logger.info("_sso_location: %s, %s" % (entityid, binding)) raise IdpUnspecified("No IdP to send to given the premises") # get the idp location from the metadata. If there is more than one # IdP in the configuration raise exception eids = self.metadata.with_descriptor("idpsso") if len(eids) > 1: raise IdpUnspecified("Too many IdPs to choose from: %s" % eids) try: srvs = self.metadata.single_sign_on_service(eids.keys()[0], binding) return destinations(srvs)[0] except __HOLE__: raise IdpUnspecified("No IdP to send to given the premises")
IndexError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/saml2/client_base.py/Base._sso_location
def add_vo_information_about_user(self, name_id): """ Add information to the knowledge I have about the user. This is for Virtual organizations. :param name_id: The subject identifier :return: A possibly extended knowledge. """ ava = {} try: (ava, _) = self.users.get_identity(name_id) except __HOLE__: pass # is this a Virtual Organization situation if self.vorg: if self.vorg.do_aggregation(name_id): # Get the extended identity ava = self.users.get_identity(name_id)[0] return ava #noinspection PyUnusedLocal
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/saml2/client_base.py/Base.add_vo_information_about_user
def create_authn_request(self, destination, vorg="", scoping=None, binding=saml2.BINDING_HTTP_POST, nameid_format=None, service_url_binding=None, message_id=0, consent=None, extensions=None, sign=None, allow_create=False, sign_prepare=False, **kwargs): """ Creates an authentication request. :param destination: Where the request should be sent. :param vorg: The virtual organization the service belongs to. :param scoping: The scope of the request :param binding: The protocol to use for the Response !! :param nameid_format: Format of the NameID :param service_url_binding: Where the reply should be sent dependent on reply binding. :param message_id: The identifier for this request :param consent: Whether the principal have given her consent :param extensions: Possible extensions :param sign: Whether the request should be signed or not. :param sign_prepare: Whether the signature should be prepared or not. :param allow_create: If the identity provider is allowed, in the course of fulfilling the request, to create a new identifier to represent the principal. :param kwargs: Extra key word arguments :return: tuple of request ID and <samlp:AuthnRequest> instance """ client_crt = None if "client_crt" in kwargs: client_crt = kwargs["client_crt"] args = {} try: args["assertion_consumer_service_url"] = kwargs[ "assertion_consumer_service_urls"][0] del kwargs["assertion_consumer_service_urls"] except KeyError: try: args["assertion_consumer_service_url"] = kwargs[ "assertion_consumer_service_url"] del kwargs["assertion_consumer_service_url"] except __HOLE__: try: args["attribute_consuming_service_index"] = str(kwargs[ "attribute_consuming_service_index"]) del kwargs["attribute_consuming_service_index"] except KeyError: if service_url_binding is None: service_urls = self.service_urls(binding) else: service_urls = self.service_urls(service_url_binding) args["assertion_consumer_service_url"] = service_urls[0] try: args["provider_name"] = kwargs["provider_name"] except KeyError: if binding == BINDING_PAOS: pass else: args["provider_name"] = self._my_name() # Allow argument values either as class instances or as dictionaries # all of these have cardinality 0..1 _msg = AuthnRequest() for param in ["scoping", "requested_authn_context", "conditions", "subject", "scoping"]: try: _item = kwargs[param] except KeyError: pass else: del kwargs[param] # either class instance or argument dictionary if isinstance(_item, _msg.child_class(param)): args[param] = _item elif isinstance(_item, dict): args[param] = RequestedAuthnContext(**_item) else: raise ValueError("%s or wrong type expected %s" % (_item, param)) try: args["name_id_policy"] = kwargs["name_id_policy"] del kwargs["name_id_policy"] except KeyError: if allow_create: allow_create = "true" else: allow_create = "false" if nameid_format == "": name_id_policy = None else: if nameid_format is None: nameid_format = self.config.getattr("name_id_format", "sp") if nameid_format is None: nameid_format = NAMEID_FORMAT_TRANSIENT elif isinstance(nameid_format, list): # NameIDPolicy can only have one format specified nameid_format = nameid_format[0] name_id_policy = samlp.NameIDPolicy(allow_create=allow_create, format=nameid_format) if name_id_policy and vorg: try: name_id_policy.sp_name_qualifier = vorg name_id_policy.format = saml.NAMEID_FORMAT_PERSISTENT except KeyError: pass args["name_id_policy"] = name_id_policy try: nsprefix = kwargs["nsprefix"] except KeyError: nsprefix = None if kwargs: _args, extensions = self._filter_args(AuthnRequest(), extensions, **kwargs) args.update(_args) try: del args["id"] except KeyError: pass if sign is None: sign = self.authn_requests_signed if (sign and self.sec.cert_handler.generate_cert()) or \ client_crt is not None: with self.lock: self.sec.cert_handler.update_cert(True, client_crt) if client_crt is not None: sign_prepare = True return self._message(AuthnRequest, destination, message_id, consent, extensions, sign, sign_prepare, protocol_binding=binding, scoping=scoping, nsprefix=nsprefix, **args) return self._message(AuthnRequest, destination, message_id, consent, extensions, sign, sign_prepare, protocol_binding=binding, scoping=scoping, nsprefix=nsprefix, **args)
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/saml2/client_base.py/Base.create_authn_request
def create_attribute_query(self, destination, name_id=None, attribute=None, message_id=0, consent=None, extensions=None, sign=False, sign_prepare=False, **kwargs): """ Constructs an AttributeQuery :param destination: To whom the query should be sent :param name_id: The identifier of the subject :param attribute: A dictionary of attributes and values that is asked for. The key are one of 4 variants: 3-tuple of name_format,name and friendly_name, 2-tuple of name_format and name, 1-tuple with name or just the name as a string. :param sp_name_qualifier: The unique identifier of the service provider or affiliation of providers for whom the identifier was generated. :param name_qualifier: The unique identifier of the identity provider that generated the identifier. :param format: The format of the name ID :param message_id: The identifier of the session :param consent: Whether the principal have given her consent :param extensions: Possible extensions :param sign: Whether the query should be signed or not. :param sign_prepare: Whether the Signature element should be added. :return: Tuple of request ID and an AttributeQuery instance """ if name_id is None: if "subject_id" in kwargs: name_id = saml.NameID(text=kwargs["subject_id"]) for key in ["sp_name_qualifier", "name_qualifier", "format"]: try: setattr(name_id, key, kwargs[key]) except KeyError: pass else: raise AttributeError("Missing required parameter") elif isinstance(name_id, basestring): name_id = saml.NameID(text=name_id) for key in ["sp_name_qualifier", "name_qualifier", "format"]: try: setattr(name_id, key, kwargs[key]) except KeyError: pass subject = saml.Subject(name_id=name_id) if attribute: attribute = do_attributes(attribute) try: nsprefix = kwargs["nsprefix"] except __HOLE__: nsprefix = None return self._message(AttributeQuery, destination, message_id, consent, extensions, sign, sign_prepare, subject=subject, attribute=attribute, nsprefix=nsprefix) # MUST use SOAP for # AssertionIDRequest, SubjectQuery, # AuthnQuery, AttributeQuery, or AuthzDecisionQuery
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/saml2/client_base.py/Base.create_attribute_query
def parse_authn_request_response(self, xmlstr, binding, outstanding=None, outstanding_certs=None): """ Deal with an AuthnResponse :param xmlstr: The reply as a xml string :param binding: Which binding that was used for the transport :param outstanding: A dictionary with session IDs as keys and the original web request from the user before redirection as values. :return: An response.AuthnResponse or None """ try: _ = self.config.entityid except __HOLE__: raise SAMLError("Missing entity_id specification") resp = None if xmlstr: kwargs = { "outstanding_queries": outstanding, "outstanding_certs": outstanding_certs, "allow_unsolicited": self.allow_unsolicited, "want_assertions_signed": self.want_assertions_signed, "want_response_signed": self.want_response_signed, "return_addrs": self.service_urls(binding=binding), "entity_id": self.config.entityid, "attribute_converters": self.config.attribute_converters, "allow_unknown_attributes": self.config.allow_unknown_attributes, } try: resp = self._parse_response(xmlstr, AuthnResponse, "assertion_consumer_service", binding, **kwargs) except StatusError as err: logger.error("SAML status error: %s" % err) raise except UnravelError: return None except Exception as err: logger.error("XML parse error: %s" % err) raise #logger.debug(">> %s", resp) if resp is None: return None elif isinstance(resp, AuthnResponse): self.users.add_information_about_person(resp.session_info()) logger.info("--- ADDED person info ----") pass else: logger.error("Response type not supported: %s" % ( saml2.class_name(resp),)) return resp # ------------------------------------------------------------------------ # SubjectQuery, AuthnQuery, RequestedAuthnContext, AttributeQuery, # AuthzDecisionQuery all get Response as response
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/saml2/client_base.py/Base.parse_authn_request_response
def create_ecp_authn_request(self, entityid=None, relay_state="", sign=False, **kwargs): """ Makes an authentication request. :param entityid: The entity ID of the IdP to send the request to :param relay_state: A token that can be used by the SP to know where to continue the conversation with the client :param sign: Whether the request should be signed or not. :return: SOAP message with the AuthnRequest """ # ---------------------------------------- # <paos:Request> # ---------------------------------------- my_url = self.service_urls(BINDING_PAOS)[0] # must_understand and act according to the standard # paos_request = paos.Request(must_understand="1", actor=ACTOR, response_consumer_url=my_url, service=ECP_SERVICE) # ---------------------------------------- # <ecp:RelayState> # ---------------------------------------- relay_state = ecp.RelayState(actor=ACTOR, must_understand="1", text=relay_state) # ---------------------------------------- # <samlp:AuthnRequest> # ---------------------------------------- try: authn_req = kwargs["authn_req"] try: req_id = authn_req.id except __HOLE__: req_id = 0 # Unknown but since it's SOAP it doesn't matter except KeyError: try: _binding = kwargs["binding"] except KeyError: _binding = BINDING_SOAP kwargs["binding"] = _binding logger.debug("entityid: %s, binding: %s" % (entityid, _binding)) # The IDP publishes support for ECP by using the SOAP binding on # SingleSignOnService _, location = self.pick_binding("single_sign_on_service", [_binding], entity_id=entityid) req_id, authn_req = self.create_authn_request( location, service_url_binding=BINDING_PAOS, **kwargs) # ---------------------------------------- # The SOAP envelope # ---------------------------------------- soap_envelope = make_soap_enveloped_saml_thingy(authn_req, [paos_request, relay_state]) return req_id, "%s" % soap_envelope
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/saml2/client_base.py/Base.create_ecp_authn_request
@staticmethod def can_handle_ecp_response(response): try: accept = response.headers["accept"] except KeyError: try: accept = response.headers["Accept"] except __HOLE__: return False if MIME_PAOS in accept: return True else: return False # ---------------------------------------------------------------------- # IDP discovery # ----------------------------------------------------------------------
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/saml2/client_base.py/Base.can_handle_ecp_response
@staticmethod def create_discovery_service_request(url, entity_id, **kwargs): """ Created the HTTP redirect URL needed to send the user to the discovery service. :param url: The URL of the discovery service :param entity_id: The unique identifier of the service provider :param return: The discovery service MUST redirect the user agent to this location in response to this request :param policy: A parameter name used to indicate the desired behavior controlling the processing of the discovery service :param returnIDParam: A parameter name used to return the unique identifier of the selected identity provider to the original requester. :param isPassive: A boolean value True/False that controls whether the discovery service is allowed to visibly interact with the user agent. :return: A URL """ args = {"entityID": entity_id} for key in ["policy", "returnIDParam"]: try: args[key] = kwargs[key] except KeyError: pass try: args["return"] = kwargs["return_url"] except __HOLE__: try: args["return"] = kwargs["return"] except KeyError: pass if "isPassive" in kwargs: if kwargs["isPassive"]: args["isPassive"] = "true" else: args["isPassive"] = "false" params = urlencode(args) return "%s?%s" % (url, params)
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/saml2/client_base.py/Base.create_discovery_service_request
@staticmethod def parse_discovery_service_response(url="", query="", returnIDParam="entityID"): """ Deal with the response url from a Discovery Service :param url: the url the user was redirected back to or :param query: just the query part of the URL. :param returnIDParam: This is where the identifier of the IdP is place if it was specified in the query. Default is 'entityID' :return: The IdP identifier or "" if none was given """ if url: part = urlparse(url) qsd = parse_qs(part[4]) elif query: qsd = parse_qs(query) else: qsd = {} try: return qsd[returnIDParam][0] except __HOLE__: return ""
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/saml2/client_base.py/Base.parse_discovery_service_response
def parseLine(self, line): """Override this. By default, this will split the line on whitespace and call self.parseFields (catching any errors). """ try: self.parseFields(*line.split()) except __HOLE__: raise InvalidInetdConfError, 'Invalid line: ' + repr(line)
ValueError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/runner/inetdconf.py/SimpleConfFile.parseLine
def choice(a, size=None, replace=True, p=None, random_state=None): """ choice(a, size=None, replace=True, p=None) Generates a random sample from a given 1-D array .. versionadded:: 1.7.0 Parameters ----------- a : 1-D array-like or int If an ndarray, a random sample is generated from its elements. If an int, the random sample is generated as if a was np.arange(n) size : int or tuple of ints, optional Output shape. Default is None, in which case a single value is returned. replace : boolean, optional Whether the sample is with or without replacement. p : 1-D array-like, optional The probabilities associated with each entry in a. If not given the sample assumes a uniform distribution over all entries in a. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns -------- samples : 1-D ndarray, shape (size,) The generated random samples Raises ------- ValueError If a is an int and less than zero, if a or p are not 1-dimensional, if a is an array-like of size 0, if p is not a vector of probabilities, if a and p have different lengths, or if replace=False and the sample size is greater than the population size See Also --------- randint, shuffle, permutation Examples --------- Generate a uniform random sample from np.arange(5) of size 3: >>> np.random.choice(5, 3) # doctest: +SKIP array([0, 3, 4]) >>> #This is equivalent to np.random.randint(0,5,3) Generate a non-uniform random sample from np.arange(5) of size 3: >>> np.random.choice(5, 3, p=[0.1, 0, 0.3, 0.6, 0]) # doctest: +SKIP array([3, 3, 0]) Generate a uniform random sample from np.arange(5) of size 3 without replacement: >>> np.random.choice(5, 3, replace=False) # doctest: +SKIP array([3,1,0]) >>> #This is equivalent to np.random.shuffle(np.arange(5))[:3] Generate a non-uniform random sample from np.arange(5) of size 3 without replacement: >>> np.random.choice(5, 3, replace=False, p=[0.1, 0, 0.3, 0.6, 0]) ... # doctest: +SKIP array([2, 3, 0]) Any of the above can be repeated with an arbitrary array-like instead of just integers. For instance: >>> aa_milne_arr = ['pooh', 'rabbit', 'piglet', 'Christopher'] >>> np.random.choice(aa_milne_arr, 5, p=[0.5, 0.1, 0.1, 0.3]) ... # doctest: +SKIP array(['pooh', 'pooh', 'pooh', 'Christopher', 'piglet'], dtype='|S11') """ random_state = check_random_state(random_state) # Format and Verify input a = np.array(a, copy=False) if a.ndim == 0: try: # __index__ must return an integer by python rules. pop_size = operator.index(a.item()) except __HOLE__: raise ValueError("a must be 1-dimensional or an integer") if pop_size <= 0: raise ValueError("a must be greater than 0") elif a.ndim != 1: raise ValueError("a must be 1-dimensional") else: pop_size = a.shape[0] if pop_size is 0: raise ValueError("a must be non-empty") if None != p: p = np.array(p, dtype=np.double, ndmin=1, copy=False) if p.ndim != 1: raise ValueError("p must be 1-dimensional") if p.size != pop_size: raise ValueError("a and p must have same size") if np.any(p < 0): raise ValueError("probabilities are not non-negative") if not np.allclose(p.sum(), 1): raise ValueError("probabilities do not sum to 1") shape = size if shape is not None: size = np.prod(shape, dtype=np.intp) else: size = 1 # Actual sampling if replace: if None != p: cdf = p.cumsum() cdf /= cdf[-1] uniform_samples = random_state.random_sample(shape) idx = cdf.searchsorted(uniform_samples, side='right') # searchsorted returns a scalar idx = np.array(idx, copy=False) else: idx = random_state.randint(0, pop_size, size=shape) else: if size > pop_size: raise ValueError("Cannot take a larger sample than " "population when 'replace=False'") if None != p: if np.sum(p > 0) < size: raise ValueError("Fewer non-zero entries in p than size") n_uniq = 0 p = p.copy() found = np.zeros(shape, dtype=np.int) flat_found = found.ravel() while n_uniq < size: x = random_state.rand(size - n_uniq) if n_uniq > 0: p[flat_found[0:n_uniq]] = 0 cdf = np.cumsum(p) cdf /= cdf[-1] new = cdf.searchsorted(x, side='right') _, unique_indices = np.unique(new, return_index=True) unique_indices.sort() new = new.take(unique_indices) flat_found[n_uniq:n_uniq + new.size] = new n_uniq += new.size idx = found else: idx = random_state.permutation(pop_size)[:size] if shape is not None: idx.shape = shape if shape is None and isinstance(idx, np.ndarray): # In most cases a scalar will have been made an array idx = idx.item(0) # Use samples as indices for a if a is array-like if a.ndim == 0: return idx if shape is not None and idx.ndim == 0: # If size == () then the user requested a 0-d array as opposed to # a scalar object when size is None. However a[idx] is always a # scalar and not an array. So this makes sure the result is an # array, taking into account that np.array(item) may not work # for object arrays. res = np.empty((), dtype=a.dtype) res[()] = a[idx] return res return a[idx]
TypeError
dataset/ETHPy150Open scikit-learn/scikit-learn/sklearn/utils/random.py/choice
def main(): try: command = COMMANDS.get(args.all.pop(0).lower(), usage) except: command = usage # fallback to usage. if command is usage: return command() # retrieve lang try: lang = args.grouped.get('lang', []).pop(0) if lang not in LANGS: raise ValueError(u"Unknown lang code") except __HOLE__ as e: puts(colored.red(e.message)) usage() except IndexError: lang = None # update cmd requires more args. if command is update: # extract user & password try: user = args.grouped.get('--user', []).pop(0) password = args.grouped.get('--password', []).pop(0) except: raise user = password = None if not user or not password: print(colored.red( u"You need to provide Transifex.com credentials")) usage() return command(user, password, lang) # execute command with lang argument. return command(lang)
ValueError
dataset/ETHPy150Open kobotoolbox/kobocat/script/i18ntool.py/main
def initialiseShapeLearners(self): self.shapeLearners_currentCollection = [] self.settings_shapeLearners_currentCollection = [] self.shapeLearnersSeenBefore_currentCollection = [] for i in range(len(self.currentCollection)): shapeType = self.currentCollection[i] #check if shape has been learnt before try: shapeType_index = self.shapesLearnt.index(shapeType) newShape = False except __HOLE__: newShape = True self.shapeLearnersSeenBefore_currentCollection.append(not newShape) if (newShape): settings = self.generateSettings(shapeType) shapeLearner = ShapeLearner(settings) self.shapesLearnt.append(shapeType) self.shapeLearners_all.append(shapeLearner) self.settings_shapeLearners_all.append(settings) self.shapeLearners_currentCollection.append(self.shapeLearners_all[-1]) self.settings_shapeLearners_currentCollection.append(self.settings_shapeLearners_all[-1]) else: #use the bounds determined last time previousBounds = self.shapeLearners_all[shapeType_index].getParameterBounds() newInitialBounds = previousBounds newInitialBounds[0, 0] -= boundExpandingAmount; #USE ONLY FIRST PARAM FOR SELF-LEARNING ALGORITHM ATM newInitialBounds[0, 1] += boundExpandingAmount; #USE ONLY FIRST PARAM FOR SELF-LEARNING ALGORITHM ATM self.shapeLearners_all[shapeType_index].setParameterBounds(newInitialBounds) self.shapeLearners_currentCollection.append(self.shapeLearners_all[shapeType_index]) self.settings_shapeLearners_currentCollection.append(self.settings_shapeLearners_all[shapeType_index])
ValueError
dataset/ETHPy150Open chili-epfl/shape_learning/src/shape_learning/shape_learner_manager.py/ShapeLearnerManager.initialiseShapeLearners
def indexOfShapeInCurrentCollection(self, shapeType): try: shapeType_index = self.currentCollection.index(shapeType) except __HOLE__: #unknown shape shapeType_index = -1 return shapeType_index
ValueError
dataset/ETHPy150Open chili-epfl/shape_learning/src/shape_learning/shape_learner_manager.py/ShapeLearnerManager.indexOfShapeInCurrentCollection
def indexOfShapeInAllShapesLearnt(self, shapeType): try: shapeType_index = self.shapesLearnt.index(shapeType) except __HOLE__: #unknown shape shapeType_index = -1 return shapeType_index
ValueError
dataset/ETHPy150Open chili-epfl/shape_learning/src/shape_learning/shape_learner_manager.py/ShapeLearnerManager.indexOfShapeInAllShapesLearnt
def shapeAtIndexInCurrentCollection(self, shapeType_index): try: shapeType = self.currentCollection[shapeType_index] except __HOLE__: #unknown shape shapeType = -1 return shapeType
ValueError
dataset/ETHPy150Open chili-epfl/shape_learning/src/shape_learning/shape_learner_manager.py/ShapeLearnerManager.shapeAtIndexInCurrentCollection
def shapeAtIndexInAllShapesLearnt(self, shapeType_index): try: shapeType = self.shapesLearnt[shapeType_index] except __HOLE__: #unknown shape shapeType = -1 return shapeType
ValueError
dataset/ETHPy150Open chili-epfl/shape_learning/src/shape_learning/shape_learner_manager.py/ShapeLearnerManager.shapeAtIndexInAllShapesLearnt
def newCollection(self, collection): self.currentCollection = "" # check, for each letter, that we have the corresponding dataset for l in collection: try: self.generateSettings(l) except __HOLE__: # no dataset for this letter! shapeLogger.error("No dataset available for letter <%s>. Skipping this letter." % l) continue self.currentCollection += l self.nextShapeLearnerToBeStarted = 0 shapeLogger.info("Starting to work on word <%s>" % collection) try: self.collectionsLearnt.index(self.currentCollection) collectionSeenBefore = True except ValueError: collectionSeenBefore = False self.collectionsLearnt.append(self.currentCollection) self.initialiseShapeLearners() return collectionSeenBefore
RuntimeError
dataset/ETHPy150Open chili-epfl/shape_learning/src/shape_learning/shape_learner_manager.py/ShapeLearnerManager.newCollection
def _get_win_folder_with_pywin32(csidl_name): from win32com.shell import shellcon, shell dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) # Try to make this a unicode path because SHGetFolderPath does # not return unicode strings when there is unicode data in the # path. try: dir = unicode(dir) # Downgrade to short path name if have highbit chars. See # <http://bugs.activestate.com/show_bug.cgi?id=85099>. has_high_char = False for c in dir: if ord(c) > 255: has_high_char = True break if has_high_char: try: import win32api dir = win32api.GetShortPathName(dir) except __HOLE__: pass except UnicodeError: pass return dir
ImportError
dataset/ETHPy150Open Anaconda-Platform/chalmers/chalmers/utils/appdirs.py/_get_win_folder_with_pywin32
def fsinit(self): try: NfSpy.fsinit(self) except __HOLE__ as e: raise fuse.FuseError, e.message
RuntimeError
dataset/ETHPy150Open bonsaiviking/NfSpy/nfspy/fusefs.py/NFSFuse.fsinit
def release(self): """Flush changes and release the configuration write lock. This instance must not be used anymore afterwards. In Python 3, it's required to explicitly release locks and flush changes, as __del__ is not called deterministically anymore.""" # checking for the lock here makes sure we do not raise during write() # in case an invalid parser was created who could not get a lock if self.read_only or (self._lock and not self._lock._has_lock()): return try: try: self.write() except __HOLE__: log.error("Exception during destruction of GitConfigParser", exc_info=True) except ReferenceError: # This happens in PY3 ... and usually means that some state cannot be written # as the sections dict cannot be iterated # Usually when shutting down the interpreter, don'y know how to fix this pass finally: self._lock._release_lock()
IOError
dataset/ETHPy150Open gitpython-developers/GitPython/git/config.py/GitConfigParser.release
def read(self): """Reads the data stored in the files we have been initialized with. It will ignore files that cannot be read, possibly leaving an empty configuration :return: Nothing :raise IOError: if a file cannot be handled""" if self._is_initialized: return self._is_initialized = True if not isinstance(self._file_or_files, (tuple, list)): files_to_read = [self._file_or_files] else: files_to_read = list(self._file_or_files) # end assure we have a copy of the paths to handle seen = set(files_to_read) num_read_include_files = 0 while files_to_read: file_path = files_to_read.pop(0) fp = file_path close_fp = False # assume a path if it is not a file-object if not hasattr(fp, "seek"): try: fp = open(file_path, 'rb') close_fp = True except __HOLE__: continue # END fp handling try: self._read(fp, fp.name) finally: if close_fp: fp.close() # END read-handling # Read includes and append those that we didn't handle yet # We expect all paths to be normalized and absolute (and will assure that is the case) if self._has_includes(): for _, include_path in self.items('include'): if include_path.startswith('~'): include_path = os.path.expanduser(include_path) if not os.path.isabs(include_path): if not close_fp: continue # end ignore relative paths if we don't know the configuration file path assert os.path.isabs(file_path), "Need absolute paths to be sure our cycle checks will work" include_path = os.path.join(os.path.dirname(file_path), include_path) # end make include path absolute include_path = os.path.normpath(include_path) if include_path in seen or not os.access(include_path, os.R_OK): continue seen.add(include_path) files_to_read.append(include_path) num_read_include_files += 1 # each include path in configuration file # end handle includes # END for each file object to read # If there was no file included, we can safely write back (potentially) the configuration file # without altering it's meaning if num_read_include_files == 0: self._merge_includes = False # end
IOError
dataset/ETHPy150Open gitpython-developers/GitPython/git/config.py/GitConfigParser.read
def get_value(self, section, option, default=None): """ :param default: If not None, the given default value will be returned in case the option did not exist :return: a properly typed value, either int, float or string :raise TypeError: in case the value could not be understood Otherwise the exceptions known to the ConfigParser will be raised.""" try: valuestr = self.get(section, option) except Exception: if default is not None: return default raise types = (int, float) for numtype in types: try: val = numtype(valuestr) # truncated value ? if val != float(valuestr): continue return val except (ValueError, __HOLE__): continue # END for each numeric type # try boolean values as git uses them vl = valuestr.lower() if vl == 'false': return False if vl == 'true': return True if not isinstance(valuestr, string_types): raise TypeError("Invalid value type: only int, long, float and str are allowed", valuestr) return valuestr
TypeError
dataset/ETHPy150Open gitpython-developers/GitPython/git/config.py/GitConfigParser.get_value
@click.command(help="Open a Python shell, bootstrapping the connection to MongoDB") @click.option('--no-startup', is_flag=True) def shell(no_startup=False): """ Open a Python shell, bootstrapping the connection to MongoDB """ # Set up a dictionary to serve as the environment for the shell, so # that tab completion works on objects that are imported at runtime. imported_objects = {} try: # Try activating rlcompleter, because it's handy. import readline except __HOLE__: pass else: # We don't have to wrap the following import in a 'try', because # we already know 'readline' was imported successfully. import rlcompleter readline.set_completer(rlcompleter.Completer(imported_objects).complete) readline.parse_and_bind("tab:complete") # We want to honor both $PYTHONSTARTUP and .pythonrc.py, so follow system # conventions and get $PYTHONSTARTUP first then .pythonrc.py. if not no_startup: for pythonrc in (os.environ.get("PYTHONSTARTUP"), '~/.pythonrc.py'): if not pythonrc: continue pythonrc = os.path.expanduser(pythonrc) if not os.path.isfile(pythonrc): continue try: with open(pythonrc) as handle: exec(compile(handle.read(), pythonrc, 'exec'), imported_objects) except NameError: pass code.interact(local=imported_objects)
ImportError
dataset/ETHPy150Open openelections/openelections-core/openelex/tasks/shell.py/shell
@deferredAsThread def _deserialize(self, fd): while True: try: obj = Entity.deserialize(fd) except EOFError: fd.close() break except __HOLE__: fd.close() break
AttributeError
dataset/ETHPy150Open OrbitzWorldwide/droned/droned/lib/droned/clients/gremlin.py/GremlinClient._deserialize
def _repr_svg_(self): """Show SVG representation of the transducer (IPython magic). >>> dg = DependencyGraph( ... 'John N 2\\n' ... 'loves V 0\\n' ... 'Mary N 2' ... ) >>> dg._repr_svg_().split('\\n')[0] '<?xml version="1.0" encoding="UTF-8" standalone="no"?>' """ dot_string = self.to_dot() try: process = subprocess.Popen( ['dot', '-Tsvg'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, ) except __HOLE__: raise Exception('Cannot find the dot binary from Graphviz package') out, err = process.communicate(dot_string) if err: raise Exception( 'Cannot create svg representation by running dot from string: {}' ''.format(dot_string)) return out
OSError
dataset/ETHPy150Open nltk/nltk/nltk/parse/dependencygraph.py/DependencyGraph._repr_svg_
def _parse(self, input_, cell_extractor=None, zero_based=False, cell_separator=None, top_relation_label='ROOT'): """Parse a sentence. :param extractor: a function that given a tuple of cells returns a 7-tuple, where the values are ``word, lemma, ctag, tag, feats, head, rel``. :param str cell_separator: the cell separator. If not provided, cells are split by whitespace. :param str top_relation_label: the label by which the top relation is identified, for examlple, `ROOT`, `null` or `TOP`. """ def extract_3_cells(cells, index): word, tag, head = cells return index, word, word, tag, tag, '', head, '' def extract_4_cells(cells, index): word, tag, head, rel = cells return index, word, word, tag, tag, '', head, rel def extract_7_cells(cells, index): line_index, word, lemma, tag, _, head, rel = cells try: index = int(line_index) except ValueError: # index can't be parsed as an integer, use default pass return index, word, lemma, tag, tag, '', head, rel def extract_10_cells(cells, index): line_index, word, lemma, ctag, tag, feats, head, rel, _, _ = cells try: index = int(line_index) except ValueError: # index can't be parsed as an integer, use default pass return index, word, lemma, ctag, tag, feats, head, rel extractors = { 3: extract_3_cells, 4: extract_4_cells, 7: extract_7_cells, 10: extract_10_cells, } if isinstance(input_, string_types): input_ = (line for line in input_.split('\n')) lines = (l.rstrip() for l in input_) lines = (l for l in lines if l) cell_number = None for index, line in enumerate(lines, start=1): cells = line.split(cell_separator) if cell_number is None: cell_number = len(cells) else: assert cell_number == len(cells) if cell_extractor is None: try: cell_extractor = extractors[cell_number] except KeyError: raise ValueError( 'Number of tab-delimited fields ({0}) not supported by ' 'CoNLL(10) or Malt-Tab(4) format'.format(cell_number) ) try: index, word, lemma, ctag, tag, feats, head, rel = cell_extractor(cells, index) except (TypeError, __HOLE__): # cell_extractor doesn't take 2 arguments or doesn't return 8 # values; assume the cell_extractor is an older external # extractor and doesn't accept or return an index. word, lemma, ctag, tag, feats, head, rel = cell_extractor(cells) if head == '_': continue head = int(head) if zero_based: head += 1 self.nodes[index].update( { 'address': index, 'word': word, 'lemma': lemma, 'ctag': ctag, 'tag': tag, 'feats': feats, 'head': head, 'rel': rel, } ) # Make sure that the fake root node has labeled dependencies. if (cell_number == 3) and (head == 0): rel = top_relation_label self.nodes[head]['deps'][rel].append(index) if self.nodes[0]['deps'][top_relation_label]: root_address = self.nodes[0]['deps'][top_relation_label][0] self.root = self.nodes[root_address] self.top_relation_label = top_relation_label else: warnings.warn( "The graph doesn't contain a node " "that depends on the root element." )
ValueError
dataset/ETHPy150Open nltk/nltk/nltk/parse/dependencygraph.py/DependencyGraph._parse
def _hd(self, i): try: return self.nodes[i]['head'] except __HOLE__: return None
IndexError
dataset/ETHPy150Open nltk/nltk/nltk/parse/dependencygraph.py/DependencyGraph._hd
def _rel(self, i): try: return self.nodes[i]['rel'] except __HOLE__: return None # what's the return type? Boolean or list?
IndexError
dataset/ETHPy150Open nltk/nltk/nltk/parse/dependencygraph.py/DependencyGraph._rel
def _update_or_create_shard(self, step): """ Find or create a random shard and alter its `count` by the given step. """ shard_index = random.randint(0, self.field.shard_count - 1) # Converting the set to a list introduces some randomness in the ordering, but that's fine shard_pks = list(self.field.value_from_object(self.instance)) # needs to be indexable try: shard_pk = shard_pks[shard_index] except __HOLE__: # We don't have this many shards yet, so create a new one with transaction.atomic(xg=True): # We must re-fetch the instance to ensure that we do this atomically, but we must # also update self.instance so that the calling code which is referencing # self.instance also gets the updated list of shard PKs new_shard = self._create_shard(count=step) new_instance = self.instance._default_manager.get(pk=self.instance.pk) new_instance_shard_pks = getattr(new_instance, self.field.attname, set()) new_instance_shard_pks.add(new_shard.pk) setattr(self.instance, self.field.attname, new_instance_shard_pks) models.Model.save(new_instance) # avoid custom save method, which might do DB lookups else: with transaction.atomic(): from djangae.models import CounterShard shard = CounterShard.objects.get(pk=shard_pk) shard.count += step shard.save()
IndexError
dataset/ETHPy150Open potatolondon/djangae/djangae/fields/counting.py/RelatedShardManager._update_or_create_shard
def run_asv(args, current_repo=False): cwd = os.path.abspath(os.path.dirname(__file__)) if current_repo: try: from asv.util import load_json, write_json conf = load_json(os.path.join(cwd, 'asv.conf.json')) conf['repo'] = os.path.normpath(os.path.join(cwd, '..')) cfg_fn = os.path.join(cwd, '.asvconf.tmp') write_json(cfg_fn, conf) args = ['--config', cfg_fn] + args except ImportError: pass repo_dir = os.path.join(cwd, 'scipy') if is_git_repo_root(repo_dir): if current_repo: url = os.path.normpath(os.path.join(cwd, '..')) else: url = "https://github.com/scipy/scipy.git" subprocess.call(['git', 'remote', 'set-url', "origin", url], cwd=repo_dir) cmd = ['asv'] + list(args) env = dict(os.environ) # Inject ccache/f90cache paths if sys.platform.startswith('linux'): env['PATH'] = os.pathsep.join(EXTRA_PATH + env.get('PATH', '').split(os.pathsep)) # Control BLAS and CFLAGS env['OPENBLAS_NUM_THREADS'] = '1' env['CFLAGS'] = drop_bad_flags(sysconfig.get_config_var('CFLAGS')) # Limit memory usage try: set_mem_rlimit() except (ImportError, __HOLE__): pass # Check scipy version if in dev mode; otherwise clone and setup results # repository if args and (args[0] == 'dev' or '--python=same' in args): import scipy print("Running benchmarks for Scipy version %s at %s" % (scipy.__version__, scipy.__file__)) # Override gh-pages if 'gh-pages' in args: print("gh-pages command is disabled") return 1 # Run try: return subprocess.call(cmd, env=env, cwd=cwd) except OSError as err: if err.errno == 2: print("Error when running '%s': %s\n" % (" ".join(cmd), str(err),)) print("You need to install Airspeed Velocity https://spacetelescope.github.io/asv/") print("to run Scipy benchmarks") return 1 raise
RuntimeError
dataset/ETHPy150Open scipy/scipy/benchmarks/run.py/run_asv
def is_git_repo_root(path): try: p = subprocess.Popen(['git', '-C', path, 'rev-parse', '--git-dir'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() if p.returncode != 0: return False return (out.strip() == '.git') except __HOLE__: return False
OSError
dataset/ETHPy150Open scipy/scipy/benchmarks/run.py/is_git_repo_root
def __init__(self, func, when): #: context of invocation: one of "setup", "call", #: "teardown", "memocollect" self.when = when self.start = time() try: self.result = func() except __HOLE__: self.stop = time() raise except: self.excinfo = ExceptionInfo() self.stop = time()
KeyboardInterrupt
dataset/ETHPy150Open pytest-dev/pytest/_pytest/runner.py/CallInfo.__init__
def getslaveinfoline(node): try: return node._slaveinfocache except __HOLE__: d = node.slaveinfo ver = "%s.%s.%s" % d['version_info'][:3] node._slaveinfocache = s = "[%s] %s -- Python %s %s" % ( d['id'], d['sysplatform'], ver, d['executable']) return s
AttributeError
dataset/ETHPy150Open pytest-dev/pytest/_pytest/runner.py/getslaveinfoline
def importorskip(modname, minversion=None): """ return imported module if it has at least "minversion" as its __version__ attribute. If no minversion is specified the a skip is only triggered if the module can not be imported. """ __tracebackhide__ = True compile(modname, '', 'eval') # to catch syntaxerrors try: __import__(modname) except __HOLE__: skip("could not import %r" %(modname,)) mod = sys.modules[modname] if minversion is None: return mod verattr = getattr(mod, '__version__', None) if minversion is not None: try: from pkg_resources import parse_version as pv except ImportError: skip("we have a required version for %r but can not import " "no pkg_resources to parse version strings." %(modname,)) if verattr is None or pv(verattr) < pv(minversion): skip("module %r has __version__ %r, required is: %r" %( modname, verattr, minversion)) return mod
ImportError
dataset/ETHPy150Open pytest-dev/pytest/_pytest/runner.py/importorskip
def lazy_gettext(string, **variables): """ Similar to 'gettext' but the string returned is lazy which means it will be translated when it is used as an actual string.""" try: from speaklater import make_lazy_string return make_lazy_string(gettext, string, **variables) except __HOLE__: return string % variables
ImportError
dataset/ETHPy150Open lingthio/Flask-User/flask_user/translations.py/lazy_gettext
def lookup(value, key): """ Return a dictionary lookup of key in value """ try: return value[key] except __HOLE__: return ""
KeyError
dataset/ETHPy150Open dokterbob/satchmo/satchmo/apps/satchmo_store/shop/templatetags/satchmo_util.py/lookup
def test_rowset_tables(): # print("Project ID:", project.id) # del integration._to_cleanup[:] cols = [] cols.append(Column(name='name', columnType='STRING', maximumSize=1000)) cols.append(Column(name='foo', columnType='STRING', enumValues=['foo', 'bar', 'bat'])) cols.append(Column(name='x', columnType='DOUBLE')) cols.append(Column(name='age', columnType='INTEGER')) cols.append(Column(name='cartoon', columnType='BOOLEAN')) schema1 = syn.store(Schema(name='Foo Table', columns=cols, parent=project)) print("Table Schema:", schema1.id) ## Get columns associated with the given table retrieved_cols = list(syn.getTableColumns(schema1)) ## Test that the columns we get are the same as the ones we stored assert len(retrieved_cols) == len(cols) for retrieved_col, col in zip(retrieved_cols, cols): assert retrieved_col.name == col.name assert retrieved_col.columnType == col.columnType data1 =[['Chris', 'bar', 11.23, 45, False], ['Jen', 'bat', 14.56, 40, False], ['Jane', 'bat', 17.89, 6, False], ['Henry', 'bar', 10.12, 1, False]] row_reference_set1 = syn.store( RowSet(columns=cols, schema=schema1, rows=[Row(r) for r in data1])) assert len(row_reference_set1['rows']) == 4 ## add more new rows ## TODO: use 'NaN', '+Infinity', '-Infinity' when supported by server data2 =[['Fred', 'bat', 21.45, 20, True], ['Daphne', 'foo', 27.89, 20, True], ['Shaggy', 'foo', 23.45, 20, True], ['Velma', 'bar', 25.67, 20, True]] syn.store( RowSet(columns=cols, schema=schema1, rows=[Row(r) for r in data2])) results = syn.tableQuery("select * from %s order by name" % schema1.id, resultsAs="rowset") assert results.count==8 assert results.tableId==schema1.id ## test that the values made the round trip expected = sorted(data1 + data2) for expected_values, row in zip(expected, results): assert expected_values == row['values'], 'got %s but expected %s' % (row['values'], expected_values) ## To modify rows, we have to select then first. result2 = syn.tableQuery('select * from %s where age>18 and age<30'%schema1.id, resultsAs="rowset") ## make a change rs = result2.asRowSet() for row in rs['rows']: row['values'][2] = 88.888 ## store it row_reference_set = syn.store(rs) ## check if the change sticks result3 = syn.tableQuery('select name, x, age from %s'%schema1.id, resultsAs="rowset") for row in result3: if int(row['values'][2]) == 20: assert row['values'][1] == 88.888 ## Add a column bday_column = syn.store(Column(name='birthday', columnType='DATE')) column = syn.getColumn(bday_column.id) assert column.name=="birthday" assert column.columnType=="DATE" schema1.addColumn(bday_column) schema1 = syn.store(schema1) results = syn.tableQuery('select * from %s where cartoon=false order by age'%schema1.id, resultsAs="rowset") rs = results.asRowSet() ## put data in new column bdays = ('2013-3-15', '2008-1-3', '1973-12-8', '1969-4-28') for bday, row in zip(bdays, rs.rows): row['values'][5] = bday row_reference_set = syn.store(rs) ## query by date and check that we get back two kids date_2008_jan_1 = utils.to_unix_epoch_time(datetime(2008,1,1)) results = syn.tableQuery('select name from %s where birthday > %d order by birthday' % (schema1.id, date_2008_jan_1), resultsAs="rowset") assert ["Jane", "Henry"] == [row['values'][0] for row in results] try: import pandas as pd df = results.asDataFrame() assert all(df.ix[:,"name"] == ["Jane", "Henry"]) except __HOLE__ as e1: sys.stderr.write('Pandas is apparently not installed, skipping part of test_rowset_tables.\n\n') results = syn.tableQuery('select birthday from %s where cartoon=false order by age' % schema1.id, resultsAs="rowset") for bday, row in zip(bdays, results): assert row['values'][0] == datetime.strptime(bday, "%Y-%m-%d"), "got %s but expected %s" % (row['values'][0], bday) try: import pandas as pd results = syn.tableQuery("select foo, MAX(x), COUNT(foo), MIN(age) from %s group by foo order by foo" % schema1.id, resultsAs="rowset") df = results.asDataFrame() print(df) assert df.shape == (3,4) assert all(df.iloc[:,0] == ["bar", "bat", "foo"]) assert all(df.iloc[:,1] == [88.888, 88.888, 88.888]) assert all(df.iloc[:,2] == [3, 3, 2]) except ImportError as e1: sys.stderr.write('Pandas is apparently not installed, skipping part of test_rowset_tables.\n\n') ## test delete rows by deleting cartoon characters syn.delete(syn.tableQuery('select name from %s where cartoon = true'%schema1.id, resultsAs="rowset")) results = syn.tableQuery('select name from %s order by birthday' % schema1.id, resultsAs="rowset") assert ["Chris", "Jen", "Jane", "Henry"] == [row['values'][0] for row in results] ## check what happens when query result is empty results = syn.tableQuery('select * from %s where age > 1000' % schema1.id, resultsAs="rowset") assert len(list(results)) == 0 try: import pandas as pd results = syn.tableQuery('select * from %s where age > 1000' % schema1.id, resultsAs="rowset") df = results.asDataFrame() assert df.shape[0] == 0 except ImportError as e1: sys.stderr.write('Pandas is apparently not installed, skipping part of test_rowset_tables.\n\n')
ImportError
dataset/ETHPy150Open Sage-Bionetworks/synapsePythonClient/tests/integration/test_tables.py/test_rowset_tables
def test_tables_csv(): ## Define schema cols = [] cols.append(Column(name='Name', columnType='STRING')) cols.append(Column(name='Born', columnType='INTEGER')) cols.append(Column(name='Hipness', columnType='DOUBLE')) cols.append(Column(name='Living', columnType='BOOLEAN')) schema = Schema(name='Jazz Guys', columns=cols, parent=project) data = [["John Coltrane", 1926, 8.65, False], ["Miles Davis", 1926, 9.87, False], ["Bill Evans", 1929, 7.65, False], ["Paul Chambers", 1935, 5.14, False], ["Jimmy Cobb", 1929, 5.78, True], ["Scott LaFaro", 1936, 4.21, False], ["Sonny Rollins", 1930, 8.99, True], ["Kenny Burrel", 1931, 4.37, True]] ## the following creates a CSV file and uploads it to create a new table table = syn.store(Table(schema, data)) ## Query and download an identical CSV results = syn.tableQuery("select * from %s" % table.schema.id, resultsAs="csv", includeRowIdAndRowVersion=False) ## Test that CSV file came back as expected for expected_row, row in zip(data, results): assert expected_row == row, "expected %s but got %s" % (expected_row, row) try: ## check if we have pandas import pandas as pd df = results.asDataFrame() assert all(df.columns.values == ['Name', 'Born', 'Hipness', 'Living']) assert list(df.iloc[1,[0,1,3]]) == ['Miles Davis', 1926, False] assert df.iloc[1,2] - 9.87 < 0.0001 except __HOLE__ as e1: sys.stderr.write('Pandas is apparently not installed, skipping test of .asDataFrame for CSV tables.\n\n') ## Aggregate query expected = { True: [True, 1929, 3, 6.38], False: [False, 1926, 5, 7.104]} results = syn.tableQuery('select Living, min(Born), count(Living), avg(Hipness) from %s group by Living' % table.schema.id, resultsAs="csv", includeRowIdAndRowVersion=False) for row in results: living = row[0] assert expected[living][1] == row[1] assert expected[living][2] == row[2] assert abs(expected[living][3] - row[3]) < 0.0001 ## Aggregate query results to DataFrame try: ## check if we have pandas import pandas as pd df = results.asDataFrame() assert all(expected[df.iloc[0,0]][0:3] == df.iloc[0,0:3]) assert abs(expected[df.iloc[1,0]][3] - df.iloc[1,3]) < 0.0001 except ImportError as e1: sys.stderr.write('Pandas is apparently not installed, skipping test of .asDataFrame for aggregate queries as CSV tables.\n\n') ## Append rows more_jazz_guys = [["Sonny Clark", 1931, 8.43, False], ["Hank Mobley", 1930, 5.67, False], ["Freddie Hubbard", 1938, float('nan'), False], ["Thelonious Monk", 1917, float('inf'), False]] table = syn.store(Table(table.schema, more_jazz_guys)) ## test that CSV file now has more jazz guys results = syn.tableQuery("select * from %s" % table.schema.id, resultsAs="csv") for expected_row, row in zip(data+more_jazz_guys, results): for field, expected_field in zip(row[2:], expected_row): if type(field) is float and math.isnan(field): assert type(expected_field) is float and math.isnan(expected_field) elif type(expected_field) is float and math.isnan(expected_field): assert type(field) is float and math.isnan(field) else: assert expected_field == field ## Update as a RowSet rowset = results.asRowSet() for row in rowset['rows']: if row['values'][1] == 1930: row['values'][2] = 8.5 row_reference_set = syn.store(rowset) ## aggregate queries won't return row id and version, so we need to ## handle this correctly results = syn.tableQuery('select Born, COUNT(*) from %s group by Born order by Born' % table.schema.id, resultsAs="csv") assert results.includeRowIdAndRowVersion == False for i,row in enumerate(results): assert row[0] == [1917,1926,1929,1930,1931,1935,1936,1938][i] assert row[1] == [1,2,2,2,2,1,1,1][i] try: import pandas as pd results = syn.tableQuery("select * from %s where Born=1930" % table.schema.id, resultsAs="csv") df = results.asDataFrame() print("\nUpdated hipness to 8.5", df) all(df['Born'].values == 1930) all(df['Hipness'].values == 8.5) ## Update via a Data Frame df['Hipness'] = 9.75 table = syn.store(Table(table.tableId, df, etag=results.etag)) results = syn.tableQuery("select * from %s where Born=1930" % table.tableId, resultsAs="csv") for row in results: assert row[4] == 9.75 except ImportError as e1: sys.stderr.write('Pandas is apparently not installed, skipping part of test_tables_csv.\n\n') ## check what happens when query result is empty results = syn.tableQuery('select * from %s where Born=2013' % table.tableId, resultsAs="csv") assert len(list(results)) == 0 try: import pandas as pd results = syn.tableQuery('select * from %s where Born=2013' % table.tableId, resultsAs="csv") df = results.asDataFrame() assert df.shape[0] == 0 except ImportError as e1: sys.stderr.write('Pandas is apparently not installed, skipping part of test_tables_csv.\n\n') ## delete some rows results = syn.tableQuery('select * from %s where Hipness < 7' % table.tableId, resultsAs="csv") syn.delete(results)
ImportError
dataset/ETHPy150Open Sage-Bionetworks/synapsePythonClient/tests/integration/test_tables.py/test_tables_csv
def test_tables_pandas(): try: ## check if we have pandas import pandas as pd ## create a pandas DataFrame df = pd.DataFrame({ 'A' : ("foo", "bar", "baz", "qux", "asdf"), 'B' : tuple(math.pi*i for i in range(5)), 'C' : (101, 202, 303, 404, 505), 'D' : (False, True, False, True, False)}) cols = as_table_columns(df) cols[0].maximumSize = 20 schema = Schema(name="Nifty Table", columns=cols, parent=project) ## store in Synapse table = syn.store(Table(schema, df)) ## retrieve the table and verify results = syn.tableQuery('select * from %s'%table.schema.id) df2 = results.asDataFrame() ## simulate rowId-version rownames for comparison df.index = ['%s_0'%i for i in range(5)] assert all(df2 == df) except __HOLE__ as e1: sys.stderr.write('Pandas is apparently not installed, skipping test_tables_pandas.\n\n')
ImportError
dataset/ETHPy150Open Sage-Bionetworks/synapsePythonClient/tests/integration/test_tables.py/test_tables_pandas
def _process_command(self, cmd, sender): # this function is runned by a worker thread logger = self._logger.getChild('worker') try: arg_list = cmd.split() logger.debug('get cmd: ' + str(arg_list)) args, unknown_args = self._cmd_parser.parse_known_args(arg_list) if len(unknown_args) > 0: logger.debug('unknown args: ' + str(unknown_args)) # pragma: no cover args.proc(args, sender) except __HOLE__ as e: # TODO maybe these processes could be hided in to cmd parser if e.code == 2: # reach here if no sub command is found in the cmd # direct command is processed here matched = self._cmd_parser.process_direct_commands(cmd, sender) if not matched: # if no direct command is matching # response to user that we cannot recognize the command logger.debug('no known args found.') sender.send_message('Unknown commands.') else: logger.exception('Unexpected SystemExit') # pragma: no cover
SystemExit
dataset/ETHPy150Open KavenC/Linot/linot/command_server.py/CmdServer._process_command
def _failsafe_parse(self, requirement): try: return Requirement.parse(requirement, replacement=False) except __HOLE__: return Requirement.parse(requirement)
TypeError
dataset/ETHPy150Open pantsbuild/pants/src/python/pants/backend/python/python_setup.py/PythonSetup._failsafe_parse
def main(): try: hostname = raw_input("Enter remote host to test: ") username = raw_input("Enter remote username: ") except __HOLE__: hostname = input("Enter remote host to test: ") username = input("Enter remote username: ") linux_test = { 'username': username, 'use_keys': True, 'ip': hostname, 'device_type': 'ovs_linux', 'key_file': '/home/{}/.ssh/test_rsa'.format(username), 'verbose': False} net_connect = ConnectHandler(**linux_test) print() print(net_connect.find_prompt()) # Test enable mode print() print("***** Testing enable mode *****") net_connect.enable() if net_connect.check_enable_mode(): print("Success: in enable mode") else: print("Fail...") print(net_connect.find_prompt()) net_connect.exit_enable_mode() print("Out of enable mode") print(net_connect.find_prompt()) # Test config mode print() print("***** Testing config mode *****") net_connect.config_mode() if net_connect.check_config_mode(): print("Success: in config mode") else: print("Fail...") print(net_connect.find_prompt()) net_connect.exit_config_mode() print("Out of config mode") print(net_connect.find_prompt()) # Test config mode (when already at root prompt) print() print("***** Testing config mode when already root *****") net_connect.enable() if net_connect.check_enable_mode(): print("Success: in enable mode") else: print("Fail...") print(net_connect.find_prompt()) print("Test config_mode while already at root prompt") net_connect.config_mode() if net_connect.check_config_mode(): print("Success: still at root prompt") else: print("Fail...") net_connect.exit_config_mode() # Should do nothing net_connect.exit_enable_mode() print("Out of config/enable mode") print(net_connect.find_prompt()) # Send config commands print() print("***** Testing send_config_set *****") print(net_connect.find_prompt()) output = net_connect.send_config_set(['ls -al']) print(output) print()
NameError
dataset/ETHPy150Open ktbyers/netmiko/tests/test_linux.py/main
def get_endpoint(self, datacenter=None, network=None): """Get a message queue endpoint based on datacenter/network type. :param datacenter: datacenter code :param network: network ('public' or 'private') """ if datacenter is None: datacenter = 'dal05' if network is None: network = 'public' try: host = ENDPOINTS[datacenter][network] return "https://%s" % host except __HOLE__: raise TypeError('Invalid endpoint %s/%s' % (datacenter, network))
KeyError
dataset/ETHPy150Open softlayer/softlayer-python/SoftLayer/managers/messaging.py/MessagingManager.get_endpoint
def get_vcd_timescale( model ): try: return model.vcd_timescale except __HOLE__: return DEFAULT_TIMESCALE #----------------------------------------------------------------------- # write_vcd_header #-----------------------------------------------------------------------
AttributeError
dataset/ETHPy150Open cornell-brg/pymtl/pymtl/tools/simulation/vcd.py/get_vcd_timescale
def test02_proxy(self): "Testing Lazy-Geometry support (using the GeometryProxy)." #### Testing on a Point pnt = Point(0, 0) nullcity = City(name='NullCity', point=pnt) nullcity.save() # Making sure TypeError is thrown when trying to set with an # incompatible type. for bad in [5, 2.0, LineString((0, 0), (1, 1))]: try: nullcity.point = bad except __HOLE__: pass else: self.fail('Should throw a TypeError') # Now setting with a compatible GEOS Geometry, saving, and ensuring # the save took, notice no SRID is explicitly set. new = Point(5, 23) nullcity.point = new # Ensuring that the SRID is automatically set to that of the # field after assignment, but before saving. self.assertEqual(4326, nullcity.point.srid) nullcity.save() # Ensuring the point was saved correctly after saving self.assertEqual(new, City.objects.get(name='NullCity').point) # Setting the X and Y of the Point nullcity.point.x = 23 nullcity.point.y = 5 # Checking assignments pre & post-save. self.assertNotEqual(Point(23, 5), City.objects.get(name='NullCity').point) nullcity.save() self.assertEqual(Point(23, 5), City.objects.get(name='NullCity').point) nullcity.delete() #### Testing on a Polygon shell = LinearRing((0, 0), (0, 100), (100, 100), (100, 0), (0, 0)) inner = LinearRing((40, 40), (40, 60), (60, 60), (60, 40), (40, 40)) # Creating a State object using a built Polygon ply = Polygon(shell, inner) nullstate = State(name='NullState', poly=ply) self.assertEqual(4326, nullstate.poly.srid) # SRID auto-set from None nullstate.save() ns = State.objects.get(name='NullState') self.assertEqual(ply, ns.poly) # Testing the `ogr` and `srs` lazy-geometry properties. if gdal.HAS_GDAL: self.assertEqual(True, isinstance(ns.poly.ogr, gdal.OGRGeometry)) self.assertEqual(ns.poly.wkb, ns.poly.ogr.wkb) self.assertEqual(True, isinstance(ns.poly.srs, gdal.SpatialReference)) self.assertEqual('WGS 84', ns.poly.srs.name) # Changing the interior ring on the poly attribute. new_inner = LinearRing((30, 30), (30, 70), (70, 70), (70, 30), (30, 30)) ns.poly[1] = new_inner ply[1] = new_inner self.assertEqual(4326, ns.poly.srid) ns.save() self.assertEqual(ply, State.objects.get(name='NullState').poly) ns.delete()
TypeError
dataset/ETHPy150Open dcramer/django-compositepks/django/contrib/gis/tests/geoapp/tests_mysql.py/GeoModelTest.test02_proxy