text
stringlengths
75
104k
code_tokens
sequence
avg_line_len
float64
7.91
980
score
float64
0
0.18
def distance_matrix(lons, lats, diameter=2*EARTH_RADIUS): """ :param lons: array of m longitudes :param lats: array of m latitudes :returns: matrix of (m, m) distances """ m = len(lons) assert m == len(lats), (m, len(lats)) lons = numpy.radians(lons) lats = numpy.radians(lats) cos_lats = numpy.cos(lats) result = numpy.zeros((m, m)) for i in range(len(lons)): a = numpy.sin((lats[i] - lats) / 2.0) b = numpy.sin((lons[i] - lons) / 2.0) result[i, :] = numpy.arcsin( numpy.sqrt(a * a + cos_lats[i] * cos_lats * b * b)) * diameter return numpy.matrix(result, copy=False)
[ "def", "distance_matrix", "(", "lons", ",", "lats", ",", "diameter", "=", "2", "*", "EARTH_RADIUS", ")", ":", "m", "=", "len", "(", "lons", ")", "assert", "m", "==", "len", "(", "lats", ")", ",", "(", "m", ",", "len", "(", "lats", ")", ")", "lons", "=", "numpy", ".", "radians", "(", "lons", ")", "lats", "=", "numpy", ".", "radians", "(", "lats", ")", "cos_lats", "=", "numpy", ".", "cos", "(", "lats", ")", "result", "=", "numpy", ".", "zeros", "(", "(", "m", ",", "m", ")", ")", "for", "i", "in", "range", "(", "len", "(", "lons", ")", ")", ":", "a", "=", "numpy", ".", "sin", "(", "(", "lats", "[", "i", "]", "-", "lats", ")", "/", "2.0", ")", "b", "=", "numpy", ".", "sin", "(", "(", "lons", "[", "i", "]", "-", "lons", ")", "/", "2.0", ")", "result", "[", "i", ",", ":", "]", "=", "numpy", ".", "arcsin", "(", "numpy", ".", "sqrt", "(", "a", "*", "a", "+", "cos_lats", "[", "i", "]", "*", "cos_lats", "*", "b", "*", "b", ")", ")", "*", "diameter", "return", "numpy", ".", "matrix", "(", "result", ",", "copy", "=", "False", ")" ]
35.5
0.001524
def has_only_keys(self, keys): """ Ensures :attr:`subject` is a :class:`collections.Mapping` and contains *keys*, and no other keys. """ self.is_a(Mapping) self.contains_only(keys) return ChainInspector(self._subject)
[ "def", "has_only_keys", "(", "self", ",", "keys", ")", ":", "self", ".", "is_a", "(", "Mapping", ")", "self", ".", "contains_only", "(", "keys", ")", "return", "ChainInspector", "(", "self", ".", "_subject", ")" ]
37
0.011321
def _op_generic_Ctz(self, args): """Count the trailing zeroes""" wtf_expr = claripy.BVV(self._from_size, self._from_size) for a in reversed(range(self._from_size)): bit = claripy.Extract(a, a, args[0]) wtf_expr = claripy.If(bit == 1, claripy.BVV(a, self._from_size), wtf_expr) return wtf_expr
[ "def", "_op_generic_Ctz", "(", "self", ",", "args", ")", ":", "wtf_expr", "=", "claripy", ".", "BVV", "(", "self", ".", "_from_size", ",", "self", ".", "_from_size", ")", "for", "a", "in", "reversed", "(", "range", "(", "self", ".", "_from_size", ")", ")", ":", "bit", "=", "claripy", ".", "Extract", "(", "a", ",", "a", ",", "args", "[", "0", "]", ")", "wtf_expr", "=", "claripy", ".", "If", "(", "bit", "==", "1", ",", "claripy", ".", "BVV", "(", "a", ",", "self", ".", "_from_size", ")", ",", "wtf_expr", ")", "return", "wtf_expr" ]
48.857143
0.008621
def isAboveHorizon(ra, decl, mcRA, lat): """ Returns if an object's 'ra' and 'decl' is above the horizon at a specific latitude, given the MC's right ascension. """ # This function checks if the equatorial distance from # the object to the MC is within its diurnal semi-arc. dArc, _ = dnarcs(decl, lat) dist = abs(angle.closestdistance(mcRA, ra)) return dist <= dArc/2.0 + 0.0003
[ "def", "isAboveHorizon", "(", "ra", ",", "decl", ",", "mcRA", ",", "lat", ")", ":", "# This function checks if the equatorial distance from ", "# the object to the MC is within its diurnal semi-arc.", "dArc", ",", "_", "=", "dnarcs", "(", "decl", ",", "lat", ")", "dist", "=", "abs", "(", "angle", ".", "closestdistance", "(", "mcRA", ",", "ra", ")", ")", "return", "dist", "<=", "dArc", "/", "2.0", "+", "0.0003" ]
34.75
0.014019
def equal(actual, expected): ''' Compare actual and expected using == >>> expect = Expector([]) >>> expect(1).to_not(equal, 2) (True, 'equal: expect 1 == 2') >>> expect(1).to(equal, 1) (True, 'equal: expect 1 == 1') ''' is_passing = (actual == expected) types_to_diff = (str, dict, list, tuple) if not is_passing and isinstance(expected, types_to_diff) and isinstance(actual, types_to_diff): readable_diff = difflib.unified_diff(pformat(expected).split('\n'), pformat(actual).split('\n'), n=99) description = '\n'.join(['equal:'] + list(readable_diff)) else: description = "equal: expect {} == {}".format(actual, expected) outcome = (is_passing, description) return outcome
[ "def", "equal", "(", "actual", ",", "expected", ")", ":", "is_passing", "=", "(", "actual", "==", "expected", ")", "types_to_diff", "=", "(", "str", ",", "dict", ",", "list", ",", "tuple", ")", "if", "not", "is_passing", "and", "isinstance", "(", "expected", ",", "types_to_diff", ")", "and", "isinstance", "(", "actual", ",", "types_to_diff", ")", ":", "readable_diff", "=", "difflib", ".", "unified_diff", "(", "pformat", "(", "expected", ")", ".", "split", "(", "'\\n'", ")", ",", "pformat", "(", "actual", ")", ".", "split", "(", "'\\n'", ")", ",", "n", "=", "99", ")", "description", "=", "'\\n'", ".", "join", "(", "[", "'equal:'", "]", "+", "list", "(", "readable_diff", ")", ")", "else", ":", "description", "=", "\"equal: expect {} == {}\"", ".", "format", "(", "actual", ",", "expected", ")", "outcome", "=", "(", "is_passing", ",", "description", ")", "return", "outcome" ]
35.454545
0.002497
def unpack_rsp(cls, rsp_pb): """Convert from PLS response to user response""" if rsp_pb.retType != RET_OK: return RET_ERROR, rsp_pb.retMsg, None raw_deal_list = rsp_pb.s2c.orderFillList deal_list = [DealListQuery.parse_deal(rsp_pb, deal) for deal in raw_deal_list] return RET_OK, "", deal_list
[ "def", "unpack_rsp", "(", "cls", ",", "rsp_pb", ")", ":", "if", "rsp_pb", ".", "retType", "!=", "RET_OK", ":", "return", "RET_ERROR", ",", "rsp_pb", ".", "retMsg", ",", "None", "raw_deal_list", "=", "rsp_pb", ".", "s2c", ".", "orderFillList", "deal_list", "=", "[", "DealListQuery", ".", "parse_deal", "(", "rsp_pb", ",", "deal", ")", "for", "deal", "in", "raw_deal_list", "]", "return", "RET_OK", ",", "\"\"", ",", "deal_list" ]
37.666667
0.008646
def solve_let(expr, vars): """Solves a let-form by calling RHS with nested scope.""" lhs_value = solve(expr.lhs, vars).value if not isinstance(lhs_value, structured.IStructured): raise errors.EfilterTypeError( root=expr.lhs, query=expr.original, message="The LHS of 'let' must evaluate to an IStructured. Got %r." % (lhs_value,)) return solve(expr.rhs, __nest_scope(expr.lhs, vars, lhs_value))
[ "def", "solve_let", "(", "expr", ",", "vars", ")", ":", "lhs_value", "=", "solve", "(", "expr", ".", "lhs", ",", "vars", ")", ".", "value", "if", "not", "isinstance", "(", "lhs_value", ",", "structured", ".", "IStructured", ")", ":", "raise", "errors", ".", "EfilterTypeError", "(", "root", "=", "expr", ".", "lhs", ",", "query", "=", "expr", ".", "original", ",", "message", "=", "\"The LHS of 'let' must evaluate to an IStructured. Got %r.\"", "%", "(", "lhs_value", ",", ")", ")", "return", "solve", "(", "expr", ".", "rhs", ",", "__nest_scope", "(", "expr", ".", "lhs", ",", "vars", ",", "lhs_value", ")", ")" ]
44.5
0.002203
async def _connect_sentinel(self, address, timeout, pools): """Try to connect to specified Sentinel returning either connections pool or exception. """ try: with async_timeout(timeout, loop=self._loop): pool = await create_pool( address, minsize=1, maxsize=2, parser=self._parser_class, loop=self._loop) pools.append(pool) return pool except asyncio.TimeoutError as err: sentinel_logger.debug( "Failed to connect to Sentinel(%r) within %ss timeout", address, timeout) return err except Exception as err: sentinel_logger.debug( "Error connecting to Sentinel(%r): %r", address, err) return err
[ "async", "def", "_connect_sentinel", "(", "self", ",", "address", ",", "timeout", ",", "pools", ")", ":", "try", ":", "with", "async_timeout", "(", "timeout", ",", "loop", "=", "self", ".", "_loop", ")", ":", "pool", "=", "await", "create_pool", "(", "address", ",", "minsize", "=", "1", ",", "maxsize", "=", "2", ",", "parser", "=", "self", ".", "_parser_class", ",", "loop", "=", "self", ".", "_loop", ")", "pools", ".", "append", "(", "pool", ")", "return", "pool", "except", "asyncio", ".", "TimeoutError", "as", "err", ":", "sentinel_logger", ".", "debug", "(", "\"Failed to connect to Sentinel(%r) within %ss timeout\"", ",", "address", ",", "timeout", ")", "return", "err", "except", "Exception", "as", "err", ":", "sentinel_logger", ".", "debug", "(", "\"Error connecting to Sentinel(%r): %r\"", ",", "address", ",", "err", ")", "return", "err" ]
39.380952
0.002361
def read(cls, proto): """ capnp deserialization method for the anomaly likelihood object :param proto: (Object) capnp proto object specified in nupic.regions.anomaly_likelihood.capnp :returns: (Object) the deserialized AnomalyLikelihood object """ # pylint: disable=W0212 anomalyLikelihood = object.__new__(cls) anomalyLikelihood._iteration = proto.iteration anomalyLikelihood._historicalScores = collections.deque( maxlen=proto.historicWindowSize) for i, score in enumerate(proto.historicalScores): anomalyLikelihood._historicalScores.append((i, score.value, score.anomalyScore)) if proto.distribution.name: # is "" when there is no distribution. anomalyLikelihood._distribution = dict() anomalyLikelihood._distribution['distribution'] = dict() anomalyLikelihood._distribution['distribution']["name"] = proto.distribution.name anomalyLikelihood._distribution['distribution']["mean"] = proto.distribution.mean anomalyLikelihood._distribution['distribution']["variance"] = proto.distribution.variance anomalyLikelihood._distribution['distribution']["stdev"] = proto.distribution.stdev anomalyLikelihood._distribution["movingAverage"] = {} anomalyLikelihood._distribution["movingAverage"]["windowSize"] = proto.distribution.movingAverage.windowSize anomalyLikelihood._distribution["movingAverage"]["historicalValues"] = [] for value in proto.distribution.movingAverage.historicalValues: anomalyLikelihood._distribution["movingAverage"]["historicalValues"].append(value) anomalyLikelihood._distribution["movingAverage"]["total"] = proto.distribution.movingAverage.total anomalyLikelihood._distribution["historicalLikelihoods"] = [] for likelihood in proto.distribution.historicalLikelihoods: anomalyLikelihood._distribution["historicalLikelihoods"].append(likelihood) else: anomalyLikelihood._distribution = None anomalyLikelihood._probationaryPeriod = proto.probationaryPeriod anomalyLikelihood._learningPeriod = proto.learningPeriod anomalyLikelihood._reestimationPeriod = proto.reestimationPeriod # pylint: enable=W0212 return anomalyLikelihood
[ "def", "read", "(", "cls", ",", "proto", ")", ":", "# pylint: disable=W0212", "anomalyLikelihood", "=", "object", ".", "__new__", "(", "cls", ")", "anomalyLikelihood", ".", "_iteration", "=", "proto", ".", "iteration", "anomalyLikelihood", ".", "_historicalScores", "=", "collections", ".", "deque", "(", "maxlen", "=", "proto", ".", "historicWindowSize", ")", "for", "i", ",", "score", "in", "enumerate", "(", "proto", ".", "historicalScores", ")", ":", "anomalyLikelihood", ".", "_historicalScores", ".", "append", "(", "(", "i", ",", "score", ".", "value", ",", "score", ".", "anomalyScore", ")", ")", "if", "proto", ".", "distribution", ".", "name", ":", "# is \"\" when there is no distribution.", "anomalyLikelihood", ".", "_distribution", "=", "dict", "(", ")", "anomalyLikelihood", ".", "_distribution", "[", "'distribution'", "]", "=", "dict", "(", ")", "anomalyLikelihood", ".", "_distribution", "[", "'distribution'", "]", "[", "\"name\"", "]", "=", "proto", ".", "distribution", ".", "name", "anomalyLikelihood", ".", "_distribution", "[", "'distribution'", "]", "[", "\"mean\"", "]", "=", "proto", ".", "distribution", ".", "mean", "anomalyLikelihood", ".", "_distribution", "[", "'distribution'", "]", "[", "\"variance\"", "]", "=", "proto", ".", "distribution", ".", "variance", "anomalyLikelihood", ".", "_distribution", "[", "'distribution'", "]", "[", "\"stdev\"", "]", "=", "proto", ".", "distribution", ".", "stdev", "anomalyLikelihood", ".", "_distribution", "[", "\"movingAverage\"", "]", "=", "{", "}", "anomalyLikelihood", ".", "_distribution", "[", "\"movingAverage\"", "]", "[", "\"windowSize\"", "]", "=", "proto", ".", "distribution", ".", "movingAverage", ".", "windowSize", "anomalyLikelihood", ".", "_distribution", "[", "\"movingAverage\"", "]", "[", "\"historicalValues\"", "]", "=", "[", "]", "for", "value", "in", "proto", ".", "distribution", ".", "movingAverage", ".", "historicalValues", ":", "anomalyLikelihood", ".", "_distribution", "[", "\"movingAverage\"", "]", "[", "\"historicalValues\"", "]", ".", "append", "(", "value", ")", "anomalyLikelihood", ".", "_distribution", "[", "\"movingAverage\"", "]", "[", "\"total\"", "]", "=", "proto", ".", "distribution", ".", "movingAverage", ".", "total", "anomalyLikelihood", ".", "_distribution", "[", "\"historicalLikelihoods\"", "]", "=", "[", "]", "for", "likelihood", "in", "proto", ".", "distribution", ".", "historicalLikelihoods", ":", "anomalyLikelihood", ".", "_distribution", "[", "\"historicalLikelihoods\"", "]", ".", "append", "(", "likelihood", ")", "else", ":", "anomalyLikelihood", ".", "_distribution", "=", "None", "anomalyLikelihood", ".", "_probationaryPeriod", "=", "proto", ".", "probationaryPeriod", "anomalyLikelihood", ".", "_learningPeriod", "=", "proto", ".", "learningPeriod", "anomalyLikelihood", ".", "_reestimationPeriod", "=", "proto", ".", "reestimationPeriod", "# pylint: enable=W0212", "return", "anomalyLikelihood" ]
51.363636
0.010855
def context_exclude(zap_helper, name, pattern): """Exclude a pattern from a given context.""" console.info('Excluding regex {0} from context with name: {1}'.format(pattern, name)) with zap_error_handler(): result = zap_helper.zap.context.exclude_from_context(contextname=name, regex=pattern) if result != 'OK': raise ZAPError('Excluding regex from context failed: {}'.format(result))
[ "def", "context_exclude", "(", "zap_helper", ",", "name", ",", "pattern", ")", ":", "console", ".", "info", "(", "'Excluding regex {0} from context with name: {1}'", ".", "format", "(", "pattern", ",", "name", ")", ")", "with", "zap_error_handler", "(", ")", ":", "result", "=", "zap_helper", ".", "zap", ".", "context", ".", "exclude_from_context", "(", "contextname", "=", "name", ",", "regex", "=", "pattern", ")", "if", "result", "!=", "'OK'", ":", "raise", "ZAPError", "(", "'Excluding regex from context failed: {}'", ".", "format", "(", "result", ")", ")" ]
52.125
0.009434
def extract(what, calc_id, webapi=True): """ Extract an output from the datastore and save it into an .hdf5 file. By default uses the WebAPI, otherwise the extraction is done locally. """ with performance.Monitor('extract', measuremem=True) as mon: if webapi: obj = WebExtractor(calc_id).get(what) else: obj = Extractor(calc_id).get(what) fname = '%s_%d.hdf5' % (what.replace('/', '-').replace('?', '-'), calc_id) obj.save(fname) print('Saved', fname) if mon.duration > 1: print(mon)
[ "def", "extract", "(", "what", ",", "calc_id", ",", "webapi", "=", "True", ")", ":", "with", "performance", ".", "Monitor", "(", "'extract'", ",", "measuremem", "=", "True", ")", "as", "mon", ":", "if", "webapi", ":", "obj", "=", "WebExtractor", "(", "calc_id", ")", ".", "get", "(", "what", ")", "else", ":", "obj", "=", "Extractor", "(", "calc_id", ")", ".", "get", "(", "what", ")", "fname", "=", "'%s_%d.hdf5'", "%", "(", "what", ".", "replace", "(", "'/'", ",", "'-'", ")", ".", "replace", "(", "'?'", ",", "'-'", ")", ",", "calc_id", ")", "obj", ".", "save", "(", "fname", ")", "print", "(", "'Saved'", ",", "fname", ")", "if", "mon", ".", "duration", ">", "1", ":", "print", "(", "mon", ")" ]
37.25
0.001637
def jdToDate(jd): '''def jdToDate(jd): Convert a Julian day number to day/month/year. jd is an integer.''' if (jd > 2299160): # After 5/10/1582, Gregorian calendar a = jd + 32044 b = int((4 * a + 3) / 146097.) c = a - int((b * 146097) / 4.) else: b = 0 c = jd + 32082 d = int((4 * c + 3) / 1461.) e = c - int((1461 * d) / 4.) m = int((5 * e + 2) / 153.) day = e - int((153 * m + 2) / 5.) + 1 month = m + 3 - 12 * int(m / 10.) year = b * 100 + d - 4800 + int(m / 10.) return [day, month, year]
[ "def", "jdToDate", "(", "jd", ")", ":", "if", "(", "jd", ">", "2299160", ")", ":", "# After 5/10/1582, Gregorian calendar", "a", "=", "jd", "+", "32044", "b", "=", "int", "(", "(", "4", "*", "a", "+", "3", ")", "/", "146097.", ")", "c", "=", "a", "-", "int", "(", "(", "b", "*", "146097", ")", "/", "4.", ")", "else", ":", "b", "=", "0", "c", "=", "jd", "+", "32082", "d", "=", "int", "(", "(", "4", "*", "c", "+", "3", ")", "/", "1461.", ")", "e", "=", "c", "-", "int", "(", "(", "1461", "*", "d", ")", "/", "4.", ")", "m", "=", "int", "(", "(", "5", "*", "e", "+", "2", ")", "/", "153.", ")", "day", "=", "e", "-", "int", "(", "(", "153", "*", "m", "+", "2", ")", "/", "5.", ")", "+", "1", "month", "=", "m", "+", "3", "-", "12", "*", "int", "(", "m", "/", "10.", ")", "year", "=", "b", "*", "100", "+", "d", "-", "4800", "+", "int", "(", "m", "/", "10.", ")", "return", "[", "day", ",", "month", ",", "year", "]" ]
32.555556
0.001658
def authenticate_search_bind(self, username, password): """ Performs a search bind to authenticate a user. This is required when a the login attribute is not the same as the RDN, since we cannot string together their DN on the fly, instead we have to find it in the LDAP, then attempt to bind with their credentials. Args: username (str): Username of the user to bind (the field specified as LDAP_BIND_LOGIN_ATTR) password (str): User's password to bind with when we find their dn. Returns: AuthenticationResponse """ connection = self._make_connection( bind_user=self.config.get('LDAP_BIND_USER_DN'), bind_password=self.config.get('LDAP_BIND_USER_PASSWORD'), ) try: connection.bind() log.debug("Successfully bound to LDAP as '{0}' for search_bind method".format( self.config.get('LDAP_BIND_USER_DN') or 'Anonymous' )) except Exception as e: self.destroy_connection(connection) log.error(e) return AuthenticationResponse() # Find the user in the search path. user_filter = '({search_attr}={username})'.format( search_attr=self.config.get('LDAP_USER_LOGIN_ATTR'), username=username ) search_filter = '(&{0}{1})'.format( self.config.get('LDAP_USER_OBJECT_FILTER'), user_filter, ) log.debug( "Performing an LDAP Search using filter '{0}', base '{1}', " "and scope '{2}'".format( search_filter, self.full_user_search_dn, self.config.get('LDAP_USER_SEARCH_SCOPE') )) connection.search( search_base=self.full_user_search_dn, search_filter=search_filter, search_scope=getattr( ldap3, self.config.get('LDAP_USER_SEARCH_SCOPE')), attributes=self.config.get('LDAP_GET_USER_ATTRIBUTES') ) response = AuthenticationResponse() if len(connection.response) == 0 or \ (self.config.get('LDAP_FAIL_AUTH_ON_MULTIPLE_FOUND') and len(connection.response) > 1): # Don't allow them to log in. log.debug( "Authentication was not successful for user '{0}'".format(username)) else: for user in connection.response: # Attempt to bind with each user we find until we can find # one that works. if 'type' not in user or user.get('type') != 'searchResEntry': # Issue #13 - Don't return non-entry results. continue user_connection = self._make_connection( bind_user=user['dn'], bind_password=password ) log.debug( "Directly binding a connection to a server with " "user:'{0}'".format(user['dn'])) try: user_connection.bind() log.debug( "Authentication was successful for user '{0}'".format(username)) response.status = AuthenticationResponseStatus.success # Populate User Data user['attributes']['dn'] = user['dn'] response.user_info = user['attributes'] response.user_id = username response.user_dn = user['dn'] if self.config.get('LDAP_SEARCH_FOR_GROUPS'): response.user_groups = self.get_user_groups( dn=user['dn'], _connection=connection) self.destroy_connection(user_connection) break except ldap3.core.exceptions.LDAPInvalidCredentialsResult: log.debug( "Authentication was not successful for " "user '{0}'".format(username)) response.status = AuthenticationResponseStatus.fail except Exception as e: # pragma: no cover # This should never happen, however in case ldap3 does ever # throw an error here, we catch it and log it log.error(e) response.status = AuthenticationResponseStatus.fail self.destroy_connection(user_connection) self.destroy_connection(connection) return response
[ "def", "authenticate_search_bind", "(", "self", ",", "username", ",", "password", ")", ":", "connection", "=", "self", ".", "_make_connection", "(", "bind_user", "=", "self", ".", "config", ".", "get", "(", "'LDAP_BIND_USER_DN'", ")", ",", "bind_password", "=", "self", ".", "config", ".", "get", "(", "'LDAP_BIND_USER_PASSWORD'", ")", ",", ")", "try", ":", "connection", ".", "bind", "(", ")", "log", ".", "debug", "(", "\"Successfully bound to LDAP as '{0}' for search_bind method\"", ".", "format", "(", "self", ".", "config", ".", "get", "(", "'LDAP_BIND_USER_DN'", ")", "or", "'Anonymous'", ")", ")", "except", "Exception", "as", "e", ":", "self", ".", "destroy_connection", "(", "connection", ")", "log", ".", "error", "(", "e", ")", "return", "AuthenticationResponse", "(", ")", "# Find the user in the search path.", "user_filter", "=", "'({search_attr}={username})'", ".", "format", "(", "search_attr", "=", "self", ".", "config", ".", "get", "(", "'LDAP_USER_LOGIN_ATTR'", ")", ",", "username", "=", "username", ")", "search_filter", "=", "'(&{0}{1})'", ".", "format", "(", "self", ".", "config", ".", "get", "(", "'LDAP_USER_OBJECT_FILTER'", ")", ",", "user_filter", ",", ")", "log", ".", "debug", "(", "\"Performing an LDAP Search using filter '{0}', base '{1}', \"", "\"and scope '{2}'\"", ".", "format", "(", "search_filter", ",", "self", ".", "full_user_search_dn", ",", "self", ".", "config", ".", "get", "(", "'LDAP_USER_SEARCH_SCOPE'", ")", ")", ")", "connection", ".", "search", "(", "search_base", "=", "self", ".", "full_user_search_dn", ",", "search_filter", "=", "search_filter", ",", "search_scope", "=", "getattr", "(", "ldap3", ",", "self", ".", "config", ".", "get", "(", "'LDAP_USER_SEARCH_SCOPE'", ")", ")", ",", "attributes", "=", "self", ".", "config", ".", "get", "(", "'LDAP_GET_USER_ATTRIBUTES'", ")", ")", "response", "=", "AuthenticationResponse", "(", ")", "if", "len", "(", "connection", ".", "response", ")", "==", "0", "or", "(", "self", ".", "config", ".", "get", "(", "'LDAP_FAIL_AUTH_ON_MULTIPLE_FOUND'", ")", "and", "len", "(", "connection", ".", "response", ")", ">", "1", ")", ":", "# Don't allow them to log in.", "log", ".", "debug", "(", "\"Authentication was not successful for user '{0}'\"", ".", "format", "(", "username", ")", ")", "else", ":", "for", "user", "in", "connection", ".", "response", ":", "# Attempt to bind with each user we find until we can find", "# one that works.", "if", "'type'", "not", "in", "user", "or", "user", ".", "get", "(", "'type'", ")", "!=", "'searchResEntry'", ":", "# Issue #13 - Don't return non-entry results.", "continue", "user_connection", "=", "self", ".", "_make_connection", "(", "bind_user", "=", "user", "[", "'dn'", "]", ",", "bind_password", "=", "password", ")", "log", ".", "debug", "(", "\"Directly binding a connection to a server with \"", "\"user:'{0}'\"", ".", "format", "(", "user", "[", "'dn'", "]", ")", ")", "try", ":", "user_connection", ".", "bind", "(", ")", "log", ".", "debug", "(", "\"Authentication was successful for user '{0}'\"", ".", "format", "(", "username", ")", ")", "response", ".", "status", "=", "AuthenticationResponseStatus", ".", "success", "# Populate User Data", "user", "[", "'attributes'", "]", "[", "'dn'", "]", "=", "user", "[", "'dn'", "]", "response", ".", "user_info", "=", "user", "[", "'attributes'", "]", "response", ".", "user_id", "=", "username", "response", ".", "user_dn", "=", "user", "[", "'dn'", "]", "if", "self", ".", "config", ".", "get", "(", "'LDAP_SEARCH_FOR_GROUPS'", ")", ":", "response", ".", "user_groups", "=", "self", ".", "get_user_groups", "(", "dn", "=", "user", "[", "'dn'", "]", ",", "_connection", "=", "connection", ")", "self", ".", "destroy_connection", "(", "user_connection", ")", "break", "except", "ldap3", ".", "core", ".", "exceptions", ".", "LDAPInvalidCredentialsResult", ":", "log", ".", "debug", "(", "\"Authentication was not successful for \"", "\"user '{0}'\"", ".", "format", "(", "username", ")", ")", "response", ".", "status", "=", "AuthenticationResponseStatus", ".", "fail", "except", "Exception", "as", "e", ":", "# pragma: no cover", "# This should never happen, however in case ldap3 does ever", "# throw an error here, we catch it and log it", "log", ".", "error", "(", "e", ")", "response", ".", "status", "=", "AuthenticationResponseStatus", ".", "fail", "self", ".", "destroy_connection", "(", "user_connection", ")", "self", ".", "destroy_connection", "(", "connection", ")", "return", "response" ]
39.396552
0.001067
def _parse_node(node, parent_matrix, material_map, meshes, graph, resolver=None): """ Recursively parse COLLADA scene nodes. """ # Parse mesh node if isinstance(node, collada.scene.GeometryNode): geometry = node.geometry # Create local material map from material symbol to actual material local_material_map = {} for mn in node.materials: symbol = mn.symbol m = mn.target if m.id in material_map: local_material_map[symbol] = material_map[m.id] else: local_material_map[symbol] = _parse_material(m, resolver) # Iterate over primitives of geometry for i, primitive in enumerate(geometry.primitives): if isinstance(primitive, collada.polylist.Polylist): primitive = primitive.triangleset() if isinstance(primitive, collada.triangleset.TriangleSet): vertex = primitive.vertex vertex_index = primitive.vertex_index vertices = vertex[vertex_index].reshape( len(vertex_index) * 3, 3) # Get normals if present normals = None if primitive.normal is not None: normal = primitive.normal normal_index = primitive.normal_index normals = normal[normal_index].reshape( len(normal_index) * 3, 3) # Get colors if present colors = None s = primitive.sources if ('COLOR' in s and len(s['COLOR']) > 0 and len(primitive.index) > 0): color = s['COLOR'][0][4].data color_index = primitive.index[:, :, s['COLOR'][0][0]] colors = color[color_index].reshape( len(color_index) * 3, 3) faces = np.arange( vertices.shape[0]).reshape( vertices.shape[0] // 3, 3) # Get UV coordinates if possible vis = None if primitive.material in local_material_map: material = copy.copy( local_material_map[primitive.material]) uv = None if len(primitive.texcoordset) > 0: texcoord = primitive.texcoordset[0] texcoord_index = primitive.texcoord_indexset[0] uv = texcoord[texcoord_index].reshape( (len(texcoord_index) * 3, 2)) vis = visual.texture.TextureVisuals( uv=uv, material=material) primid = '{}.{}'.format(geometry.id, i) meshes[primid] = { 'vertices': vertices, 'faces': faces, 'vertex_normals': normals, 'vertex_colors': colors, 'visual': vis} graph.append({'frame_to': primid, 'matrix': parent_matrix, 'geometry': primid}) # recurse down tree for nodes with children elif isinstance(node, collada.scene.Node): if node.children is not None: for child in node.children: # create the new matrix matrix = np.dot(parent_matrix, node.matrix) # parse the child node _parse_node( node=child, parent_matrix=matrix, material_map=material_map, meshes=meshes, graph=graph, resolver=resolver) elif isinstance(node, collada.scene.CameraNode): # TODO: convert collada cameras to trimesh cameras pass elif isinstance(node, collada.scene.LightNode): # TODO: convert collada lights to trimesh lights pass
[ "def", "_parse_node", "(", "node", ",", "parent_matrix", ",", "material_map", ",", "meshes", ",", "graph", ",", "resolver", "=", "None", ")", ":", "# Parse mesh node", "if", "isinstance", "(", "node", ",", "collada", ".", "scene", ".", "GeometryNode", ")", ":", "geometry", "=", "node", ".", "geometry", "# Create local material map from material symbol to actual material", "local_material_map", "=", "{", "}", "for", "mn", "in", "node", ".", "materials", ":", "symbol", "=", "mn", ".", "symbol", "m", "=", "mn", ".", "target", "if", "m", ".", "id", "in", "material_map", ":", "local_material_map", "[", "symbol", "]", "=", "material_map", "[", "m", ".", "id", "]", "else", ":", "local_material_map", "[", "symbol", "]", "=", "_parse_material", "(", "m", ",", "resolver", ")", "# Iterate over primitives of geometry", "for", "i", ",", "primitive", "in", "enumerate", "(", "geometry", ".", "primitives", ")", ":", "if", "isinstance", "(", "primitive", ",", "collada", ".", "polylist", ".", "Polylist", ")", ":", "primitive", "=", "primitive", ".", "triangleset", "(", ")", "if", "isinstance", "(", "primitive", ",", "collada", ".", "triangleset", ".", "TriangleSet", ")", ":", "vertex", "=", "primitive", ".", "vertex", "vertex_index", "=", "primitive", ".", "vertex_index", "vertices", "=", "vertex", "[", "vertex_index", "]", ".", "reshape", "(", "len", "(", "vertex_index", ")", "*", "3", ",", "3", ")", "# Get normals if present", "normals", "=", "None", "if", "primitive", ".", "normal", "is", "not", "None", ":", "normal", "=", "primitive", ".", "normal", "normal_index", "=", "primitive", ".", "normal_index", "normals", "=", "normal", "[", "normal_index", "]", ".", "reshape", "(", "len", "(", "normal_index", ")", "*", "3", ",", "3", ")", "# Get colors if present", "colors", "=", "None", "s", "=", "primitive", ".", "sources", "if", "(", "'COLOR'", "in", "s", "and", "len", "(", "s", "[", "'COLOR'", "]", ")", ">", "0", "and", "len", "(", "primitive", ".", "index", ")", ">", "0", ")", ":", "color", "=", "s", "[", "'COLOR'", "]", "[", "0", "]", "[", "4", "]", ".", "data", "color_index", "=", "primitive", ".", "index", "[", ":", ",", ":", ",", "s", "[", "'COLOR'", "]", "[", "0", "]", "[", "0", "]", "]", "colors", "=", "color", "[", "color_index", "]", ".", "reshape", "(", "len", "(", "color_index", ")", "*", "3", ",", "3", ")", "faces", "=", "np", ".", "arange", "(", "vertices", ".", "shape", "[", "0", "]", ")", ".", "reshape", "(", "vertices", ".", "shape", "[", "0", "]", "//", "3", ",", "3", ")", "# Get UV coordinates if possible", "vis", "=", "None", "if", "primitive", ".", "material", "in", "local_material_map", ":", "material", "=", "copy", ".", "copy", "(", "local_material_map", "[", "primitive", ".", "material", "]", ")", "uv", "=", "None", "if", "len", "(", "primitive", ".", "texcoordset", ")", ">", "0", ":", "texcoord", "=", "primitive", ".", "texcoordset", "[", "0", "]", "texcoord_index", "=", "primitive", ".", "texcoord_indexset", "[", "0", "]", "uv", "=", "texcoord", "[", "texcoord_index", "]", ".", "reshape", "(", "(", "len", "(", "texcoord_index", ")", "*", "3", ",", "2", ")", ")", "vis", "=", "visual", ".", "texture", ".", "TextureVisuals", "(", "uv", "=", "uv", ",", "material", "=", "material", ")", "primid", "=", "'{}.{}'", ".", "format", "(", "geometry", ".", "id", ",", "i", ")", "meshes", "[", "primid", "]", "=", "{", "'vertices'", ":", "vertices", ",", "'faces'", ":", "faces", ",", "'vertex_normals'", ":", "normals", ",", "'vertex_colors'", ":", "colors", ",", "'visual'", ":", "vis", "}", "graph", ".", "append", "(", "{", "'frame_to'", ":", "primid", ",", "'matrix'", ":", "parent_matrix", ",", "'geometry'", ":", "primid", "}", ")", "# recurse down tree for nodes with children", "elif", "isinstance", "(", "node", ",", "collada", ".", "scene", ".", "Node", ")", ":", "if", "node", ".", "children", "is", "not", "None", ":", "for", "child", "in", "node", ".", "children", ":", "# create the new matrix", "matrix", "=", "np", ".", "dot", "(", "parent_matrix", ",", "node", ".", "matrix", ")", "# parse the child node", "_parse_node", "(", "node", "=", "child", ",", "parent_matrix", "=", "matrix", ",", "material_map", "=", "material_map", ",", "meshes", "=", "meshes", ",", "graph", "=", "graph", ",", "resolver", "=", "resolver", ")", "elif", "isinstance", "(", "node", ",", "collada", ".", "scene", ".", "CameraNode", ")", ":", "# TODO: convert collada cameras to trimesh cameras", "pass", "elif", "isinstance", "(", "node", ",", "collada", ".", "scene", ".", "LightNode", ")", ":", "# TODO: convert collada lights to trimesh lights", "pass" ]
38.76699
0.000244
def handle(data_type, data, data_id=None, caller=None): """ execute all data handlers on the specified data according to data type Args: data_type (str): data type handle data (dict or list): data Kwargs: data_id (str): can be used to differentiate between different data sets of the same data type. If not specified will default to the data type caller (object): if specified, holds the object or function that is trying to handle data Returns: dict or list - data after handlers have been executed on it """ if not data_id: data_id = data_type # instantiate handlers for data type if they havent been yet if data_id not in _handlers: _handlers[data_id] = dict( [(h.handle, h) for h in handlers.instantiate_for_data_type(data_type, data_id=data_id)]) for handler in list(_handlers[data_id].values()): try: data = handler(data, caller=caller) except Exception as inst: vodka.log.error("Data handler '%s' failed with error" % handler) vodka.log.error(traceback.format_exc()) return data
[ "def", "handle", "(", "data_type", ",", "data", ",", "data_id", "=", "None", ",", "caller", "=", "None", ")", ":", "if", "not", "data_id", ":", "data_id", "=", "data_type", "# instantiate handlers for data type if they havent been yet", "if", "data_id", "not", "in", "_handlers", ":", "_handlers", "[", "data_id", "]", "=", "dict", "(", "[", "(", "h", ".", "handle", ",", "h", ")", "for", "h", "in", "handlers", ".", "instantiate_for_data_type", "(", "data_type", ",", "data_id", "=", "data_id", ")", "]", ")", "for", "handler", "in", "list", "(", "_handlers", "[", "data_id", "]", ".", "values", "(", ")", ")", ":", "try", ":", "data", "=", "handler", "(", "data", ",", "caller", "=", "caller", ")", "except", "Exception", "as", "inst", ":", "vodka", ".", "log", ".", "error", "(", "\"Data handler '%s' failed with error\"", "%", "handler", ")", "vodka", ".", "log", ".", "error", "(", "traceback", ".", "format_exc", "(", ")", ")", "return", "data" ]
33.085714
0.001678
def split_box( fraction, x,y, w,h ): """Return set of two boxes where first is the fraction given""" if w >= h: new_w = int(w*fraction) if new_w: return (x,y,new_w,h),(x+new_w,y,w-new_w,h) else: return None,None else: new_h = int(h*fraction) if new_h: return (x,y,w,new_h),(x,y+new_h,w,h-new_h) else: return None,None
[ "def", "split_box", "(", "fraction", ",", "x", ",", "y", ",", "w", ",", "h", ")", ":", "if", "w", ">=", "h", ":", "new_w", "=", "int", "(", "w", "*", "fraction", ")", "if", "new_w", ":", "return", "(", "x", ",", "y", ",", "new_w", ",", "h", ")", ",", "(", "x", "+", "new_w", ",", "y", ",", "w", "-", "new_w", ",", "h", ")", "else", ":", "return", "None", ",", "None", "else", ":", "new_h", "=", "int", "(", "h", "*", "fraction", ")", "if", "new_h", ":", "return", "(", "x", ",", "y", ",", "w", ",", "new_h", ")", ",", "(", "x", ",", "y", "+", "new_h", ",", "w", ",", "h", "-", "new_h", ")", "else", ":", "return", "None", ",", "None" ]
29.428571
0.049412
def _calc_ticks(value_range, base): """ Calculate tick marks within a range Parameters ---------- value_range: tuple Range for which to calculate ticks. Returns ------- out: tuple (major, middle, minor) tick locations """ def _minor(x, mid_idx): return np.hstack([x[1:mid_idx], x[mid_idx+1:-1]]) # * Calculate the low and high powers, # * Generate for all intervals in along the low-high power range # The intervals are in normal space # * Calculate evenly spaced breaks in normal space, then convert # them to log space. low = np.floor(value_range[0]) high = np.ceil(value_range[1]) arr = base ** np.arange(low, float(high+1)) n_ticks = base - 1 breaks = [log(np.linspace(b1, b2, n_ticks+1), base) for (b1, b2) in list(zip(arr, arr[1:]))] # Partition the breaks in the 3 groups major = np.array([x[0] for x in breaks] + [breaks[-1][-1]]) if n_ticks % 2: mid_idx = n_ticks // 2 middle = [x[mid_idx] for x in breaks] minor = np.hstack([_minor(x, mid_idx) for x in breaks]) else: middle = [] minor = np.hstack([x[1:-1] for x in breaks]) return major, middle, minor
[ "def", "_calc_ticks", "(", "value_range", ",", "base", ")", ":", "def", "_minor", "(", "x", ",", "mid_idx", ")", ":", "return", "np", ".", "hstack", "(", "[", "x", "[", "1", ":", "mid_idx", "]", ",", "x", "[", "mid_idx", "+", "1", ":", "-", "1", "]", "]", ")", "# * Calculate the low and high powers,", "# * Generate for all intervals in along the low-high power range", "# The intervals are in normal space", "# * Calculate evenly spaced breaks in normal space, then convert", "# them to log space.", "low", "=", "np", ".", "floor", "(", "value_range", "[", "0", "]", ")", "high", "=", "np", ".", "ceil", "(", "value_range", "[", "1", "]", ")", "arr", "=", "base", "**", "np", ".", "arange", "(", "low", ",", "float", "(", "high", "+", "1", ")", ")", "n_ticks", "=", "base", "-", "1", "breaks", "=", "[", "log", "(", "np", ".", "linspace", "(", "b1", ",", "b2", ",", "n_ticks", "+", "1", ")", ",", "base", ")", "for", "(", "b1", ",", "b2", ")", "in", "list", "(", "zip", "(", "arr", ",", "arr", "[", "1", ":", "]", ")", ")", "]", "# Partition the breaks in the 3 groups", "major", "=", "np", ".", "array", "(", "[", "x", "[", "0", "]", "for", "x", "in", "breaks", "]", "+", "[", "breaks", "[", "-", "1", "]", "[", "-", "1", "]", "]", ")", "if", "n_ticks", "%", "2", ":", "mid_idx", "=", "n_ticks", "//", "2", "middle", "=", "[", "x", "[", "mid_idx", "]", "for", "x", "in", "breaks", "]", "minor", "=", "np", ".", "hstack", "(", "[", "_minor", "(", "x", ",", "mid_idx", ")", "for", "x", "in", "breaks", "]", ")", "else", ":", "middle", "=", "[", "]", "minor", "=", "np", ".", "hstack", "(", "[", "x", "[", "1", ":", "-", "1", "]", "for", "x", "in", "breaks", "]", ")", "return", "major", ",", "middle", ",", "minor" ]
33.65
0.001444
def plot_points(self, ax, legend=None, field=None, field_function=None, undefined=0, **kwargs): """ Plotting, but only for points (as opposed to intervals). """ ys = [iv.top.z for iv in self] if field is not None: f = field_function or utils.null xs = [f(iv.data.get(field, undefined)) for iv in self] else: xs = [1 for iv in self] ax.set_xlim((min(xs), max(xs))) for x, y in zip(xs, ys): ax.axhline(y, color='lightgray', zorder=0) ax.scatter(xs, ys, clip_on=False, **kwargs) return ax
[ "def", "plot_points", "(", "self", ",", "ax", ",", "legend", "=", "None", ",", "field", "=", "None", ",", "field_function", "=", "None", ",", "undefined", "=", "0", ",", "*", "*", "kwargs", ")", ":", "ys", "=", "[", "iv", ".", "top", ".", "z", "for", "iv", "in", "self", "]", "if", "field", "is", "not", "None", ":", "f", "=", "field_function", "or", "utils", ".", "null", "xs", "=", "[", "f", "(", "iv", ".", "data", ".", "get", "(", "field", ",", "undefined", ")", ")", "for", "iv", "in", "self", "]", "else", ":", "xs", "=", "[", "1", "for", "iv", "in", "self", "]", "ax", ".", "set_xlim", "(", "(", "min", "(", "xs", ")", ",", "max", "(", "xs", ")", ")", ")", "for", "x", ",", "y", "in", "zip", "(", "xs", ",", "ys", ")", ":", "ax", ".", "axhline", "(", "y", ",", "color", "=", "'lightgray'", ",", "zorder", "=", "0", ")", "ax", ".", "scatter", "(", "xs", ",", "ys", ",", "clip_on", "=", "False", ",", "*", "*", "kwargs", ")", "return", "ax" ]
27.76
0.009749
def scalars_impl(self, run, tag_regex_string): """Given a tag regex and single run, return ScalarEvents. Args: run: A run string. tag_regex_string: A regular expression that captures portions of tags. Raises: ValueError: if the scalars plugin is not registered. Returns: A dictionary that is the JSON-able response. """ if not tag_regex_string: # The user provided no regex. return { _REGEX_VALID_PROPERTY: False, _TAG_TO_EVENTS_PROPERTY: {}, } # Construct the regex. try: regex = re.compile(tag_regex_string) except re.error: return { _REGEX_VALID_PROPERTY: False, _TAG_TO_EVENTS_PROPERTY: {}, } # Fetch the tags for the run. Filter for tags that match the regex. run_to_data = self._multiplexer.PluginRunToTagToContent( scalars_metadata.PLUGIN_NAME) tag_to_data = None try: tag_to_data = run_to_data[run] except KeyError: # The run could not be found. Perhaps a configuration specified a run that # TensorBoard has not read from disk yet. payload = {} if tag_to_data: scalars_plugin_instance = self._get_scalars_plugin() if not scalars_plugin_instance: raise ValueError(('Failed to respond to request for /scalars. ' 'The scalars plugin is oddly not registered.')) form = scalars_plugin.OutputFormat.JSON payload = { tag: scalars_plugin_instance.scalars_impl(tag, run, None, form)[0] for tag in tag_to_data.keys() if regex.match(tag) } return { _REGEX_VALID_PROPERTY: True, _TAG_TO_EVENTS_PROPERTY: payload, }
[ "def", "scalars_impl", "(", "self", ",", "run", ",", "tag_regex_string", ")", ":", "if", "not", "tag_regex_string", ":", "# The user provided no regex.", "return", "{", "_REGEX_VALID_PROPERTY", ":", "False", ",", "_TAG_TO_EVENTS_PROPERTY", ":", "{", "}", ",", "}", "# Construct the regex.", "try", ":", "regex", "=", "re", ".", "compile", "(", "tag_regex_string", ")", "except", "re", ".", "error", ":", "return", "{", "_REGEX_VALID_PROPERTY", ":", "False", ",", "_TAG_TO_EVENTS_PROPERTY", ":", "{", "}", ",", "}", "# Fetch the tags for the run. Filter for tags that match the regex.", "run_to_data", "=", "self", ".", "_multiplexer", ".", "PluginRunToTagToContent", "(", "scalars_metadata", ".", "PLUGIN_NAME", ")", "tag_to_data", "=", "None", "try", ":", "tag_to_data", "=", "run_to_data", "[", "run", "]", "except", "KeyError", ":", "# The run could not be found. Perhaps a configuration specified a run that", "# TensorBoard has not read from disk yet.", "payload", "=", "{", "}", "if", "tag_to_data", ":", "scalars_plugin_instance", "=", "self", ".", "_get_scalars_plugin", "(", ")", "if", "not", "scalars_plugin_instance", ":", "raise", "ValueError", "(", "(", "'Failed to respond to request for /scalars. '", "'The scalars plugin is oddly not registered.'", ")", ")", "form", "=", "scalars_plugin", ".", "OutputFormat", ".", "JSON", "payload", "=", "{", "tag", ":", "scalars_plugin_instance", ".", "scalars_impl", "(", "tag", ",", "run", ",", "None", ",", "form", ")", "[", "0", "]", "for", "tag", "in", "tag_to_data", ".", "keys", "(", ")", "if", "regex", ".", "match", "(", "tag", ")", "}", "return", "{", "_REGEX_VALID_PROPERTY", ":", "True", ",", "_TAG_TO_EVENTS_PROPERTY", ":", "payload", ",", "}" ]
28.87931
0.009238
def zSetSurfaceData(self, surfNum, radius=None, thick=None, material=None, semidia=None, conic=None, comment=None): """Sets surface data""" if self.pMode == 0: # Sequential mode surf = self.pLDE.GetSurfaceAt(surfNum) if radius is not None: surf.pRadius = radius if thick is not None: surf.pThickness = thick if material is not None: surf.pMaterial = material if semidia is not None: surf.pSemiDiameter = semidia if conic is not None: surf.pConic = conic if comment is not None: surf.pComment = comment else: raise NotImplementedError('Function not implemented for non-sequential mode')
[ "def", "zSetSurfaceData", "(", "self", ",", "surfNum", ",", "radius", "=", "None", ",", "thick", "=", "None", ",", "material", "=", "None", ",", "semidia", "=", "None", ",", "conic", "=", "None", ",", "comment", "=", "None", ")", ":", "if", "self", ".", "pMode", "==", "0", ":", "# Sequential mode", "surf", "=", "self", ".", "pLDE", ".", "GetSurfaceAt", "(", "surfNum", ")", "if", "radius", "is", "not", "None", ":", "surf", ".", "pRadius", "=", "radius", "if", "thick", "is", "not", "None", ":", "surf", ".", "pThickness", "=", "thick", "if", "material", "is", "not", "None", ":", "surf", ".", "pMaterial", "=", "material", "if", "semidia", "is", "not", "None", ":", "surf", ".", "pSemiDiameter", "=", "semidia", "if", "conic", "is", "not", "None", ":", "surf", ".", "pConic", "=", "conic", "if", "comment", "is", "not", "None", ":", "surf", ".", "pComment", "=", "comment", "else", ":", "raise", "NotImplementedError", "(", "'Function not implemented for non-sequential mode'", ")" ]
42.526316
0.008475
async def update_pin(**payload): """Update the onboarding welcome message after recieving a "pin_added" event from Slack. Update timestamp for welcome message as well. """ data = payload["data"] web_client = payload["web_client"] channel_id = data["channel_id"] user_id = data["user"] # Get the original tutorial sent. onboarding_tutorial = onboarding_tutorials_sent[channel_id][user_id] # Mark the pin task as completed. onboarding_tutorial.pin_task_completed = True # Get the new message payload message = onboarding_tutorial.get_message_payload() # Post the updated message in Slack updated_message = await web_client.chat_update(**message) # Update the timestamp saved on the onboarding tutorial object onboarding_tutorial.timestamp = updated_message["ts"]
[ "async", "def", "update_pin", "(", "*", "*", "payload", ")", ":", "data", "=", "payload", "[", "\"data\"", "]", "web_client", "=", "payload", "[", "\"web_client\"", "]", "channel_id", "=", "data", "[", "\"channel_id\"", "]", "user_id", "=", "data", "[", "\"user\"", "]", "# Get the original tutorial sent.", "onboarding_tutorial", "=", "onboarding_tutorials_sent", "[", "channel_id", "]", "[", "user_id", "]", "# Mark the pin task as completed.", "onboarding_tutorial", ".", "pin_task_completed", "=", "True", "# Get the new message payload", "message", "=", "onboarding_tutorial", ".", "get_message_payload", "(", ")", "# Post the updated message in Slack", "updated_message", "=", "await", "web_client", ".", "chat_update", "(", "*", "*", "message", ")", "# Update the timestamp saved on the onboarding tutorial object", "onboarding_tutorial", ".", "timestamp", "=", "updated_message", "[", "\"ts\"", "]" ]
35.26087
0.0012
def display_pil_image(im): """Displayhook function for PIL Images, rendered as PNG.""" from IPython.core import display b = BytesIO() im.save(b, format='png') data = b.getvalue() ip_img = display.Image(data=data, format='png', embed=True) return ip_img._repr_png_()
[ "def", "display_pil_image", "(", "im", ")", ":", "from", "IPython", ".", "core", "import", "display", "b", "=", "BytesIO", "(", ")", "im", ".", "save", "(", "b", ",", "format", "=", "'png'", ")", "data", "=", "b", ".", "getvalue", "(", ")", "ip_img", "=", "display", ".", "Image", "(", "data", "=", "data", ",", "format", "=", "'png'", ",", "embed", "=", "True", ")", "return", "ip_img", ".", "_repr_png_", "(", ")" ]
31
0.027875
def _upload(self, archive, region): """Upload function source and return source url """ # Generate source upload url url = self.client.execute_command( 'generateUploadUrl', {'parent': 'projects/{}/locations/{}'.format( self.session.get_default_project(), region)}).get('uploadUrl') log.debug("uploading function code %s", url) http = self._get_http_client(self.client) headers, response = http.request( url, method='PUT', headers={ 'content-type': 'application/zip', 'Content-Length': '%d' % archive.size, 'x-goog-content-length-range': '0,104857600' }, body=open(archive.path, 'rb') ) log.info("function code uploaded") if headers['status'] != '200': raise RuntimeError("%s\n%s" % (headers, response)) return url
[ "def", "_upload", "(", "self", ",", "archive", ",", "region", ")", ":", "# Generate source upload url", "url", "=", "self", ".", "client", ".", "execute_command", "(", "'generateUploadUrl'", ",", "{", "'parent'", ":", "'projects/{}/locations/{}'", ".", "format", "(", "self", ".", "session", ".", "get_default_project", "(", ")", ",", "region", ")", "}", ")", ".", "get", "(", "'uploadUrl'", ")", "log", ".", "debug", "(", "\"uploading function code %s\"", ",", "url", ")", "http", "=", "self", ".", "_get_http_client", "(", "self", ".", "client", ")", "headers", ",", "response", "=", "http", ".", "request", "(", "url", ",", "method", "=", "'PUT'", ",", "headers", "=", "{", "'content-type'", ":", "'application/zip'", ",", "'Content-Length'", ":", "'%d'", "%", "archive", ".", "size", ",", "'x-goog-content-length-range'", ":", "'0,104857600'", "}", ",", "body", "=", "open", "(", "archive", ".", "path", ",", "'rb'", ")", ")", "log", ".", "info", "(", "\"function code uploaded\"", ")", "if", "headers", "[", "'status'", "]", "!=", "'200'", ":", "raise", "RuntimeError", "(", "\"%s\\n%s\"", "%", "(", "headers", ",", "response", ")", ")", "return", "url" ]
39.25
0.002073
def copy(self, deep=True, data=None): """Returns a copy of this object. `deep` is ignored since data is stored in the form of pandas.Index, which is already immutable. Dimensions, attributes and encodings are always copied. Use `data` to create a new object with the same structure as original but entirely new data. Parameters ---------- deep : bool, optional Deep is always ignored. data : array_like, optional Data to use in the new object. Must have same shape as original. Returns ------- object : Variable New object with dimensions, attributes, encodings, and optionally data copied from original. """ if data is None: data = self._data else: data = as_compatible_data(data) if self.shape != data.shape: raise ValueError("Data shape {} must match shape of object {}" .format(data.shape, self.shape)) return type(self)(self.dims, data, self._attrs, self._encoding, fastpath=True)
[ "def", "copy", "(", "self", ",", "deep", "=", "True", ",", "data", "=", "None", ")", ":", "if", "data", "is", "None", ":", "data", "=", "self", ".", "_data", "else", ":", "data", "=", "as_compatible_data", "(", "data", ")", "if", "self", ".", "shape", "!=", "data", ".", "shape", ":", "raise", "ValueError", "(", "\"Data shape {} must match shape of object {}\"", ".", "format", "(", "data", ".", "shape", ",", "self", ".", "shape", ")", ")", "return", "type", "(", "self", ")", "(", "self", ".", "dims", ",", "data", ",", "self", ".", "_attrs", ",", "self", ".", "_encoding", ",", "fastpath", "=", "True", ")" ]
36.03125
0.001689
def add_output_option(parser): """output option""" parser.add_argument("-o", "--outdir", dest="outdir", type=str, default='GSEApy_reports', metavar='', action="store", help="The GSEApy output directory. Default: the current working directory") parser.add_argument("-f", "--format", dest="format", type=str, metavar='', action="store", choices=("pdf", "png", "jpeg", "eps", "svg"), default="pdf", help="File extensions supported by Matplotlib active backend,\ choose from {'pdf', 'png', 'jpeg','ps', 'eps','svg'}. Default: 'pdf'.") parser.add_argument("--fs", "--figsize", action='store', nargs=2, dest='figsize', metavar=('width', 'height'),type=float, default=(6.5, 6), help="The figsize keyword argument need two parameters to define. Default: (6.5, 6)") parser.add_argument("--graph", dest = "graph", action="store", type=int, default=20, metavar='int', help="Numbers of top graphs produced. Default: 20") parser.add_argument("--no-plot", action='store_true', dest='noplot', default=False, help="Speed up computing by suppressing the plot output."+\ "This is useful only if data are interested. Default: False.") parser.add_argument("-v", "--verbose", action="store_true", default=False, dest='verbose', help="Increase output verbosity, print out progress of your job", )
[ "def", "add_output_option", "(", "parser", ")", ":", "parser", ".", "add_argument", "(", "\"-o\"", ",", "\"--outdir\"", ",", "dest", "=", "\"outdir\"", ",", "type", "=", "str", ",", "default", "=", "'GSEApy_reports'", ",", "metavar", "=", "''", ",", "action", "=", "\"store\"", ",", "help", "=", "\"The GSEApy output directory. Default: the current working directory\"", ")", "parser", ".", "add_argument", "(", "\"-f\"", ",", "\"--format\"", ",", "dest", "=", "\"format\"", ",", "type", "=", "str", ",", "metavar", "=", "''", ",", "action", "=", "\"store\"", ",", "choices", "=", "(", "\"pdf\"", ",", "\"png\"", ",", "\"jpeg\"", ",", "\"eps\"", ",", "\"svg\"", ")", ",", "default", "=", "\"pdf\"", ",", "help", "=", "\"File extensions supported by Matplotlib active backend,\\\n choose from {'pdf', 'png', 'jpeg','ps', 'eps','svg'}. Default: 'pdf'.\"", ")", "parser", ".", "add_argument", "(", "\"--fs\"", ",", "\"--figsize\"", ",", "action", "=", "'store'", ",", "nargs", "=", "2", ",", "dest", "=", "'figsize'", ",", "metavar", "=", "(", "'width'", ",", "'height'", ")", ",", "type", "=", "float", ",", "default", "=", "(", "6.5", ",", "6", ")", ",", "help", "=", "\"The figsize keyword argument need two parameters to define. Default: (6.5, 6)\"", ")", "parser", ".", "add_argument", "(", "\"--graph\"", ",", "dest", "=", "\"graph\"", ",", "action", "=", "\"store\"", ",", "type", "=", "int", ",", "default", "=", "20", ",", "metavar", "=", "'int'", ",", "help", "=", "\"Numbers of top graphs produced. Default: 20\"", ")", "parser", ".", "add_argument", "(", "\"--no-plot\"", ",", "action", "=", "'store_true'", ",", "dest", "=", "'noplot'", ",", "default", "=", "False", ",", "help", "=", "\"Speed up computing by suppressing the plot output.\"", "+", "\"This is useful only if data are interested. Default: False.\"", ")", "parser", ".", "add_argument", "(", "\"-v\"", ",", "\"--verbose\"", ",", "action", "=", "\"store_true\"", ",", "default", "=", "False", ",", "dest", "=", "'verbose'", ",", "help", "=", "\"Increase output verbosity, print out progress of your job\"", ",", ")" ]
77.95
0.013308
def cartesian_square_centred_on_point(self, point, distance, **kwargs): ''' Select earthquakes from within a square centered on a point :param point: Centre point as instance of nhlib.geo.point.Point class :param distance: Distance (km) :returns: Instance of :class:`openquake.hmtk.seismicity.catalogue.Catalogue` class containing only selected events ''' point_surface = Point(point.longitude, point.latitude, 0.) # As distance is north_point = point_surface.point_at(distance, 0., 0.) east_point = point_surface.point_at(distance, 0., 90.) south_point = point_surface.point_at(distance, 0., 180.) west_point = point_surface.point_at(distance, 0., 270.) is_long = np.logical_and( self.catalogue.data['longitude'] >= west_point.longitude, self.catalogue.data['longitude'] < east_point.longitude) is_surface = np.logical_and( is_long, self.catalogue.data['latitude'] >= south_point.latitude, self.catalogue.data['latitude'] < north_point.latitude) upper_depth, lower_depth = _check_depth_limits(kwargs) is_valid = np.logical_and( is_surface, self.catalogue.data['depth'] >= upper_depth, self.catalogue.data['depth'] < lower_depth) return self.select_catalogue(is_valid)
[ "def", "cartesian_square_centred_on_point", "(", "self", ",", "point", ",", "distance", ",", "*", "*", "kwargs", ")", ":", "point_surface", "=", "Point", "(", "point", ".", "longitude", ",", "point", ".", "latitude", ",", "0.", ")", "# As distance is", "north_point", "=", "point_surface", ".", "point_at", "(", "distance", ",", "0.", ",", "0.", ")", "east_point", "=", "point_surface", ".", "point_at", "(", "distance", ",", "0.", ",", "90.", ")", "south_point", "=", "point_surface", ".", "point_at", "(", "distance", ",", "0.", ",", "180.", ")", "west_point", "=", "point_surface", ".", "point_at", "(", "distance", ",", "0.", ",", "270.", ")", "is_long", "=", "np", ".", "logical_and", "(", "self", ".", "catalogue", ".", "data", "[", "'longitude'", "]", ">=", "west_point", ".", "longitude", ",", "self", ".", "catalogue", ".", "data", "[", "'longitude'", "]", "<", "east_point", ".", "longitude", ")", "is_surface", "=", "np", ".", "logical_and", "(", "is_long", ",", "self", ".", "catalogue", ".", "data", "[", "'latitude'", "]", ">=", "south_point", ".", "latitude", ",", "self", ".", "catalogue", ".", "data", "[", "'latitude'", "]", "<", "north_point", ".", "latitude", ")", "upper_depth", ",", "lower_depth", "=", "_check_depth_limits", "(", "kwargs", ")", "is_valid", "=", "np", ".", "logical_and", "(", "is_surface", ",", "self", ".", "catalogue", ".", "data", "[", "'depth'", "]", ">=", "upper_depth", ",", "self", ".", "catalogue", ".", "data", "[", "'depth'", "]", "<", "lower_depth", ")", "return", "self", ".", "select_catalogue", "(", "is_valid", ")" ]
40.542857
0.001376
def validateData(self, text): ''' Method which validates the data from each tag, to check whether it is an empty string :param text: data to be validated :return: True or False depending on the result ''' if text == "\n": return False for c in text: try: if str(c) != " ": return True except: return False return False
[ "def", "validateData", "(", "self", ",", "text", ")", ":", "if", "text", "==", "\"\\n\"", ":", "return", "False", "for", "c", "in", "text", ":", "try", ":", "if", "str", "(", "c", ")", "!=", "\" \"", ":", "return", "True", "except", ":", "return", "False", "return", "False" ]
30.4
0.008511
def renew_service(request, pk): """ renew an existing service :param request object :param pk: the primary key of the service to renew :type pk: int """ default_provider.load_services() service = get_object_or_404(ServicesActivated, pk=pk) service_name = str(service.name) service_object = default_provider.get_service(service_name) lets_auth = getattr(service_object, 'auth') getattr(service_object, 'reset_failed')(pk=pk) return redirect(lets_auth(request))
[ "def", "renew_service", "(", "request", ",", "pk", ")", ":", "default_provider", ".", "load_services", "(", ")", "service", "=", "get_object_or_404", "(", "ServicesActivated", ",", "pk", "=", "pk", ")", "service_name", "=", "str", "(", "service", ".", "name", ")", "service_object", "=", "default_provider", ".", "get_service", "(", "service_name", ")", "lets_auth", "=", "getattr", "(", "service_object", ",", "'auth'", ")", "getattr", "(", "service_object", ",", "'reset_failed'", ")", "(", "pk", "=", "pk", ")", "return", "redirect", "(", "lets_auth", "(", "request", ")", ")" ]
36.714286
0.001898
def clicked(self, px, py): '''see if the image has been clicked on''' if self.hidden: return None if (abs(px - self.posx) > self.width/2 or abs(py - self.posy) > self.height/2): return None return math.sqrt((px-self.posx)**2 + (py-self.posy)**2)
[ "def", "clicked", "(", "self", ",", "px", ",", "py", ")", ":", "if", "self", ".", "hidden", ":", "return", "None", "if", "(", "abs", "(", "px", "-", "self", ".", "posx", ")", ">", "self", ".", "width", "/", "2", "or", "abs", "(", "py", "-", "self", ".", "posy", ")", ">", "self", ".", "height", "/", "2", ")", ":", "return", "None", "return", "math", ".", "sqrt", "(", "(", "px", "-", "self", ".", "posx", ")", "**", "2", "+", "(", "py", "-", "self", ".", "posy", ")", "**", "2", ")" ]
38.25
0.009585
def proj_l1(v, gamma, axis=None, method=None): r"""Projection operator of the :math:`\ell_1` norm. Parameters ---------- v : array_like Input array :math:`\mathbf{v}` gamma : float Parameter :math:`\gamma` axis : None or int or tuple of ints, optional (default None) Axes of `v` over which to compute the :math:`\ell_1` norm. If `None`, an entire multi-dimensional array is treated as a vector. If axes are specified, then distinct norm values are computed over the indices of the remaining axes of input array `v`. method : None or str, optional (default None) Solver method to use. If `None`, the most appropriate choice is made based on the `axis` parameter. Valid methods are - 'scalarroot' The solution is computed via the method of Sec. 6.5.2 in :cite:`parikh-2014-proximal`. - 'sortcumsum' The solution is computed via the method of :cite:`duchi-2008-efficient`. Returns ------- x : ndarray Output array """ if method is None: if axis is None: method = 'scalarroot' else: method = 'sortcumsum' if method == 'scalarroot': if axis is not None: raise ValueError('Method scalarroot only supports axis=None') return _proj_l1_scalar_root(v, gamma) elif method == 'sortcumsum': if isinstance(axis, tuple): vtr, rsi = ndto2d(v, axis) xtr = _proj_l1_sortsum(vtr, gamma, axis=1) return ndfrom2d(xtr, rsi) else: return _proj_l1_sortsum(v, gamma, axis) else: raise ValueError('Unknown solver method %s' % method)
[ "def", "proj_l1", "(", "v", ",", "gamma", ",", "axis", "=", "None", ",", "method", "=", "None", ")", ":", "if", "method", "is", "None", ":", "if", "axis", "is", "None", ":", "method", "=", "'scalarroot'", "else", ":", "method", "=", "'sortcumsum'", "if", "method", "==", "'scalarroot'", ":", "if", "axis", "is", "not", "None", ":", "raise", "ValueError", "(", "'Method scalarroot only supports axis=None'", ")", "return", "_proj_l1_scalar_root", "(", "v", ",", "gamma", ")", "elif", "method", "==", "'sortcumsum'", ":", "if", "isinstance", "(", "axis", ",", "tuple", ")", ":", "vtr", ",", "rsi", "=", "ndto2d", "(", "v", ",", "axis", ")", "xtr", "=", "_proj_l1_sortsum", "(", "vtr", ",", "gamma", ",", "axis", "=", "1", ")", "return", "ndfrom2d", "(", "xtr", ",", "rsi", ")", "else", ":", "return", "_proj_l1_sortsum", "(", "v", ",", "gamma", ",", "axis", ")", "else", ":", "raise", "ValueError", "(", "'Unknown solver method %s'", "%", "method", ")" ]
32.461538
0.000575
def setOverlayTransformTrackedDeviceRelative(self, ulOverlayHandle, unTrackedDevice): """Sets the transform to relative to the transform of the specified tracked device.""" fn = self.function_table.setOverlayTransformTrackedDeviceRelative pmatTrackedDeviceToOverlayTransform = HmdMatrix34_t() result = fn(ulOverlayHandle, unTrackedDevice, byref(pmatTrackedDeviceToOverlayTransform)) return result, pmatTrackedDeviceToOverlayTransform
[ "def", "setOverlayTransformTrackedDeviceRelative", "(", "self", ",", "ulOverlayHandle", ",", "unTrackedDevice", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTransformTrackedDeviceRelative", "pmatTrackedDeviceToOverlayTransform", "=", "HmdMatrix34_t", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "unTrackedDevice", ",", "byref", "(", "pmatTrackedDeviceToOverlayTransform", ")", ")", "return", "result", ",", "pmatTrackedDeviceToOverlayTransform" ]
66.857143
0.010549
def chunk(line, mapping={None: 'text', '${': 'escape', '#{': 'bless', '&{': 'args', '%{': 'format', '@{': 'json'}): """Chunkify and "tag" a block of text into plain text and code sections. The first delimeter is blank to represent text sections, and keep the indexes aligned with the tags. Values are yielded in the form (tag, text). """ skipping = 0 # How many closing parenthesis will we need to skip? start = None # Starting position of current match. last = 0 i = 0 text = line.line while i < len(text): if start is not None: if text[i] == '{': skipping += 1 elif text[i] == '}': if skipping: skipping -= 1 else: yield line.clone(kind=mapping[text[start-2:start]], line=text[start:i]) start = None last = i = i + 1 continue elif text[i:i+2] in mapping: if last is not None and last != i: yield line.clone(kind=mapping[None], line=text[last:i]) last = None start = i = i + 2 continue i += 1 if last < len(text): yield line.clone(kind=mapping[None], line=text[last:])
[ "def", "chunk", "(", "line", ",", "mapping", "=", "{", "None", ":", "'text'", ",", "'${'", ":", "'escape'", ",", "'#{'", ":", "'bless'", ",", "'&{'", ":", "'args'", ",", "'%{'", ":", "'format'", ",", "'@{'", ":", "'json'", "}", ")", ":", "skipping", "=", "0", "# How many closing parenthesis will we need to skip?", "start", "=", "None", "# Starting position of current match.", "last", "=", "0", "i", "=", "0", "text", "=", "line", ".", "line", "while", "i", "<", "len", "(", "text", ")", ":", "if", "start", "is", "not", "None", ":", "if", "text", "[", "i", "]", "==", "'{'", ":", "skipping", "+=", "1", "elif", "text", "[", "i", "]", "==", "'}'", ":", "if", "skipping", ":", "skipping", "-=", "1", "else", ":", "yield", "line", ".", "clone", "(", "kind", "=", "mapping", "[", "text", "[", "start", "-", "2", ":", "start", "]", "]", ",", "line", "=", "text", "[", "start", ":", "i", "]", ")", "start", "=", "None", "last", "=", "i", "=", "i", "+", "1", "continue", "elif", "text", "[", "i", ":", "i", "+", "2", "]", "in", "mapping", ":", "if", "last", "is", "not", "None", "and", "last", "!=", "i", ":", "yield", "line", ".", "clone", "(", "kind", "=", "mapping", "[", "None", "]", ",", "line", "=", "text", "[", "last", ":", "i", "]", ")", "last", "=", "None", "start", "=", "i", "=", "i", "+", "2", "continue", "i", "+=", "1", "if", "last", "<", "len", "(", "text", ")", ":", "yield", "line", ".", "clone", "(", "kind", "=", "mapping", "[", "None", "]", ",", "line", "=", "text", "[", "last", ":", "]", ")" ]
24.690476
0.05102
def show_image(self, image_id_or_slug): """ This method displays the attributes of an image. Required parameters image_id: Numeric, this is the id of the image you would like to use to rebuild your droplet with """ if not image_id_or_slug: msg = 'image_id_or_slug is required to destroy an image!' raise DOPException(msg) json = self.request('/images/%s' % image_id_or_slug, method='GET') image_json = json.get('image') status = json.get('status') if status == 'OK': image = Image.from_json(image_json) return image else: message = json.get('message') raise DOPException('[%s]: %s' % (status, message))
[ "def", "show_image", "(", "self", ",", "image_id_or_slug", ")", ":", "if", "not", "image_id_or_slug", ":", "msg", "=", "'image_id_or_slug is required to destroy an image!'", "raise", "DOPException", "(", "msg", ")", "json", "=", "self", ".", "request", "(", "'/images/%s'", "%", "image_id_or_slug", ",", "method", "=", "'GET'", ")", "image_json", "=", "json", ".", "get", "(", "'image'", ")", "status", "=", "json", ".", "get", "(", "'status'", ")", "if", "status", "==", "'OK'", ":", "image", "=", "Image", ".", "from_json", "(", "image_json", ")", "return", "image", "else", ":", "message", "=", "json", ".", "get", "(", "'message'", ")", "raise", "DOPException", "(", "'[%s]: %s'", "%", "(", "status", ",", "message", ")", ")" ]
33.869565
0.002497
def remove_zero_normals(self): """Removes normal vectors with a zero magnitude. Note ---- This returns nothing and updates the NormalCloud in-place. """ points_of_interest = np.where(np.linalg.norm(self._data, axis=0) != 0.0)[0] self._data = self._data[:, points_of_interest]
[ "def", "remove_zero_normals", "(", "self", ")", ":", "points_of_interest", "=", "np", ".", "where", "(", "np", ".", "linalg", ".", "norm", "(", "self", ".", "_data", ",", "axis", "=", "0", ")", "!=", "0.0", ")", "[", "0", "]", "self", ".", "_data", "=", "self", ".", "_data", "[", ":", ",", "points_of_interest", "]" ]
36
0.009036
def search(self, index=None, doc_type=None, body=None, **query_params): """ Make a search query on the elastic search `<http://www.elastic.co/guide/en/elasticsearch/reference/current/search-search.html>`_ :param index: the index name to query :param doc_type: he doc type to search in :param body: the query :param query_params: params :arg _source: True or false to return the _source field or not, or a list of fields to return :arg _source_exclude: A list of fields to exclude from the returned _source field :arg _source_include: A list of fields to extract and return from the _source field :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg analyze_wildcard: Specify whether wildcard and prefix queries should be analyzed (default: false) :arg analyzer: The analyzer to use for the query string :arg default_operator: The default operator for query string query (AND or OR), default 'OR', valid choices are: 'AND', 'OR' :arg df: The field to use as default where no field prefix is given in the query string :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both., default 'open', valid choices are: 'open', 'closed', 'none', 'all' :arg explain: Specify whether to return detailed information about score computation as part of a hit :arg fielddata_fields: A comma-separated list of fields to return as the field data representation of a field for each hit :arg fields: A comma-separated list of fields to return as part of a hit :arg from\_: Starting offset (default: 0) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg lenient: Specify whether format-based query failures (such as providing text to a numeric field) should be ignored :arg lowercase_expanded_terms: Specify whether query terms should be lowercased :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg q: Query in the Lucene query string syntax :arg request_cache: Specify if request cache should be used for this request or not, defaults to index level setting :arg routing: A comma-separated list of specific routing values :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search :arg search_type: Search operation type, valid choices are: 'query_then_fetch', 'dfs_query_then_fetch' :arg size: Number of hits to return (default: 10) :arg sort: A comma-separated list of <field>:<direction> pairs :arg stats: Specific 'tag' of the request for logging and statistical purposes :arg suggest_field: Specify which field to use for suggestions :arg suggest_mode: Specify suggest mode, default 'missing', valid choices are: 'missing', 'popular', 'always' :arg suggest_size: How many suggestions to return in response :arg suggest_text: The source text for which the suggestions should be returned :arg terminate_after: The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. :arg timeout: Explicit operation timeout :arg track_scores: Whether to calculate and return scores even if they are not used for sorting :arg version: Specify whether to return document version as part of a hit """ path = self._es_parser.make_path(index, doc_type, EsMethods.SEARCH) result = yield self._perform_request(HttpMethod.POST, path, body=body, params=query_params) returnValue(result)
[ "def", "search", "(", "self", ",", "index", "=", "None", ",", "doc_type", "=", "None", ",", "body", "=", "None", ",", "*", "*", "query_params", ")", ":", "path", "=", "self", ".", "_es_parser", ".", "make_path", "(", "index", ",", "doc_type", ",", "EsMethods", ".", "SEARCH", ")", "result", "=", "yield", "self", ".", "_perform_request", "(", "HttpMethod", ".", "POST", ",", "path", ",", "body", "=", "body", ",", "params", "=", "query_params", ")", "returnValue", "(", "result", ")" ]
57.652778
0.001895
def set_static_dns(iface, *addrs): ''' Set static DNS configuration on a Windows NIC Args: iface (str): The name of the interface to set addrs (*): One or more DNS servers to be added. To clear the list of DNS servers pass an empty list (``[]``). If undefined or ``None`` no changes will be made. Returns: dict: A dictionary containing the new DNS settings CLI Example: .. code-block:: bash salt -G 'os_family:Windows' ip.set_static_dns 'Local Area Connection' '192.168.1.1' salt -G 'os_family:Windows' ip.set_static_dns 'Local Area Connection' '192.168.1.252' '192.168.1.253' ''' if addrs is () or str(addrs[0]).lower() == 'none': return {'Interface': iface, 'DNS Server': 'No Changes'} # Clear the list of DNS servers if [] is passed if str(addrs[0]).lower() == '[]': log.debug('Clearing list of DNS servers') cmd = ['netsh', 'interface', 'ip', 'set', 'dns', 'name={0}'.format(iface), 'source=static', 'address=none'] __salt__['cmd.run'](cmd, python_shell=False) return {'Interface': iface, 'DNS Server': []} addr_index = 1 for addr in addrs: if addr_index == 1: cmd = ['netsh', 'interface', 'ip', 'set', 'dns', 'name={0}'.format(iface), 'source=static', 'address={0}'.format(addr), 'register=primary'] __salt__['cmd.run'](cmd, python_shell=False) addr_index = addr_index + 1 else: cmd = ['netsh', 'interface', 'ip', 'add', 'dns', 'name={0}'.format(iface), 'address={0}'.format(addr), 'index={0}'.format(addr_index)] __salt__['cmd.run'](cmd, python_shell=False) addr_index = addr_index + 1 return {'Interface': iface, 'DNS Server': addrs}
[ "def", "set_static_dns", "(", "iface", ",", "*", "addrs", ")", ":", "if", "addrs", "is", "(", ")", "or", "str", "(", "addrs", "[", "0", "]", ")", ".", "lower", "(", ")", "==", "'none'", ":", "return", "{", "'Interface'", ":", "iface", ",", "'DNS Server'", ":", "'No Changes'", "}", "# Clear the list of DNS servers if [] is passed", "if", "str", "(", "addrs", "[", "0", "]", ")", ".", "lower", "(", ")", "==", "'[]'", ":", "log", ".", "debug", "(", "'Clearing list of DNS servers'", ")", "cmd", "=", "[", "'netsh'", ",", "'interface'", ",", "'ip'", ",", "'set'", ",", "'dns'", ",", "'name={0}'", ".", "format", "(", "iface", ")", ",", "'source=static'", ",", "'address=none'", "]", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", "return", "{", "'Interface'", ":", "iface", ",", "'DNS Server'", ":", "[", "]", "}", "addr_index", "=", "1", "for", "addr", "in", "addrs", ":", "if", "addr_index", "==", "1", ":", "cmd", "=", "[", "'netsh'", ",", "'interface'", ",", "'ip'", ",", "'set'", ",", "'dns'", ",", "'name={0}'", ".", "format", "(", "iface", ")", ",", "'source=static'", ",", "'address={0}'", ".", "format", "(", "addr", ")", ",", "'register=primary'", "]", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", "addr_index", "=", "addr_index", "+", "1", "else", ":", "cmd", "=", "[", "'netsh'", ",", "'interface'", ",", "'ip'", ",", "'add'", ",", "'dns'", ",", "'name={0}'", ".", "format", "(", "iface", ")", ",", "'address={0}'", ".", "format", "(", "addr", ")", ",", "'index={0}'", ".", "format", "(", "addr_index", ")", "]", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", "addr_index", "=", "addr_index", "+", "1", "return", "{", "'Interface'", ":", "iface", ",", "'DNS Server'", ":", "addrs", "}" ]
37.115385
0.001514
def unarchive(filename,output_dir='.'): '''unpacks the given archive into ``output_dir``''' if not os.path.exists(output_dir): os.makedirs(output_dir) for archive in archive_formats: if filename.endswith(archive_formats[archive]['suffix']): return subprocess.call(archive_formats[archive]['command'](output_dir,filename))==0 return False
[ "def", "unarchive", "(", "filename", ",", "output_dir", "=", "'.'", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "output_dir", ")", ":", "os", ".", "makedirs", "(", "output_dir", ")", "for", "archive", "in", "archive_formats", ":", "if", "filename", ".", "endswith", "(", "archive_formats", "[", "archive", "]", "[", "'suffix'", "]", ")", ":", "return", "subprocess", ".", "call", "(", "archive_formats", "[", "archive", "]", "[", "'command'", "]", "(", "output_dir", ",", "filename", ")", ")", "==", "0", "return", "False" ]
46.75
0.013123
def convert_destination_to_id(destination_node, destination_port, nodes): """ Convert a destination to device and port ID :param str destination_node: Destination node name :param str destination_port: Destination port name :param list nodes: list of nodes from :py:meth:`generate_nodes` :return: dict containing device ID, device name and port ID :rtype: dict """ device_id = None device_name = None port_id = None if destination_node != 'NIO': for node in nodes: if destination_node == node['properties']['name']: device_id = node['id'] device_name = destination_node for port in node['ports']: if destination_port == port['name']: port_id = port['id'] break break else: for node in nodes: if node['type'] == 'Cloud': for port in node['ports']: if destination_port.lower() == port['name'].lower(): device_id = node['id'] device_name = node['properties']['name'] port_id = port['id'] break info = {'id': device_id, 'name': device_name, 'pid': port_id} return info
[ "def", "convert_destination_to_id", "(", "destination_node", ",", "destination_port", ",", "nodes", ")", ":", "device_id", "=", "None", "device_name", "=", "None", "port_id", "=", "None", "if", "destination_node", "!=", "'NIO'", ":", "for", "node", "in", "nodes", ":", "if", "destination_node", "==", "node", "[", "'properties'", "]", "[", "'name'", "]", ":", "device_id", "=", "node", "[", "'id'", "]", "device_name", "=", "destination_node", "for", "port", "in", "node", "[", "'ports'", "]", ":", "if", "destination_port", "==", "port", "[", "'name'", "]", ":", "port_id", "=", "port", "[", "'id'", "]", "break", "break", "else", ":", "for", "node", "in", "nodes", ":", "if", "node", "[", "'type'", "]", "==", "'Cloud'", ":", "for", "port", "in", "node", "[", "'ports'", "]", ":", "if", "destination_port", ".", "lower", "(", ")", "==", "port", "[", "'name'", "]", ".", "lower", "(", ")", ":", "device_id", "=", "node", "[", "'id'", "]", "device_name", "=", "node", "[", "'properties'", "]", "[", "'name'", "]", "port_id", "=", "port", "[", "'id'", "]", "break", "info", "=", "{", "'id'", ":", "device_id", ",", "'name'", ":", "device_name", ",", "'pid'", ":", "port_id", "}", "return", "info" ]
39.297297
0.001342
def _match_pattern(filename, include, exclude, real, path, follow): """Match includes and excludes.""" if real: symlinks = {} if isinstance(filename, bytes): curdir = os.fsencode(os.curdir) mount = RE_BWIN_MOUNT if util.platform() == "windows" else RE_BMOUNT else: curdir = os.curdir mount = RE_WIN_MOUNT if util.platform() == "windows" else RE_MOUNT if not mount.match(filename): exists = os.path.lexists(os.path.join(curdir, filename)) else: exists = os.path.lexists(filename) if not exists: return False if path: return _match_real(filename, include, exclude, follow, symlinks) matched = False for pattern in include: if pattern.fullmatch(filename): matched = True break if not include and exclude: matched = True if matched: matched = True if exclude: for pattern in exclude: if not pattern.fullmatch(filename): matched = False break return matched
[ "def", "_match_pattern", "(", "filename", ",", "include", ",", "exclude", ",", "real", ",", "path", ",", "follow", ")", ":", "if", "real", ":", "symlinks", "=", "{", "}", "if", "isinstance", "(", "filename", ",", "bytes", ")", ":", "curdir", "=", "os", ".", "fsencode", "(", "os", ".", "curdir", ")", "mount", "=", "RE_BWIN_MOUNT", "if", "util", ".", "platform", "(", ")", "==", "\"windows\"", "else", "RE_BMOUNT", "else", ":", "curdir", "=", "os", ".", "curdir", "mount", "=", "RE_WIN_MOUNT", "if", "util", ".", "platform", "(", ")", "==", "\"windows\"", "else", "RE_MOUNT", "if", "not", "mount", ".", "match", "(", "filename", ")", ":", "exists", "=", "os", ".", "path", ".", "lexists", "(", "os", ".", "path", ".", "join", "(", "curdir", ",", "filename", ")", ")", "else", ":", "exists", "=", "os", ".", "path", ".", "lexists", "(", "filename", ")", "if", "not", "exists", ":", "return", "False", "if", "path", ":", "return", "_match_real", "(", "filename", ",", "include", ",", "exclude", ",", "follow", ",", "symlinks", ")", "matched", "=", "False", "for", "pattern", "in", "include", ":", "if", "pattern", ".", "fullmatch", "(", "filename", ")", ":", "matched", "=", "True", "break", "if", "not", "include", "and", "exclude", ":", "matched", "=", "True", "if", "matched", ":", "matched", "=", "True", "if", "exclude", ":", "for", "pattern", "in", "exclude", ":", "if", "not", "pattern", ".", "fullmatch", "(", "filename", ")", ":", "matched", "=", "False", "break", "return", "matched" ]
28.820513
0.001721
def get(self): '''Return a dictionary that represents the Tcl array''' value = {} for (elementname, elementvar) in self._elementvars.items(): value[elementname] = elementvar.get() return value
[ "def", "get", "(", "self", ")", ":", "value", "=", "{", "}", "for", "(", "elementname", ",", "elementvar", ")", "in", "self", ".", "_elementvars", ".", "items", "(", ")", ":", "value", "[", "elementname", "]", "=", "elementvar", ".", "get", "(", ")", "return", "value" ]
38.5
0.008475
def assert_not_visible(self, selector, testid=None, **kwargs): """Assert that the element is not visible in the dom Args: selector (str): the selector used to find the element test_id (str): the test_id or a str Kwargs: wait_until_not_visible (bool) highlight (bool) Returns: bool: True is the assertion succeed; False otherwise. """ self.info_log( "Assert not visible selector(%s) testid(%s)" % (selector, testid) ) highlight = kwargs.get( 'highlight', BROME_CONFIG['highlight']['highlight_on_assertion_failure'] ) self.debug_log("effective highlight: %s" % highlight) wait_until_not_visible = kwargs.get( 'wait_until_not_visible', BROME_CONFIG['proxy_driver']['wait_until_not_visible_before_assert_not_visible'] # noqa ) self.debug_log( "effective wait_until_not_visible: %s" % wait_until_not_visible ) if wait_until_not_visible: self.wait_until_not_visible(selector, raise_exception=False) element = self.find( selector, raise_exception=False, wait_until_visible=False, wait_until_present=False ) if element and element.is_displayed(raise_exception=False): data = self.execute_script( "return arguments[0].getBoundingClientRect();", element._element ) if highlight: element.highlight( style=BROME_CONFIG['highlight']['style_on_assertion_failure'] # noqa ) if testid is not None: self.create_test_result(testid, False, extra_data={ 'bounding_client_rect': data, 'video_x_offset': self.browser_config.get('video_x_offset', 0), # noqa 'video_y_offset': self.browser_config.get('video_y_offset', 0) # noqa }) return False else: if testid is not None: self.create_test_result(testid, True) return True
[ "def", "assert_not_visible", "(", "self", ",", "selector", ",", "testid", "=", "None", ",", "*", "*", "kwargs", ")", ":", "self", ".", "info_log", "(", "\"Assert not visible selector(%s) testid(%s)\"", "%", "(", "selector", ",", "testid", ")", ")", "highlight", "=", "kwargs", ".", "get", "(", "'highlight'", ",", "BROME_CONFIG", "[", "'highlight'", "]", "[", "'highlight_on_assertion_failure'", "]", ")", "self", ".", "debug_log", "(", "\"effective highlight: %s\"", "%", "highlight", ")", "wait_until_not_visible", "=", "kwargs", ".", "get", "(", "'wait_until_not_visible'", ",", "BROME_CONFIG", "[", "'proxy_driver'", "]", "[", "'wait_until_not_visible_before_assert_not_visible'", "]", "# noqa", ")", "self", ".", "debug_log", "(", "\"effective wait_until_not_visible: %s\"", "%", "wait_until_not_visible", ")", "if", "wait_until_not_visible", ":", "self", ".", "wait_until_not_visible", "(", "selector", ",", "raise_exception", "=", "False", ")", "element", "=", "self", ".", "find", "(", "selector", ",", "raise_exception", "=", "False", ",", "wait_until_visible", "=", "False", ",", "wait_until_present", "=", "False", ")", "if", "element", "and", "element", ".", "is_displayed", "(", "raise_exception", "=", "False", ")", ":", "data", "=", "self", ".", "execute_script", "(", "\"return arguments[0].getBoundingClientRect();\"", ",", "element", ".", "_element", ")", "if", "highlight", ":", "element", ".", "highlight", "(", "style", "=", "BROME_CONFIG", "[", "'highlight'", "]", "[", "'style_on_assertion_failure'", "]", "# noqa", ")", "if", "testid", "is", "not", "None", ":", "self", ".", "create_test_result", "(", "testid", ",", "False", ",", "extra_data", "=", "{", "'bounding_client_rect'", ":", "data", ",", "'video_x_offset'", ":", "self", ".", "browser_config", ".", "get", "(", "'video_x_offset'", ",", "0", ")", ",", "# noqa", "'video_y_offset'", ":", "self", ".", "browser_config", ".", "get", "(", "'video_y_offset'", ",", "0", ")", "# noqa", "}", ")", "return", "False", "else", ":", "if", "testid", "is", "not", "None", ":", "self", ".", "create_test_result", "(", "testid", ",", "True", ")", "return", "True" ]
33.90625
0.000896
def aggregator(name, func, *args, type=None): 'Define simple aggregator `name` that calls func(values)' def _func(col, rows): # wrap builtins so they can have a .type vals = list(col.getValues(rows)) try: return func(vals, *args) except Exception as e: if len(vals) == 0: return None return e aggregators[name] = _defaggr(name, type, _func)
[ "def", "aggregator", "(", "name", ",", "func", ",", "*", "args", ",", "type", "=", "None", ")", ":", "def", "_func", "(", "col", ",", "rows", ")", ":", "# wrap builtins so they can have a .type", "vals", "=", "list", "(", "col", ".", "getValues", "(", "rows", ")", ")", "try", ":", "return", "func", "(", "vals", ",", "*", "args", ")", "except", "Exception", "as", "e", ":", "if", "len", "(", "vals", ")", "==", "0", ":", "return", "None", "return", "e", "aggregators", "[", "name", "]", "=", "_defaggr", "(", "name", ",", "type", ",", "_func", ")" ]
34.916667
0.002326
def is_valid_nc3_name(s): """Test whether an object can be validly converted to a netCDF-3 dimension, variable or attribute name Earlier versions of the netCDF C-library reference implementation enforced a more restricted set of characters in creating new names, but permitted reading names containing arbitrary bytes. This specification extends the permitted characters in names to include multi-byte UTF-8 encoded Unicode and additional printing characters from the US-ASCII alphabet. The first character of a name must be alphanumeric, a multi-byte UTF-8 character, or '_' (reserved for special names with meaning to implementations, such as the "_FillValue" attribute). Subsequent characters may also include printing special characters, except for '/' which is not allowed in names. Names that have trailing space characters are also not permitted. """ if not isinstance(s, str): return False if not isinstance(s, str): s = s.decode('utf-8') num_bytes = len(s.encode('utf-8')) return ((unicodedata.normalize('NFC', s) == s) and (s not in _reserved_names) and (num_bytes >= 0) and ('/' not in s) and (s[-1] != ' ') and (_isalnumMUTF8(s[0]) or (s[0] == '_')) and all((_isalnumMUTF8(c) or c in _specialchars for c in s)))
[ "def", "is_valid_nc3_name", "(", "s", ")", ":", "if", "not", "isinstance", "(", "s", ",", "str", ")", ":", "return", "False", "if", "not", "isinstance", "(", "s", ",", "str", ")", ":", "s", "=", "s", ".", "decode", "(", "'utf-8'", ")", "num_bytes", "=", "len", "(", "s", ".", "encode", "(", "'utf-8'", ")", ")", "return", "(", "(", "unicodedata", ".", "normalize", "(", "'NFC'", ",", "s", ")", "==", "s", ")", "and", "(", "s", "not", "in", "_reserved_names", ")", "and", "(", "num_bytes", ">=", "0", ")", "and", "(", "'/'", "not", "in", "s", ")", "and", "(", "s", "[", "-", "1", "]", "!=", "' '", ")", "and", "(", "_isalnumMUTF8", "(", "s", "[", "0", "]", ")", "or", "(", "s", "[", "0", "]", "==", "'_'", ")", ")", "and", "all", "(", "(", "_isalnumMUTF8", "(", "c", ")", "or", "c", "in", "_specialchars", "for", "c", "in", "s", ")", ")", ")" ]
46.896552
0.00072
def question_default_add_related_pks(self, obj): """Add related primary keys to a Question instance.""" if not hasattr(obj, '_choice_pks'): obj._choice_pks = list(obj.choices.values_list('pk', flat=True))
[ "def", "question_default_add_related_pks", "(", "self", ",", "obj", ")", ":", "if", "not", "hasattr", "(", "obj", ",", "'_choice_pks'", ")", ":", "obj", ".", "_choice_pks", "=", "list", "(", "obj", ".", "choices", ".", "values_list", "(", "'pk'", ",", "flat", "=", "True", ")", ")" ]
57.25
0.008621
def run_async(self): """ Spawns a new thread that runs the message loop until the Pebble disconnects. ``run_async`` will call :meth:`fetch_watch_info` on your behalf, and block until it receives a response. """ thread = threading.Thread(target=self.run_sync) thread.daemon = True thread.name = "PebbleConnection" thread.start() self.fetch_watch_info()
[ "def", "run_async", "(", "self", ")", ":", "thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "run_sync", ")", "thread", ".", "daemon", "=", "True", "thread", ".", "name", "=", "\"PebbleConnection\"", "thread", ".", "start", "(", ")", "self", ".", "fetch_watch_info", "(", ")" ]
41.4
0.009456
def ordered_symbols(self): """ :return: list of all symbols in this model, topologically sorted so they can be evaluated in the correct order. Within each group of equal priority symbols, we sort by the order of the derivative. """ key_func = lambda s: [isinstance(s, sympy.Derivative), isinstance(s, sympy.Derivative) and s.derivative_count] symbols = [] for symbol in toposort(self.connectivity_mapping): symbols.extend(sorted(symbol, key=key_func)) return symbols
[ "def", "ordered_symbols", "(", "self", ")", ":", "key_func", "=", "lambda", "s", ":", "[", "isinstance", "(", "s", ",", "sympy", ".", "Derivative", ")", ",", "isinstance", "(", "s", ",", "sympy", ".", "Derivative", ")", "and", "s", ".", "derivative_count", "]", "symbols", "=", "[", "]", "for", "symbol", "in", "toposort", "(", "self", ".", "connectivity_mapping", ")", ":", "symbols", ".", "extend", "(", "sorted", "(", "symbol", ",", "key", "=", "key_func", ")", ")", "return", "symbols" ]
39
0.011686
def print_usage(self, hint=None): """Usage format should be like: Lineno | Content 1 | Script description (__doc__) 2 | Usage: {script name} [COMMAND] [ARGUMENTS] 3 | \n 4 | Commands: 5 | cmd1 cmd1 description. 6 | cmd2isverylong cmd2 description, and it is also 7 | long as shit. 7 | cmd3 cmd3 description. """ buf = [] # Description if __doc__: buf.append(__doc__) # Usage script_name = sys.argv[0] buf.append('Usage: %s [COMMAND] [ARGUMENTS]' % script_name) buf.append('') buf.append('Commands:') # Commands indent_size = 2 tab_size = 4 doc_width = 50 grid_len = max(len(i) for i in list(self._commands.keys())) + tab_size for name in self._commands_list: command = self._commands[name] line = ' ' * indent_size + name + ' ' * (grid_len - len(name)) doc = command.doc pieces = [doc[i:i + doc_width] for i in range(0, len(doc), doc_width)] line += pieces[0] if len(pieces) > 1: line += '\n' line += '\n'.join(' ' * (grid_len + 2) + i for i in pieces[1:]) buf.append(line) print('\n'.join(buf))
[ "def", "print_usage", "(", "self", ",", "hint", "=", "None", ")", ":", "buf", "=", "[", "]", "# Description", "if", "__doc__", ":", "buf", ".", "append", "(", "__doc__", ")", "# Usage", "script_name", "=", "sys", ".", "argv", "[", "0", "]", "buf", ".", "append", "(", "'Usage: %s [COMMAND] [ARGUMENTS]'", "%", "script_name", ")", "buf", ".", "append", "(", "''", ")", "buf", ".", "append", "(", "'Commands:'", ")", "# Commands", "indent_size", "=", "2", "tab_size", "=", "4", "doc_width", "=", "50", "grid_len", "=", "max", "(", "len", "(", "i", ")", "for", "i", "in", "list", "(", "self", ".", "_commands", ".", "keys", "(", ")", ")", ")", "+", "tab_size", "for", "name", "in", "self", ".", "_commands_list", ":", "command", "=", "self", ".", "_commands", "[", "name", "]", "line", "=", "' '", "*", "indent_size", "+", "name", "+", "' '", "*", "(", "grid_len", "-", "len", "(", "name", ")", ")", "doc", "=", "command", ".", "doc", "pieces", "=", "[", "doc", "[", "i", ":", "i", "+", "doc_width", "]", "for", "i", "in", "range", "(", "0", ",", "len", "(", "doc", ")", ",", "doc_width", ")", "]", "line", "+=", "pieces", "[", "0", "]", "if", "len", "(", "pieces", ")", ">", "1", ":", "line", "+=", "'\\n'", "line", "+=", "'\\n'", ".", "join", "(", "' '", "*", "(", "grid_len", "+", "2", ")", "+", "i", "for", "i", "in", "pieces", "[", "1", ":", "]", ")", "buf", ".", "append", "(", "line", ")", "print", "(", "'\\n'", ".", "join", "(", "buf", ")", ")" ]
31.681818
0.002088
def convert_to_sympy_matrix(expr, full_space=None): """Convert a QNET expression to an explicit ``n x n`` instance of `sympy.Matrix`, where ``n`` is the dimension of `full_space`. The entries of the matrix may contain symbols. Parameters: expr: a QNET expression full_space (qnet.algebra.hilbert_space_algebra.HilbertSpace): The Hilbert space in which `expr` is defined. If not given, ``expr.space`` is used. The Hilbert space must have a well-defined basis. Raises: qnet.algebra.hilbert_space_algebra.BasisNotSetError: if `full_space` does not have a defined basis ValueError: if `expr` is not in `full_space`, or if `expr` cannot be converted. """ if full_space is None: full_space = expr.space if not expr.space.is_tensor_factor_of(full_space): raise ValueError("expr must be in full_space") if expr is IdentityOperator: return sympy.eye(full_space.dimension) elif expr is ZeroOperator: return 0 elif isinstance(expr, LocalOperator): n = full_space.dimension if full_space != expr.space: all_spaces = full_space.local_factors own_space_index = all_spaces.index(expr.space) factors = [sympy.eye(s.dimension) for s in all_spaces[:own_space_index]] factors.append(convert_to_sympy_matrix(expr, expr.space)) factors.extend([sympy.eye(s.dimension) for s in all_spaces[own_space_index + 1:]]) return tensor(*factors) if isinstance(expr, (Create, Jz, Jplus)): return SympyCreate(n) elif isinstance(expr, (Destroy, Jminus)): return SympyCreate(n).H elif isinstance(expr, Phase): phi = expr.phase result = sympy.zeros(n) for i in range(n): result[i, i] = sympy.exp(sympy.I * i * phi) return result elif isinstance(expr, Displace): alpha = expr.operands[1] a = SympyCreate(n) return (alpha * a - alpha.conjugate() * a.H).exp() elif isinstance(expr, Squeeze): eta = expr.operands[1] a = SympyCreate(n) return ((eta/2) * a**2 - (eta.conjugate()/2) * (a.H)**2).exp() elif isinstance(expr, LocalSigma): ket = basis_state(expr.index_j, n) bra = basis_state(expr.index_k, n).H return ket * bra else: raise ValueError("Cannot convert '%s' of type %s" % (str(expr), type(expr))) elif (isinstance(expr, Operator) and isinstance(expr, Operation)): if isinstance(expr, OperatorPlus): s = convert_to_sympy_matrix(expr.operands[0], full_space) for op in expr.operands[1:]: s += convert_to_sympy_matrix(op, full_space) return s elif isinstance(expr, OperatorTimes): # if any factor acts non-locally, we need to expand distributively. if any(len(op.space) > 1 for op in expr.operands): se = expr.expand() if se == expr: raise ValueError("Cannot represent as sympy matrix: %s" % expr) return convert_to_sympy_matrix(se, full_space) all_spaces = full_space.local_factors by_space = [] ck = 0 for ls in all_spaces: # group factors by associated local space ls_ops = [convert_to_sympy_matrix(o, o.space) for o in expr.operands if o.space == ls] if len(ls_ops): # compute factor associated with local space by_space.append(ls_ops[0]) for ls_op in ls_ops[1:]: by_space[-1] *= ls_op ck += len(ls_ops) else: # if trivial action, take identity matrix by_space.append(sympy.eye(ls.dimension)) assert ck == len(expr.operands) # combine local factors in tensor product if len(by_space) == 1: return by_space[0] else: return tensor(*by_space) elif isinstance(expr, Adjoint): return convert_to_sympy_matrix(expr.operand, full_space).H elif isinstance(expr, PseudoInverse): raise NotImplementedError( 'Cannot convert PseudoInverse to sympy matrix') elif isinstance(expr, NullSpaceProjector): raise NotImplementedError( 'Cannot convert NullSpaceProjector to sympy') elif isinstance(expr, ScalarTimesOperator): return expr.coeff * convert_to_sympy_matrix(expr.term, full_space) else: raise ValueError( "Cannot convert '%s' of type %s" % (str(expr), type(expr))) else: raise ValueError( "Cannot convert '%s' of type %s" % (str(expr), type(expr)))
[ "def", "convert_to_sympy_matrix", "(", "expr", ",", "full_space", "=", "None", ")", ":", "if", "full_space", "is", "None", ":", "full_space", "=", "expr", ".", "space", "if", "not", "expr", ".", "space", ".", "is_tensor_factor_of", "(", "full_space", ")", ":", "raise", "ValueError", "(", "\"expr must be in full_space\"", ")", "if", "expr", "is", "IdentityOperator", ":", "return", "sympy", ".", "eye", "(", "full_space", ".", "dimension", ")", "elif", "expr", "is", "ZeroOperator", ":", "return", "0", "elif", "isinstance", "(", "expr", ",", "LocalOperator", ")", ":", "n", "=", "full_space", ".", "dimension", "if", "full_space", "!=", "expr", ".", "space", ":", "all_spaces", "=", "full_space", ".", "local_factors", "own_space_index", "=", "all_spaces", ".", "index", "(", "expr", ".", "space", ")", "factors", "=", "[", "sympy", ".", "eye", "(", "s", ".", "dimension", ")", "for", "s", "in", "all_spaces", "[", ":", "own_space_index", "]", "]", "factors", ".", "append", "(", "convert_to_sympy_matrix", "(", "expr", ",", "expr", ".", "space", ")", ")", "factors", ".", "extend", "(", "[", "sympy", ".", "eye", "(", "s", ".", "dimension", ")", "for", "s", "in", "all_spaces", "[", "own_space_index", "+", "1", ":", "]", "]", ")", "return", "tensor", "(", "*", "factors", ")", "if", "isinstance", "(", "expr", ",", "(", "Create", ",", "Jz", ",", "Jplus", ")", ")", ":", "return", "SympyCreate", "(", "n", ")", "elif", "isinstance", "(", "expr", ",", "(", "Destroy", ",", "Jminus", ")", ")", ":", "return", "SympyCreate", "(", "n", ")", ".", "H", "elif", "isinstance", "(", "expr", ",", "Phase", ")", ":", "phi", "=", "expr", ".", "phase", "result", "=", "sympy", ".", "zeros", "(", "n", ")", "for", "i", "in", "range", "(", "n", ")", ":", "result", "[", "i", ",", "i", "]", "=", "sympy", ".", "exp", "(", "sympy", ".", "I", "*", "i", "*", "phi", ")", "return", "result", "elif", "isinstance", "(", "expr", ",", "Displace", ")", ":", "alpha", "=", "expr", ".", "operands", "[", "1", "]", "a", "=", "SympyCreate", "(", "n", ")", "return", "(", "alpha", "*", "a", "-", "alpha", ".", "conjugate", "(", ")", "*", "a", ".", "H", ")", ".", "exp", "(", ")", "elif", "isinstance", "(", "expr", ",", "Squeeze", ")", ":", "eta", "=", "expr", ".", "operands", "[", "1", "]", "a", "=", "SympyCreate", "(", "n", ")", "return", "(", "(", "eta", "/", "2", ")", "*", "a", "**", "2", "-", "(", "eta", ".", "conjugate", "(", ")", "/", "2", ")", "*", "(", "a", ".", "H", ")", "**", "2", ")", ".", "exp", "(", ")", "elif", "isinstance", "(", "expr", ",", "LocalSigma", ")", ":", "ket", "=", "basis_state", "(", "expr", ".", "index_j", ",", "n", ")", "bra", "=", "basis_state", "(", "expr", ".", "index_k", ",", "n", ")", ".", "H", "return", "ket", "*", "bra", "else", ":", "raise", "ValueError", "(", "\"Cannot convert '%s' of type %s\"", "%", "(", "str", "(", "expr", ")", ",", "type", "(", "expr", ")", ")", ")", "elif", "(", "isinstance", "(", "expr", ",", "Operator", ")", "and", "isinstance", "(", "expr", ",", "Operation", ")", ")", ":", "if", "isinstance", "(", "expr", ",", "OperatorPlus", ")", ":", "s", "=", "convert_to_sympy_matrix", "(", "expr", ".", "operands", "[", "0", "]", ",", "full_space", ")", "for", "op", "in", "expr", ".", "operands", "[", "1", ":", "]", ":", "s", "+=", "convert_to_sympy_matrix", "(", "op", ",", "full_space", ")", "return", "s", "elif", "isinstance", "(", "expr", ",", "OperatorTimes", ")", ":", "# if any factor acts non-locally, we need to expand distributively.", "if", "any", "(", "len", "(", "op", ".", "space", ")", ">", "1", "for", "op", "in", "expr", ".", "operands", ")", ":", "se", "=", "expr", ".", "expand", "(", ")", "if", "se", "==", "expr", ":", "raise", "ValueError", "(", "\"Cannot represent as sympy matrix: %s\"", "%", "expr", ")", "return", "convert_to_sympy_matrix", "(", "se", ",", "full_space", ")", "all_spaces", "=", "full_space", ".", "local_factors", "by_space", "=", "[", "]", "ck", "=", "0", "for", "ls", "in", "all_spaces", ":", "# group factors by associated local space", "ls_ops", "=", "[", "convert_to_sympy_matrix", "(", "o", ",", "o", ".", "space", ")", "for", "o", "in", "expr", ".", "operands", "if", "o", ".", "space", "==", "ls", "]", "if", "len", "(", "ls_ops", ")", ":", "# compute factor associated with local space", "by_space", ".", "append", "(", "ls_ops", "[", "0", "]", ")", "for", "ls_op", "in", "ls_ops", "[", "1", ":", "]", ":", "by_space", "[", "-", "1", "]", "*=", "ls_op", "ck", "+=", "len", "(", "ls_ops", ")", "else", ":", "# if trivial action, take identity matrix", "by_space", ".", "append", "(", "sympy", ".", "eye", "(", "ls", ".", "dimension", ")", ")", "assert", "ck", "==", "len", "(", "expr", ".", "operands", ")", "# combine local factors in tensor product", "if", "len", "(", "by_space", ")", "==", "1", ":", "return", "by_space", "[", "0", "]", "else", ":", "return", "tensor", "(", "*", "by_space", ")", "elif", "isinstance", "(", "expr", ",", "Adjoint", ")", ":", "return", "convert_to_sympy_matrix", "(", "expr", ".", "operand", ",", "full_space", ")", ".", "H", "elif", "isinstance", "(", "expr", ",", "PseudoInverse", ")", ":", "raise", "NotImplementedError", "(", "'Cannot convert PseudoInverse to sympy matrix'", ")", "elif", "isinstance", "(", "expr", ",", "NullSpaceProjector", ")", ":", "raise", "NotImplementedError", "(", "'Cannot convert NullSpaceProjector to sympy'", ")", "elif", "isinstance", "(", "expr", ",", "ScalarTimesOperator", ")", ":", "return", "expr", ".", "coeff", "*", "convert_to_sympy_matrix", "(", "expr", ".", "term", ",", "full_space", ")", "else", ":", "raise", "ValueError", "(", "\"Cannot convert '%s' of type %s\"", "%", "(", "str", "(", "expr", ")", ",", "type", "(", "expr", ")", ")", ")", "else", ":", "raise", "ValueError", "(", "\"Cannot convert '%s' of type %s\"", "%", "(", "str", "(", "expr", ")", ",", "type", "(", "expr", ")", ")", ")" ]
44.122807
0.000194
def _proxy(self): """ Generate an instance context for the instance, the context is capable of performing various actions. All instance actions are proxied to the context :returns: WorkspaceCumulativeStatisticsContext for this WorkspaceCumulativeStatisticsInstance :rtype: twilio.rest.taskrouter.v1.workspace.workspace_cumulative_statistics.WorkspaceCumulativeStatisticsContext """ if self._context is None: self._context = WorkspaceCumulativeStatisticsContext( self._version, workspace_sid=self._solution['workspace_sid'], ) return self._context
[ "def", "_proxy", "(", "self", ")", ":", "if", "self", ".", "_context", "is", "None", ":", "self", ".", "_context", "=", "WorkspaceCumulativeStatisticsContext", "(", "self", ".", "_version", ",", "workspace_sid", "=", "self", ".", "_solution", "[", "'workspace_sid'", "]", ",", ")", "return", "self", ".", "_context" ]
46.785714
0.008982
def build_uri(endpoint, api_version, uri_parts, uri_args={}): """ Build the URL using the endpoint, the api version, the uri parts and the args. :param dict uri_args: parameters to include in the URL. :param tuple uri_parts: url encoded and `uri_parts` too. :return: A string that represents the absolute URL of the request :rtype : str The resulting uri is as follows: {endpoint}/{api_version}/{uri_part1}/.../{uri_partn}?{uri_args} The `uri_args` and the `uri_parts` are url encoded. """ # to unicode uri_parts = [unicode(x) for x in uri_parts] # and encoded uri_parts = [urllib.quote(x) for x in uri_parts] # Add enpoint and version all_uri_parts = [endpoint, api_version, ] + uri_parts # join parts url_to_call = "/".join(all_uri_parts) # add params if any if uri_args: url_to_call = "{}?{}".format(url_to_call, urllib.urlencode(uri_args)) # return return url_to_call
[ "def", "build_uri", "(", "endpoint", ",", "api_version", ",", "uri_parts", ",", "uri_args", "=", "{", "}", ")", ":", "# to unicode", "uri_parts", "=", "[", "unicode", "(", "x", ")", "for", "x", "in", "uri_parts", "]", "# and encoded ", "uri_parts", "=", "[", "urllib", ".", "quote", "(", "x", ")", "for", "x", "in", "uri_parts", "]", "# Add enpoint and version ", "all_uri_parts", "=", "[", "endpoint", ",", "api_version", ",", "]", "+", "uri_parts", "# join parts", "url_to_call", "=", "\"/\"", ".", "join", "(", "all_uri_parts", ")", "# add params if any", "if", "uri_args", ":", "url_to_call", "=", "\"{}?{}\"", ".", "format", "(", "url_to_call", ",", "urllib", ".", "urlencode", "(", "uri_args", ")", ")", "# return", "return", "url_to_call" ]
36.740741
0.008841
def _run_dnb_normalization(self, dnb_data, sza_data): """Scale the DNB data using a adaptive histogram equalization method. Args: dnb_data (ndarray): Day/Night Band data array sza_data (ndarray): Solar Zenith Angle data array """ # convert dask arrays to DataArray objects dnb_data = xr.DataArray(dnb_data, dims=('y', 'x')) sza_data = xr.DataArray(sza_data, dims=('y', 'x')) good_mask = ~(dnb_data.isnull() | sza_data.isnull()) # good_mask = ~(dnb_data.mask | sza_data.mask) output_dataset = dnb_data.where(good_mask) # we only need the numpy array output_dataset = output_dataset.values.copy() dnb_data = dnb_data.values sza_data = sza_data.values day_mask, mixed_mask, night_mask = make_day_night_masks( sza_data, good_mask.values, self.high_angle_cutoff, self.low_angle_cutoff, stepsDegrees=self.mixed_degree_step) did_equalize = False has_multi_times = len(mixed_mask) > 0 if day_mask.any(): did_equalize = True if self.adaptive_day == "always" or ( has_multi_times and self.adaptive_day == "multiple"): LOG.debug("Adaptive histogram equalizing DNB day data...") local_histogram_equalization( dnb_data, day_mask, valid_data_mask=good_mask.values, local_radius_px=self.day_radius_pixels, out=output_dataset) else: LOG.debug("Histogram equalizing DNB day data...") histogram_equalization(dnb_data, day_mask, out=output_dataset) if mixed_mask: for mask in mixed_mask: if mask.any(): did_equalize = True if self.adaptive_mixed == "always" or ( has_multi_times and self.adaptive_mixed == "multiple"): LOG.debug( "Adaptive histogram equalizing DNB mixed data...") local_histogram_equalization( dnb_data, mask, valid_data_mask=good_mask.values, local_radius_px=self.mixed_radius_pixels, out=output_dataset) else: LOG.debug("Histogram equalizing DNB mixed data...") histogram_equalization(dnb_data, day_mask, out=output_dataset) if night_mask.any(): did_equalize = True if self.adaptive_night == "always" or ( has_multi_times and self.adaptive_night == "multiple"): LOG.debug("Adaptive histogram equalizing DNB night data...") local_histogram_equalization( dnb_data, night_mask, valid_data_mask=good_mask.values, local_radius_px=self.night_radius_pixels, out=output_dataset) else: LOG.debug("Histogram equalizing DNB night data...") histogram_equalization(dnb_data, night_mask, out=output_dataset) if not did_equalize: raise RuntimeError("No valid data found to histogram equalize") return output_dataset
[ "def", "_run_dnb_normalization", "(", "self", ",", "dnb_data", ",", "sza_data", ")", ":", "# convert dask arrays to DataArray objects", "dnb_data", "=", "xr", ".", "DataArray", "(", "dnb_data", ",", "dims", "=", "(", "'y'", ",", "'x'", ")", ")", "sza_data", "=", "xr", ".", "DataArray", "(", "sza_data", ",", "dims", "=", "(", "'y'", ",", "'x'", ")", ")", "good_mask", "=", "~", "(", "dnb_data", ".", "isnull", "(", ")", "|", "sza_data", ".", "isnull", "(", ")", ")", "# good_mask = ~(dnb_data.mask | sza_data.mask)", "output_dataset", "=", "dnb_data", ".", "where", "(", "good_mask", ")", "# we only need the numpy array", "output_dataset", "=", "output_dataset", ".", "values", ".", "copy", "(", ")", "dnb_data", "=", "dnb_data", ".", "values", "sza_data", "=", "sza_data", ".", "values", "day_mask", ",", "mixed_mask", ",", "night_mask", "=", "make_day_night_masks", "(", "sza_data", ",", "good_mask", ".", "values", ",", "self", ".", "high_angle_cutoff", ",", "self", ".", "low_angle_cutoff", ",", "stepsDegrees", "=", "self", ".", "mixed_degree_step", ")", "did_equalize", "=", "False", "has_multi_times", "=", "len", "(", "mixed_mask", ")", ">", "0", "if", "day_mask", ".", "any", "(", ")", ":", "did_equalize", "=", "True", "if", "self", ".", "adaptive_day", "==", "\"always\"", "or", "(", "has_multi_times", "and", "self", ".", "adaptive_day", "==", "\"multiple\"", ")", ":", "LOG", ".", "debug", "(", "\"Adaptive histogram equalizing DNB day data...\"", ")", "local_histogram_equalization", "(", "dnb_data", ",", "day_mask", ",", "valid_data_mask", "=", "good_mask", ".", "values", ",", "local_radius_px", "=", "self", ".", "day_radius_pixels", ",", "out", "=", "output_dataset", ")", "else", ":", "LOG", ".", "debug", "(", "\"Histogram equalizing DNB day data...\"", ")", "histogram_equalization", "(", "dnb_data", ",", "day_mask", ",", "out", "=", "output_dataset", ")", "if", "mixed_mask", ":", "for", "mask", "in", "mixed_mask", ":", "if", "mask", ".", "any", "(", ")", ":", "did_equalize", "=", "True", "if", "self", ".", "adaptive_mixed", "==", "\"always\"", "or", "(", "has_multi_times", "and", "self", ".", "adaptive_mixed", "==", "\"multiple\"", ")", ":", "LOG", ".", "debug", "(", "\"Adaptive histogram equalizing DNB mixed data...\"", ")", "local_histogram_equalization", "(", "dnb_data", ",", "mask", ",", "valid_data_mask", "=", "good_mask", ".", "values", ",", "local_radius_px", "=", "self", ".", "mixed_radius_pixels", ",", "out", "=", "output_dataset", ")", "else", ":", "LOG", ".", "debug", "(", "\"Histogram equalizing DNB mixed data...\"", ")", "histogram_equalization", "(", "dnb_data", ",", "day_mask", ",", "out", "=", "output_dataset", ")", "if", "night_mask", ".", "any", "(", ")", ":", "did_equalize", "=", "True", "if", "self", ".", "adaptive_night", "==", "\"always\"", "or", "(", "has_multi_times", "and", "self", ".", "adaptive_night", "==", "\"multiple\"", ")", ":", "LOG", ".", "debug", "(", "\"Adaptive histogram equalizing DNB night data...\"", ")", "local_histogram_equalization", "(", "dnb_data", ",", "night_mask", ",", "valid_data_mask", "=", "good_mask", ".", "values", ",", "local_radius_px", "=", "self", ".", "night_radius_pixels", ",", "out", "=", "output_dataset", ")", "else", ":", "LOG", ".", "debug", "(", "\"Histogram equalizing DNB night data...\"", ")", "histogram_equalization", "(", "dnb_data", ",", "night_mask", ",", "out", "=", "output_dataset", ")", "if", "not", "did_equalize", ":", "raise", "RuntimeError", "(", "\"No valid data found to histogram equalize\"", ")", "return", "output_dataset" ]
42.837209
0.000531
def values(self): """ Return all values as numpy-array (mean, var, min, max, num). """ return np.array([self.mean, self.var, self.min, self.max, self.num])
[ "def", "values", "(", "self", ")", ":", "return", "np", ".", "array", "(", "[", "self", ".", "mean", ",", "self", ".", "var", ",", "self", ".", "min", ",", "self", ".", "max", ",", "self", ".", "num", "]", ")" ]
36.6
0.010695
def _check_no_current_table(new_obj, current_table): """ Raises exception if we try to add a relation or a column with no current table. """ if current_table is None: msg = 'Cannot add {} before adding table' if isinstance(new_obj, Relation): raise NoCurrentTableException(msg.format('relation')) if isinstance(new_obj, Column): raise NoCurrentTableException(msg.format('column'))
[ "def", "_check_no_current_table", "(", "new_obj", ",", "current_table", ")", ":", "if", "current_table", "is", "None", ":", "msg", "=", "'Cannot add {} before adding table'", "if", "isinstance", "(", "new_obj", ",", "Relation", ")", ":", "raise", "NoCurrentTableException", "(", "msg", ".", "format", "(", "'relation'", ")", ")", "if", "isinstance", "(", "new_obj", ",", "Column", ")", ":", "raise", "NoCurrentTableException", "(", "msg", ".", "format", "(", "'column'", ")", ")" ]
48
0.002273
def start(self): """ Begins the job by kicking off all tasks with no dependencies. """ logger.info('Job {0} starting job run'.format(self.name)) if not self.state.allow_start: raise DagobahError('job cannot be started in its current state; ' + 'it is probably already running') self.initialize_snapshot() # don't increment if the job was run manually if self.cron_iter and datetime.utcnow() > self.next_run: self.next_run = self.cron_iter.get_next(datetime) self.run_log = {'job_id': self.job_id, 'name': self.name, 'parent_id': self.parent.dagobah_id, 'log_id': self.backend.get_new_log_id(), 'start_time': datetime.utcnow(), 'tasks': {}} self._set_status('running') logger.debug('Job {0} resetting all tasks prior to start'.format(self.name)) for task in self.tasks.itervalues(): task.reset() logger.debug('Job {0} seeding run logs'.format(self.name)) for task_name in self.ind_nodes(self.snapshot): self._put_task_in_run_log(task_name) self.tasks[task_name].start() self._commit_run_log()
[ "def", "start", "(", "self", ")", ":", "logger", ".", "info", "(", "'Job {0} starting job run'", ".", "format", "(", "self", ".", "name", ")", ")", "if", "not", "self", ".", "state", ".", "allow_start", ":", "raise", "DagobahError", "(", "'job cannot be started in its current state; '", "+", "'it is probably already running'", ")", "self", ".", "initialize_snapshot", "(", ")", "# don't increment if the job was run manually", "if", "self", ".", "cron_iter", "and", "datetime", ".", "utcnow", "(", ")", ">", "self", ".", "next_run", ":", "self", ".", "next_run", "=", "self", ".", "cron_iter", ".", "get_next", "(", "datetime", ")", "self", ".", "run_log", "=", "{", "'job_id'", ":", "self", ".", "job_id", ",", "'name'", ":", "self", ".", "name", ",", "'parent_id'", ":", "self", ".", "parent", ".", "dagobah_id", ",", "'log_id'", ":", "self", ".", "backend", ".", "get_new_log_id", "(", ")", ",", "'start_time'", ":", "datetime", ".", "utcnow", "(", ")", ",", "'tasks'", ":", "{", "}", "}", "self", ".", "_set_status", "(", "'running'", ")", "logger", ".", "debug", "(", "'Job {0} resetting all tasks prior to start'", ".", "format", "(", "self", ".", "name", ")", ")", "for", "task", "in", "self", ".", "tasks", ".", "itervalues", "(", ")", ":", "task", ".", "reset", "(", ")", "logger", ".", "debug", "(", "'Job {0} seeding run logs'", ".", "format", "(", "self", ".", "name", ")", ")", "for", "task_name", "in", "self", ".", "ind_nodes", "(", "self", ".", "snapshot", ")", ":", "self", ".", "_put_task_in_run_log", "(", "task_name", ")", "self", ".", "tasks", "[", "task_name", "]", ".", "start", "(", ")", "self", ".", "_commit_run_log", "(", ")" ]
40.0625
0.002285
def validate_args(api_key, *, rate="informers", **kwargs): "Проверяет и формирует аргументы для запроса" rate = Rate.validate(rate) headers = {"X-Yandex-API-Key": api_key} url = "https://api.weather.yandex.ru/v1/{}".format(rate) if rate == "informers": params = ARGS_SCHEMA(kwargs) else: params = ARGS_FORECAST_SCHEMA(kwargs) return (url,), {"headers": headers, "params": params}
[ "def", "validate_args", "(", "api_key", ",", "*", ",", "rate", "=", "\"informers\"", ",", "*", "*", "kwargs", ")", ":", "rate", "=", "Rate", ".", "validate", "(", "rate", ")", "headers", "=", "{", "\"X-Yandex-API-Key\"", ":", "api_key", "}", "url", "=", "\"https://api.weather.yandex.ru/v1/{}\"", ".", "format", "(", "rate", ")", "if", "rate", "==", "\"informers\"", ":", "params", "=", "ARGS_SCHEMA", "(", "kwargs", ")", "else", ":", "params", "=", "ARGS_FORECAST_SCHEMA", "(", "kwargs", ")", "return", "(", "url", ",", ")", ",", "{", "\"headers\"", ":", "headers", ",", "\"params\"", ":", "params", "}" ]
41.4
0.002364
def _encrypted_data_keys_hash(hasher, encrypted_data_keys): """Generates the expected hash for the provided encrypted data keys. :param hasher: Existing hasher to use :type hasher: cryptography.hazmat.primitives.hashes.Hash :param iterable encrypted_data_keys: Encrypted data keys to hash :returns: Concatenated, sorted, list of all hashes :rtype: bytes """ hashed_keys = [] for edk in encrypted_data_keys: serialized_edk = serialize_encrypted_data_key(edk) _hasher = hasher.copy() _hasher.update(serialized_edk) hashed_keys.append(_hasher.finalize()) return b"".join(sorted(hashed_keys))
[ "def", "_encrypted_data_keys_hash", "(", "hasher", ",", "encrypted_data_keys", ")", ":", "hashed_keys", "=", "[", "]", "for", "edk", "in", "encrypted_data_keys", ":", "serialized_edk", "=", "serialize_encrypted_data_key", "(", "edk", ")", "_hasher", "=", "hasher", ".", "copy", "(", ")", "_hasher", ".", "update", "(", "serialized_edk", ")", "hashed_keys", ".", "append", "(", "_hasher", ".", "finalize", "(", ")", ")", "return", "b\"\"", ".", "join", "(", "sorted", "(", "hashed_keys", ")", ")" ]
40.375
0.001513
def parse_info(response): "Parse the result of Redis's INFO command into a Python dict" info = {} response = nativestr(response) def get_value(value): if ',' not in value or '=' not in value: try: if '.' in value: return float(value) else: return int(value) except ValueError: return value else: sub_dict = {} for item in value.split(','): k, v = item.rsplit('=', 1) sub_dict[k] = get_value(v) return sub_dict for line in response.splitlines(): if line and not line.startswith('#'): if line.find(':') != -1: key, value = line.split(':', 1) info[key] = get_value(value) else: # if the line isn't splittable, append it to the "__raw__" key info.setdefault('__raw__', []).append(line) return info
[ "def", "parse_info", "(", "response", ")", ":", "info", "=", "{", "}", "response", "=", "nativestr", "(", "response", ")", "def", "get_value", "(", "value", ")", ":", "if", "','", "not", "in", "value", "or", "'='", "not", "in", "value", ":", "try", ":", "if", "'.'", "in", "value", ":", "return", "float", "(", "value", ")", "else", ":", "return", "int", "(", "value", ")", "except", "ValueError", ":", "return", "value", "else", ":", "sub_dict", "=", "{", "}", "for", "item", "in", "value", ".", "split", "(", "','", ")", ":", "k", ",", "v", "=", "item", ".", "rsplit", "(", "'='", ",", "1", ")", "sub_dict", "[", "k", "]", "=", "get_value", "(", "v", ")", "return", "sub_dict", "for", "line", "in", "response", ".", "splitlines", "(", ")", ":", "if", "line", "and", "not", "line", ".", "startswith", "(", "'#'", ")", ":", "if", "line", ".", "find", "(", "':'", ")", "!=", "-", "1", ":", "key", ",", "value", "=", "line", ".", "split", "(", "':'", ",", "1", ")", "info", "[", "key", "]", "=", "get_value", "(", "value", ")", "else", ":", "# if the line isn't splittable, append it to the \"__raw__\" key", "info", ".", "setdefault", "(", "'__raw__'", ",", "[", "]", ")", ".", "append", "(", "line", ")", "return", "info" ]
31.677419
0.000988
def _rpt_unused_sections(self, prt): """Report unused sections.""" sections_unused = set(self.sections_seen).difference(self.section2goids.keys()) for sec in sections_unused: prt.write(" UNUSED SECTION: {SEC}\n".format(SEC=sec))
[ "def", "_rpt_unused_sections", "(", "self", ",", "prt", ")", ":", "sections_unused", "=", "set", "(", "self", ".", "sections_seen", ")", ".", "difference", "(", "self", ".", "section2goids", ".", "keys", "(", ")", ")", "for", "sec", "in", "sections_unused", ":", "prt", ".", "write", "(", "\" UNUSED SECTION: {SEC}\\n\"", ".", "format", "(", "SEC", "=", "sec", ")", ")" ]
52.2
0.011321
def _field_accessor(name, docstring=None, min_cftime_version='0.0'): """Adapted from pandas.tseries.index._field_accessor""" def f(self, min_cftime_version=min_cftime_version): import cftime version = cftime.__version__ if LooseVersion(version) >= LooseVersion(min_cftime_version): return get_date_field(self._data, name) else: raise ImportError('The {!r} accessor requires a minimum ' 'version of cftime of {}. Found an ' 'installed version of {}.'.format( name, min_cftime_version, version)) f.__name__ = name f.__doc__ = docstring return property(f)
[ "def", "_field_accessor", "(", "name", ",", "docstring", "=", "None", ",", "min_cftime_version", "=", "'0.0'", ")", ":", "def", "f", "(", "self", ",", "min_cftime_version", "=", "min_cftime_version", ")", ":", "import", "cftime", "version", "=", "cftime", ".", "__version__", "if", "LooseVersion", "(", "version", ")", ">=", "LooseVersion", "(", "min_cftime_version", ")", ":", "return", "get_date_field", "(", "self", ".", "_data", ",", "name", ")", "else", ":", "raise", "ImportError", "(", "'The {!r} accessor requires a minimum '", "'version of cftime of {}. Found an '", "'installed version of {}.'", ".", "format", "(", "name", ",", "min_cftime_version", ",", "version", ")", ")", "f", ".", "__name__", "=", "name", "f", ".", "__doc__", "=", "docstring", "return", "property", "(", "f", ")" ]
37.263158
0.001377
def verify_checksum(*lines): """Verify the checksum of one or more TLE lines. Raises `ValueError` if any of the lines fails its checksum, and includes the failing line in the error message. """ for line in lines: checksum = line[68:69] if not checksum.isdigit(): continue checksum = int(checksum) computed = compute_checksum(line) if checksum != computed: complaint = ('TLE line gives its checksum as {}' ' but in fact tallies to {}:\n{}') raise ValueError(complaint.format(checksum, computed, line))
[ "def", "verify_checksum", "(", "*", "lines", ")", ":", "for", "line", "in", "lines", ":", "checksum", "=", "line", "[", "68", ":", "69", "]", "if", "not", "checksum", ".", "isdigit", "(", ")", ":", "continue", "checksum", "=", "int", "(", "checksum", ")", "computed", "=", "compute_checksum", "(", "line", ")", "if", "checksum", "!=", "computed", ":", "complaint", "=", "(", "'TLE line gives its checksum as {}'", "' but in fact tallies to {}:\\n{}'", ")", "raise", "ValueError", "(", "complaint", ".", "format", "(", "checksum", ",", "computed", ",", "line", ")", ")" ]
35.705882
0.001605
def otherrole(self, otherrole): """ Get the ``OTHERROLE`` attribute value. """ if otherrole is not None: self._el.set('ROLE', 'OTHER') self._el.set('OTHERROLE', otherrole)
[ "def", "otherrole", "(", "self", ",", "otherrole", ")", ":", "if", "otherrole", "is", "not", "None", ":", "self", ".", "_el", ".", "set", "(", "'ROLE'", ",", "'OTHER'", ")", "self", ".", "_el", ".", "set", "(", "'OTHERROLE'", ",", "otherrole", ")" ]
31.571429
0.008811
def rcm_chip_order(machine): """A generator which iterates over a set of chips in a machine in Reverse-Cuthill-McKee order. For use as a chip ordering for the sequential placer. """ # Convert the Machine description into a placement-problem-style-graph # where the vertices are chip coordinate tuples (x, y) and each net # represents the links leaving each chip. This allows us to re-use the # rcm_vertex_order function above to generate an RCM ordering of chips in # the machine. vertices = list(machine) nets = [] for (x, y) in vertices: neighbours = [] for link in Links: if (x, y, link) in machine: dx, dy = link.to_vector() neighbour = ((x + dx) % machine.width, (y + dy) % machine.height) # In principle if the link to chip is marked as working, that # chip should be working. In practice this might not be the # case (especially for carelessly hand-defined Machine # objects). if neighbour in machine: neighbours.append(neighbour) nets.append(Net((x, y), neighbours)) return rcm_vertex_order(vertices, nets)
[ "def", "rcm_chip_order", "(", "machine", ")", ":", "# Convert the Machine description into a placement-problem-style-graph", "# where the vertices are chip coordinate tuples (x, y) and each net", "# represents the links leaving each chip. This allows us to re-use the", "# rcm_vertex_order function above to generate an RCM ordering of chips in", "# the machine.", "vertices", "=", "list", "(", "machine", ")", "nets", "=", "[", "]", "for", "(", "x", ",", "y", ")", "in", "vertices", ":", "neighbours", "=", "[", "]", "for", "link", "in", "Links", ":", "if", "(", "x", ",", "y", ",", "link", ")", "in", "machine", ":", "dx", ",", "dy", "=", "link", ".", "to_vector", "(", ")", "neighbour", "=", "(", "(", "x", "+", "dx", ")", "%", "machine", ".", "width", ",", "(", "y", "+", "dy", ")", "%", "machine", ".", "height", ")", "# In principle if the link to chip is marked as working, that", "# chip should be working. In practice this might not be the", "# case (especially for carelessly hand-defined Machine", "# objects).", "if", "neighbour", "in", "machine", ":", "neighbours", ".", "append", "(", "neighbour", ")", "nets", ".", "append", "(", "Net", "(", "(", "x", ",", "y", ")", ",", "neighbours", ")", ")", "return", "rcm_vertex_order", "(", "vertices", ",", "nets", ")" ]
41.233333
0.00079
def add_user(username, deployment_name, token_manager=None, app_url=defaults.APP_URL): """ add user to deployment """ deployment_id = get_deployment_id(deployment_name, token_manager=token_manager, app_url=app_url) account_id = accounts.get_account_id(username, token_manager=token_manager, app_url=app_url) headers = token_manager.get_access_token_headers() deployment_url = environment.get_deployment_url(app_url=app_url) response = requests.put('%s/api/v1/deployments/%s/accounts/%s' % (deployment_url, deployment_id, account_id), headers=headers) if response.status_code == 204: return response.text else: raise JutException('Error %s: %s' % (response.status_code, response.text))
[ "def", "add_user", "(", "username", ",", "deployment_name", ",", "token_manager", "=", "None", ",", "app_url", "=", "defaults", ".", "APP_URL", ")", ":", "deployment_id", "=", "get_deployment_id", "(", "deployment_name", ",", "token_manager", "=", "token_manager", ",", "app_url", "=", "app_url", ")", "account_id", "=", "accounts", ".", "get_account_id", "(", "username", ",", "token_manager", "=", "token_manager", ",", "app_url", "=", "app_url", ")", "headers", "=", "token_manager", ".", "get_access_token_headers", "(", ")", "deployment_url", "=", "environment", ".", "get_deployment_url", "(", "app_url", "=", "app_url", ")", "response", "=", "requests", ".", "put", "(", "'%s/api/v1/deployments/%s/accounts/%s'", "%", "(", "deployment_url", ",", "deployment_id", ",", "account_id", ")", ",", "headers", "=", "headers", ")", "if", "response", ".", "status_code", "==", "204", ":", "return", "response", ".", "text", "else", ":", "raise", "JutException", "(", "'Error %s: %s'", "%", "(", "response", ".", "status_code", ",", "response", ".", "text", ")", ")" ]
37.384615
0.002006
def plotcdf(x,xmin,alpha): """ Plots CDF and powerlaw """ x=sort(x) n=len(x) xcdf = arange(n,0,-1,dtype='float')/float(n) q = x[x>=xmin] fcdf = (q/xmin)**(1-alpha) nc = xcdf[argmax(x>=xmin)] fcdf_norm = nc*fcdf loglog(x,xcdf) loglog(q,fcdf_norm)
[ "def", "plotcdf", "(", "x", ",", "xmin", ",", "alpha", ")", ":", "x", "=", "sort", "(", "x", ")", "n", "=", "len", "(", "x", ")", "xcdf", "=", "arange", "(", "n", ",", "0", ",", "-", "1", ",", "dtype", "=", "'float'", ")", "/", "float", "(", "n", ")", "q", "=", "x", "[", "x", ">=", "xmin", "]", "fcdf", "=", "(", "q", "/", "xmin", ")", "**", "(", "1", "-", "alpha", ")", "nc", "=", "xcdf", "[", "argmax", "(", "x", ">=", "xmin", ")", "]", "fcdf_norm", "=", "nc", "*", "fcdf", "loglog", "(", "x", ",", "xcdf", ")", "loglog", "(", "q", ",", "fcdf_norm", ")" ]
17.5625
0.040541
def unlock_wallet(self, *args, **kwargs): """ Unlock the library internal wallet """ self.blockchain.wallet.unlock(*args, **kwargs) return self
[ "def", "unlock_wallet", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "blockchain", ".", "wallet", ".", "unlock", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "self" ]
34.2
0.011429
def _check_stop_conditions(self, sensor_graph): """Check if any of our stop conditions are met. Args: sensor_graph (SensorGraph): The sensor graph we are currently simulating Returns: bool: True if we should stop the simulation """ for stop in self.stop_conditions: if stop.should_stop(self.tick_count, self.tick_count - self._start_tick, sensor_graph): return True return False
[ "def", "_check_stop_conditions", "(", "self", ",", "sensor_graph", ")", ":", "for", "stop", "in", "self", ".", "stop_conditions", ":", "if", "stop", ".", "should_stop", "(", "self", ".", "tick_count", ",", "self", ".", "tick_count", "-", "self", ".", "_start_tick", ",", "sensor_graph", ")", ":", "return", "True", "return", "False" ]
31.2
0.008299
def local_2d_halo_exchange(k, v, num_h_blocks, h_dim, num_w_blocks, w_dim, mask_right): """Halo exchange for keys and values for Local 2D attention.""" for blocks_dim, block_size_dim, halo_size in [ (num_h_blocks, h_dim, h_dim.size), (num_w_blocks, w_dim, w_dim.size)]: # shape of k is [num_h_blocks, num_w_blocks, h_dim, w_dim, kv_channels] if halo_size > 0: if blocks_dim is not None: if mask_right: k = mtf.left_halo_exchange(k, blocks_dim, block_size_dim, halo_size) v = mtf.left_halo_exchange(v, blocks_dim, block_size_dim, halo_size) else: k = mtf.halo_exchange(k, blocks_dim, block_size_dim, halo_size) v = mtf.halo_exchange(v, blocks_dim, block_size_dim, halo_size) else: if mask_right: k = mtf.pad(k, [halo_size, None], block_size_dim.name) v = mtf.pad(v, [halo_size, None], block_size_dim.name) else: k = mtf.pad(k, [halo_size, halo_size], block_size_dim.name) v = mtf.pad(v, [halo_size, halo_size], block_size_dim.name) return k, v
[ "def", "local_2d_halo_exchange", "(", "k", ",", "v", ",", "num_h_blocks", ",", "h_dim", ",", "num_w_blocks", ",", "w_dim", ",", "mask_right", ")", ":", "for", "blocks_dim", ",", "block_size_dim", ",", "halo_size", "in", "[", "(", "num_h_blocks", ",", "h_dim", ",", "h_dim", ".", "size", ")", ",", "(", "num_w_blocks", ",", "w_dim", ",", "w_dim", ".", "size", ")", "]", ":", "# shape of k is [num_h_blocks, num_w_blocks, h_dim, w_dim, kv_channels]", "if", "halo_size", ">", "0", ":", "if", "blocks_dim", "is", "not", "None", ":", "if", "mask_right", ":", "k", "=", "mtf", ".", "left_halo_exchange", "(", "k", ",", "blocks_dim", ",", "block_size_dim", ",", "halo_size", ")", "v", "=", "mtf", ".", "left_halo_exchange", "(", "v", ",", "blocks_dim", ",", "block_size_dim", ",", "halo_size", ")", "else", ":", "k", "=", "mtf", ".", "halo_exchange", "(", "k", ",", "blocks_dim", ",", "block_size_dim", ",", "halo_size", ")", "v", "=", "mtf", ".", "halo_exchange", "(", "v", ",", "blocks_dim", ",", "block_size_dim", ",", "halo_size", ")", "else", ":", "if", "mask_right", ":", "k", "=", "mtf", ".", "pad", "(", "k", ",", "[", "halo_size", ",", "None", "]", ",", "block_size_dim", ".", "name", ")", "v", "=", "mtf", ".", "pad", "(", "v", ",", "[", "halo_size", ",", "None", "]", ",", "block_size_dim", ".", "name", ")", "else", ":", "k", "=", "mtf", ".", "pad", "(", "k", ",", "[", "halo_size", ",", "halo_size", "]", ",", "block_size_dim", ".", "name", ")", "v", "=", "mtf", ".", "pad", "(", "v", ",", "[", "halo_size", ",", "halo_size", "]", ",", "block_size_dim", ".", "name", ")", "return", "k", ",", "v" ]
47.695652
0.013405
def from_json(graph_json_dict: Mapping[str, Any], check_version=True) -> BELGraph: """Build a graph from Node-Link JSON Object.""" graph = node_link_graph(graph_json_dict) return ensure_version(graph, check_version=check_version)
[ "def", "from_json", "(", "graph_json_dict", ":", "Mapping", "[", "str", ",", "Any", "]", ",", "check_version", "=", "True", ")", "->", "BELGraph", ":", "graph", "=", "node_link_graph", "(", "graph_json_dict", ")", "return", "ensure_version", "(", "graph", ",", "check_version", "=", "check_version", ")" ]
59.5
0.008299
def _get_uniprot_id(agent): """Return the UniProt ID for an agent, looking up in HGNC if necessary. If the UniProt ID is a list then return the first ID by default. """ up_id = agent.db_refs.get('UP') hgnc_id = agent.db_refs.get('HGNC') if up_id is None: if hgnc_id is None: # If both UniProt and HGNC refs are missing we can't # sequence check and so don't report a failure. return None # Try to get UniProt ID from HGNC up_id = hgnc_client.get_uniprot_id(hgnc_id) # If this fails, again, we can't sequence check if up_id is None: return None # If the UniProt ID is a list then choose the first one. if not isinstance(up_id, basestring) and \ isinstance(up_id[0], basestring): up_id = up_id[0] return up_id
[ "def", "_get_uniprot_id", "(", "agent", ")", ":", "up_id", "=", "agent", ".", "db_refs", ".", "get", "(", "'UP'", ")", "hgnc_id", "=", "agent", ".", "db_refs", ".", "get", "(", "'HGNC'", ")", "if", "up_id", "is", "None", ":", "if", "hgnc_id", "is", "None", ":", "# If both UniProt and HGNC refs are missing we can't", "# sequence check and so don't report a failure.", "return", "None", "# Try to get UniProt ID from HGNC", "up_id", "=", "hgnc_client", ".", "get_uniprot_id", "(", "hgnc_id", ")", "# If this fails, again, we can't sequence check", "if", "up_id", "is", "None", ":", "return", "None", "# If the UniProt ID is a list then choose the first one.", "if", "not", "isinstance", "(", "up_id", ",", "basestring", ")", "and", "isinstance", "(", "up_id", "[", "0", "]", ",", "basestring", ")", ":", "up_id", "=", "up_id", "[", "0", "]", "return", "up_id" ]
37.545455
0.001181
def purge_url(self, host, path): """Purge an individual URL.""" content = self._fetch(path, method="PURGE", headers={ "Host": host }) return FastlyPurge(self, content)
[ "def", "purge_url", "(", "self", ",", "host", ",", "path", ")", ":", "content", "=", "self", ".", "_fetch", "(", "path", ",", "method", "=", "\"PURGE\"", ",", "headers", "=", "{", "\"Host\"", ":", "host", "}", ")", "return", "FastlyPurge", "(", "self", ",", "content", ")" ]
42.75
0.051724
def transform_position_array(array, pos, euler, is_normal, reverse=False): """ Transform any Nx3 position array by translating to a center-of-mass 'pos' and applying an euler transformation :parameter array array: numpy array of Nx3 positions in the original (star) coordinate frame :parameter array pos: numpy array with length 3 giving cartesian coordinates to offset all positions :parameter array euler: euler angles (etheta, elongan, eincl) in radians :parameter bool is_normal: whether each entry is a normal vector rather than position vector. If true, the quantities won't be offset by 'pos' :return: new positions array with same shape as 'array'. """ trans_matrix = euler_trans_matrix(*euler) if not reverse: trans_matrix = trans_matrix.T if isinstance(array, ComputedColumn): array = array.for_computations if is_normal: # then we don't do an offset by the position return np.dot(np.asarray(array), trans_matrix) else: return np.dot(np.asarray(array), trans_matrix) + np.asarray(pos)
[ "def", "transform_position_array", "(", "array", ",", "pos", ",", "euler", ",", "is_normal", ",", "reverse", "=", "False", ")", ":", "trans_matrix", "=", "euler_trans_matrix", "(", "*", "euler", ")", "if", "not", "reverse", ":", "trans_matrix", "=", "trans_matrix", ".", "T", "if", "isinstance", "(", "array", ",", "ComputedColumn", ")", ":", "array", "=", "array", ".", "for_computations", "if", "is_normal", ":", "# then we don't do an offset by the position", "return", "np", ".", "dot", "(", "np", ".", "asarray", "(", "array", ")", ",", "trans_matrix", ")", "else", ":", "return", "np", ".", "dot", "(", "np", ".", "asarray", "(", "array", ")", ",", "trans_matrix", ")", "+", "np", ".", "asarray", "(", "pos", ")" ]
39.285714
0.000887
def _find_by(self, key): """Find devices.""" by_path = glob.glob('/dev/input/by-{key}/*-event-*'.format(key=key)) for device_path in by_path: self._parse_device_path(device_path)
[ "def", "_find_by", "(", "self", ",", "key", ")", ":", "by_path", "=", "glob", ".", "glob", "(", "'/dev/input/by-{key}/*-event-*'", ".", "format", "(", "key", "=", "key", ")", ")", "for", "device_path", "in", "by_path", ":", "self", ".", "_parse_device_path", "(", "device_path", ")" ]
42
0.009346
def download(self, force=False, silent=False): """Download from URL.""" def _download(): if self.url.startswith("http"): self._download_http(silent=silent) elif self.url.startswith("ftp"): self._download_ftp(silent=silent) else: raise ValueError("Invalid URL %s" % self.url) logger.debug("Moving %s to %s" % ( self._temp_file_name, self.destination)) shutil.move(self._temp_file_name, self.destination) logger.debug("Successfully downloaded %s" % self.url) try: is_already_downloaded = os.path.isfile(self.destination) if is_already_downloaded: if force: try: os.remove(self.destination) except Exception: logger.error("Cannot delete %s" % self.destination) logger.info( "Downloading %s to %s" % (self.url, self.destination)) logger.debug( "Downloading %s to %s" % (self.url, self._temp_file_name)) _download() else: logger.info(("File %s already exist. Use force=True if you" " would like to overwrite it.") % self.destination) else: _download() finally: try: os.remove(self._temp_file_name) except OSError: pass
[ "def", "download", "(", "self", ",", "force", "=", "False", ",", "silent", "=", "False", ")", ":", "def", "_download", "(", ")", ":", "if", "self", ".", "url", ".", "startswith", "(", "\"http\"", ")", ":", "self", ".", "_download_http", "(", "silent", "=", "silent", ")", "elif", "self", ".", "url", ".", "startswith", "(", "\"ftp\"", ")", ":", "self", ".", "_download_ftp", "(", "silent", "=", "silent", ")", "else", ":", "raise", "ValueError", "(", "\"Invalid URL %s\"", "%", "self", ".", "url", ")", "logger", ".", "debug", "(", "\"Moving %s to %s\"", "%", "(", "self", ".", "_temp_file_name", ",", "self", ".", "destination", ")", ")", "shutil", ".", "move", "(", "self", ".", "_temp_file_name", ",", "self", ".", "destination", ")", "logger", ".", "debug", "(", "\"Successfully downloaded %s\"", "%", "self", ".", "url", ")", "try", ":", "is_already_downloaded", "=", "os", ".", "path", ".", "isfile", "(", "self", ".", "destination", ")", "if", "is_already_downloaded", ":", "if", "force", ":", "try", ":", "os", ".", "remove", "(", "self", ".", "destination", ")", "except", "Exception", ":", "logger", ".", "error", "(", "\"Cannot delete %s\"", "%", "self", ".", "destination", ")", "logger", ".", "info", "(", "\"Downloading %s to %s\"", "%", "(", "self", ".", "url", ",", "self", ".", "destination", ")", ")", "logger", ".", "debug", "(", "\"Downloading %s to %s\"", "%", "(", "self", ".", "url", ",", "self", ".", "_temp_file_name", ")", ")", "_download", "(", ")", "else", ":", "logger", ".", "info", "(", "(", "\"File %s already exist. Use force=True if you\"", "\" would like to overwrite it.\"", ")", "%", "self", ".", "destination", ")", "else", ":", "_download", "(", ")", "finally", ":", "try", ":", "os", ".", "remove", "(", "self", ".", "_temp_file_name", ")", "except", "OSError", ":", "pass" ]
40.75
0.001198
def parse_line(self, statement, element, mode): """As part of real-time update, parses the statement and adjusts the attributes of the specified CustomType instance to reflect the changes. :arg statement: the lines of code that was added/removed/changed on the element after it had alread been parsed. The lines together form a single continuous code statement. :arg element: the CustomType instance to update. :arg mode: 'insert', or 'delete'. """ if element.incomplete: #We need to check for the end_token so we can close up the incomplete #status for the instance. if element.end_token in statement: element.incomplete = False return #This method deals with updating the *body* of the type declaration. The only #possible entries in the body are member variable declarations and type #executable definitions. self._process_execs_contents(statement, element.module.name, element, mode) self._rt_parse_members(statement, element, mode)
[ "def", "parse_line", "(", "self", ",", "statement", ",", "element", ",", "mode", ")", ":", "if", "element", ".", "incomplete", ":", "#We need to check for the end_token so we can close up the incomplete", "#status for the instance.", "if", "element", ".", "end_token", "in", "statement", ":", "element", ".", "incomplete", "=", "False", "return", "#This method deals with updating the *body* of the type declaration. The only", "#possible entries in the body are member variable declarations and type", "#executable definitions.", "self", ".", "_process_execs_contents", "(", "statement", ",", "element", ".", "module", ".", "name", ",", "element", ",", "mode", ")", "self", ".", "_rt_parse_members", "(", "statement", ",", "element", ",", "mode", ")" ]
50.363636
0.0124
def envs(self): ''' Check the refs and return a list of the ones which can be used as salt environments. ''' ref_paths = [x.path for x in self.repo.refs] return self._get_envs_from_ref_paths(ref_paths)
[ "def", "envs", "(", "self", ")", ":", "ref_paths", "=", "[", "x", ".", "path", "for", "x", "in", "self", ".", "repo", ".", "refs", "]", "return", "self", ".", "_get_envs_from_ref_paths", "(", "ref_paths", ")" ]
34.714286
0.008032
def render(self): """Render the axes data into the dict data""" for opt,values in self.data.items(): if opt == 'ticks': self['chxtc'] = '|'.join(values) else: self['chx%s'%opt[0]] = '|'.join(values) return self
[ "def", "render", "(", "self", ")", ":", "for", "opt", ",", "values", "in", "self", ".", "data", ".", "items", "(", ")", ":", "if", "opt", "==", "'ticks'", ":", "self", "[", "'chxtc'", "]", "=", "'|'", ".", "join", "(", "values", ")", "else", ":", "self", "[", "'chx%s'", "%", "opt", "[", "0", "]", "]", "=", "'|'", ".", "join", "(", "values", ")", "return", "self" ]
35.375
0.013793
def s_to_ev(offset_us, source_to_detector_m, array): """convert time (s) to energy (eV) Parameters: =========== numpy array of time in s offset_us: float. Delay of detector in us source_to_detector_m: float. Distance source to detector in m Returns: ======== numpy array of energy in eV """ lambda_a = 3956. * (array + offset_us * 1e-6) / source_to_detector_m return (81.787 / pow(lambda_a, 2)) / 1000.
[ "def", "s_to_ev", "(", "offset_us", ",", "source_to_detector_m", ",", "array", ")", ":", "lambda_a", "=", "3956.", "*", "(", "array", "+", "offset_us", "*", "1e-6", ")", "/", "source_to_detector_m", "return", "(", "81.787", "/", "pow", "(", "lambda_a", ",", "2", ")", ")", "/", "1000." ]
31.285714
0.002217
def get_uaa(self): """ Returns an insstance of the UAA Service. """ import predix.security.uaa uaa = predix.security.uaa.UserAccountAuthentication() return uaa
[ "def", "get_uaa", "(", "self", ")", ":", "import", "predix", ".", "security", ".", "uaa", "uaa", "=", "predix", ".", "security", ".", "uaa", ".", "UserAccountAuthentication", "(", ")", "return", "uaa" ]
28.714286
0.009662
def transform(line, known_fields=ENRICHED_EVENT_FIELD_TYPES, add_geolocation_data=True): """ Convert a Snowplow enriched event TSV into a JSON """ return jsonify_good_event(line.split('\t'), known_fields, add_geolocation_data)
[ "def", "transform", "(", "line", ",", "known_fields", "=", "ENRICHED_EVENT_FIELD_TYPES", ",", "add_geolocation_data", "=", "True", ")", ":", "return", "jsonify_good_event", "(", "line", ".", "split", "(", "'\\t'", ")", ",", "known_fields", ",", "add_geolocation_data", ")" ]
47.6
0.012397
def load_config(path=None, defaults=None): """ Loads and parses an INI style configuration file using Python's built-in configparser module. If path is specified, load it. If ``defaults`` (a list of strings) is given, try to load each entry as a file, without throwing any error if the operation fails. If ``defaults`` is not given, the following locations listed in the DEFAULT_FILES constant are tried. To completely disable defaults loading, pass in an empty list or ``False``. Returns the SafeConfigParser instance used to load and parse the files. """ if defaults is None: defaults = DEFAULT_FILES config = ConfigParser(allow_no_value=True) if defaults: config.read(defaults) if path: with open(path) as fh: config.read_file(fh) return config
[ "def", "load_config", "(", "path", "=", "None", ",", "defaults", "=", "None", ")", ":", "if", "defaults", "is", "None", ":", "defaults", "=", "DEFAULT_FILES", "config", "=", "ConfigParser", "(", "allow_no_value", "=", "True", ")", "if", "defaults", ":", "config", ".", "read", "(", "defaults", ")", "if", "path", ":", "with", "open", "(", "path", ")", "as", "fh", ":", "config", ".", "read_file", "(", "fh", ")", "return", "config" ]
32.96
0.001179
def _iter_from_annotations_dict(graph: BELGraph, annotations_dict: AnnotationsDict, ) -> Iterable[Tuple[str, Set[str]]]: """Iterate over the key/value pairs in this edge data dictionary normalized to their source URLs.""" for key, names in annotations_dict.items(): if key in graph.annotation_url: url = graph.annotation_url[key] elif key in graph.annotation_list: continue # skip those elif key in graph.annotation_pattern: log.debug('pattern annotation in database not implemented yet not implemented') # FIXME continue else: raise ValueError('Graph resources does not contain keyword: {}'.format(key)) yield url, set(names)
[ "def", "_iter_from_annotations_dict", "(", "graph", ":", "BELGraph", ",", "annotations_dict", ":", "AnnotationsDict", ",", ")", "->", "Iterable", "[", "Tuple", "[", "str", ",", "Set", "[", "str", "]", "]", "]", ":", "for", "key", ",", "names", "in", "annotations_dict", ".", "items", "(", ")", ":", "if", "key", "in", "graph", ".", "annotation_url", ":", "url", "=", "graph", ".", "annotation_url", "[", "key", "]", "elif", "key", "in", "graph", ".", "annotation_list", ":", "continue", "# skip those", "elif", "key", "in", "graph", ".", "annotation_pattern", ":", "log", ".", "debug", "(", "'pattern annotation in database not implemented yet not implemented'", ")", "# FIXME", "continue", "else", ":", "raise", "ValueError", "(", "'Graph resources does not contain keyword: {}'", ".", "format", "(", "key", ")", ")", "yield", "url", ",", "set", "(", "names", ")" ]
52.625
0.008168
def CopyToDict(self): """Copies the path specification to a dictionary. Returns: dict[str, object]: path specification attributes. """ path_spec_dict = {} for attribute_name, attribute_value in iter(self.__dict__.items()): if attribute_value is None: continue if attribute_name == 'parent': attribute_value = attribute_value.CopyToDict() path_spec_dict[attribute_name] = attribute_value return path_spec_dict
[ "def", "CopyToDict", "(", "self", ")", ":", "path_spec_dict", "=", "{", "}", "for", "attribute_name", ",", "attribute_value", "in", "iter", "(", "self", ".", "__dict__", ".", "items", "(", ")", ")", ":", "if", "attribute_value", "is", "None", ":", "continue", "if", "attribute_name", "==", "'parent'", ":", "attribute_value", "=", "attribute_value", ".", "CopyToDict", "(", ")", "path_spec_dict", "[", "attribute_name", "]", "=", "attribute_value", "return", "path_spec_dict" ]
27.058824
0.008403
def from_string(cls, s): """Create an istance from string s containing a YAML dictionary.""" stream = cStringIO(s) stream.seek(0) return cls(**yaml.safe_load(stream))
[ "def", "from_string", "(", "cls", ",", "s", ")", ":", "stream", "=", "cStringIO", "(", "s", ")", "stream", ".", "seek", "(", "0", ")", "return", "cls", "(", "*", "*", "yaml", ".", "safe_load", "(", "stream", ")", ")" ]
38.8
0.010101
def read_config(config_path): """read config_path and return options as dictionary""" result = {} with open(config_path, 'r') as fd: for line in fd.readlines(): if '=' in line: key, value = line.split('=', 1) try: result[key] = json.loads(value) except ValueError: result[key] = value.rstrip('\n') return result
[ "def", "read_config", "(", "config_path", ")", ":", "result", "=", "{", "}", "with", "open", "(", "config_path", ",", "'r'", ")", "as", "fd", ":", "for", "line", "in", "fd", ".", "readlines", "(", ")", ":", "if", "'='", "in", "line", ":", "key", ",", "value", "=", "line", ".", "split", "(", "'='", ",", "1", ")", "try", ":", "result", "[", "key", "]", "=", "json", ".", "loads", "(", "value", ")", "except", "ValueError", ":", "result", "[", "key", "]", "=", "value", ".", "rstrip", "(", "'\\n'", ")", "return", "result" ]
35.333333
0.002299
def assign_hosting_device_to_cfg_agent(self, context, cfg_agent_id, hosting_device_id): """Make config agent handle an (unassigned) hosting device.""" hd_db = self._get_hosting_device(context, hosting_device_id) if hd_db.cfg_agent_id: if hd_db.cfg_agent_id == cfg_agent_id: return LOG.debug('Hosting device %(hd_id)s has already been assigned to ' 'Cisco cfg agent %(agent_id)s', {'hd_id': hosting_device_id, 'agent_id': cfg_agent_id}) raise ciscocfgagentscheduler.HostingDeviceAssignedToCfgAgent( hosting_device_id=hosting_device_id, agent_id=cfg_agent_id) cfg_agent_db = get_agent_db_obj(self._get_agent(context, cfg_agent_id)) if (cfg_agent_db.agent_type != c_constants.AGENT_TYPE_CFG or cfg_agent_db.admin_state_up is not True): raise ciscocfgagentscheduler.InvalidCfgAgent(agent_id=cfg_agent_id) self._bind_hosting_device_to_cfg_agent(context, hd_db, cfg_agent_db) cfg_notifier = self.agent_notifiers.get(c_constants.AGENT_TYPE_CFG) if cfg_notifier: cfg_notifier.hosting_devices_assigned_to_cfg_agent( context, [hosting_device_id], cfg_agent_db.host)
[ "def", "assign_hosting_device_to_cfg_agent", "(", "self", ",", "context", ",", "cfg_agent_id", ",", "hosting_device_id", ")", ":", "hd_db", "=", "self", ".", "_get_hosting_device", "(", "context", ",", "hosting_device_id", ")", "if", "hd_db", ".", "cfg_agent_id", ":", "if", "hd_db", ".", "cfg_agent_id", "==", "cfg_agent_id", ":", "return", "LOG", ".", "debug", "(", "'Hosting device %(hd_id)s has already been assigned to '", "'Cisco cfg agent %(agent_id)s'", ",", "{", "'hd_id'", ":", "hosting_device_id", ",", "'agent_id'", ":", "cfg_agent_id", "}", ")", "raise", "ciscocfgagentscheduler", ".", "HostingDeviceAssignedToCfgAgent", "(", "hosting_device_id", "=", "hosting_device_id", ",", "agent_id", "=", "cfg_agent_id", ")", "cfg_agent_db", "=", "get_agent_db_obj", "(", "self", ".", "_get_agent", "(", "context", ",", "cfg_agent_id", ")", ")", "if", "(", "cfg_agent_db", ".", "agent_type", "!=", "c_constants", ".", "AGENT_TYPE_CFG", "or", "cfg_agent_db", ".", "admin_state_up", "is", "not", "True", ")", ":", "raise", "ciscocfgagentscheduler", ".", "InvalidCfgAgent", "(", "agent_id", "=", "cfg_agent_id", ")", "self", ".", "_bind_hosting_device_to_cfg_agent", "(", "context", ",", "hd_db", ",", "cfg_agent_db", ")", "cfg_notifier", "=", "self", ".", "agent_notifiers", ".", "get", "(", "c_constants", ".", "AGENT_TYPE_CFG", ")", "if", "cfg_notifier", ":", "cfg_notifier", ".", "hosting_devices_assigned_to_cfg_agent", "(", "context", ",", "[", "hosting_device_id", "]", ",", "cfg_agent_db", ".", "host", ")" ]
62.380952
0.002256
def _run_bubbletree(vcf_csv, cnv_csv, data, wide_lrr=False, do_plots=True, handle_failures=True): """Create R script and run on input data BubbleTree has some internal hardcoded paramters that assume a smaller distribution of log2 scores. This is not true for tumor-only calls, so if we specify wide_lrr we scale the calculations to actually get calls. Need a better long term solution with flexible parameters. """ lrr_scale = 10.0 if wide_lrr else 1.0 local_sitelib = utils.R_sitelib() base = utils.splitext_plus(vcf_csv)[0] r_file = "%s-run.R" % base bubbleplot_out = "%s-bubbleplot.pdf" % base trackplot_out = "%s-trackplot.pdf" % base calls_out = "%s-calls.rds" % base freqs_out = "%s-bubbletree_prevalence.txt" % base sample = dd.get_sample_name(data) do_plots = "yes" if do_plots else "no" with open(r_file, "w") as out_handle: out_handle.write(_script.format(**locals())) if not utils.file_exists(freqs_out): cmd = "%s && %s --no-environ %s" % (utils.get_R_exports(), utils.Rscript_cmd(), r_file) try: do.run(cmd, "Assess heterogeneity with BubbleTree") except subprocess.CalledProcessError as msg: if handle_failures and _allowed_bubbletree_errorstates(str(msg)): with open(freqs_out, "w") as out_handle: out_handle.write('bubbletree failed:\n %s"\n' % (str(msg))) else: logger.exception() raise return {"caller": "bubbletree", "report": freqs_out, "plot": {"bubble": bubbleplot_out, "track": trackplot_out}}
[ "def", "_run_bubbletree", "(", "vcf_csv", ",", "cnv_csv", ",", "data", ",", "wide_lrr", "=", "False", ",", "do_plots", "=", "True", ",", "handle_failures", "=", "True", ")", ":", "lrr_scale", "=", "10.0", "if", "wide_lrr", "else", "1.0", "local_sitelib", "=", "utils", ".", "R_sitelib", "(", ")", "base", "=", "utils", ".", "splitext_plus", "(", "vcf_csv", ")", "[", "0", "]", "r_file", "=", "\"%s-run.R\"", "%", "base", "bubbleplot_out", "=", "\"%s-bubbleplot.pdf\"", "%", "base", "trackplot_out", "=", "\"%s-trackplot.pdf\"", "%", "base", "calls_out", "=", "\"%s-calls.rds\"", "%", "base", "freqs_out", "=", "\"%s-bubbletree_prevalence.txt\"", "%", "base", "sample", "=", "dd", ".", "get_sample_name", "(", "data", ")", "do_plots", "=", "\"yes\"", "if", "do_plots", "else", "\"no\"", "with", "open", "(", "r_file", ",", "\"w\"", ")", "as", "out_handle", ":", "out_handle", ".", "write", "(", "_script", ".", "format", "(", "*", "*", "locals", "(", ")", ")", ")", "if", "not", "utils", ".", "file_exists", "(", "freqs_out", ")", ":", "cmd", "=", "\"%s && %s --no-environ %s\"", "%", "(", "utils", ".", "get_R_exports", "(", ")", ",", "utils", ".", "Rscript_cmd", "(", ")", ",", "r_file", ")", "try", ":", "do", ".", "run", "(", "cmd", ",", "\"Assess heterogeneity with BubbleTree\"", ")", "except", "subprocess", ".", "CalledProcessError", "as", "msg", ":", "if", "handle_failures", "and", "_allowed_bubbletree_errorstates", "(", "str", "(", "msg", ")", ")", ":", "with", "open", "(", "freqs_out", ",", "\"w\"", ")", "as", "out_handle", ":", "out_handle", ".", "write", "(", "'bubbletree failed:\\n %s\"\\n'", "%", "(", "str", "(", "msg", ")", ")", ")", "else", ":", "logger", ".", "exception", "(", ")", "raise", "return", "{", "\"caller\"", ":", "\"bubbletree\"", ",", "\"report\"", ":", "freqs_out", ",", "\"plot\"", ":", "{", "\"bubble\"", ":", "bubbleplot_out", ",", "\"track\"", ":", "trackplot_out", "}", "}" ]
46.857143
0.001195
def plot_slippage_sweep(returns, positions, transactions, slippage_params=(3, 8, 10, 12, 15, 20, 50), ax=None, **kwargs): """ Plots equity curves at different per-dollar slippage assumptions. Parameters ---------- returns : pd.Series Timeseries of portfolio returns to be adjusted for various degrees of slippage. positions : pd.DataFrame Daily net position values. - See full explanation in tears.create_full_tear_sheet. transactions : pd.DataFrame Prices and amounts of executed trades. One row per trade. - See full explanation in tears.create_full_tear_sheet. slippage_params: tuple Slippage pameters to apply to the return time series (in basis points). ax : matplotlib.Axes, optional Axes upon which to plot. **kwargs, optional Passed to seaborn plotting function. Returns ------- ax : matplotlib.Axes The axes that were plotted on. """ if ax is None: ax = plt.gca() slippage_sweep = pd.DataFrame() for bps in slippage_params: adj_returns = txn.adjust_returns_for_slippage(returns, positions, transactions, bps) label = str(bps) + " bps" slippage_sweep[label] = ep.cum_returns(adj_returns, 1) slippage_sweep.plot(alpha=1.0, lw=0.5, ax=ax) ax.set_title('Cumulative returns given additional per-dollar slippage') ax.set_ylabel('') ax.legend(loc='center left', frameon=True, framealpha=0.5) return ax
[ "def", "plot_slippage_sweep", "(", "returns", ",", "positions", ",", "transactions", ",", "slippage_params", "=", "(", "3", ",", "8", ",", "10", ",", "12", ",", "15", ",", "20", ",", "50", ")", ",", "ax", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "ax", "is", "None", ":", "ax", "=", "plt", ".", "gca", "(", ")", "slippage_sweep", "=", "pd", ".", "DataFrame", "(", ")", "for", "bps", "in", "slippage_params", ":", "adj_returns", "=", "txn", ".", "adjust_returns_for_slippage", "(", "returns", ",", "positions", ",", "transactions", ",", "bps", ")", "label", "=", "str", "(", "bps", ")", "+", "\" bps\"", "slippage_sweep", "[", "label", "]", "=", "ep", ".", "cum_returns", "(", "adj_returns", ",", "1", ")", "slippage_sweep", ".", "plot", "(", "alpha", "=", "1.0", ",", "lw", "=", "0.5", ",", "ax", "=", "ax", ")", "ax", ".", "set_title", "(", "'Cumulative returns given additional per-dollar slippage'", ")", "ax", ".", "set_ylabel", "(", "''", ")", "ax", ".", "legend", "(", "loc", "=", "'center left'", ",", "frameon", "=", "True", ",", "framealpha", "=", "0.5", ")", "return", "ax" ]
32.142857
0.000616